├── dusty ├── __init__.py ├── commands │ ├── __init__.py │ ├── version.py │ ├── client.py │ ├── shell.py │ ├── logs.py │ ├── doctor.py │ ├── status.py │ ├── dump.py │ ├── manage_config.py │ ├── scripts.py │ ├── utils.py │ ├── bundles.py │ ├── assets.py │ └── disk.py ├── compiler │ ├── __init__.py │ └── compose │ │ └── common.py ├── systems │ ├── __init__.py │ ├── docker │ │ ├── common.py │ │ ├── files.py │ │ ├── cleanup.py │ │ └── config.py │ ├── nfs │ │ ├── __init__.py │ │ └── client.py │ ├── config_file │ │ └── __init__.py │ ├── hosts │ │ └── __init__.py │ ├── known_hosts │ │ └── __init__.py │ ├── nginx │ │ └── __init__.py │ └── rsync │ │ └── __init__.py ├── cli │ ├── __main__.py │ ├── shutdown.py │ ├── version.py │ ├── dump.py │ ├── status.py │ ├── stop.py │ ├── shell.py │ ├── doctor.py │ ├── validate.py │ ├── logs.py │ ├── upgrade.py │ ├── config.py │ ├── scripts.py │ ├── restart.py │ ├── setup.py │ ├── up.py │ ├── bundles.py │ ├── env.py │ ├── disk.py │ ├── assets.py │ ├── repos.py │ ├── cp.py │ └── test.py ├── schemas │ ├── __init__.py │ ├── asset_schema.py │ ├── bundle_schema.py │ ├── lib_schema.py │ ├── test_schema.py │ └── app_schema.py ├── resources │ ├── nginx_base_config.txt │ ├── nginx_502_page.html │ └── 502.js ├── memoize.py ├── warnings.py ├── path.py ├── parallel.py ├── changeset.py ├── payload.py ├── log.py └── subprocess.py ├── tests ├── __init__.py ├── unit │ ├── __init__.py │ ├── cli │ │ ├── __init__.py │ │ └── cp_test.py │ ├── commands │ │ ├── __init__.py │ │ ├── shell_test.py │ │ ├── cp_test.py │ │ ├── upgrade_test.py │ │ ├── assets_test.py │ │ ├── bundles_test.py │ │ ├── scripts_test.py │ │ ├── logs_test.py │ │ └── manage_config_test.py │ ├── schemas │ │ └── __init__.py │ ├── systems │ │ ├── __init__.py │ │ ├── docker │ │ │ ├── __init__.py │ │ │ ├── .DS_Store │ │ │ └── config_test.py │ │ ├── hosts │ │ │ ├── __init__.py │ │ │ └── init_test.py │ │ ├── nfs │ │ │ ├── __init__.py │ │ │ ├── client_test.py │ │ │ └── server_test.py │ │ ├── nginx │ │ │ └── __init__.py │ │ ├── config_file │ │ │ ├── __init__.py │ │ │ └── init_test.py │ │ ├── known_hosts │ │ │ ├── __init__.py │ │ │ └── init_test.py │ │ └── virtualbox │ │ │ ├── __init__.py │ │ │ └── init_test.py │ ├── compiler │ │ ├── nginx │ │ │ └── __init__.py │ │ ├── compose │ │ │ └── __init__.py │ │ ├── port_spec │ │ │ └── __init__.py │ │ ├── test_configs │ │ │ ├── simple │ │ │ │ ├── bundles │ │ │ │ │ └── simple.yml │ │ │ │ ├── docker-compose.yml │ │ │ │ ├── apps │ │ │ │ │ └── simpleapp.yml │ │ │ │ ├── nginx.conf │ │ │ │ └── assembled_spec.yml │ │ │ ├── recursive_apps │ │ │ │ ├── bundles │ │ │ │ │ └── simple.yml │ │ │ │ ├── docker-compose.yml │ │ │ │ ├── apps │ │ │ │ │ ├── app2.yml │ │ │ │ │ └── simpleapp.yml │ │ │ │ ├── nginx.conf │ │ │ │ └── assembled_spec.yml │ │ │ ├── recursive_libs │ │ │ │ ├── bundles │ │ │ │ │ └── simple.yml │ │ │ │ ├── libs │ │ │ │ │ ├── lib2.yml │ │ │ │ │ └── lib1.yml │ │ │ │ ├── docker-compose.yml │ │ │ │ ├── apps │ │ │ │ │ └── simpleapp.yml │ │ │ │ ├── nginx.conf │ │ │ │ └── assembled_spec.yml │ │ │ ├── bundle_without_app │ │ │ │ ├── bundles │ │ │ │ │ ├── simple.yml │ │ │ │ │ └── serviceonly.yml │ │ │ │ ├── services │ │ │ │ │ └── bundle-specified-service.yml │ │ │ │ ├── docker-compose.yml │ │ │ │ ├── apps │ │ │ │ │ └── simpleapp.yml │ │ │ │ ├── nginx.conf │ │ │ │ └── assembled_spec.yml │ │ │ └── bundle_with_service │ │ │ │ ├── services │ │ │ │ └── bundle-specified-service.yml │ │ │ │ ├── bundles │ │ │ │ └── simple.yml │ │ │ │ ├── docker-compose.yml │ │ │ │ ├── apps │ │ │ │ └── simpleapp.yml │ │ │ │ ├── nginx.conf │ │ │ │ └── assembled_spec.yml │ │ └── __init__.py │ ├── preflight_test.py │ ├── memoize_test.py │ ├── path_test.py │ ├── config_test.py │ ├── utils.py │ ├── warnings_test.py │ ├── changeset_test.py │ ├── parallel_test.py │ └── payload_test.py ├── integration │ ├── __init__.py │ ├── cli │ │ ├── __init__.py │ │ ├── dump_test.py │ │ ├── disk_test.py │ │ ├── config_test.py │ │ ├── logs_test.py │ │ ├── up_test.py │ │ ├── validate_test.py │ │ ├── env_test.py │ │ ├── test_test.py │ │ ├── scripts_test.py │ │ ├── bundles_test.py │ │ ├── stop_test.py │ │ └── setup_test.py │ └── systems │ │ ├── __init__.py │ │ └── nfs │ │ └── __init__.py ├── upgrade_integration │ ├── __init__.py │ └── upgrade_test.py ├── hfs_nfs_insync │ ├── Test_2 │ │ └── sub_test_3 │ │ │ └── empty │ └── test_1 │ │ ├── sub_test_1 │ │ └── empty │ │ └── Sub_test_2 │ │ └── sub_sub_test_1 │ │ └── empty └── run_integration_tests.sh ├── setup.cfg ├── setup ├── bin │ └── dusty ├── binary-hook.py ├── brew-install.sh ├── create_binaries.sh ├── dusty.rb ├── com.gamechanger.dusty.plist ├── dusty.spec ├── release.py └── install.sh ├── requirements-dev.txt ├── docs ├── assets │ ├── howdy.png │ ├── architecture.png │ ├── fileserver-1.png │ ├── fileserver-2.png │ └── flask-hello-world.png ├── specs │ ├── service-specs.md │ ├── bundle-specs.md │ ├── index.md │ └── lib-specs.md ├── index.md ├── setup.md ├── installation.md └── faq.md ├── MANIFEST.in ├── requirements.txt ├── testing.yml ├── mkdocs.yml ├── .gitignore ├── LICENSE ├── setup.py └── README.md /dusty/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dusty/commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dusty/compiler/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dusty/systems/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/integration/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/systems/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore=E501 3 | -------------------------------------------------------------------------------- /tests/integration/systems/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/compiler/nginx/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/systems/docker/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/systems/hosts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/systems/nfs/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/systems/nginx/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/upgrade_integration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/integration/systems/nfs/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/compiler/compose/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/compiler/port_spec/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/systems/config_file/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/systems/known_hosts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/systems/virtualbox/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/hfs_nfs_insync/Test_2/sub_test_3/empty: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/hfs_nfs_insync/test_1/sub_test_1/empty: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup/bin/dusty: -------------------------------------------------------------------------------- 1 | from dusty.cli import main 2 | main() 3 | -------------------------------------------------------------------------------- /tests/hfs_nfs_insync/test_1/Sub_test_2/sub_sub_test_1/empty: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | -e . 2 | nose==1.3.7 3 | mock==1.0.1 4 | python-dateutil==2.8. 5 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/simple/bundles/simple.yml: -------------------------------------------------------------------------------- 1 | apps: 2 | - simpleapp 3 | -------------------------------------------------------------------------------- /docs/assets/howdy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gamechanger/dusty/HEAD/docs/assets/howdy.png -------------------------------------------------------------------------------- /dusty/cli/__main__.py: -------------------------------------------------------------------------------- 1 | from . import main 2 | 3 | if __name__ == '__main__': 4 | main() 5 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_apps/bundles/simple.yml: -------------------------------------------------------------------------------- 1 | apps: 2 | - simpleapp 3 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_libs/bundles/simple.yml: -------------------------------------------------------------------------------- 1 | apps: 2 | - simpleapp 3 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_without_app/bundles/simple.yml: -------------------------------------------------------------------------------- 1 | apps: 2 | - simpleapp 3 | -------------------------------------------------------------------------------- /docs/assets/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gamechanger/dusty/HEAD/docs/assets/architecture.png -------------------------------------------------------------------------------- /docs/assets/fileserver-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gamechanger/dusty/HEAD/docs/assets/fileserver-1.png -------------------------------------------------------------------------------- /docs/assets/fileserver-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gamechanger/dusty/HEAD/docs/assets/fileserver-2.png -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_with_service/services/bundle-specified-service.yml: -------------------------------------------------------------------------------- 1 | image: someimage 2 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_without_app/services/bundle-specified-service.yml: -------------------------------------------------------------------------------- 1 | image: someimage 2 | -------------------------------------------------------------------------------- /docs/assets/flask-hello-world.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gamechanger/dusty/HEAD/docs/assets/flask-hello-world.png -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_libs/libs/lib2.yml: -------------------------------------------------------------------------------- 1 | repo: github.com/gamechanger/simpleapp 2 | mount: /lib2 3 | -------------------------------------------------------------------------------- /tests/unit/systems/docker/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gamechanger/dusty/HEAD/tests/unit/systems/docker/.DS_Store -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_without_app/bundles/serviceonly.yml: -------------------------------------------------------------------------------- 1 | services: 2 | - bundle-specified-service 3 | -------------------------------------------------------------------------------- /dusty/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | from .app_schema import app_schema 2 | from .lib_schema import lib_schema 3 | from .bundle_schema import bundle_schema 4 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_with_service/bundles/simple.yml: -------------------------------------------------------------------------------- 1 | apps: 2 | - simpleapp 3 | services: 4 | - bundle-specified-service 5 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_libs/libs/lib1.yml: -------------------------------------------------------------------------------- 1 | repo: github.com/gamechanger/lib1 2 | mount: /lib1 3 | depends: 4 | libs: 5 | - lib2 6 | -------------------------------------------------------------------------------- /setup/binary-hook.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import dusty.constants 4 | dusty.constants.BINARY = True 5 | 6 | dusty.constants.PRERELEASE = os.getenv('PRERELEASE') == 'true' 7 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/simple/docker-compose.yml: -------------------------------------------------------------------------------- 1 | simpleapp: 2 | image: docker.gamechanger.io/simpleapp 3 | command: run-script.sh 4 | environment: 5 | gcenv: local 6 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.rst *.txt 2 | recursive-exclude tests * 3 | recursive-exclude * __pycache__ 4 | recursive-exclude * *.py[co] 5 | 6 | recursive-include docs *.rst conf.py Makefile make.bat 7 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_apps/docker-compose.yml: -------------------------------------------------------------------------------- 1 | simpleapp: 2 | image: docker.gamechanger.io/simpleapp 3 | command: run-script.sh 4 | environment: 5 | gcenv: local 6 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_libs/docker-compose.yml: -------------------------------------------------------------------------------- 1 | simpleapp: 2 | image: docker.gamechanger.io/simpleapp 3 | command: run-script.sh 4 | environment: 5 | gcenv: local 6 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | docker-py==1.7.2 2 | PyYAML==3.11 3 | PrettyTable==0.7.2 4 | github3.py==1.3.0 5 | GitPython==2.1.9 6 | docopt==0.6.2 7 | Schemer==0.2.9 8 | psutil==2.2.1 9 | Flask==0.10.1 10 | -------------------------------------------------------------------------------- /dusty/commands/version.py: -------------------------------------------------------------------------------- 1 | from ..constants import VERSION 2 | from ..log import log_to_client 3 | from ..payload import daemon_command 4 | 5 | @daemon_command 6 | def version(): 7 | log_to_client('Dusty daemon version: {}'.format(VERSION)) 8 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_with_service/docker-compose.yml: -------------------------------------------------------------------------------- 1 | simpleapp: 2 | image: docker.gamechanger.io/simpleapp 3 | command: run-script.sh 4 | environment: 5 | gcenv: local 6 | bundle-specified-service: 7 | image: someimage 8 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_without_app/docker-compose.yml: -------------------------------------------------------------------------------- 1 | simpleapp: 2 | image: docker.gamechanger.io/simpleapp 3 | command: run-script.sh 4 | environment: 5 | gcenv: local 6 | bundle-specified-service: 7 | image: someimage 8 | -------------------------------------------------------------------------------- /dusty/systems/docker/common.py: -------------------------------------------------------------------------------- 1 | def spec_for_service(app_or_lib_name, expanded_specs): 2 | if app_or_lib_name in expanded_specs['apps']: 3 | return expanded_specs['apps'][app_or_lib_name] 4 | return expanded_specs['libs'][app_or_lib_name] 5 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_apps/apps/app2.yml: -------------------------------------------------------------------------------- 1 | repo: docker.gamechanger.io/app2 2 | image: docker.gamechanger.io/app2 3 | mount: /app2 4 | commands: 5 | always: 6 | - run-script.sh 7 | compose: 8 | environment: 9 | gcenv: local 10 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_libs/apps/simpleapp.yml: -------------------------------------------------------------------------------- 1 | image: docker.gamechanger.io/simpleapp 2 | commands: 3 | always: 4 | - run-script.sh 5 | compose: 6 | environment: 7 | gcenv: local 8 | depends: 9 | libs: 10 | - lib1 11 | -------------------------------------------------------------------------------- /dusty/schemas/asset_schema.py: -------------------------------------------------------------------------------- 1 | from schemer import Schema, Array 2 | 3 | asset_schema = Schema({ 4 | 'name': {'type': basestring, 'required': True}, 5 | 'path': {'type': basestring, 'required': True}, 6 | 'required': {'type': bool, 'default': True} 7 | }) 8 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/simple/apps/simpleapp.yml: -------------------------------------------------------------------------------- 1 | repo: github.com/gamechanger/simpleapp 2 | image: docker.gamechanger.io/simpleapp 3 | mount: /simpleapp 4 | commands: 5 | always: 6 | - run-script.sh 7 | compose: 8 | environment: 9 | gcenv: local 10 | -------------------------------------------------------------------------------- /tests/integration/cli/dump_test.py: -------------------------------------------------------------------------------- 1 | from ...testcases import DustyIntegrationTestCase 2 | 3 | class TestDumpCLI(DustyIntegrationTestCase): 4 | def test_dump(self): 5 | result = self.run_command('dump') 6 | self.assertInSameLine(result, 'COMMAND', 'Dusty Version') 7 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_with_service/apps/simpleapp.yml: -------------------------------------------------------------------------------- 1 | repo: github.com/gamechanger/simpleapp 2 | image: docker.gamechanger.io/simpleapp 3 | mount: /simpleapp 4 | commands: 5 | always: 6 | - run-script.sh 7 | compose: 8 | environment: 9 | gcenv: local 10 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_without_app/apps/simpleapp.yml: -------------------------------------------------------------------------------- 1 | repo: github.com/gamechanger/simpleapp 2 | image: docker.gamechanger.io/simpleapp 3 | mount: /simpleapp 4 | commands: 5 | always: 6 | - run-script.sh 7 | compose: 8 | environment: 9 | gcenv: local 10 | -------------------------------------------------------------------------------- /dusty/systems/nfs/__init__.py: -------------------------------------------------------------------------------- 1 | from . import client 2 | from . import server 3 | 4 | def configure_nfs(): 5 | server.configure_nfs_server() 6 | client.mount_active_repos() 7 | 8 | def update_nfs_with_repos(repos): 9 | server.add_exports_for_repos(repos) 10 | client.remount_repos(repos) 11 | -------------------------------------------------------------------------------- /dusty/commands/client.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | def call_command_from_client(shell_args, env=None): 4 | print "Running command: {}".format(' '.join(shell_args)) 5 | try: 6 | subprocess.call(shell_args, env=env) 7 | except KeyboardInterrupt: 8 | print "KeyboardInterrupt; terminating" 9 | -------------------------------------------------------------------------------- /dusty/cli/shutdown.py: -------------------------------------------------------------------------------- 1 | """Shut down the Dusty VM. 2 | 3 | Usage: 4 | shutdown 5 | """ 6 | 7 | from docopt import docopt 8 | 9 | from ..payload import Payload 10 | from ..commands.run import shutdown_dusty_vm 11 | 12 | def main(argv): 13 | args = docopt(__doc__, argv) 14 | return Payload(shutdown_dusty_vm) 15 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_apps/apps/simpleapp.yml: -------------------------------------------------------------------------------- 1 | repo: docker.gamechanger.io/simpleapp 2 | image: docker.gamechanger.io/simpleapp 3 | mount: /simpleapp 4 | commands: 5 | always: 6 | - run-script.sh 7 | compose: 8 | environment: 9 | gcenv: local 10 | depends: 11 | apps: 12 | - app2 13 | -------------------------------------------------------------------------------- /docs/specs/service-specs.md: -------------------------------------------------------------------------------- 1 | # Service Specs 2 | 3 | Services represent running containers for which you do not manage the source code. 4 | A typical use case for services is to run a database container using publicly 5 | available Docker images. 6 | 7 | Services are defined using [Docker Compose specs](https://docs.docker.com/compose/yml/). 8 | -------------------------------------------------------------------------------- /setup/brew-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | cp $1/com.gamechanger.dusty.plist /Library/LaunchDaemons/com.gamechanger.dusty.plist 6 | launchctl unload /Library/LaunchDaemons/com.gamechanger.dusty.plist || true 7 | chmod +x $1/dusty 8 | $1/dusty -d --preflight-only 9 | launchctl load /Library/LaunchDaemons/com.gamechanger.dusty.plist 10 | -------------------------------------------------------------------------------- /dusty/cli/version.py: -------------------------------------------------------------------------------- 1 | """Print Dusty daemon's current version 2 | 3 | Usage: 4 | version 5 | """ 6 | 7 | from docopt import docopt 8 | 9 | from ..commands.version import version 10 | from ..constants import VERSION 11 | from ..payload import Payload 12 | 13 | def main(argv): 14 | args = docopt(__doc__, argv) 15 | print 'Dusty client version: {}'.format(VERSION) 16 | return Payload(version) 17 | -------------------------------------------------------------------------------- /dusty/cli/dump.py: -------------------------------------------------------------------------------- 1 | """Output diagnostic data, useful for filing bug reports. 2 | 3 | Usage: 4 | dump 5 | 6 | Commands: 7 | dump Output diagnostic data from your system. 8 | """ 9 | 10 | from docopt import docopt 11 | 12 | from ..payload import Payload 13 | from ..commands.dump import dump_diagnostics 14 | 15 | def main(argv): 16 | args = docopt(__doc__, argv) 17 | return Payload(dump_diagnostics) 18 | -------------------------------------------------------------------------------- /testing.yml: -------------------------------------------------------------------------------- 1 | image: docker.gamechanger.io/python2.7 2 | script: 3 | - mkdir /gc 4 | - cd /gc 5 | - cd $REPO 6 | - python setup.py install 7 | - pip install logilab-common==0.63.0 8 | - pip install pylint nose 9 | - python setup.py nosetests --tests tests/unit 10 | - | 11 | pylint --ignored-classes=_socketobject --msg-template="{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}" --reports=y -E y $REPO/dusty 12 | -------------------------------------------------------------------------------- /dusty/cli/status.py: -------------------------------------------------------------------------------- 1 | """Give information on activated apps, services and 2 | libs. Will present which ones are running in a 3 | container and name to use when calling addressing them. 4 | 5 | Usage: 6 | status 7 | """ 8 | 9 | from docopt import docopt 10 | 11 | from ..payload import Payload 12 | from ..commands.status import get_dusty_status 13 | 14 | def main(argv): 15 | docopt(__doc__, argv) 16 | return Payload(get_dusty_status) 17 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/simple/nginx.conf: -------------------------------------------------------------------------------- 1 | worker_processes 1; 2 | 3 | error_log /var/log/dusty/nginx/error.log; 4 | 5 | pid /var/log/dusty/nginx/nginx.pid; 6 | 7 | events { 8 | worker_connections 1024; 9 | } 10 | 11 | http { 12 | include mime.types; 13 | default_type application/octet-stream; 14 | 15 | access_log /var/log/dusty/nginx/access.log; 16 | 17 | sendfile on; 18 | 19 | keepalive_timeout 65; 20 | 21 | } 22 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_apps/nginx.conf: -------------------------------------------------------------------------------- 1 | worker_processes 1; 2 | 3 | error_log /var/log/dusty/nginx/error.log; 4 | 5 | pid /var/log/dusty/nginx/nginx.pid; 6 | 7 | events { 8 | worker_connections 1024; 9 | } 10 | 11 | http { 12 | include mime.types; 13 | default_type application/octet-stream; 14 | 15 | access_log /var/log/dusty/nginx/access.log; 16 | 17 | sendfile on; 18 | 19 | keepalive_timeout 65; 20 | 21 | } 22 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_libs/nginx.conf: -------------------------------------------------------------------------------- 1 | worker_processes 1; 2 | 3 | error_log /var/log/dusty/nginx/error.log; 4 | 5 | pid /var/log/dusty/nginx/nginx.pid; 6 | 7 | events { 8 | worker_connections 1024; 9 | } 10 | 11 | http { 12 | include mime.types; 13 | default_type application/octet-stream; 14 | 15 | access_log /var/log/dusty/nginx/access.log; 16 | 17 | sendfile on; 18 | 19 | keepalive_timeout 65; 20 | 21 | } 22 | -------------------------------------------------------------------------------- /tests/unit/preflight_test.py: -------------------------------------------------------------------------------- 1 | from ..testcases import DustyTestCase 2 | from dusty.preflight import _assert_executable_exists, PreflightException 3 | 4 | class PreflightTest(DustyTestCase): 5 | def test_assert_executable_exists(self): 6 | _assert_executable_exists('python') 7 | 8 | def test_assert_executable_exists_fails(self): 9 | with self.assertRaises(PreflightException): 10 | _assert_executable_exists('somecrazythingwhichforsuredoesnotexist') 11 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_with_service/nginx.conf: -------------------------------------------------------------------------------- 1 | worker_processes 1; 2 | 3 | error_log /var/log/dusty/nginx/error.log; 4 | 5 | pid /var/log/dusty/nginx/nginx.pid; 6 | 7 | events { 8 | worker_connections 1024; 9 | } 10 | 11 | http { 12 | include mime.types; 13 | default_type application/octet-stream; 14 | 15 | access_log /var/log/dusty/nginx/access.log; 16 | 17 | sendfile on; 18 | 19 | keepalive_timeout 65; 20 | 21 | } 22 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_without_app/nginx.conf: -------------------------------------------------------------------------------- 1 | worker_processes 1; 2 | 3 | error_log /var/log/dusty/nginx/error.log; 4 | 5 | pid /var/log/dusty/nginx/nginx.pid; 6 | 7 | events { 8 | worker_connections 1024; 9 | } 10 | 11 | http { 12 | include mime.types; 13 | default_type application/octet-stream; 14 | 15 | access_log /var/log/dusty/nginx/access.log; 16 | 17 | sendfile on; 18 | 19 | keepalive_timeout 65; 20 | 21 | } 22 | -------------------------------------------------------------------------------- /setup/create_binaries.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | venv_name=venv 4 | rm -rf $venv_name 5 | if [[ $(/usr/bin/python --version 2>&1) =~ .*2\.7.* ]]; then # --version outputs to stderr 6 | virtualenv --python=/usr/bin/python $venv_name 7 | else 8 | virtualenv $venv_name 9 | fi 10 | $venv_name/bin/pip install pyinstaller==3.4 11 | $venv_name/bin/pip install . 12 | $venv_name/bin/pyinstaller -F --runtime-hook=setup/binary-hook.py setup/dusty.spec 13 | echo "Binaries can now be found at dist/dusty" 14 | rm -rf $venv_name 15 | -------------------------------------------------------------------------------- /dusty/cli/stop.py: -------------------------------------------------------------------------------- 1 | """Stop containers associated with Dusty apps and services. 2 | 3 | This does not remove the containers unless run with --rm 4 | 5 | Usage: 6 | stop [--rm] [...] 7 | 8 | Options: 9 | --rm remove containers 10 | """ 11 | 12 | from docopt import docopt 13 | 14 | from ..payload import Payload 15 | from ..commands.run import stop_apps_or_services 16 | 17 | def main(argv): 18 | args = docopt(__doc__, argv) 19 | return Payload(stop_apps_or_services, args[''], rm_containers=args['--rm']) 20 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/simple/assembled_spec.yml: -------------------------------------------------------------------------------- 1 | bundles: 2 | simple: 3 | apps: 4 | - simpleapp 5 | apps: 6 | simpleapp: 7 | image: docker.gamechanger.io/simpleapp 8 | commands: 9 | always: 10 | - run-script.sh 11 | once: [] 12 | compose: 13 | environment: 14 | gcenv: local 15 | volumes: [] 16 | depends: 17 | apps: [] 18 | libs: [] 19 | services: [] 20 | libs: {} 21 | services: {} 22 | -------------------------------------------------------------------------------- /dusty/commands/shell.py: -------------------------------------------------------------------------------- 1 | from ..compiler.spec_assembler import get_specs 2 | from . import utils 3 | from ..systems.docker import get_dusty_container_name 4 | 5 | def execute_shell(app_or_service_name): 6 | specs = get_specs() 7 | if app_or_service_name not in [spec.name for spec in specs.get_apps_and_services()]: 8 | raise KeyError('No app or service found named {}'.format(app_or_service_name)) 9 | exec_options = utils.exec_docker_options() 10 | utils.exec_docker('exec', exec_options, get_dusty_container_name(app_or_service_name), '/bin/bash') 11 | -------------------------------------------------------------------------------- /dusty/cli/shell.py: -------------------------------------------------------------------------------- 1 | """Open a shell inside a running container. Works with Dusty 2 | apps and services. 3 | 4 | Usage: 5 | shell 6 | 7 | Example: 8 | To start a shell inside a container for a service named `website`: 9 | dusty shell website 10 | """ 11 | 12 | from docopt import docopt 13 | 14 | from ..commands.shell import execute_shell 15 | from ..payload import Payload 16 | 17 | def main(argv): 18 | args = docopt(__doc__, argv) 19 | payload = Payload(execute_shell, args['']) 20 | payload.run_on_daemon = False 21 | return payload 22 | -------------------------------------------------------------------------------- /dusty/cli/doctor.py: -------------------------------------------------------------------------------- 1 | """Attempt to fix networking issues with your Dusty VM 2 | 3 | VirtualBox can get itself in a state where the network between 4 | your host Mac and the Dusty VM stops functioning. This command 5 | automatically tries a few debugging commands which are known 6 | to fix the networking bugs in certain situations. 7 | 8 | Usage: 9 | doctor 10 | """ 11 | 12 | from docopt import docopt 13 | 14 | from ..payload import Payload 15 | from ..commands.doctor import run_doctor 16 | 17 | def main(argv): 18 | args = docopt(__doc__, argv) 19 | return Payload(run_doctor) 20 | -------------------------------------------------------------------------------- /dusty/schemas/bundle_schema.py: -------------------------------------------------------------------------------- 1 | from schemer import Schema, Array 2 | 3 | def app_or_service_required_validator(): 4 | def validator(document): 5 | if 'apps' not in document and 'services' not in document: 6 | return 'Bundles must specify `apps` or `services`' 7 | return validator 8 | 9 | bundle_schema = Schema({ 10 | 'description': {'type': basestring, 'default': ''}, 11 | 'apps': {'type': Array(basestring), 'default': list}, 12 | 'services': {'type': Array(basestring), 'default': list}, 13 | }, validates=[app_or_service_required_validator()]) 14 | -------------------------------------------------------------------------------- /dusty/cli/validate.py: -------------------------------------------------------------------------------- 1 | """Validates specs to ensure that they're consistent with specifications 2 | 3 | Usage: 4 | validate [] 5 | """ 6 | 7 | from docopt import docopt 8 | 9 | from ..payload import Payload 10 | from ..commands.validate import validate_specs, validate_specs_from_path 11 | 12 | def main(argv): 13 | args = docopt(__doc__, argv) 14 | if args.get(''): 15 | payload = Payload(validate_specs_from_path, args['']) 16 | payload.run_on_daemon = False 17 | return payload 18 | else: 19 | return Payload(validate_specs) 20 | -------------------------------------------------------------------------------- /dusty/cli/logs.py: -------------------------------------------------------------------------------- 1 | """Tail out Docker logs for a container running a Dusty application 2 | or service. 3 | 4 | Usage: 5 | logs [-f] [-t] [--tail=NUM] 6 | 7 | Options: 8 | -f follow log output 9 | -t show timestamps 10 | --tail=NUM show NUM lines from end of file 11 | 12 | """ 13 | from docopt import docopt 14 | 15 | from ..payload import Payload 16 | from ..commands.logs import tail_container_logs 17 | 18 | def main(argv): 19 | args = docopt(__doc__, argv) 20 | payload = Payload(tail_container_logs, args[''], args['-f'], args['--tail'], args['-t']) 21 | payload.run_on_daemon = False 22 | return payload 23 | -------------------------------------------------------------------------------- /dusty/schemas/lib_schema.py: -------------------------------------------------------------------------------- 1 | from schemer import Schema, Array 2 | 3 | from .test_schema import test_schema 4 | from .asset_schema import asset_schema 5 | 6 | depends_schema = Schema({ 7 | 'libs': {'type': Array(basestring), 'default': list} 8 | }) 9 | 10 | lib_schema = Schema({ 11 | 'repo': {'type': basestring, 'required': True}, 12 | 'mount': {'type': basestring, 'default': '', 'required': True}, 13 | 'install': {'type': Array(basestring), 'default': list}, 14 | 'depends': {'type': depends_schema, 'default': dict}, 15 | 'assets': {'type': Array(asset_schema), 'default': list}, 16 | 'test': {'type': test_schema, 'default': dict} 17 | }) 18 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_with_service/assembled_spec.yml: -------------------------------------------------------------------------------- 1 | bundles: 2 | simple: 3 | apps: 4 | - simpleapp 5 | services: 6 | - bundle-specified-service 7 | apps: 8 | simpleapp: 9 | image: docker.gamechanger.io/simpleapp 10 | commands: 11 | always: 12 | - run-script.sh 13 | once: [] 14 | compose: 15 | environment: 16 | gcenv: local 17 | volumes: [] 18 | depends: 19 | apps: [] 20 | libs: [] 21 | services: [] 22 | libs: {} 23 | services: 24 | bundle-specified-service: 25 | image: someimage 26 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/bundle_without_app/assembled_spec.yml: -------------------------------------------------------------------------------- 1 | bundles: 2 | simple: 3 | apps: 4 | - simpleapp 5 | serviceonly: 6 | services: 7 | - bundle-specified-service 8 | apps: 9 | simpleapp: 10 | image: docker.gamechanger.io/simpleapp 11 | commands: 12 | always: 13 | - run-script.sh 14 | once: [] 15 | compose: 16 | environment: 17 | gcenv: local 18 | volumes: [] 19 | depends: 20 | apps: [] 21 | libs: [] 22 | services: [] 23 | libs: {} 24 | services: 25 | bundle-specified-service: 26 | image: someimage 27 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_libs/assembled_spec.yml: -------------------------------------------------------------------------------- 1 | bundles: 2 | simple: 3 | apps: 4 | - simpleapp 5 | apps: 6 | simpleapp: 7 | image: docker.gamechanger.io/simpleapp 8 | commands: 9 | always: 10 | - run-script.sh 11 | once: [] 12 | compose: 13 | environment: 14 | gcenv: local 15 | volumes: [] 16 | depends: 17 | libs: !!set 18 | ? lib1 19 | ? lib2 20 | apps: [] 21 | services: [] 22 | libs: 23 | lib1: 24 | depends: 25 | libs: 26 | - lib2 27 | lib2: 28 | services: {} 29 | -------------------------------------------------------------------------------- /tests/unit/systems/nfs/client_test.py: -------------------------------------------------------------------------------- 1 | from mock import Mock, patch 2 | 3 | from dusty.systems.nfs import client 4 | from ....testcases import DustyTestCase 5 | 6 | class TestNFSClient(DustyTestCase): 7 | @patch('dusty.systems.nfs.client.get_host_ip') 8 | def test_mount_args_string(self, fake_get_host_ip): 9 | fake_get_host_ip.return_value = '192.168.59.3' 10 | fake_repo = Mock() 11 | fake_repo.local_path = '/repo/local/path' 12 | fake_repo.vm_path = '/persist/repos/remote/path' 13 | expected_mount_args = '-t nfs -o async,udp,noatime,nfsvers=3 192.168.59.3:/repo/local/path /persist/repos/remote/path' 14 | self.assertEqual(expected_mount_args, client._nfs_mount_args_string(fake_repo)) 15 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Dusty 2 | 3 | .. note:: 4 | 5 | As there are better ways of working with Docker on a Mac now such as 6 | Docker for Mac, Dusty is officially deprecated and is no longer maintained. 7 | For similar capabilities, please check out `docker-compose `_. 8 | 9 | Docker-powered Development Environments 10 | 11 | ## Installation 12 | 13 | See [Installation.](installation.md) 14 | 15 | ## Why should I use Dusty? 16 | 17 | See [Why Dusty?](why-dusty.md) 18 | 19 | ## I have a question! 20 | 21 | Please check out our [FAQ](faq.md) or join us on [Slack.](https://dusty-slackin.herokuapp.com/) 22 | 23 | ## License 24 | 25 | Dusty is available under the MIT License. See LICENSE for full license text. 26 | -------------------------------------------------------------------------------- /dusty/commands/logs.py: -------------------------------------------------------------------------------- 1 | from ..systems.docker import get_dusty_containers 2 | from . import utils 3 | from ..log import log_to_client 4 | 5 | def tail_container_logs(app_or_service_name, follow=False, lines=None, timestamps=False): 6 | containers = get_dusty_containers([app_or_service_name], include_exited=True) 7 | if len(containers) == 0: 8 | log_to_client('No container exists which corresponds to {}'.format(app_or_service_name)) 9 | return 10 | container = containers[0] 11 | args = ['logs'] 12 | if follow: 13 | args.append('-f') 14 | if timestamps: 15 | args.append('-t') 16 | if lines: 17 | args.append('--tail={}'.format(lines)) 18 | args.append(container['Id']) 19 | utils.exec_docker(*args) 20 | -------------------------------------------------------------------------------- /dusty/resources/nginx_base_config.txt: -------------------------------------------------------------------------------- 1 | user root; 2 | worker_processes 1; 3 | 4 | error_log /var/log/nginx/error.log warn; 5 | pid /var/run/nginx.pid; 6 | 7 | events { 8 | worker_connections 1024; 9 | } 10 | 11 | stream { 12 | include /etc/nginx/conf.d/*.stream.conf; 13 | } 14 | 15 | http { 16 | include /etc/nginx/mime.types; 17 | default_type application/octet-stream; 18 | 19 | log_format main '$remote_addr - $remote_user [$time_local] "$request" ' 20 | '$status $body_bytes_sent "$http_referer" ' 21 | '"$http_user_agent" "$http_x_forwarded_for"'; 22 | 23 | access_log /var/log/nginx/access.log main; 24 | 25 | sendfile on; 26 | 27 | keepalive_timeout 65; 28 | 29 | include /etc/nginx/conf.d/*.http.conf; 30 | } 31 | -------------------------------------------------------------------------------- /dusty/cli/upgrade.py: -------------------------------------------------------------------------------- 1 | """Upgrade Dusty's binaries 2 | 3 | Upgrades Dusty to the specified version. If no version is 4 | specified, this will upgrade to the latest version. This command 5 | only works if Dusty is being run as a binary (as opposed to running 6 | from source). 7 | 8 | Usage: 9 | upgrade [] 10 | 11 | Options: 12 | If provided, this version of Dusty will be downloaded 13 | and used (defaults to use the most recent version) 14 | """ 15 | 16 | from docopt import docopt 17 | 18 | from ..payload import Payload 19 | from ..commands.upgrade import upgrade_dusty_binary 20 | 21 | def main(argv): 22 | args = docopt(__doc__, argv) 23 | if args['']: 24 | return Payload(upgrade_dusty_binary, args['']) 25 | return Payload(upgrade_dusty_binary) 26 | -------------------------------------------------------------------------------- /docs/setup.md: -------------------------------------------------------------------------------- 1 | ## Setup 2 | 3 | After you install Dusty, you should run `dusty setup` to do some 4 | necessary configuration. 5 | 6 | #### Username 7 | 8 | Dusty runs some commands as this user that you specify. Usually the default 9 | here should be fine. Setting this to `root` is probably a **bad** idea. 10 | 11 | #### Specs Repo 12 | 13 | Dusty needs to know where your Dusty specs live. Use a pattern similar to: 14 | ``` 15 | repo: file:///local/repo/path 16 | -or- 17 | repo: https://github.com/my-org/my-app.git 18 | -or- 19 | repo: git@github.com:my-org/my-app.git 20 | ``` 21 | to specify the Dusty specs' location. 22 | 23 | If you're new to Dusty and would like to try things out, leave this blank 24 | to use the example specs. 25 | 26 | #### Tutorial 27 | 28 | If this is your first time using Dusty, you can now run through the [tutorial.](getting-started/index.md) 29 | -------------------------------------------------------------------------------- /setup/dusty.rb: -------------------------------------------------------------------------------- 1 | cask 'dusty' do 2 | version '0.6.5' 3 | sha256 '6e88894c7062f24bb8b4b7b2faa149b6b638c7d3fa2be86a56266e2d1789ea60' 4 | 5 | url "https://github.com/gamechanger/dusty/releases/download/#{version}/dusty.tar.gz" 6 | appcast 'https://github.com/gamechanger/dusty/releases.atom', 7 | :checkpoint => '14dd655e5ed3a55495436c0fc9fb426bedb461a600083d10f72f330c66181d7a' 8 | name 'Dusty' 9 | homepage 'https://github.com/gamechanger/dusty' 10 | license :mit 11 | 12 | depends_on :cask => 'dockertoolbox' 13 | container :type => :tar 14 | 15 | installer :script => 'brew-install.sh', 16 | :args => %W[#{staged_path}], 17 | :must_succeed => true, 18 | :sudo => true 19 | binary 'dusty' 20 | 21 | uninstall :launchctl => 'com.gamechanger.dusty' 22 | 23 | zap :delete => '/etc/dusty' 24 | end 25 | -------------------------------------------------------------------------------- /tests/unit/commands/shell_test.py: -------------------------------------------------------------------------------- 1 | from mock import patch 2 | 3 | from ...testcases import DustyTestCase 4 | from dusty.commands.shell import execute_shell 5 | 6 | class TestShellCommands(DustyTestCase): 7 | def test_execute_script_nonexistent_app(self): 8 | with self.assertRaises(KeyError): 9 | execute_shell('some-nonexistent-app') 10 | 11 | @patch('dusty.commands.utils.exec_docker') 12 | def test_execute_script_valid_app(self, fake_exec_docker): 13 | execute_shell('app-a') 14 | fake_exec_docker.assert_called_once_with('exec', '-i', 'dusty_app-a_1', '/bin/bash') 15 | 16 | @patch('dusty.commands.utils.exec_docker') 17 | def test_execute_script_valid_service(self, fake_exec_docker): 18 | execute_shell('service-a') 19 | fake_exec_docker.assert_called_once_with('exec', '-i', 'dusty_service-a_1', '/bin/bash') 20 | -------------------------------------------------------------------------------- /dusty/memoize.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import operator 3 | import pickle 4 | 5 | from .payload import function_key 6 | 7 | cache = {} 8 | 9 | def _hash_kwargs(kwargs): 10 | return sorted(kwargs.items(), key=operator.itemgetter(0)) 11 | 12 | def memoized(fn): 13 | """ 14 | Decorator. Caches a function's return value each time it is called. 15 | If called later with the same arguments, the cached value is returned 16 | (not reevaluated). The cache lasts for the duration of each request. 17 | """ 18 | @functools.wraps(fn) 19 | def memoizer(*args, **kwargs): 20 | key = function_key(fn) + pickle.dumps(args) + pickle.dumps(_hash_kwargs(kwargs)) 21 | if key not in cache: 22 | cache[key] = fn(*args, **kwargs) 23 | return cache[key] 24 | return memoizer 25 | 26 | def reset_memoize_cache(): 27 | global cache 28 | cache = {} 29 | -------------------------------------------------------------------------------- /dusty/cli/config.py: -------------------------------------------------------------------------------- 1 | """Configure Dusty. 2 | 3 | For a description of all available config keys, 4 | run `config list`. 5 | 6 | Usage: 7 | config list 8 | config listvalues 9 | config set 10 | 11 | Commands: 12 | list List all config keys with descriptions and current values. 13 | listvalues List all config keys in machine-readable format. 14 | set Set a string config key to a new value. 15 | """ 16 | 17 | from docopt import docopt 18 | 19 | from ..payload import Payload 20 | from ..commands.manage_config import list_config, list_config_values, save_value 21 | 22 | def main(argv): 23 | args = docopt(__doc__, argv) 24 | if args['list']: 25 | return Payload(list_config) 26 | elif args['listvalues']: 27 | return Payload(list_config_values) 28 | elif args['set']: 29 | return Payload(save_value, args[''], args['']) 30 | -------------------------------------------------------------------------------- /setup/com.gamechanger.dusty.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | EnvironmentVariables 6 | 7 | HOME 8 | /var/root 9 | 10 | KeepAlive 11 | 12 | Label 13 | com.gamechanger.dusty 14 | ProgramArguments 15 | 16 | bash 17 | -c 18 | export PATH=$PATH:/usr/local/bin; dusty -d 19 | 20 | RunAtLoad 21 | 22 | WorkingDirectory 23 | /usr/local 24 | StandardErrorPath 25 | /var/log/dusty.log 26 | StandardOutPath 27 | /var/log/dusty.log 28 | 29 | 30 | -------------------------------------------------------------------------------- /tests/unit/compiler/test_configs/recursive_apps/assembled_spec.yml: -------------------------------------------------------------------------------- 1 | bundles: 2 | simple: 3 | apps: 4 | - simpleapp 5 | apps: 6 | simpleapp: 7 | image: docker.gamechanger.io/simpleapp 8 | commands: 9 | always: 10 | - run-script.sh 11 | once: [] 12 | compose: 13 | environment: 14 | gcenv: local 15 | volumes: [] 16 | depends: 17 | apps: 18 | - app2 19 | libs: [] 20 | services: [] 21 | app2: 22 | image: docker.gamechanger.io/app2 23 | commands: 24 | always: 25 | - run-script.sh 26 | once: [] 27 | compose: 28 | environment: 29 | gcenv: local 30 | volumes: [] 31 | depends: 32 | apps: [] 33 | libs: [] 34 | services: [] 35 | libs: {} 36 | services: {} 37 | -------------------------------------------------------------------------------- /dusty/warnings.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | from collections import defaultdict 3 | 4 | class Warnings(object): 5 | def __init__(self): 6 | self._stored = defaultdict(list) 7 | 8 | @property 9 | def has_warnings(self): 10 | for namespace, warnings in self._stored.iteritems(): 11 | if len(warnings) > 0: 12 | return True 13 | return False 14 | 15 | def clear_namespace(self, namespace): 16 | self._stored[namespace] = [] 17 | 18 | def warn(self, namespace, message): 19 | self._stored[namespace].append(message) 20 | 21 | def pretty(self): 22 | result = '' 23 | for namespace in sorted(self._stored.keys()): 24 | result += ''.join(['WARNING ({}): {}\n'.format(namespace, '\n'.join(textwrap.wrap(message, 80))) 25 | for message in self._stored[namespace]]) 26 | return result 27 | 28 | daemon_warnings = Warnings() 29 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Dusty 2 | theme: readthedocs 3 | repo_url: https://github.com/gamechanger/dusty 4 | repo_name: GitHub 5 | site_author: GameChanger 6 | pages: 7 | - Home: index.md 8 | - Why Dusty?: why-dusty.md 9 | - Installation: 10 | - Installation: installation.md 11 | - Setup: setup.md 12 | - Getting Started: 13 | - Example Specs: getting-started/index.md 14 | - Hello World!: getting-started/hello-world.md 15 | - Developing an App: getting-started/developing.md 16 | - Moving Files Around: getting-started/files.md 17 | - Dusty Specs: 18 | - Specs Overview: specs/index.md 19 | - Bundle Specs: specs/bundle-specs.md 20 | - App Specs: specs/app-specs.md 21 | - Lib Specs: specs/lib-specs.md 22 | - Service Specs: specs/service-specs.md 23 | - Test Specs: specs/test-specs.md 24 | - Usage: usage.md 25 | - Architecture: architecture.md 26 | - FAQ: faq.md 27 | - Changelog: changelog.md 28 | - Contributing: contributing.md 29 | -------------------------------------------------------------------------------- /dusty/resources/nginx_502_page.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Dusty 502 4 | 5 | 6 | 7 | 8 |
9 |
10 |
11 |

Dusty 502 for App:

12 | 13 |
14 |
15 |
16 |
17 |
18 | Stream New Data 19 |
20 |
21 | 22 |

Logs

23 | 24 |
25 |
26 |
27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | .mypy_cache/ 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | !setup/*.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .venv 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Mkdocs build 54 | site/ 55 | 56 | # Django stuff: 57 | *.log 58 | 59 | # PyBuilder 60 | target/ 61 | -------------------------------------------------------------------------------- /dusty/cli/scripts.py: -------------------------------------------------------------------------------- 1 | """Execute scripts defined in an app's spec inside a running app container. 2 | 3 | Usage: 4 | scripts [] [...] 5 | 6 | Options: 7 | Arguments to pass to the script 8 | 9 | Examples: 10 | To get information on all scripts available for an app called `website`: 11 | dusty scripts website 12 | 13 | To run the `rebuild` script defined inside the `website` app spec: 14 | dusty scripts website rebuild 15 | """ 16 | 17 | from docopt import docopt 18 | 19 | from ..payload import Payload 20 | from ..commands.scripts import script_info_for_app, execute_script 21 | 22 | def main(argv): 23 | args = docopt(__doc__, argv, options_first=True) 24 | if not args['']: 25 | return Payload(script_info_for_app, args['']) 26 | else: 27 | payload = Payload(execute_script, args[''], args[''], script_arguments=args['']) 28 | payload.run_on_daemon = False 29 | return payload 30 | -------------------------------------------------------------------------------- /dusty/systems/config_file/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from ... import constants 5 | 6 | 7 | def read(filepath): 8 | with open(filepath, 'r') as f: 9 | return f.read() 10 | 11 | def write(filepath, contents): 12 | with open(filepath, 'w') as f: 13 | f.write(contents) 14 | 15 | def remove_current_dusty_config(config): 16 | """Given a string representing the contents of a 17 | file, this function strips out the Dusty config section 18 | denominated by the Dusty header and footer. Returns 19 | the stripped string.""" 20 | return constants.DUSTY_CONFIG_REGEX.sub("", config) 21 | 22 | def create_config_section(contents): 23 | config = constants.DUSTY_CONFIG_BEGIN 24 | config += contents 25 | config += constants.DUSTY_CONFIG_END 26 | return config 27 | 28 | def get_dusty_config_section(file_contents): 29 | m = constants.DUSTY_CONFIG_GROUP_REGEX.match(file_contents) 30 | if not m: 31 | return '' 32 | return m.group('dusty_config') 33 | 34 | -------------------------------------------------------------------------------- /dusty/schemas/test_schema.py: -------------------------------------------------------------------------------- 1 | from schemer import Schema, Array 2 | 3 | def no_all_suite_validator(): 4 | def validator(document): 5 | for suite in document.get('suites', []): 6 | if suite['name'].upper() == 'ALL': 7 | return 'all is a reserved suite name. It cannot be used in a spec.' 8 | return validator 9 | 10 | test_suite_schema = Schema({ 11 | 'name': {'type': basestring, 'required': True}, 12 | 'command': {'type': Array(basestring), 'required': True}, 13 | 'default_args': {'type': basestring, 'default': ''}, 14 | 'description': {'type': basestring, 'default': ''}, 15 | 'compose': {'type': dict, 'default': dict}, 16 | 'services': {'type': Array(basestring), 'default': list}, 17 | }) 18 | 19 | test_schema = Schema({ 20 | 'image': {'type': basestring}, 21 | 'image_requires_login': {'type': bool, 'default': False}, 22 | 'build': {'type': basestring}, 23 | 'once': {'type': Array(basestring), 'default': []}, 24 | 'suites': {'type': Array(test_suite_schema), 'default': list}, 25 | }, validates=[no_all_suite_validator()]) 26 | -------------------------------------------------------------------------------- /setup/dusty.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python -*- 2 | 3 | import os 4 | 5 | block_cipher = None 6 | 7 | a = Analysis(['bin/dusty'], 8 | pathex=[os.path.abspath('.')], 9 | binaries=None, 10 | datas=[], 11 | hiddenimports=[], 12 | hookspath=[], 13 | runtime_hooks=['setup/binary-hook.py'], 14 | excludes=[], 15 | win_no_prefer_redirects=False, 16 | win_private_assemblies=False, 17 | cipher=block_cipher) 18 | 19 | for resource in ['502.js', 'jquery-2.2.1.min.js', 'nginx_502_page.html', 20 | 'nginx_base_config.txt', 'skeleton.min.css']: 21 | path = os.path.join('dusty', 'resources', resource) 22 | a.datas += [(path, path, 'DATA')] 23 | 24 | pyz = PYZ(a.pure, a.zipped_data, 25 | cipher=block_cipher) 26 | exe = EXE(pyz, 27 | a.scripts, 28 | a.binaries, 29 | a.zipfiles, 30 | a.datas, 31 | name='dusty', 32 | debug=False, 33 | strip=False, 34 | upx=True, 35 | console=True ) 36 | -------------------------------------------------------------------------------- /dusty/cli/restart.py: -------------------------------------------------------------------------------- 1 | """Restart containers associated with Dusty apps or services. 2 | 3 | Upon restart, an app container will execute the command specified 4 | in its `commands.always` spec key. Restarting app containers will 5 | also perform a NFS mount of repos needed for restarted containers, 6 | using your current repo override settings. 7 | 8 | Usage: 9 | restart ( --repos ... | [...] ) 10 | 11 | Options: 12 | --repos If provided, Dusty will restart any containers 13 | that are using the repos specified. 14 | If provided, Dusty will only restart the given 15 | services. Otherwise, all currently running 16 | services are restarted. 17 | """ 18 | 19 | from docopt import docopt 20 | 21 | from ..payload import Payload 22 | from ..commands.run import restart_apps_or_services, restart_apps_by_repo 23 | 24 | def main(argv): 25 | args = docopt(__doc__, argv) 26 | if args['--repos']: 27 | return Payload(restart_apps_by_repo, args['--repos']) 28 | return Payload(restart_apps_or_services, args['']) 29 | -------------------------------------------------------------------------------- /tests/unit/memoize_test.py: -------------------------------------------------------------------------------- 1 | from mock import patch, Mock 2 | 3 | from dusty.memoize import memoized, reset_memoize_cache 4 | from ..testcases import DustyTestCase 5 | 6 | class TestMemoize(DustyTestCase): 7 | def setUp(self): 8 | super(TestMemoize, self).setUp() 9 | self.counter = 0 10 | @memoized 11 | def memoized_fn(kw1=None, kw2=None): 12 | self.counter += 1 13 | return self.counter 14 | self.memoized_fn = memoized_fn 15 | 16 | def test_memoize(self): 17 | self.memoized_fn() 18 | self.memoized_fn() 19 | self.assertEqual(self.counter, 1) 20 | 21 | def test_return_value(self): 22 | first = self.memoized_fn() 23 | second = self.memoized_fn() 24 | self.assertEqual(first, second) 25 | 26 | def test_cache_bust(self): 27 | self.memoized_fn() 28 | reset_memoize_cache() 29 | self.memoized_fn() 30 | self.assertEqual(self.counter, 2) 31 | 32 | def test_kwargs_order(self): 33 | self.memoized_fn(kw1=1, kw2=2) 34 | self.memoized_fn(kw2=2, kw1=1) 35 | self.assertEqual(self.counter, 1) 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 GameChanger Media 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /dusty/systems/hosts/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from ... import constants 4 | from .. import config_file 5 | 6 | 7 | def _dusty_hosts_config(hosts_specs): 8 | """Return a string of all host rules required to match 9 | the given spec. This string is wrapped in the Dusty hosts 10 | header and footer so it can be easily removed later.""" 11 | rules = ''.join(['{} {}\n'.format(spec['forwarded_ip'], spec['host_address']) for spec in hosts_specs]) 12 | return config_file.create_config_section(rules) 13 | 14 | def update_hosts_file_from_port_spec(port_spec): 15 | """Given a port spec, update the hosts file specified at 16 | constants.HOST_PATH to contain the port mappings specified 17 | in the spec. Any existing Dusty configurations are replaced.""" 18 | logging.info('Updating hosts file to match port spec') 19 | hosts_specs = port_spec['hosts_file'] 20 | current_hosts = config_file.read(constants.HOSTS_PATH) 21 | cleared_hosts = config_file.remove_current_dusty_config(current_hosts) 22 | updated_hosts = cleared_hosts + _dusty_hosts_config(hosts_specs) 23 | config_file.write(constants.HOSTS_PATH, updated_hosts) 24 | -------------------------------------------------------------------------------- /dusty/systems/known_hosts/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | 4 | from ...subprocess import check_output 5 | 6 | def _get_known_hosts_path(): 7 | ssh_dir = os.path.expanduser('~root/.ssh') 8 | if not os.path.isdir(ssh_dir): 9 | os.makedirs(ssh_dir) 10 | return os.path.join(ssh_dir, 'known_hosts') 11 | 12 | def ensure_known_hosts(hosts): 13 | known_hosts_path = _get_known_hosts_path() 14 | if not os.path.exists(known_hosts_path): 15 | open(known_hosts_path, 'a+').close() 16 | modified = False 17 | with open(known_hosts_path, 'r+') as f: 18 | contents = f.read() 19 | if not contents.endswith('\n'): 20 | contents += '\n' 21 | for host in hosts: 22 | if host not in contents: 23 | logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host)) 24 | command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)] 25 | result = check_output(command, demote=False) 26 | contents += result 27 | modified = True 28 | if modified: 29 | f.seek(0) 30 | f.write(contents) 31 | -------------------------------------------------------------------------------- /dusty/cli/setup.py: -------------------------------------------------------------------------------- 1 | """Run this command once after installation to set up 2 | configuration values tailored to your system. 3 | 4 | Usage: 5 | setup [options] 6 | 7 | Options: 8 | --mac_username= User name of the primary Dusty client user. This user 9 | will own all Docker-related processes. 10 | --default_specs_repo= Repo where your Dusty specs are located. Dusty manages this 11 | repo for you just like other repos. 12 | --vm_memory= Memory to assign to the Docker VM, in megabytes 13 | --no-update Skip pulling managed repos at conclusion of setup 14 | """ 15 | 16 | from docopt import docopt 17 | from ..payload import Payload 18 | from ..commands.setup import setup_dusty_config 19 | 20 | def main(argv): 21 | args = docopt(__doc__, argv) 22 | return setup_dusty_config(mac_username=args['--mac_username'], 23 | specs_repo=args['--default_specs_repo'], 24 | vm_memory=args['--vm_memory'], 25 | update=not args['--no-update']) 26 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # ## GAMECHANGER_CI_PREVENT_BUILD 2 | import sys 3 | from setuptools import find_packages 4 | 5 | 6 | def read(path): 7 | with open(path, 'rb') as fid: 8 | return fid.read().decode('utf-8') 9 | 10 | 11 | try: 12 | from restricted_pkg import setup 13 | except ImportError: 14 | # allow falling back to setuptools only if 15 | # we are not trying to upload 16 | if 'upload' in sys.argv: 17 | raise ImportError('restricted_pkg is required to upload, first do pip install restricted_pkg') 18 | from setuptools import setup 19 | 20 | 21 | setup( 22 | name='dusty', 23 | version='0.0.1', 24 | description='Docker-based development environment manager', 25 | url='https://github.com/gamechanger/dusty', 26 | private_repository='gamechanger', 27 | author='GameChanger', 28 | packages=find_packages(), 29 | package_data={'dusty': ['resources/*']}, 30 | install_requires=read('requirements.txt').splitlines(), 31 | tests_require=read('requirements-dev.txt').splitlines()[1:], 32 | test_suite="nose.collector", 33 | entry_points={'console_scripts': 34 | ['dusty = dusty.cli.__init__:main']}, 35 | zip_safe=False 36 | ) 37 | -------------------------------------------------------------------------------- /dusty/commands/doctor.py: -------------------------------------------------------------------------------- 1 | from ..log import log_to_client 2 | from ..payload import daemon_command 3 | from ..systems.virtualbox import (shut_down_docker_vm, delete_docker_vm_host_only_interface, 4 | ensure_docker_vm_is_started, regenerate_docker_vm_certificates) 5 | 6 | @daemon_command 7 | def run_doctor(): 8 | log_to_client('Shutting down Dusty VM to perform remedial operations') 9 | shut_down_docker_vm() 10 | delete_docker_vm_host_only_interface() 11 | ensure_docker_vm_is_started() 12 | regenerate_docker_vm_certificates() 13 | # TODO: Remove this step once initialize_docker_vm is smarter 14 | # The problem is we couldn't do our standard initialization 15 | # steps (making our symlinks, installing rsync, etc) since 16 | # the network was having problems. We need to shut the VM 17 | # down so the next dusty up attempts to do these. Instead 18 | # of basing init off VM state, we should write a file somewhere 19 | # and base the operation off of that. 20 | log_to_client('Doctor operations successful') 21 | log_to_client('Shutting down VM so next dusty up initializes properly') 22 | shut_down_docker_vm() 23 | log_to_client('Run dusty up to bring up the VM') 24 | -------------------------------------------------------------------------------- /tests/unit/path_test.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | import shutil 3 | 4 | from ..testcases import DustyTestCase 5 | from dusty.commands.repos import override_repo 6 | from dusty.path import parent_dir 7 | from dusty.source import Repo 8 | 9 | class TestPath(DustyTestCase): 10 | def setUp(self): 11 | super(TestPath, self).setUp() 12 | self.temp_dir = tempfile.mkdtemp() 13 | 14 | def tearDown(self): 15 | super(TestPath, self).tearDown() 16 | shutil.rmtree(self.temp_dir) 17 | 18 | def test_local_repo_path_no_override(self): 19 | self.assertEqual(Repo('github.com/app/a').local_path, 20 | '/etc/dusty/repos/github.com/app/a') 21 | 22 | def test_local_repo_path_with_override(self): 23 | override_repo('github.com/app/a', self.temp_dir) 24 | self.assertEqual(Repo('github.com/app/a').local_path, self.temp_dir) 25 | 26 | def test_parent_dir_on_dir(self): 27 | self.assertEqual(parent_dir('/some/long/dir'), '/some/long') 28 | 29 | def test_parent_dir_on_file(self): 30 | self.assertEqual(parent_dir('/some/long/dir/file.txt'), '/some/long/dir') 31 | 32 | def test_parent_dir_on_root_dir(self): 33 | self.assertEqual(parent_dir('/'), '/') 34 | -------------------------------------------------------------------------------- /dusty/path.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import os 4 | import shutil 5 | import subprocess 6 | import time 7 | import tempfile 8 | 9 | from . import constants 10 | from .config import get_config_value 11 | 12 | def parent_dir(path): 13 | """Return the parent directory of a file or directory. 14 | This is commonly useful for creating parent directories 15 | prior to creating a file.""" 16 | return os.path.split(path)[0] 17 | 18 | def vm_cp_path(app_or_service_name): 19 | return os.path.join(constants.VM_CP_DIR, app_or_service_name) 20 | 21 | def vm_command_files_path(app_or_service_name): 22 | return os.path.join(constants.VM_COMMAND_FILES_DIR, app_or_service_name) 23 | 24 | def dir_modified_time(path): 25 | return time.ctime(os.path.getmtime(path)) 26 | 27 | def set_mac_user_ownership(path): 28 | command = "chown -R {} {}".format(get_config_value(constants.CONFIG_MAC_USERNAME_KEY), path).split() 29 | subprocess.check_call(command) 30 | 31 | def case_insensitive_rename(src, dst): 32 | """A hack to allow us to rename paths in a case-insensitive filesystem like HFS.""" 33 | temp_dir = tempfile.mkdtemp() 34 | shutil.rmtree(temp_dir) 35 | shutil.move(src, temp_dir) 36 | shutil.move(temp_dir, dst) 37 | -------------------------------------------------------------------------------- /tests/unit/commands/cp_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | from ...testcases import DustyTestCase 5 | from dusty.commands.cp import _cleanup_path 6 | 7 | class TestCpCommands(DustyTestCase): 8 | def test_cleanup_path_file(self): 9 | self.temp_file = tempfile.mkstemp()[1] 10 | with _cleanup_path(self.temp_file): 11 | pass 12 | self.assertFalse(os.path.exists(self.temp_file)) 13 | 14 | def test_cleanup_path_file_on_error(self): 15 | self.temp_file = tempfile.mkstemp()[1] 16 | try: 17 | with _cleanup_path(self.temp_file): 18 | raise RuntimeError() 19 | except: 20 | pass 21 | self.assertFalse(os.path.exists(self.temp_file)) 22 | 23 | def test_cleanup_path_dir(self): 24 | self.temp_dir = tempfile.mkdtemp() 25 | with _cleanup_path(self.temp_dir): 26 | pass 27 | self.assertFalse(os.path.exists(self.temp_dir)) 28 | 29 | def test_cleanup_path_dir_on_error(self): 30 | self.temp_dir = tempfile.mkdtemp() 31 | try: 32 | with _cleanup_path(self.temp_dir): 33 | raise RuntimeError() 34 | except: 35 | pass 36 | self.assertFalse(os.path.exists(self.temp_dir)) 37 | -------------------------------------------------------------------------------- /tests/unit/commands/upgrade_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import tempfile 4 | 5 | from mock import patch 6 | 7 | from ...testcases import DustyTestCase 8 | from dusty.commands.upgrade import _move_temp_binary_to_path 9 | 10 | class UpgradeTestCase(DustyTestCase): 11 | def setUp(self): 12 | super(UpgradeTestCase, self).setUp() 13 | self.file1_path = tempfile.mkstemp(suffix='dusty')[1] 14 | self.file2_path = tempfile.mkstemp(suffix='dusty')[1] 15 | 16 | 17 | @patch('dusty.commands.upgrade._get_binary_location') 18 | def test_move_preserves_permissions(self, fake_get_binary_location): 19 | os.chmod(self.file1_path, 0764) 20 | os.chmod(self.file2_path, 0777) 21 | previous_st_mode = os.stat(self.file1_path).st_mode 22 | fake_get_binary_location.return_value = self.file1_path 23 | _move_temp_binary_to_path(self.file2_path) 24 | self.assertEqual(os.stat(self.file1_path).st_mode, previous_st_mode) 25 | 26 | @patch('dusty.commands.upgrade._get_binary_location') 27 | def test_refuses_to_overwrite(self, fake_get_binary_location): 28 | fake_get_binary_location.return_value = self.file1_path.rstrip('dusty') 29 | with self.assertRaises(RuntimeError): 30 | _move_temp_binary_to_path(self.file2_path) 31 | -------------------------------------------------------------------------------- /dusty/cli/up.py: -------------------------------------------------------------------------------- 1 | """Fully initialize all components of the Dusty system. 2 | 3 | Up compiles your specs (subject to your activated bundles), 4 | configures local port forwarding through your hosts file and 5 | nginx, initializes your Docker VM and prepares it for 6 | use by Dusty, and starts any containers specified by your 7 | currently activated bundles. 8 | 9 | Usage: 10 | up [--no-recreate] [--no-pull] 11 | 12 | Options: 13 | --no-recreate If a container already exists, do not recreate 14 | it from scratch. This is faster, but containers 15 | may get out of sync over time. 16 | --no-pull Do not pull dusty managed repos from remotes. 17 | """ 18 | 19 | from docopt import docopt 20 | 21 | from ..payload import Payload 22 | from ..commands.run import (prep_for_start_local_env, 23 | log_in_to_required_registries, 24 | start_local_env) 25 | 26 | def main(argv): 27 | args = docopt(__doc__, argv) 28 | payload0 = Payload(prep_for_start_local_env, pull_repos=not args['--no-pull']) 29 | payload1 = Payload(log_in_to_required_registries) 30 | payload1.run_on_daemon = False 31 | payload2 = Payload(start_local_env, recreate_containers=not args['--no-recreate']) 32 | return [payload0, payload1, payload2] 33 | -------------------------------------------------------------------------------- /tests/integration/cli/disk_test.py: -------------------------------------------------------------------------------- 1 | from ...testcases import DustyIntegrationTestCase 2 | from ...fixtures import busybox_single_app_bundle_fixture 3 | 4 | class TestDiskCLI(DustyIntegrationTestCase): 5 | def setUp(self): 6 | super(TestDiskCLI, self).setUp() 7 | busybox_single_app_bundle_fixture(num_bundles=1) 8 | self.run_command('bundles activate busyboxa') 9 | 10 | def tearDown(self): 11 | try: 12 | self.run_command('stop') 13 | except Exception: 14 | pass 15 | super(TestDiskCLI, self).tearDown() 16 | 17 | def test_disk_inspect(self): 18 | result = self.run_command('disk inspect') 19 | self.assertInSameLine(result, 'Usage', '%') 20 | 21 | def test_disk_cleanup_containers(self): 22 | self.run_command('up') 23 | self.run_command('stop') 24 | self.assertContainerExists('busyboxa') 25 | self.run_command('disk cleanup_containers') 26 | self.assertContainerDoesNotExist('busyboxa') 27 | 28 | def test_disk_cleanup_images(self): 29 | self.run_command('up') 30 | self.run_command('stop') 31 | self.assertImageExists('busybox:latest') 32 | self.run_command('disk cleanup_containers') 33 | self.run_command('disk cleanup_images') 34 | self.assertImageDoesNotExist('busybox:latest') 35 | -------------------------------------------------------------------------------- /tests/integration/cli/config_test.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import docopt 3 | 4 | from ...testcases import DustyIntegrationTestCase 5 | from dusty.config import get_config 6 | 7 | class TestConfigCLI(DustyIntegrationTestCase): 8 | def test_config_list_returns(self): 9 | result = self.run_command('config list') 10 | self.assertInSameLine(result, 'Key', 'Description', 'Value') 11 | self.assertInSameLine(result, 'bundles', '[]') 12 | self.assertInSameLine(result, 'mac_username', self.tests_user) 13 | self.assertInSameLine(result, 'setup_has_run', 'True') 14 | 15 | def test_config_listvalues_returns(self): 16 | result = yaml.load(self.run_command('config listvalues')) 17 | self.assertItemsEqual(result, get_config()) 18 | 19 | def test_config_set_fails_with_no_args(self): 20 | with self.assertRaises(docopt.DocoptExit): 21 | self.run_command('config set') 22 | 23 | def test_config_set_fails_on_nonexistent_key(self): 24 | with self.assertRaises(self.CommandError): 25 | self.run_command('config set bacon-level extreme') 26 | 27 | def test_config_set_works_with_valid_input(self): 28 | self.run_command('config set vm_memory_size 3000') 29 | result = yaml.load(self.run_command('config listvalues')) 30 | self.assertEqual(result['vm_memory_size'], '3000') 31 | -------------------------------------------------------------------------------- /dusty/commands/status.py: -------------------------------------------------------------------------------- 1 | from prettytable import PrettyTable 2 | 3 | from ..compiler.spec_assembler import get_assembled_specs 4 | from ..log import log_to_client 5 | from ..systems.docker import get_dusty_containers 6 | from ..systems.virtualbox import docker_vm_is_running 7 | from ..payload import daemon_command 8 | from .. import constants 9 | 10 | def _has_active_container(spec_type, service_name): 11 | if spec_type == 'lib': 12 | return False 13 | return get_dusty_containers([service_name]) != [] 14 | 15 | @daemon_command 16 | def get_dusty_status(): 17 | if not docker_vm_is_running(): 18 | log_to_client('Docker VM is powered off. You can start it with `dusty up`') 19 | return 20 | assembled_specs = get_assembled_specs() 21 | table = PrettyTable(["Name", "Type", "Has Active Container"]) 22 | # Check for Dusty's special nginx container (used for host forwarding) 23 | table.add_row([constants.DUSTY_NGINX_NAME, '', 'X' if get_dusty_containers([constants.DUSTY_NGINX_NAME]) != [] else '']) 24 | for spec in assembled_specs.get_apps_libs_and_services(): 25 | spec_type = spec.type_singular 26 | service_name = spec.name 27 | has_activate_container = _has_active_container(spec_type, service_name) 28 | table.add_row([service_name, spec_type, 'X' if has_activate_container else '']) 29 | log_to_client(table.get_string(sortby="Type")) 30 | -------------------------------------------------------------------------------- /dusty/cli/bundles.py: -------------------------------------------------------------------------------- 1 | """Manage application bundles known to Dusty. 2 | 3 | A bundle represents a set of applications that are 4 | run together. Dusty uses your activated bundles as 5 | an entrypoint to resolve which apps and services it 6 | needs to run as part of your environment. 7 | 8 | You can choose which bundles are activated to customize 9 | your environment to what you're working on at the moment. 10 | You don't need to run your entire stack all the time! 11 | 12 | Usage: 13 | bundles activate [--only] ... 14 | bundles deactivate ... 15 | bundles list 16 | 17 | Commands: 18 | activate Activate one or more bundles. If --only is passed, 19 | all bundles not provided in this command will 20 | be deactivated. 21 | deactivate Deactivate one or more bundles. 22 | list List all bundles and whether they are currently active. 23 | """ 24 | 25 | from docopt import docopt 26 | 27 | from ..payload import Payload 28 | from ..commands.bundles import list_bundles, activate_bundle, deactivate_bundle 29 | 30 | def main(argv): 31 | args = docopt(__doc__, argv) 32 | if args['list']: 33 | return Payload(list_bundles) 34 | elif args['activate']: 35 | return Payload(activate_bundle, args[''], args['--only']) 36 | elif args['deactivate']: 37 | return Payload(deactivate_bundle, args['']) 38 | -------------------------------------------------------------------------------- /tests/unit/systems/nfs/server_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | from mock import Mock, patch 5 | 6 | from dusty.systems.nfs import server 7 | from dusty import constants 8 | from ....testcases import DustyTestCase 9 | 10 | class TestNFSServer(DustyTestCase): 11 | def setUp(self): 12 | super(TestNFSServer, self).setUp() 13 | 14 | def tearDown(self): 15 | super(TestNFSServer, self).tearDown() 16 | 17 | @patch('dusty.systems.config_file.get_dusty_config_section') 18 | def test_get_current_exports(self, fake_get_dusty_config_section): 19 | fake_get_dusty_config_section.return_value = 'export numba 1\n/private/etc/some/repo 192.168.59.103 -alldirs -maproot=0:0\n' 20 | expected_current_exports = set(['export numba 1\n', '/private/etc/some/repo 192.168.59.103 -alldirs -maproot=0:0\n']) 21 | self.assertEqual(expected_current_exports, server._get_current_exports()) 22 | 23 | def test_maproot_for_repo(self): 24 | fake_repo = Mock() 25 | fake_repo.local_path = tempfile.mkdtemp() 26 | expected_maproot = '{}:{}'.format(os.stat(fake_repo.local_path).st_uid, os.stat(fake_repo.local_path).st_gid) 27 | self.assertEqual(expected_maproot, server._maproot_for_repo(fake_repo)) 28 | 29 | def test_write_exports_config(self): 30 | exports_set = set(['export1\n', 'export2\n']) 31 | constants.EXPORTS_PATH = tempfile.mkstemp()[1] 32 | 33 | -------------------------------------------------------------------------------- /setup/release.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tarfile 3 | 4 | from github3 import login 5 | 6 | token = os.getenv('GITHUB_TOKEN') 7 | gh = login(token=token) 8 | repo = gh.repository('gamechanger', 'dusty') 9 | 10 | version = os.getenv('VERSION') 11 | prerelease = os.getenv('PRERELEASE') == 'true' 12 | 13 | release_name = version 14 | release = repo.create_release(version, name=release_name, prerelease=prerelease) 15 | 16 | for setup_file in ['com.gamechanger.dusty.plist', 'install.sh']: 17 | with open(os.path.join('setup', setup_file), 'r') as f: 18 | release.upload_asset(content_type='text/plain', 19 | name=setup_file, 20 | asset=f) 21 | 22 | for binary in ['dusty']: 23 | with open(os.path.join('dist', binary), 'r') as f: 24 | release.upload_asset(content_type='application/octet-stream', 25 | name=binary, 26 | asset=f) 27 | 28 | with tarfile.open('dusty.tar.gz', 'w:gz') as tarball: 29 | tarball.add('dist/dusty', arcname='dusty') 30 | tarball.add('setup/com.gamechanger.dusty.plist', arcname='com.gamechanger.dusty.plist') 31 | tarball.add('setup/brew-install.sh', arcname='brew-install.sh') 32 | 33 | with open('dusty.tar.gz', 'r') as f: 34 | release.upload_asset(content_type='application/octet-stream', 35 | name='dusty.tar.gz', 36 | asset=f) 37 | -------------------------------------------------------------------------------- /dusty/systems/nginx/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | import subprocess 4 | import tempfile 5 | 6 | from ... import constants 7 | from ...config import get_config_value 8 | from ..rsync import sync_local_path_to_vm 9 | 10 | def _write_nginx_config(nginx_config, path): 11 | """Writes the config file from the Dusty Nginx compiler 12 | to the Nginx includes directory, which should be included 13 | in the main nginx.conf.""" 14 | with open(path, 'w') as f: 15 | f.write(nginx_config) 16 | 17 | def update_nginx_from_config(nginx_config): 18 | """Write the given config to disk as a Dusty sub-config 19 | in the Nginx includes directory. Then, either start nginx 20 | or tell it to reload its config to pick up what we've 21 | just written.""" 22 | logging.info('Updating nginx with new Dusty config') 23 | temp_dir = tempfile.mkdtemp() 24 | os.mkdir(os.path.join(temp_dir, 'html')) 25 | _write_nginx_config(constants.NGINX_BASE_CONFIG, os.path.join(temp_dir, constants.NGINX_PRIMARY_CONFIG_NAME)) 26 | _write_nginx_config(nginx_config['http'], os.path.join(temp_dir, constants.NGINX_HTTP_CONFIG_NAME)) 27 | _write_nginx_config(nginx_config['stream'], os.path.join(temp_dir, constants.NGINX_STREAM_CONFIG_NAME)) 28 | _write_nginx_config(constants.NGINX_502_PAGE_HTML, os.path.join(temp_dir, 'html', constants.NGINX_502_PAGE_NAME)) 29 | sync_local_path_to_vm(temp_dir, constants.NGINX_CONFIG_DIR_IN_VM) 30 | -------------------------------------------------------------------------------- /docs/specs/bundle-specs.md: -------------------------------------------------------------------------------- 1 | # Bundle Specs 2 | 3 | Bundles are logical groups of applications that are toggleable by users at 4 | runtime. Bundles are primarily used to hide the implementation details of 5 | a particular service from endusers. Instead of having to activate the five 6 | applications which compose a logical service, the user instead simply activates 7 | the bundle for the logical service and lets Dusty do the rest. 8 | 9 | Bundle specs must be placed in the `bundles` subfolder of your specs repo. 10 | 11 | ## description 12 | 13 | ``` 14 | description: The user authentication service 15 | ``` 16 | 17 | A short text description of the logical service defined by the bundle. 18 | This is exposed to the user by `dusty bundles list`. 19 | 20 | ## apps 21 | 22 | ``` 23 | apps: 24 | - servicerouter 25 | - userauth 26 | ``` 27 | 28 | The list of apps to be run by `dusty up` if this bundle is activated. 29 | These apps are used as entrypoints into the dependency graph defined by 30 | your app and service specs. 31 | 32 | Each bundle must specify at least one app (in `apps`) or service (in `services`). 33 | 34 | ## services 35 | 36 | ``` 37 | services: 38 | - postgres 39 | ``` 40 | 41 | A list of services which will be run by `dusty up` if this bundle is activated. 42 | Services specified here will be launched in addition to services 43 | that specified apps depend on. 44 | 45 | Each bundle must specify at least one app (in `apps`) or service (in `services`). 46 | -------------------------------------------------------------------------------- /setup/install.sh: -------------------------------------------------------------------------------- 1 | set -e 2 | 3 | # This is set by Jenkins during release 4 | release= 5 | LEGACY_INSTALL_ROOT=/System/Library/LaunchDaemons 6 | LEGACY_PLIST_NAME=org.gamechanger.dusty.plist 7 | INSTALL_ROOT=/Library/LaunchDaemons 8 | PLIST_NAME=com.gamechanger.dusty.plist 9 | TMP_DIR=`mktemp -d` 10 | 11 | function bold_echo { 12 | echo -e "\033[1m$1\033[0m" 13 | } 14 | 15 | bold_echo "Downloading dusty binary" 16 | curl -L https://github.com/gamechanger/dusty/releases/download/$release/dusty > $TMP_DIR/dusty 17 | chmod +x $TMP_DIR/dusty 18 | bold_echo "Authenticating as super user... needed to setup daemon" 19 | sudo -v 20 | bold_echo "Moving dusty binary into place" 21 | sudo mv -f $TMP_DIR/dusty /usr/local/bin/dusty 22 | bold_echo "Resetting dusty daemon" 23 | sudo curl -L -o $INSTALL_ROOT/$PLIST_NAME https://raw.githubusercontent.com/gamechanger/dusty/$release/setup/$PLIST_NAME 24 | sudo launchctl unload $INSTALL_ROOT/$PLIST_NAME 25 | bold_echo "Testing dusty daemon's preflight..." 26 | sudo -H dusty -d --preflight-only || (bold_echo "Preflight failed; not loading daemon"; exit 1) 27 | bold_echo "Loading dusty daemon" 28 | sudo launchctl load $INSTALL_ROOT/$PLIST_NAME 29 | 30 | # Clean up install from legacy install directory if it exists 31 | if [ -f $LEGACY_INSTALL_ROOT/$LEGACY_PLIST_NAME ]; then 32 | echo "Removing legacy install plist in $LEGACY_INSTALL_ROOT" 33 | sudo launchctl unload $LEGACY_INSTALL_ROOT/$LEGACY_PLIST_NAME 34 | sudo rm $LEGACY_INSTALL_ROOT/$LEGACY_PLIST_NAME 35 | fi 36 | -------------------------------------------------------------------------------- /tests/integration/cli/logs_test.py: -------------------------------------------------------------------------------- 1 | from ...testcases import DustyIntegrationTestCase 2 | from ...fixtures import busybox_single_app_bundle_fixture 3 | 4 | class TestLogsCLI(DustyIntegrationTestCase): 5 | def setUp(self): 6 | super(TestLogsCLI, self).setUp() 7 | busybox_single_app_bundle_fixture(num_bundles=1) 8 | self.run_command('bundles activate busyboxa') 9 | self.run_command('up') 10 | 11 | def tearDown(self): 12 | try: 13 | self.run_command('stop') 14 | except Exception: 15 | pass 16 | super(TestLogsCLI, self).tearDown() 17 | 18 | def test_logs_on_running_container(self): 19 | self.run_command('logs busyboxa') 20 | self.assertExecDocker('logs', self.container_id('busyboxa')) 21 | 22 | def test_logs_on_stopped_container(self): 23 | self.run_command('stop') 24 | self.run_command('logs busyboxa') 25 | self.assertExecDocker('logs', self.container_id('busyboxa')) 26 | 27 | def test_logs_with_follow(self): 28 | self.run_command('logs -f busyboxa') 29 | self.assertExecDocker('logs', '-f', self.container_id('busyboxa')) 30 | 31 | def test_logs_with_lines(self): 32 | self.run_command('logs --tail=10 busyboxa') 33 | self.assertExecDocker('logs', '--tail=10', self.container_id('busyboxa')) 34 | 35 | def test_logs_with_timestamps(self): 36 | self.run_command('logs -t busyboxa') 37 | self.assertExecDocker('logs', '-t', self.container_id('busyboxa')) 38 | -------------------------------------------------------------------------------- /tests/unit/config_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | from ..testcases import DustyTestCase 5 | from dusty import constants, config 6 | 7 | class TestConfig(DustyTestCase): 8 | def setUp(self): 9 | super(TestConfig, self).setUp() 10 | self.test_config = {constants.CONFIG_BUNDLES_KEY: ['bundle-a'], constants.CONFIG_REPO_OVERRIDES_KEY: {'repo-a': '/var/run/repo-a'}, constants.CONFIG_MAC_USERNAME_KEY: 'root'} 11 | 12 | def test_save_and_get_config(self): 13 | config.save_config(self.test_config) 14 | self.assertItemsEqual(self.test_config, config.get_config()) 15 | 16 | def test_get_config_value(self): 17 | config.save_config(self.test_config) 18 | self.assertItemsEqual(config.get_config_value(constants.CONFIG_BUNDLES_KEY), ['bundle-a']) 19 | self.assertItemsEqual(config.get_config_value(constants.CONFIG_REPO_OVERRIDES_KEY), {'repo-a': '/var/run/repo-a'}) 20 | self.assertEqual(config.get_config_value(constants.CONFIG_MAC_USERNAME_KEY), 'root') 21 | 22 | def test_save_config_value(self): 23 | config.save_config(self.test_config) 24 | self.assertItemsEqual(config.get_config_value(constants.CONFIG_BUNDLES_KEY), ['bundle-a']) 25 | config.save_config_value(constants.CONFIG_BUNDLES_KEY, ['bundle-b']) 26 | self.assertItemsEqual(config.get_config_value(constants.CONFIG_BUNDLES_KEY), ['bundle-b']) 27 | config.save_config_value('new_key', 'bacon') 28 | self.assertEqual(config.get_config_value('new_key'), 'bacon') 29 | -------------------------------------------------------------------------------- /tests/unit/utils.py: -------------------------------------------------------------------------------- 1 | from dusty.schemas.base_schema_class import DustySchema 2 | from dusty.schemas.app_schema import app_schema 3 | from dusty.schemas.lib_schema import lib_schema 4 | from dusty.schemas.bundle_schema import bundle_schema 5 | 6 | def get_app_dusty_schema(doc, name=None): 7 | if 'image' not in doc and 'build' not in doc: 8 | doc['image'] = '' 9 | if 'repo' in doc and 'mount' not in doc: 10 | doc['mount'] = '/repo' 11 | if 'mount' in doc and 'repo' not in doc: 12 | doc['repo'] = '/repo' 13 | if 'commands' not in doc: 14 | doc['commands'] = {'always': ['sleep 1']} 15 | return DustySchema(app_schema, doc, name, 'apps') 16 | 17 | def get_lib_dusty_schema(doc, name=None): 18 | if 'mount' not in doc: 19 | doc['mount'] = '' 20 | if 'repo' not in doc: 21 | doc['repo'] = '' 22 | return DustySchema(lib_schema, doc, name, 'libs') 23 | 24 | def get_bundle_dusty_schema(doc): 25 | return DustySchema(bundle_schema, doc) 26 | 27 | def apply_required_keys(specs): 28 | for spec_type in ['apps', 'bundles', 'libs', 'services']: 29 | if spec_type not in specs.keys(): 30 | specs[spec_type] = {} 31 | for k, v in specs['apps'].iteritems(): 32 | specs['apps'][k] = get_app_dusty_schema(v, k) 33 | for k, v in specs['libs'].iteritems(): 34 | specs['libs'][k] = get_lib_dusty_schema(v, k) 35 | for k, v in specs['services'].iteritems(): 36 | specs['services'][k] = DustySchema(None, v, k, 'services') 37 | return specs 38 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | # Installing Dusty 2 | 3 | ## Pre-Dusty Install 4 | 5 | Make sure that Dusty can access your SSH keys for pulling down repositories: 6 | 7 | ``` 8 | ssh-add -K 9 | -or- 10 | ssh-add -K 11 | ``` 12 | 13 | `ssh-add` adds private key identities to the authentication agent, `ssh-agent`. This will allow Dusty 14 | to pull repos using your saved SSH credentials. `` should point at the SSH file set up 15 | to talk to your remote git repository (GitHub for instance). 16 | 17 | ## Dusty Installation 18 | 19 | The easiest way to install Dusty is with [Homebrew](http://brew.sh/): 20 | 21 | ``` 22 | brew cask install dusty 23 | ``` 24 | 25 | If that worked, [continue to Setup.](setup.md) 26 | 27 | ### Manual Installation 28 | 29 | You can also install Dusty manually. First, make sure you have the following requirements installed: 30 | 31 | * [VirtualBox](https://www.virtualbox.org/wiki/VirtualBox) 32 | * [Docker Machine](https://docs.docker.com/machine/) 33 | * [Docker Compose](https://docs.docker.com/compose/) 34 | 35 | These can be obtained all together from [Docker Toolbox](https://www.docker.com/docker-toolbox). 36 | 37 | Then, run the following to download and install Dusty: 38 | 39 | ``` 40 | bash -c "`curl -L https://github.com/gamechanger/dusty/releases/download/0.7.2/install.sh`" 41 | ``` 42 | 43 | This script will install Dusty as a service and run the preflight check to ensure that all 44 | dependencies are installed. If the script throws an error, make sure to resolve that before 45 | continuing. 46 | -------------------------------------------------------------------------------- /tests/unit/systems/hosts/init_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | import textwrap 4 | 5 | import dusty.constants 6 | from dusty.systems.hosts import _dusty_hosts_config, update_hosts_file_from_port_spec 7 | from dusty.systems import config_file 8 | from ....testcases import DustyTestCase 9 | 10 | class TestHostsSystem(DustyTestCase): 11 | def setUp(self): 12 | super(TestHostsSystem, self).setUp() 13 | self.temp_hosts_path = tempfile.mkstemp()[1] 14 | self.old_hosts_path = dusty.constants.HOSTS_PATH 15 | dusty.constants.HOSTS_PATH = self.temp_hosts_path 16 | self.test_spec = [{'host_address': 'local.gc.com', 'forwarded_ip': '127.0.0.1'}, 17 | {'host_address': 'local-api.gc.com', 'forwarded_ip': '127.0.0.2'}] 18 | self.spec_output = textwrap.dedent("""\ 19 | # BEGIN section for Dusty 20 | 127.0.0.1 local.gc.com 21 | 127.0.0.2 local-api.gc.com 22 | # END section for Dusty 23 | """) 24 | 25 | def tearDown(self): 26 | super(TestHostsSystem, self).tearDown() 27 | os.remove(self.temp_hosts_path) 28 | dusty.constants.HOSTS_PATH = self.old_hosts_path 29 | 30 | def test_dusty_hosts_config(self): 31 | result = _dusty_hosts_config(self.test_spec) 32 | self.assertEqual(result, self.spec_output) 33 | 34 | def test_update_hosts_file_from_port_spec(self): 35 | update_hosts_file_from_port_spec({'hosts_file': self.test_spec}) 36 | self.assertEqual(config_file.read(dusty.constants.HOSTS_PATH), self.spec_output) 37 | -------------------------------------------------------------------------------- /dusty/cli/env.py: -------------------------------------------------------------------------------- 1 | """Set environment variable overrides. 2 | 3 | Environment variables specified will be added to app 4 | and service container environments, overriding variables 5 | specified in a `compose.env` spec (if present). 6 | 7 | Usage: 8 | env list [] 9 | env set ( | --file ) 10 | env unset (--all | ) 11 | 12 | Commands: 13 | list List all environment variables and their current values. 14 | set Set a variable name to a new value for the given app or service. 15 | unset Unset a variable for the given app or service. 16 | """ 17 | import os 18 | 19 | from docopt import docopt 20 | 21 | from ..payload import Payload 22 | from ..commands import env 23 | 24 | def main(argv): 25 | args = docopt(__doc__, argv) 26 | if args['list']: 27 | if args['']: 28 | return Payload(env.list_app_or_service, args['']) 29 | else: 30 | return Payload(env.list_all) 31 | elif args['set']: 32 | if args['--file']: 33 | return Payload(env.set_from_file, args[''], os.path.abspath(args[''])) 34 | else: 35 | return Payload(env.set_var, args[''], args[''], args['']) 36 | elif args['unset']: 37 | if args['--all']: 38 | return Payload(env.unset_all, args['']) 39 | else: 40 | return Payload(env.unset_var, args[''], args['']) 41 | -------------------------------------------------------------------------------- /dusty/commands/dump.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | from .. import constants 4 | from ..log import log_to_client 5 | from ..subprocess import check_output_demoted 6 | from ..warnings import daemon_warnings 7 | from ..payload import daemon_command 8 | 9 | DIAGNOSTIC_SUBPROCESS_COMMANDS = [ 10 | ['which', 'rsync'], 11 | ['VBoxManage', '-v'], 12 | ['docker-machine', 'version'], 13 | ['docker-machine', 'ssh', constants.VM_MACHINE_NAME, 'df', '-h', '/dev/sda1'], 14 | ['docker', '-v'], 15 | ['docker-compose', '--version'], 16 | ['cat', constants.HOSTS_PATH], 17 | ['cat', constants.CONFIG_PATH], 18 | ['VBoxManage', 'showvminfo', constants.VM_MACHINE_NAME], 19 | ['ssh-add', '-l'] 20 | ] 21 | 22 | DIAGNOSTIC_DUSTY_COMMANDS = [ 23 | ('Dusty Version', lambda: constants.VERSION), 24 | ('Dusty Binary', lambda: constants.BINARY), 25 | ('Daemon Warnings', daemon_warnings.pretty) 26 | ] 27 | 28 | @daemon_command 29 | def dump_diagnostics(): 30 | for title, fn in DIAGNOSTIC_DUSTY_COMMANDS: 31 | log_to_client('COMMAND: {}'.format(title)) 32 | log_to_client('OUTPUT:') 33 | log_to_client(fn()) 34 | log_to_client('') 35 | 36 | for command in DIAGNOSTIC_SUBPROCESS_COMMANDS: 37 | log_to_client('COMMAND: {}'.format(' '.join(command))) 38 | log_to_client('OUTPUT:') 39 | try: 40 | output = check_output_demoted(command, redirect_stderr=True) 41 | except subprocess.CalledProcessError as e: 42 | output = e.output 43 | log_to_client(output) 44 | log_to_client('') 45 | -------------------------------------------------------------------------------- /tests/unit/compiler/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pkg_resources import resource_isdir, resource_listdir, resource_string 3 | import yaml 4 | 5 | from nose.tools import nottest 6 | 7 | from dusty.compiler.spec_assembler import get_specs_from_path 8 | 9 | @nottest 10 | def get_all_test_configs(): 11 | return resource_listdir(__name__, 'test_configs') 12 | 13 | @nottest 14 | def resources_for_test_config(test_config): 15 | resources = {} 16 | for key in [constants.CONFIG_BUNDLES_KEY, 'apps', 'libs', 'services']: 17 | key_path = 'test_configs/{}/{}'.format(test_config, key) 18 | if resource_isdir(__name__, key_path): 19 | resources[key] = {resource_name: resource_string(__name__, '{}/{}'.format(key_path, resource_name)) 20 | for resource_name in resource_listdir(__name__, key_path)} 21 | return resources 22 | 23 | @nottest 24 | def specs_for_test_config(test_config): 25 | case_path = '{}/test_configs/{}/'.format(__path__[0], test_config) 26 | return get_specs_from_path(case_path) 27 | 28 | @nottest 29 | def assembled_specs_for_test_config(test_config): 30 | assembled_file = "{}/test_configs/{}/assembled_spec.yml".format(__path__[0], test_config) 31 | with open(assembled_file, 'r') as f: 32 | return yaml.load(f.read()) 33 | 34 | @nottest 35 | def nginx_config_for_test_config(test_config): 36 | return resource_string(__name__, 'test_configs/{}/nginx.conf'.format(test_config)) 37 | 38 | @nottest 39 | def docker_compose_yaml_for_test_config(test_config): 40 | return resource_string(__name__, 'test_configs/{}/docker-compose.yml'.format(test_config)) 41 | -------------------------------------------------------------------------------- /dusty/commands/manage_config.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | 3 | from prettytable import PrettyTable 4 | 5 | from ..config import get_config, save_config_value, refresh_config_warnings 6 | from .. import constants 7 | from ..log import log_to_client 8 | from ..payload import daemon_command 9 | 10 | def _eligible_config_keys_for_setting(): 11 | config = get_config() 12 | return [key for key in sorted(constants.CONFIG_SETTINGS.keys()) 13 | if key not in config or isinstance(config[key], basestring)] 14 | 15 | @daemon_command 16 | def list_config(): 17 | config = get_config() 18 | table = PrettyTable(['Key', 'Description', 'Value']) 19 | for key, description in constants.CONFIG_SETTINGS.iteritems(): 20 | table.add_row([key, 21 | '\n'.join(textwrap.wrap(description, 80)), 22 | '\n'.join(textwrap.wrap(str(config.get(key)), 80))]) 23 | log_to_client(table.get_string(sortby='Key')) 24 | 25 | @daemon_command 26 | def list_config_values(): 27 | log_to_client(get_config()) 28 | 29 | @daemon_command 30 | def save_value(key, value): 31 | config = get_config() 32 | if key not in constants.CONFIG_SETTINGS: 33 | raise KeyError('Your key {} must be in the list {}'.format(key, sorted(constants.CONFIG_SETTINGS.keys()))) 34 | if key in config and not isinstance(config[key], basestring): 35 | raise KeyError('You can only modify string values in your config. {} has type {}'.format(key, type(config[key]))) 36 | 37 | save_config_value(key, value) 38 | log_to_client('Set {} to {} in your config'.format(key, value)) 39 | refresh_config_warnings() 40 | -------------------------------------------------------------------------------- /dusty/cli/disk.py: -------------------------------------------------------------------------------- 1 | """Basic tools for managing disk usage in the Docker VM 2 | 3 | Usage: 4 | disk inspect 5 | disk cleanup_containers 6 | disk cleanup_images 7 | disk backup 8 | disk restore 9 | 10 | Commands: 11 | inspect Prints VM disk usage information 12 | cleanup_containers Cleans docker containers that have exited 13 | cleanup_images Removes docker images that can be removed without the --force flag 14 | backup Backs up the /persist directory on your Docker VM to your local file system 15 | restore Restores a backed up /persist directory 16 | """ 17 | 18 | from docopt import docopt 19 | import os 20 | 21 | from ..payload import Payload 22 | from ..log import log_to_client 23 | from ..commands.disk import (inspect_vm_disk, cleanup_inactive_containers, cleanup_images, 24 | backup, restore) 25 | 26 | def main(argv): 27 | args = docopt(__doc__, argv) 28 | if args['inspect']: 29 | return Payload(inspect_vm_disk) 30 | elif args['cleanup_containers']: 31 | return Payload(cleanup_inactive_containers) 32 | elif args['cleanup_images']: 33 | return Payload(cleanup_images) 34 | elif args['backup']: 35 | path = os.path.abspath(args['']) 36 | return Payload(backup, path) 37 | elif args['restore']: 38 | path = os.path.abspath(args['']) 39 | print "Warning: this will overwrite the /persist directory on your VM with the contents of {}".format(path) 40 | if raw_input("Continue? (y/n) ").strip().upper() == 'Y': 41 | return Payload(restore, path) 42 | else: 43 | log_to_client("Restore cancelled") 44 | -------------------------------------------------------------------------------- /docs/specs/index.md: -------------------------------------------------------------------------------- 1 | # Specs Overview 2 | 3 | Dusty uses YAML specifications for configuration of the applications, services, and 4 | libraries used to create your stack. 5 | 6 | An additional entrypoint layer, called a Bundle, is used to provide toggleable entrypoints 7 | into your stack's dependency graph. Users can decide which Bundles they want to run, 8 | and Dusty runs the applications, services, and libraries defined by those Bundles. 9 | 10 | 1. [Bundles](./bundle-specs.md) - Logical groups of applications. These can be 11 | toggled by users at runtime to mix and match parts of the stack. 12 | 1. [Apps](./app-specs.md) - Applications which you actively develop. These may link 13 | to a source repo and install Libs. May include test specifications. 14 | 1. [Services](./service-specs.md) - Applications which you do not develop. Services are 15 | often used to run database containers off of a public Docker image. 16 | 1. [Libs](./lib-specs.md) - Libraries which you actively develop. These may be depended on 17 | by Apps, which will then install them automatically. May include test specifications. 18 | 19 | Additionally, apps and libs share the following common sub-schemata - 20 | 21 | * [Tests](./test-specs.md) - Specifications of test images and suites. Libs and Apps with 22 | test specs can be tested using `dusty test`. 23 | 24 | ## Dusty Specs Repo 25 | 26 | Dusty assumes that your specs are defined in a Git repo with the following format - 27 | 28 | ``` 29 | / 30 | apps/ 31 | bundles/ 32 | libs/ 33 | services/ 34 | ``` 35 | 36 | Each folder must contain YAML files with specs which match the schema for that type. The 37 | [example specs repo](https://github.com/gamechanger/dusty-example-specs) has valid 38 | examples for each of these. 39 | -------------------------------------------------------------------------------- /dusty/cli/assets.py: -------------------------------------------------------------------------------- 1 | """ 2 | Place files in Dusty containers 3 | 4 | Assets are files to be put in containers, but which don't live in a repository. 5 | Assets are declared in Dusty specs of apps and libraries, and their values are 6 | managed with the CLI. 7 | 8 | Usage: 9 | assets list [] 10 | assets read 11 | assets set 12 | assets unset 13 | 14 | Commands: 15 | list List all assets that are defined in specs for active apps and libs 16 | read Print the current value of an asset 17 | set Associate an asset with the contents of a local file 18 | unset Delete the currently registered value of an asset 19 | 20 | Examples: 21 | To set the value of the asset GITHUB_KEY to the contents of ~/.ssh/id_rsa: 22 | dusty assets set GITHUB_KEY ~/.ssh/id_rsa 23 | """ 24 | 25 | import os 26 | import sys 27 | 28 | from docopt import docopt 29 | 30 | from ..payload import Payload 31 | from ..commands import assets 32 | from ..log import log_to_client 33 | 34 | def main(argv): 35 | args = docopt(__doc__, argv) 36 | if args['list']: 37 | if args['']: 38 | assets.list_by_app_or_lib(args['']) 39 | else: 40 | assets.list_all() 41 | elif args['read']: 42 | assets.read_asset(args['']) 43 | elif args['set']: 44 | if not os.access(args[''], os.R_OK): 45 | log_to_client('Local path {} does not exist, or you don\'t have permission to access it'.format(args[''])) 46 | sys.exit(1) 47 | assets.set_asset(args[''], os.path.abspath(args[''])) 48 | elif args['unset']: 49 | assets.unset_asset(args['']) 50 | -------------------------------------------------------------------------------- /tests/integration/cli/up_test.py: -------------------------------------------------------------------------------- 1 | import time 2 | import subprocess 3 | 4 | from ...testcases import DustyIntegrationTestCase 5 | from ...fixtures import busybox_single_app_bundle_fixture 6 | 7 | class TestUpCLI(DustyIntegrationTestCase): 8 | def setUp(self): 9 | super(TestUpCLI, self).setUp() 10 | busybox_single_app_bundle_fixture(num_bundles=2) 11 | self.run_command('bundles activate busyboxa busyboxb') 12 | 13 | def tearDown(self): 14 | try: 15 | self.run_command('stop') 16 | except Exception: 17 | pass 18 | super(TestUpCLI, self).tearDown() 19 | 20 | def test_basic_up_command(self): 21 | run_output = self.run_command('up') 22 | self.assertIn('Your local environment is now started!', run_output) 23 | self.assertContainerRunning('busyboxa') 24 | self.assertContainerRunning('busyboxb') 25 | 26 | def test_basic_up_recreate(self): 27 | run_output = self.run_command('up') 28 | run_output = self.run_command('up') 29 | self.assertIn('Removing dusty_busyboxb_1', run_output) 30 | self.assertIn('Removing dusty_busyboxa_1', run_output) 31 | self.assertIn('Creating dusty_busyboxb_1', run_output) 32 | self.assertIn('Creating dusty_busyboxa_1', run_output) 33 | run_output = self.run_command('up --no-recreate') 34 | self.assertNotIn('Creating dusty_busyboxb_1', run_output) 35 | self.assertNotIn('Creating dusty_busyboxa_1', run_output) 36 | 37 | def test_basic_up_no_pull(self): 38 | run_output = self.run_command('up') 39 | self.assertIn('Updated managed copy of file:///tmp/fake-repo', run_output) 40 | run_output = self.run_command('up --no-pull') 41 | self.assertNotIn('Updated managed copy of file:///tmp/fake-repo', run_output) 42 | -------------------------------------------------------------------------------- /tests/integration/cli/validate_test.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | import tempfile 3 | 4 | from schemer import ValidationException 5 | from dusty.compiler.spec_assembler import get_specs_path 6 | 7 | from ...testcases import DustyIntegrationTestCase 8 | from ...fixtures import busybox_single_app_bundle_fixture, invalid_fixture 9 | 10 | class TestValidateCLI(DustyIntegrationTestCase): 11 | def setUp(self): 12 | super(TestValidateCLI, self).setUp() 13 | busybox_single_app_bundle_fixture(num_bundles=1) 14 | # Set up a location we know we can write to but put nothing there 15 | # because shutil.copytree demands the dest be empty 16 | self.temp_dir = tempfile.mkdtemp() 17 | shutil.rmtree(self.temp_dir) 18 | 19 | def tearDown(self): 20 | try: 21 | shutil.rmtree(self.temp_dir) 22 | except: 23 | pass 24 | super(TestValidateCLI, self).tearDown() 25 | 26 | def test_validate_success(self): 27 | self.run_command('validate') 28 | 29 | def test_validate_failure(self): 30 | invalid_fixture() 31 | # This raises a CommandError because the daemon does the validation 32 | with self.assertRaises(self.CommandError): 33 | self.run_command('validate') 34 | 35 | def test_validate_success_against_location(self): 36 | shutil.copytree(get_specs_path(), self.temp_dir) 37 | self.run_command('validate {}'.format(self.temp_dir)) 38 | 39 | def test_validate_failure_against_location(self): 40 | invalid_fixture() 41 | shutil.copytree(get_specs_path(), self.temp_dir) 42 | # Here, the client does the validation, so we instead get a 43 | # ValidationException directly from schemer 44 | with self.assertRaises(ValidationException): 45 | self.run_command('validate {}'.format(self.temp_dir)) 46 | -------------------------------------------------------------------------------- /tests/unit/systems/config_file/init_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | import textwrap 4 | 5 | import dusty.constants 6 | from dusty.systems import config_file 7 | from ....testcases import DustyTestCase 8 | 9 | class TestConfigFileSystem(DustyTestCase): 10 | def setUp(self): 11 | super(TestConfigFileSystem, self).setUp() 12 | self.spec_output = textwrap.dedent("""\ 13 | # BEGIN section for Dusty 14 | 127.0.0.1 local.gc.com 15 | 127.0.0.2 local-api.gc.com 16 | # END section for Dusty 17 | """) 18 | self.non_spec_starter = "127.0.0.1 some-host.local.com\n" 19 | 20 | def test_remove_current_dusty_config_from_blank(self): 21 | result = config_file.remove_current_dusty_config(self.spec_output) 22 | self.assertEqual(result, "") 23 | 24 | def test_remove_current_dusty_config_from_starting(self): 25 | result = config_file.remove_current_dusty_config(self.non_spec_starter + self.spec_output) 26 | self.assertEqual(result, self.non_spec_starter) 27 | 28 | def test_remove_current_dusty_config_from_ending(self): 29 | result = config_file.remove_current_dusty_config(self.spec_output + self.non_spec_starter) 30 | self.assertEqual(result, self.non_spec_starter) 31 | 32 | def test_create_and_remove_section(self): 33 | contents = 'arbitrary contents' 34 | dusty_config = config_file.create_config_section(contents) 35 | stripped_config = config_file.remove_current_dusty_config(dusty_config) 36 | self.assertEqual(stripped_config, "") 37 | 38 | def test_get_dusty_config_section(self): 39 | dusty_contents = 'dusty contents!!\n' 40 | dusty_config = config_file.create_config_section(dusty_contents) 41 | file_contents = "leading stuff\n{}trailin_stuff".format(dusty_config) 42 | self.assertEqual(config_file.get_dusty_config_section(file_contents), dusty_contents) 43 | -------------------------------------------------------------------------------- /dusty/resources/502.js: -------------------------------------------------------------------------------- 1 | var consumeInterval; 2 | var upInterval; 3 | 4 | $(document).ready(function() { 5 | registerConsumer(function(appName, consumerId) { 6 | $('#title').append(appName); 7 | consumeInterval = window.setInterval(consumeLogs, 1000, consumerId); 8 | upInterval = window.setInterval(checkServiceAvailability, 1000, consumerId); 9 | }); 10 | }); 11 | 12 | var getPort = function() { 13 | if(location.port != ''){ 14 | return location.port; 15 | } 16 | else if(location.protocol== 'http:'){ 17 | return 80; 18 | } 19 | else if(location.protocol== 'https:'){ 20 | return 443; 21 | } 22 | }; 23 | 24 | var registerConsumer = function(cb) { 25 | $.post( 26 | 'http://127.0.0.1:60912/register-consumer', 27 | { 28 | hostname: location.hostname, 29 | port: getPort() 30 | }, 31 | function(data) { 32 | return cb(data['app_name'], data['consumer_id']); 33 | }); 34 | }; 35 | 36 | var consumeLogs = function(consumerId) { 37 | if (!$('#stream-new-checkbox').is(':checked')) { 38 | return; 39 | } 40 | $.ajax({ 41 | url: 'http://127.0.0.1:60912/consume/' + consumerId, 42 | success: function(data) { 43 | logContainer = $('#log-container'); 44 | logContainer.append(data['logs']); 45 | logContainer.scrollTop(logContainer[0].scrollHeight); 46 | $('#status').text('Status: ' + data['status']); 47 | }, 48 | error: function() { 49 | $('#status').text('Got an error talking to Dusty daemon, streaming is inoperable'); 50 | window.clearInterval(consumeInterval); 51 | } 52 | }); 53 | }; 54 | 55 | var checkServiceAvailability = function(consumerId) { 56 | $.ajax({ 57 | url: location.href, 58 | success: function(data) { 59 | location.reload(); 60 | } 61 | }); 62 | }; 63 | -------------------------------------------------------------------------------- /dusty/parallel.py: -------------------------------------------------------------------------------- 1 | """Utilities for multithreaded parallel execution of tasks.""" 2 | 3 | import threading 4 | import multiprocessing 5 | import multiprocessing.pool 6 | from contextlib import contextmanager 7 | from Queue import Queue 8 | import logging 9 | 10 | from .log import log_to_client 11 | 12 | class TaskQueue(Queue, object): 13 | """Executable task queue used for multithreaded execution of multiple 14 | function calls. Concurrency is limited by `pool_size`.""" 15 | def __init__(self, pool_size): 16 | super(TaskQueue, self).__init__() 17 | self.pool_size = pool_size 18 | self.errors = [] 19 | 20 | def enqueue_task(self, fn, *args, **kwargs): 21 | self.put((fn, args, kwargs)) 22 | 23 | def _task_executor(self, fn, args, kwargs): 24 | try: 25 | fn(*args, **kwargs) 26 | except Exception as e: 27 | self.errors.append(e) 28 | 29 | def execute(self): 30 | self.pool = multiprocessing.pool.ThreadPool(self.pool_size) 31 | while not self.empty(): 32 | fn, args, kwargs = self.get() 33 | self.pool.apply_async(self._task_executor, args=(fn, args, kwargs)) 34 | self.pool.close() 35 | self.pool.join() 36 | 37 | if self.errors: 38 | for error in self.errors: 39 | logging.exception(error) 40 | error_msg = error.message or str(error) 41 | log_to_client(error_msg) 42 | raise RuntimeError("Exceptions encountered during parallel task execution") 43 | 44 | @contextmanager 45 | def parallel_task_queue(pool_size=multiprocessing.cpu_count()): 46 | """Context manager for setting up a TaskQueue. Upon leaving the 47 | context manager, all tasks that were enqueued will be executed 48 | in parallel subject to `pool_size` concurrency constraints.""" 49 | task_queue = TaskQueue(pool_size) 50 | yield task_queue 51 | task_queue.execute() 52 | -------------------------------------------------------------------------------- /dusty/systems/docker/files.py: -------------------------------------------------------------------------------- 1 | from . import exec_in_container, get_container_for_app_or_service 2 | from ...path import parent_dir 3 | 4 | def _create_dir_in_container(container, path): 5 | return exec_in_container(container, 'mkdir -p', path) 6 | 7 | def _remove_path_in_container(container, path): 8 | return exec_in_container(container, 'rm -rf', path) 9 | 10 | def _move_in_container(container, source_path, dest_path): 11 | return exec_in_container(container, 'mv', source_path, dest_path) 12 | 13 | def _recursive_copy_in_container(container, source_path, dest_path): 14 | return exec_in_container(container, 'cp -r', source_path, dest_path) 15 | 16 | def copy_path_inside_container(app_or_service_name, source_path, dest_path): 17 | container = get_container_for_app_or_service(app_or_service_name, raise_if_not_found=True) 18 | 19 | _create_dir_in_container(container, parent_dir(dest_path)) 20 | _recursive_copy_in_container(container, source_path, dest_path) 21 | 22 | def move_dir_inside_container(app_or_service_name, source_path, dest_path): 23 | container = get_container_for_app_or_service(app_or_service_name, raise_if_not_found=True) 24 | 25 | _create_dir_in_container(container, parent_dir(dest_path)) 26 | _remove_path_in_container(container, dest_path) 27 | _move_in_container(container, '{}/'.format(source_path), dest_path) 28 | 29 | def move_file_inside_container(app_or_service_name, source_path, dest_path): 30 | container = get_container_for_app_or_service(app_or_service_name, raise_if_not_found=True) 31 | 32 | _create_dir_in_container(container, parent_dir(dest_path)) 33 | _move_in_container(container, source_path, dest_path) 34 | 35 | def container_path_exists(app_or_service_name, path): 36 | container = get_container_for_app_or_service(app_or_service_name, raise_if_not_found=True) 37 | return exec_in_container(container, 'sh -c \'[ -e {} ] && echo "yes" || echo "no"\''.format(path)).rstrip() == "yes" 38 | -------------------------------------------------------------------------------- /dusty/changeset.py: -------------------------------------------------------------------------------- 1 | from .compiler.spec_assembler import get_repo_of_app_or_library, get_expanded_libs_specs 2 | from .config import get_config_value, save_config_value 3 | from . import constants 4 | 5 | class RepoChangeSet(object): 6 | """Used for keeping track of the latest SHAs seen for a set 7 | of repos during a given operation. The observed SHAs are persisted 8 | in the Dusty config. 9 | 10 | One example of what you could use this for is to answer the question, 11 | "Have any of the dependent repos for this app changed since the last 12 | time I made a testing image for it?" You could make a changeset for 13 | that this way: 14 | 15 | RepoChangeSet('testing_image', 'myapp') 16 | """ 17 | def __init__(self, set_key, app_or_library_name): 18 | self.set_key = set_key 19 | self.app_or_library_name = app_or_library_name 20 | self.repos = {get_repo_of_app_or_library(self.app_or_library_name)} 21 | for lib_name in self.primary_spec['depends']['libs']: 22 | self.repos.add(get_repo_of_app_or_library(lib_name)) 23 | 24 | @property 25 | def primary_spec(self): 26 | expanded_specs = get_expanded_libs_specs() 27 | return expanded_specs.get_app_or_lib(self.app_or_library_name) 28 | 29 | def _get_current_sha_dict(self): 30 | return {repo.remote_path: repo.local_commit_sha 31 | for repo in self.repos} 32 | 33 | def has_changed(self): 34 | stored = get_config_value(constants.CONFIG_CHANGESET_KEY) or {} 35 | return self._get_current_sha_dict() != stored.get(self.set_key, {}).get(self.app_or_library_name, {}) 36 | 37 | def update(self): 38 | stored = get_config_value(constants.CONFIG_CHANGESET_KEY) or {} 39 | if self.set_key not in stored: 40 | stored[self.set_key] = {} 41 | stored[self.set_key][self.app_or_library_name] = self._get_current_sha_dict() 42 | save_config_value(constants.CONFIG_CHANGESET_KEY, stored) 43 | -------------------------------------------------------------------------------- /dusty/commands/scripts.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | 3 | from prettytable import PrettyTable 4 | 5 | from ..log import log_to_client 6 | from ..compiler.spec_assembler import get_specs 7 | from ..compiler.compose import container_code_path 8 | from . import utils 9 | from ..systems.docker import get_dusty_container_name 10 | from ..command_file import dusty_command_file_name 11 | from .. import constants 12 | from ..payload import daemon_command 13 | 14 | @daemon_command 15 | def script_info_for_app(app_name): 16 | app_specs = get_specs()['apps'].get(app_name) 17 | if not app_specs: 18 | raise KeyError('No app found named {} in specs'.format(app_name)) 19 | if not app_specs['scripts']: 20 | log_to_client('No scripts registered for app {}'.format(app_name)) 21 | return 22 | 23 | table = PrettyTable(['Script', 'Description']) 24 | for script_spec in app_specs['scripts']: 25 | table.add_row([script_spec['name'], 26 | '\n'.join(textwrap.wrap(script_spec['description'], 80))]) 27 | log_to_client(table.get_string(sortby='Script')) 28 | 29 | def execute_script(app_name, script_name, script_arguments=[]): 30 | app_specs = get_specs()['apps'].get(app_name) 31 | if not app_specs: 32 | raise KeyError('No app found named {} in specs'.format(app_name)) 33 | found_spec = False 34 | for script_dict in app_specs['scripts']: 35 | if script_dict['name'] == script_name: 36 | found_spec = True 37 | break 38 | if not found_spec: 39 | raise KeyError('No script found named {} in specs for app {}'.format(script_name, app_name)) 40 | 41 | command_file = '{}/{}'.format(constants.CONTAINER_COMMAND_FILES_DIR, dusty_command_file_name(app_specs.name, script_name=script_name)) 42 | container_name = get_dusty_container_name(app_name) 43 | exec_options = utils.exec_docker_options() 44 | utils.exec_docker('exec', exec_options, container_name, 'sh', command_file, *script_arguments) 45 | -------------------------------------------------------------------------------- /tests/run_integration_tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Check that the script is being run correctly and can find the Dusty executable 4 | 5 | if [ -z ${VIRTUAL_ENV+x} ]; then 6 | echo "Cowardly refusing to run outside of a virtualenv" 7 | exit 1 8 | fi 9 | 10 | if ! which -s dusty; then 11 | echo "Could not locate the Dusty executable, are you in a virtualenv with Dusty installed?" 12 | exit 1 13 | fi 14 | 15 | if [ `which dusty` = '/usr/local/bin/dusty' ]; then 16 | echo "The located version of Dusty seems to be the installed binary, not a version from source. Make sure you're in your Dusty virtualenv." 17 | exit 1 18 | fi 19 | 20 | # Make sure the Python test requirements get installed, because setuptools makes that hard 21 | 22 | DUSTY_BIN_PATH=`which dusty` 23 | DUSTY_SOCKET_PATH=$TMPDIR"dusty-integration.sock" 24 | ROOT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd ) 25 | TEMP_REQUIREMENTS_PATH=$TMPDIR"dusty-test-requirements.txt" 26 | TEST_USER=`whoami` 27 | 28 | TEST_PATH=$ROOT_DIR/tests/integration 29 | if [ ! -z "$1" ]; then 30 | TEST_PATH=$TEST_PATH/$1 31 | fi 32 | 33 | echo "Ensuring Python test requirements are installed..." 34 | pushd $ROOT_DIR > /dev/null 35 | python -c "import requirements; f = open('$TEMP_REQUIREMENTS_PATH', 'w'); [f.write('{}\n'.format(lib)) for lib in requirements.test_requires]; f.close();" 36 | pip install -q -r $TEMP_REQUIREMENTS_PATH 37 | 38 | echo "Starting the Dusty daemon for integration testing, will require root privileges for this..." 39 | sudo -E DUSTY_SOCKET_PATH=$DUSTY_SOCKET_PATH nohup $DUSTY_BIN_PATH -d --suppress-warnings > test-daemon.log 2>&1 & 40 | DUSTYD_PID=$! 41 | trap "sudo kill $DUSTYD_PID" EXIT 42 | sleep 2 43 | 44 | sudo -E DUSTY_SOCKET_PATH=$DUSTY_SOCKET_PATH DUSTY_ALLOW_INTEGRATION_TESTS=yes DUSTY_INTEGRATION_TESTS_USER=$TEST_USER nosetests -v $TEST_PATH 45 | 46 | if [ $? -eq 0 ]; then 47 | echo "TESTS PASSED" 48 | else 49 | echo "TESTS FAILED" 50 | fi 51 | 52 | popd > /dev/null 53 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![](http://i.imgur.com/XITuXg3.png) 2 | Docker-powered development environments 3 | 4 | As there are better ways of working with Docker on a Mac now such as 5 | Docker for Mac, Dusty is officially deprecated and is no longer maintained. 6 | For similar capabilities, please check out [docker-compose](https://docs.docker.com/compose/). 7 | 8 | [![Join the Dusty chat](https://dusty-slackin.herokuapp.com/badge.svg)](https://dusty-slackin.herokuapp.com/) 9 | [![Dusty Documentation](https://readthedocs.org/projects/dusty/badge/)](http://dusty.readthedocs.org/en/latest/) 10 | [![Github Downloads](https://img.shields.io/github/downloads/gamechanger/dusty/latest/total.svg)](https://github.com/gamechanger/dusty/releases/latest) 11 | [![Github Release](https://img.shields.io/github/release/gamechanger/dusty.svg)](https://github.com/gamechanger/dusty/releases/) 12 | [![Github License](https://img.shields.io/github/license/gamechanger/dusty.svg)](https://github.com/gamechanger/dusty/blob/master/LICENSE) 13 | 14 | # Installation 15 | 16 | See the Installation section of our docs for the [stable](https://dusty.readthedocs.org/en/stable/installation/) or [latest](https://dusty.readthedocs.org/en/latest/installation/) releases. 17 | 18 | # What is Dusty? 19 | 20 | For an overview of how Dusty compares to other development environment solutions, 21 | see [Why Dusty?](https://dusty.readthedocs.org/en/latest/why-dusty/) in the documentation. 22 | 23 | To see how Dusty works, see [Architecture](https://dusty.readthedocs.org/en/latest/architecture/) or 24 | [Usage](https://dusty.readthedocs.org/en/latest/usage/). 25 | 26 | # Getting Started 27 | 28 | If you'd like to try out Dusty for yourself, please see the 29 | [Getting Started](http://dusty.readthedocs.org/en/latest/getting-started/) 30 | section of the documentation. 31 | 32 | Dusty's CLI also has help which you can access at any time through `dusty -h`. 33 | 34 | # Contributing 35 | 36 | See [Contributing](https://dusty.readthedocs.org/en/latest/contributing/). 37 | -------------------------------------------------------------------------------- /dusty/cli/repos.py: -------------------------------------------------------------------------------- 1 | """Manage repos referenced in the current Dusty specs. 2 | 3 | By default, Dusty automatically manages the repos referenced 4 | in your app and lib specs. This includes cloning the repo and 5 | pulling updates from master to keep the Dusty-managed copy up-to-date. 6 | 7 | Alternatively, you can override a repo to manage it yourself. This 8 | is useful for actively developing apps and libs that depend on that 9 | repo. To override a repo, use the `override` or `from` commands. 10 | 11 | Usage: 12 | repos from 13 | repos list 14 | repos manage (--all | ) 15 | repos override 16 | repos update 17 | 18 | Commands: 19 | from Override all repos from a given directory 20 | list Show state of all repos referenced in specs 21 | manage Tell Dusty to manage a repo or all repos, removing any overrides 22 | override Override a repo with a local copy that you manage 23 | update Pull latest master on Dusty-managed repos 24 | 25 | Options: 26 | --all When provided to manage, dusty will manage all currently overridden repos 27 | """ 28 | 29 | from docopt import docopt 30 | 31 | from ..payload import Payload 32 | from ..commands.repos import (list_repos, override_repo, manage_repo, manage_all_repos, 33 | override_repos_from_directory, update_managed_repos) 34 | 35 | def main(argv): 36 | args = docopt(__doc__, argv) 37 | if args['list']: 38 | return Payload(list_repos) 39 | elif args['override']: 40 | return Payload(override_repo, args[''], args['']) 41 | elif args['manage']: 42 | if args['--all']: 43 | return Payload(manage_all_repos) 44 | else: 45 | return Payload(manage_repo, args['']) 46 | elif args['from']: 47 | return Payload(override_repos_from_directory, args['']) 48 | elif args['update']: 49 | return Payload(update_managed_repos) 50 | -------------------------------------------------------------------------------- /tests/unit/commands/assets_test.py: -------------------------------------------------------------------------------- 1 | from mock import patch, call, Mock 2 | 3 | from dusty.commands import assets, bundles 4 | 5 | from ...testcases import DustyTestCase 6 | 7 | @patch('dusty.commands.assets.initialize_docker_vm') 8 | @patch('dusty.commands.assets.asset_is_set') 9 | class TestAssetsCommands(DustyTestCase): 10 | def assertAppOrLibAssetListed(self, asset_name, path): 11 | self.assertTrue(any([asset_name in line and path in line 12 | for line in self.last_client_output.splitlines()])) 13 | 14 | def assertAssetListed(self, asset_name, used_by, required_by): 15 | self.assertTrue(any([asset_name in line and assets._get_string_of_set(used_by) in line and assets._get_string_of_set(required_by) in line 16 | for line in self.last_client_output.splitlines()])) 17 | 18 | def test_list_by_app(self, fake_asset_is_set, *args): 19 | fake_asset_is_set.return_value = True 20 | assets.list_by_app_or_lib('app-a') 21 | self.assertAppOrLibAssetListed('required_asset', 'required_path') 22 | self.assertAppOrLibAssetListed('optional_asset', 'optional_path') 23 | 24 | def test_list_by_lib(self, fake_asset_is_set, *args): 25 | fake_asset_is_set.return_value = False 26 | assets.list_by_app_or_lib('lib-a') 27 | self.assertAppOrLibAssetListed('required_lib_asset', 'required_path') 28 | self.assertAppOrLibAssetListed('optional_lib_asset', 'optional_path') 29 | 30 | def test_list(self, fake_asset_is_set, *args): 31 | fake_asset_is_set.return_value = True 32 | bundles.activate_bundle(['bundle-a'], False) 33 | assets.list_all() 34 | self.assertAssetListed('required_asset', ['app-a'], ['app-a']) 35 | self.assertAssetListed('optional_asset', ['app-a'], []) 36 | self.assertAssetListed('required_lib_asset', ['lib-a'], ['lib-a']) 37 | self.assertAssetListed('optional_lib_asset', ['lib-a'], []) 38 | self.assertAssetListed('common_asset', ['app-a', 'lib-a'], ['app-a']) 39 | -------------------------------------------------------------------------------- /dusty/commands/utils.py: -------------------------------------------------------------------------------- 1 | from copy import copy 2 | import os 3 | import subprocess 4 | import sys 5 | import pty 6 | 7 | import psutil 8 | 9 | from ..systems.docker import get_docker_env 10 | from ..log import log_to_client, streaming_to_client 11 | from ..subprocess import demote_to_user 12 | from ..config import get_config_value 13 | from .. import constants 14 | 15 | def _executable_path(executable_name): 16 | return subprocess.check_output(['which', executable_name]).strip() 17 | 18 | def exec_docker_options(): 19 | exec_options = '-i' 20 | if sys.stdout.isatty(): 21 | exec_options += 't' 22 | return exec_options 23 | 24 | def exec_docker(*args): 25 | updated_env = copy(os.environ) 26 | updated_env.update(get_docker_env()) 27 | args += (updated_env,) 28 | os.execle(_executable_path('docker'), 'docker', *args) 29 | 30 | def pty_fork(*args): 31 | """Runs a subprocess with a PTY attached via fork and exec. 32 | The output from the PTY is streamed through log_to_client. 33 | This should not be necessary for most subprocesses, we 34 | built this to handle Compose up which only streams pull 35 | progress if it is attached to a TTY.""" 36 | 37 | updated_env = copy(os.environ) 38 | updated_env.update(get_docker_env()) 39 | args += (updated_env,) 40 | executable = args[0] 41 | demote_fn = demote_to_user(get_config_value(constants.CONFIG_MAC_USERNAME_KEY)) 42 | 43 | child_pid, pty_fd = pty.fork() 44 | if child_pid == 0: 45 | demote_fn() 46 | os.execle(_executable_path(executable), *args) 47 | else: 48 | child_process = psutil.Process(child_pid) 49 | terminal = os.fdopen(pty_fd, 'r', 0) 50 | with streaming_to_client(): 51 | while child_process.status() == 'running': 52 | output = terminal.read(1) 53 | log_to_client(output) 54 | _, exit_code = os.waitpid(child_pid, 0) 55 | if exit_code != 0: 56 | raise subprocess.CalledProcessError(exit_code, ' '.join(args[:-1])) 57 | -------------------------------------------------------------------------------- /tests/integration/cli/env_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from dusty.source import Repo 4 | 5 | from ...testcases import DustyIntegrationTestCase 6 | from ...fixtures import single_specs_fixture 7 | 8 | class TestEnvCLI(DustyIntegrationTestCase): 9 | def setUp(self): 10 | super(TestEnvCLI, self).setUp() 11 | single_specs_fixture() 12 | self.run_command('repos override github.com/app/a {}'.format(self.fake_local_repo_location)) 13 | self.run_command('bundles activate bundle-a') 14 | 15 | def tearDown(self): 16 | try: 17 | self.run_command('stop') 18 | except: 19 | pass 20 | self.run_command('env unset appa --all') 21 | super(TestEnvCLI, self).tearDown() 22 | 23 | def test_env_put_in_container(self): 24 | self.run_command('env set appa pitcher pedro') 25 | self.run_command('up --no-pull') 26 | self.assertEnvInContainer('appa', 'pitcher', 'pedro') 27 | 28 | def test_env_overrides_spec_environment(self): 29 | self.run_command('env set appa SPEC_VALUE new-value') 30 | self.run_command('up --no-pull') 31 | self.assertEnvInContainer('appa', 'SPEC_VALUE', 'new-value') 32 | self.assertEnvInContainer('appa', 'SPEC_VALUE2', 'spec-specified-value') 33 | 34 | def test_unset_all(self): 35 | self.run_command('env set appa pitcher pedro') 36 | self.run_command('env set appa SPEC_VALUE new-value') 37 | self.run_command('env unset appa --all') 38 | self.run_command('up --no-pull') 39 | self.assertEnvNotInContainer('appa', 'pitcher') 40 | self.assertEnvInContainer('appa', 'SPEC_VALUE', 'spec-specified-value') 41 | 42 | def test_unset_one(self): 43 | self.run_command('env set appa pitcher pedro') 44 | self.run_command('env set appa SPEC_VALUE new-value') 45 | self.run_command('env unset appa SPEC_VALUE') 46 | self.run_command('up --no-pull') 47 | self.assertEnvInContainer('appa', 'pitcher', 'pedro') 48 | self.assertEnvInContainer('appa', 'SPEC_VALUE', 'spec-specified-value') 49 | -------------------------------------------------------------------------------- /dusty/commands/bundles.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | 3 | from prettytable import PrettyTable 4 | 5 | from ..config import get_config_value, save_config_value 6 | from ..compiler.spec_assembler import get_specs 7 | from ..log import log_to_client 8 | from .. import constants 9 | from ..payload import daemon_command 10 | 11 | @daemon_command 12 | def list_bundles(): 13 | specs, activated_bundles = get_specs(), get_config_value(constants.CONFIG_BUNDLES_KEY) 14 | table = PrettyTable(["Name", "Description", "Activated?"]) 15 | for bundle, bundle_spec in specs[constants.CONFIG_BUNDLES_KEY].iteritems(): 16 | table.add_row([bundle, 17 | '\n'.join(textwrap.wrap(bundle_spec['description'], 80)), 18 | "X" if bundle in activated_bundles else ""]) 19 | log_to_client(table.get_string(sortby="Name")) 20 | 21 | @daemon_command 22 | def activate_bundle(bundle_names, exclusive): 23 | specs = get_specs() 24 | for bundle_name in bundle_names: 25 | if bundle_name not in specs[constants.CONFIG_BUNDLES_KEY]: 26 | raise KeyError('No bundle exists named {}'.format(bundle_name)) 27 | if exclusive: 28 | activated_bundles = bundle_names 29 | else: 30 | activated_bundles = set(get_config_value(constants.CONFIG_BUNDLES_KEY)).union(bundle_names) 31 | save_config_value(constants.CONFIG_BUNDLES_KEY, list(activated_bundles)) 32 | log_to_client('Activated bundles {}'.format(', '.join(bundle_names))) 33 | if exclusive: 34 | log_to_client('All other bundles have been deactivated') 35 | 36 | @daemon_command 37 | def deactivate_bundle(bundle_names): 38 | specs = get_specs() 39 | for bundle_name in bundle_names: 40 | if bundle_name not in specs[constants.CONFIG_BUNDLES_KEY]: 41 | raise KeyError('No bundle exists named {}'.format(bundle_name)) 42 | activated_bundles = set(get_config_value(constants.CONFIG_BUNDLES_KEY)).difference(bundle_names) 43 | save_config_value(constants.CONFIG_BUNDLES_KEY, list(activated_bundles)) 44 | log_to_client('Deactivated bundles {}'.format(', '.join(bundle_names))) 45 | -------------------------------------------------------------------------------- /tests/unit/warnings_test.py: -------------------------------------------------------------------------------- 1 | from ..testcases import DustyTestCase 2 | 3 | from dusty.warnings import Warnings 4 | 5 | class TestWarnings(DustyTestCase): 6 | def setUp(self): 7 | super(TestWarnings, self).setUp() 8 | self.warnings = Warnings() 9 | 10 | def test_warn(self): 11 | message_1 = 'Something is wrong, yo' 12 | message_2 = 'Yo this thing is also wrong' 13 | self.warnings.warn('test', message_1) 14 | self.assertItemsEqual(self.warnings._stored, {'test': [message_1]}) 15 | self.warnings.warn('test', message_2) 16 | self.assertItemsEqual(self.warnings._stored, {'test': [message_1, message_2]}) 17 | 18 | def test_has_warnings(self): 19 | self.assertFalse(self.warnings.has_warnings) 20 | self.warnings.warn('test', 'yo') 21 | self.assertTrue(self.warnings.has_warnings) 22 | 23 | def test_pretty_with_no_warnings(self): 24 | self.assertEqual(self.warnings.pretty(), "") 25 | 26 | def test_pretty(self): 27 | message_1 = 'Something is wrong, yo' 28 | message_2 = 'Something is very wrong, and that something takes way more than 80 characters to communicate the fact that it is wrong' 29 | self.warnings.warn('test', message_1) 30 | self.warnings.warn('test', message_2) 31 | self.assertEqual(self.warnings.pretty(), "WARNING (test): Something is wrong, yo\nWARNING (test): Something is very wrong, and that something takes way more than 80 characters to\ncommunicate the fact that it is wrong\n") 32 | 33 | def test_clear_namespace(self): 34 | self.warnings.warn('test', 'Something is wrong, yo') 35 | self.assertEqual(len(self.warnings._stored['test']), 1) 36 | self.warnings.clear_namespace('test') 37 | self.assertEqual(len(self.warnings._stored['test']), 0) 38 | 39 | def test_clear_namespace_leaves_others_unaffected(self): 40 | self.warnings.warn('test', 'Something is wrong, yo') 41 | self.assertEqual(len(self.warnings._stored['test']), 1) 42 | self.warnings.clear_namespace('some-other-namespace') 43 | self.assertEqual(len(self.warnings._stored['test']), 1) 44 | -------------------------------------------------------------------------------- /dusty/commands/assets.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from prettytable import PrettyTable 4 | 5 | from ..compiler.spec_assembler import get_specs, get_assembled_specs 6 | from ..systems.virtualbox import (asset_is_set, asset_value, asset_vm_path, remove_asset, 7 | initialize_docker_vm, docker_vm_is_running) 8 | from ..systems.rsync import sync_local_path_to_vm 9 | from ..log import log_to_client 10 | from .. import constants 11 | 12 | def list_by_app_or_lib(app_or_lib): 13 | initialize_docker_vm() 14 | spec = get_specs().get_app_or_lib(app_or_lib) 15 | table = PrettyTable(["Asset", "Is Set", "Required", "In-Container Path"]) 16 | for asset in spec['assets']: 17 | table.add_row([asset['name'], 'X' if asset_is_set(asset['name']) else '', 'X' if asset['required'] else '', asset['path']]) 18 | log_to_client(table.get_string()) 19 | 20 | def _get_string_of_set(items): 21 | return ', '.join(sorted(items)) 22 | 23 | def list_all(): 24 | initialize_docker_vm() 25 | log_to_client('Listing assets used by active apps and libs') 26 | table = PrettyTable(["Asset", "Is Set", "Used By", "Required By"]) 27 | assembled_specs = get_assembled_specs() 28 | for asset_name, asset_info in assembled_specs['assets'].iteritems(): 29 | used_by_display = _get_string_of_set(asset_info['used_by']) 30 | required_by_display = _get_string_of_set(asset_info['required_by']) 31 | table.add_row([asset_name, 'X' if asset_is_set(asset_name) else '', used_by_display, required_by_display]) 32 | log_to_client(table.get_string()) 33 | 34 | def read_asset(asset_key): 35 | initialize_docker_vm() 36 | if not asset_is_set(asset_key): 37 | log_to_client('Asset {} isn\'t set'.format(asset_key)) 38 | return 39 | log_to_client(asset_value(asset_key)) 40 | 41 | def set_asset(asset_key, local_path): 42 | initialize_docker_vm() 43 | sync_local_path_to_vm(local_path, asset_vm_path(asset_key)) 44 | 45 | def unset_asset(asset_key): 46 | initialize_docker_vm() 47 | if not asset_is_set(asset_key): 48 | log_to_client('Asset {} isn\'t set'.format(asset_key)) 49 | return 50 | remove_asset(asset_key) 51 | -------------------------------------------------------------------------------- /tests/unit/changeset_test.py: -------------------------------------------------------------------------------- 1 | from mock import Mock 2 | 3 | from ..testcases import DustyTestCase 4 | from ..fixtures import basic_specs_fixture 5 | from dusty.config import get_config_value 6 | from dusty.changeset import RepoChangeSet 7 | from dusty import constants 8 | from dusty.source import Repo 9 | 10 | class TestRepoChangeset(DustyTestCase): 11 | def setUp(self): 12 | super(TestRepoChangeset, self).setUp() 13 | basic_specs_fixture() 14 | self.changeset = RepoChangeSet('testing', 'app-a') 15 | 16 | mocks = set() 17 | for repo in self.changeset.repos: 18 | mock_repo = Mock() 19 | mock_repo.remote_path = repo.remote_path 20 | mock_repo.local_commit_sha = 'sha_{}'.format(repo.remote_path) 21 | mocks.add(mock_repo) 22 | self.changeset.repos = mocks 23 | 24 | def test_init_on_app_with_no_libs(self): 25 | new = RepoChangeSet('new', 'app-b') 26 | self.assertEqual(len(new.repos), 1) 27 | self.assertIn(Repo('github.com/app/b'), new.repos) 28 | 29 | def test_init_on_app_with_libs(self): 30 | new = RepoChangeSet('new', 'app-a') 31 | self.assertEqual(len(new.repos), 2) 32 | self.assertIn(Repo('github.com/app/a'), new.repos) 33 | self.assertIn(Repo('github.com/lib/a'), new.repos) 34 | 35 | def test_init_on_lib(self): 36 | new = RepoChangeSet('new', 'lib-a') 37 | self.assertEqual(len(new.repos), 1) 38 | self.assertIn(Repo('github.com/lib/a'), new.repos) 39 | 40 | def test_has_changed_when_empty(self): 41 | self.assertTrue(self.changeset.has_changed()) 42 | 43 | def test_has_changed_after_save(self): 44 | self.changeset.update() 45 | self.assertFalse(self.changeset.has_changed()) 46 | 47 | def test_update(self): 48 | current_config = get_config_value(constants.CONFIG_CHANGESET_KEY) or {} 49 | self.assertIsNone(current_config.get(self.changeset.set_key)) 50 | self.changeset.update() 51 | expected = {'app-a': {'github.com/app/a': 'sha_github.com/app/a', 'github.com/lib/a': 'sha_github.com/lib/a'}} 52 | updated_config = get_config_value(constants.CONFIG_CHANGESET_KEY) or {} 53 | self.assertItemsEqual(expected, updated_config.get(self.changeset.set_key)) 54 | -------------------------------------------------------------------------------- /dusty/payload.py: -------------------------------------------------------------------------------- 1 | import json 2 | import yaml 3 | 4 | from .constants import VERSION 5 | 6 | class Payload(object): 7 | def __init__(self, fn, *args, **kwargs): 8 | self.fn = fn 9 | self.run_on_daemon = True 10 | self.client_version = VERSION 11 | self.args = args 12 | self.kwargs = kwargs 13 | self.suppress_warnings = False 14 | 15 | def __eq__(self, other): 16 | if isinstance(other, self.__class__): 17 | return self.fn == other.fn and self.args == other.args and self.kwargs == other.kwargs 18 | return False 19 | 20 | def run(self): 21 | self.fn(*self.args, **self.kwargs) 22 | 23 | def serialize(self): 24 | fn_key = function_key(self.fn) 25 | if fn_key not in _daemon_command_mapping: 26 | raise RuntimeError('Function key {} not found; you may need to decorate your function'.format(fn_key)) 27 | doc = {'fn_key': fn_key, 'client_version': self.client_version, 'suppress_warnings': self.suppress_warnings, 28 | 'args': self.args, 'kwargs': self.kwargs} 29 | return json.dumps(doc) 30 | 31 | @staticmethod 32 | def deserialize(doc): 33 | return yaml.safe_load(doc) 34 | 35 | _daemon_command_mapping = {} 36 | 37 | def function_key(fn): 38 | return '{}.{}'.format(fn.__module__, fn.__name__) 39 | 40 | def daemon_command(fn): 41 | key = function_key(fn) 42 | if key in _daemon_command_mapping and _daemon_command_mapping[key] != fn: 43 | raise RuntimeError("Function mapping key collision: {}. Name one of the functions something else".format(key)) 44 | _daemon_command_mapping[key] = fn 45 | return fn 46 | 47 | def get_payload_function(fn_key): 48 | if fn_key not in _daemon_command_mapping: 49 | raise RuntimeError('Function key {} not found'.format(fn_key)) 50 | return _daemon_command_mapping[fn_key] 51 | 52 | def init_yaml_constructor(): 53 | """ 54 | This dark magic is used to make yaml.safe_load encode all strings as utf-8, 55 | where otherwise python unicode strings would be returned for non-ascii chars 56 | """ 57 | def utf_encoding_string_constructor(loader, node): 58 | return loader.construct_scalar(node).encode('utf-8') 59 | yaml.SafeLoader.add_constructor(u'tag:yaml.org,2002:str', utf_encoding_string_constructor) 60 | -------------------------------------------------------------------------------- /tests/unit/cli/cp_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from mock import patch 4 | 5 | from ...testcases import DustyTestCase 6 | from dusty.payload import Payload 7 | from dusty.commands.cp import copy_between_containers, copy_from_local, copy_to_local 8 | from dusty.cli.cp import _split_path, _resolve_path, _validate_path_pair, main 9 | 10 | class TestCpCLI(DustyTestCase): 11 | def test_split_path_no_name(self): 12 | self.assertEqual(_split_path('/tmp/path.txt'), (None, '/tmp/path.txt')) 13 | 14 | def test_split_path_with_name(self): 15 | self.assertEqual(_split_path('website:/tmp/path.txt'), ('website', '/tmp/path.txt')) 16 | 17 | def test_split_too_many_colons(self): 18 | with self.assertRaises(ValueError): 19 | _split_path('website:api:/tmp/path.txt') 20 | 21 | def test_resolve_path_absolute(self): 22 | self.assertEqual(_resolve_path('/tmp/a'), '/tmp/a') 23 | 24 | def test_resolve_path_relative(self): 25 | self.assertEqual(_resolve_path('a'), os.path.join(os.getcwd(), 'a')) 26 | 27 | def test_validate_path_pair_success_local_absolute(self): 28 | _validate_path_pair(None, '/tmp/a') 29 | 30 | def test_validate_path_pair_success_local_relative(self): 31 | _validate_path_pair(None, 'a') 32 | 33 | def test_validate_path_pair_success_container(self): 34 | _validate_path_pair('some-container', '/tmp/a') 35 | 36 | def test_validate_path_pair_relative_in_container(self): 37 | with self.assertRaises(RuntimeError): 38 | _validate_path_pair('some-container', 'a') 39 | 40 | def test_main_local_to_container(self): 41 | result = main(['/tmp/a', 'website:/tmp/b']) 42 | self.assertEqual(result, Payload(copy_from_local, '/tmp/a', 'website', '/tmp/b')) 43 | 44 | def test_main_container_to_local(self): 45 | result = main(['website:/tmp/a', '/tmp/b']) 46 | self.assertEqual(result, Payload(copy_to_local, '/tmp/b', 'website', '/tmp/a')) 47 | 48 | def test_main_container_to_container(self): 49 | result = main(['website:/tmp/a', 'api:/tmp/b']) 50 | self.assertEqual(result, Payload(copy_between_containers, 'website', '/tmp/a', 'api', '/tmp/b')) 51 | 52 | def test_main_fails_on_local_copy(self): 53 | with self.assertRaises(ValueError): 54 | main(['/tmp/a', '/tmp/b']) 55 | -------------------------------------------------------------------------------- /tests/integration/cli/test_test.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import sys 3 | 4 | from ...testcases import DustyIntegrationTestCase 5 | from ...fixtures import busybox_single_app_bundle_fixture 6 | 7 | class TestTestCLI(DustyIntegrationTestCase): 8 | def setUp(self): 9 | super(TestTestCLI, self).setUp() 10 | busybox_single_app_bundle_fixture(num_bundles=1) 11 | 12 | def test_basic_test_run(self): 13 | result = self.run_command('test --recreate busyboxa test1') 14 | self.assertEqual(self.handler.log_to_client_output.count('TESTS test1 PASSED'), 1) 15 | self.assertEqual(self.handler.log_to_client_output.count('OK'), 1) 16 | self.assertTrue('Running commands to create new image:' in result) 17 | 18 | def test_basic_test_args(self): 19 | result = self.run_command('test --recreate busyboxa test3') 20 | self.assertEqual(self.handler.log_to_client_output.count('var\n'), 0) 21 | self.assertEqual(self.handler.log_to_client_output.count('etc\n'), 0) 22 | self.assertEqual(self.handler.log_to_client_output.count('sbin\n'), 0) 23 | self.handler.log_to_client_output = '' 24 | 25 | self.run_command('test --recreate busyboxa test3 /') 26 | self.assertEqual(self.handler.log_to_client_output.count('var\n'), 1) 27 | self.assertEqual(self.handler.log_to_client_output.count('etc\n'), 1) 28 | self.assertEqual(self.handler.log_to_client_output.count('sbin\n'), 1) 29 | 30 | def test_basic_test_all(self): 31 | result = self.run_command('test --recreate busyboxa all') 32 | self.assertEqual(self.handler.log_to_client_output.count('TESTS PASSED'), 1) 33 | self.assertEqual(self.handler.log_to_client_output.count('OK'), 2) 34 | self.assertTrue('Running commands to create new image:' in result) 35 | 36 | 37 | def test_basic_test_no_recreate(self): 38 | result = self.run_command('test --recreate busyboxa test1') 39 | self.assertTrue('Running commands to create new image:' in result) 40 | self.handler.log_to_client_output = '' 41 | result = self.run_command('test busyboxa test1') 42 | self.assertFalse('Running commands to create new image:' in result) 43 | self.assertEqual(self.handler.log_to_client_output.count('TESTS test1 PASSED'), 1) 44 | self.assertEqual(self.handler.log_to_client_output.count('OK'), 1) 45 | -------------------------------------------------------------------------------- /dusty/commands/disk.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import docker 4 | 5 | from .. import constants 6 | from ..log import log_to_client 7 | from ..path import dir_modified_time, set_mac_user_ownership 8 | from ..systems.docker.cleanup import remove_exited_dusty_containers, remove_images 9 | from ..systems.virtualbox import get_docker_vm_disk_info, ensure_docker_vm_is_started, initialize_docker_vm 10 | from ..systems.rsync import sync_local_path_to_vm, sync_local_path_from_vm 11 | from ..payload import daemon_command 12 | 13 | @daemon_command 14 | def cleanup_inactive_containers(): 15 | ensure_docker_vm_is_started() 16 | log_to_client("Cleaning up exited containers:") 17 | containers = remove_exited_dusty_containers() 18 | log_to_client("Done cleaning {} containers".format(len(containers))) 19 | 20 | @daemon_command 21 | def cleanup_images(): 22 | ensure_docker_vm_is_started() 23 | log_to_client("Cleaning up docker images without containers:") 24 | images = remove_images() 25 | log_to_client("Done removing {} images".format(len(images))) 26 | 27 | @daemon_command 28 | def inspect_vm_disk(): 29 | ensure_docker_vm_is_started() 30 | log_to_client("Dusty VM Disk Usage:") 31 | log_to_client(get_docker_vm_disk_info()) 32 | 33 | def _full_backup_dir(path): 34 | if path.endswith(constants.LOCAL_BACKUP_DIR): 35 | return path 36 | return os.path.join(path, constants.LOCAL_BACKUP_DIR) 37 | 38 | def _ensure_backup_dir_exists(destination_path): 39 | if not os.path.exists(destination_path): 40 | os.makedirs(destination_path) 41 | 42 | @daemon_command 43 | def backup(path): 44 | destination_path = _full_backup_dir(path) 45 | _ensure_backup_dir_exists(destination_path) 46 | initialize_docker_vm() 47 | log_to_client("Syncing data from your VM to {}...".format(destination_path)) 48 | sync_local_path_from_vm(destination_path, constants.VM_PERSIST_DIR) 49 | set_mac_user_ownership(destination_path) 50 | 51 | @daemon_command 52 | def restore(path): 53 | source_path = _full_backup_dir(path) 54 | if not os.path.exists(source_path): 55 | log_to_client("Can't find backup data to restore at {}".format(source_path)) 56 | return 57 | initialize_docker_vm() 58 | log_to_client("Restoring your backup last modified at {}".format(dir_modified_time(source_path))) 59 | sync_local_path_to_vm(source_path, constants.VM_PERSIST_DIR) 60 | -------------------------------------------------------------------------------- /tests/integration/cli/scripts_test.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from os import path 3 | from shutil import rmtree 4 | from subprocess import check_call, check_output 5 | from tempfile import mkdtemp 6 | import time 7 | 8 | import git 9 | from mock import patch 10 | 11 | from dusty import commands 12 | from ...fixtures import single_specs_fixture 13 | from ...testcases import DustyIntegrationTestCase 14 | 15 | class TestScriptsCLI(DustyIntegrationTestCase): 16 | def setUp(self): 17 | super(TestScriptsCLI, self).setUp() 18 | self.fake_repo_location = path.join(mkdtemp(), 'appa') 19 | self._set_up_fake_local_repo(path=self.fake_repo_location) 20 | single_specs_fixture() 21 | self.run_command('repos override github.com/app/a {}'.format(self.fake_repo_location)) 22 | self.run_command('bundles activate bundle-a') 23 | self.run_command('up') 24 | 25 | def tearDown(self): 26 | try: 27 | self.run_command('stop --rm') 28 | except Exception: 29 | pass 30 | rmtree(self.fake_repo_location) 31 | super(TestScriptsCLI, self).tearDown() 32 | 33 | def test_basic(self): 34 | self.assertFileNotInContainer('appa', '/app/a/foo') 35 | self.run_command('scripts appa example') 36 | self.wait_for_exec_docker(timeout=1) 37 | self.assertFileInContainer('appa', '/app/a/foo') 38 | self.exec_in_container('appa', 'rm /app/a/foo') 39 | 40 | def test_with_arg(self): 41 | self.run_command('scripts appa example') 42 | self.wait_for_exec_docker(timeout=1) 43 | self.assertFileInContainer('appa', '/app/a/foo') 44 | self.run_command('scripts appa example_rm /app/a/foo') 45 | self.wait_for_exec_docker(timeout=1) 46 | self.assertFileNotInContainer('appa', '/app/a/foo') 47 | 48 | def test_with_flag(self): 49 | self.run_command('scripts appa example_ls') 50 | self.run_command('scripts appa example_ls -l') 51 | self.assertTrue(len(self.exec_docker_processes[0].stdout.read()) < len(self.exec_docker_processes[1].stdout.read())) 52 | 53 | def test_with_flag_and_option(self): 54 | self.assertFileNotInContainer('appa', '/app/a/foo') 55 | self.run_command('scripts appa example_touch -c /app/a/foo') 56 | self.wait_for_exec_docker(timeout=1) 57 | self.assertFileNotInContainer('appa', '/app/a/foo') 58 | -------------------------------------------------------------------------------- /dusty/systems/docker/cleanup.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from ...log import log_to_client 4 | from . import get_dusty_containers, get_dusty_images, get_docker_client 5 | 6 | def get_exited_dusty_containers(): 7 | all_containers = get_dusty_containers(None, include_exited=True) 8 | stopped_containers = [] 9 | for container in all_containers: 10 | if 'Exited' in container['Status']: 11 | stopped_containers.append(container) 12 | return stopped_containers 13 | 14 | def remove_exited_dusty_containers(): 15 | """Removed all dusty containers with 'Exited' in their status""" 16 | client = get_docker_client() 17 | exited_containers = get_exited_dusty_containers() 18 | removed_containers = [] 19 | for container in exited_containers: 20 | log_to_client("Removing container {}".format(container['Names'][0])) 21 | try: 22 | client.remove_container(container['Id'], v=True) 23 | removed_containers.append(container) 24 | except Exception as e: 25 | log_to_client(e.message or str(e)) 26 | return removed_containers 27 | 28 | def _remove_dangling_images(): 29 | client = get_docker_client() 30 | dangling_images = client.images(all=True, filters={'dangling': True}) 31 | removed = [] 32 | for image in dangling_images: 33 | try: 34 | client.remove_image(image['Id']) 35 | except Exception as e: 36 | logging.info("Couldn't remove image {}".format(image['RepoTags'])) 37 | else: 38 | log_to_client("Removed Image {}".format(image['RepoTags'])) 39 | removed.append(image) 40 | return removed 41 | 42 | def remove_images(): 43 | """Removes all dangling images as well as all images referenced in a dusty spec; forceful removal is not used""" 44 | client = get_docker_client() 45 | removed = _remove_dangling_images() 46 | dusty_images = get_dusty_images() 47 | all_images = client.images(all=True) 48 | for image in all_images: 49 | if set(image['RepoTags']).intersection(dusty_images): 50 | try: 51 | client.remove_image(image['Id']) 52 | except Exception as e: 53 | logging.info("Couldn't remove image {}".format(image['RepoTags'])) 54 | else: 55 | log_to_client("Removed Image {}".format(image['RepoTags'])) 56 | removed.append(image) 57 | return removed 58 | -------------------------------------------------------------------------------- /tests/integration/cli/bundles_test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from ...testcases import DustyIntegrationTestCase 4 | from ...fixtures import busybox_single_app_bundle_fixture, unicode_fixture 5 | 6 | class TestBundlesCLI(DustyIntegrationTestCase): 7 | def setUp(self): 8 | super(TestBundlesCLI, self).setUp() 9 | busybox_single_app_bundle_fixture(num_bundles=2) 10 | 11 | def test_bundles_list_returns(self): 12 | result = self.run_command('bundles list') 13 | self.assertIn('busyboxa', result) 14 | self.assertIn('busyboxb', result) 15 | 16 | def test_bundles_activate(self): 17 | self.run_command('bundles activate busyboxa') 18 | result = self.run_command('bundles list') 19 | self.assertInSameLine(result, 'busyboxa', 'X') 20 | self.assertNotInSameLine(result, 'busyboxb', 'X') 21 | 22 | def test_bundles_activate_multiple(self): 23 | self.run_command('bundles activate busyboxa busyboxb') 24 | result = self.run_command('bundles list') 25 | self.assertInSameLine(result, 'busyboxa', 'X') 26 | self.assertInSameLine(result, 'busyboxb', 'X') 27 | 28 | def test_bundles_activate_only(self): 29 | self.run_command('bundles activate busyboxa') 30 | self.run_command('bundles activate --only busyboxb') 31 | result = self.run_command('bundles list') 32 | self.assertNotInSameLine(result, 'busyboxa', 'X') 33 | self.assertInSameLine(result, 'busyboxb', 'X') 34 | 35 | def test_bundles_deactivate(self): 36 | self.run_command('bundles activate busyboxa') 37 | self.run_command('bundles deactivate busyboxa') 38 | result = self.run_command('bundles list') 39 | self.assertNotInSameLine(result, 'busyboxa', 'X') 40 | self.assertNotInSameLine(result, 'busyboxb', 'X') 41 | 42 | def test_bundles_deactivate_multiple(self): 43 | self.run_command('bundles activate busyboxb') 44 | self.run_command('bundles deactivate busyboxa busyboxb') 45 | result = self.run_command('bundles list') 46 | self.assertNotInSameLine(result, 'busyboxa', 'X') 47 | self.assertNotInSameLine(result, 'busyboxb', 'X') 48 | 49 | def test_bundles_with_unicode_names(self): 50 | unicode_fixture() 51 | self.run_command('bundles activate bundle-Ɯ') 52 | result = self.run_command('bundles list') 53 | self.assertInSameLine(result, 'bundle-Ɯ', 'X', 'unicode woohoooၕഇഞƺ') 54 | -------------------------------------------------------------------------------- /dusty/cli/cp.py: -------------------------------------------------------------------------------- 1 | """Copy files between your local filesystem and Dusty-managed containers. 2 | This tool also supports copying files directly between two containers. 3 | 4 | To specify a file or directory location, either give just a path to 5 | indicate a location on your local filesystem, or prefix a path with 6 | `:` to indicate a location inside a running container. 7 | 8 | Usage: 9 | cp 10 | 11 | Examples: 12 | To copy a file from your local filesystem to the container of an app called `website`: 13 | cp /tmp/my-local-file.txt website:/tmp/file-inside-website-container.txt 14 | 15 | To copy a file from that same `website` container back to your local filesystem: 16 | cp website:/tmp/file-inside-website-container.txt /tmp/my-local-file.txt 17 | 18 | To copy a file from the `website` container to a different container called `api`: 19 | cp website:/tmp/website-file.txt api:/different/location/api-file.txt 20 | """ 21 | 22 | import os 23 | 24 | from docopt import docopt 25 | 26 | from ..payload import Payload 27 | from ..commands.cp import copy_between_containers, copy_from_local, copy_to_local 28 | 29 | def _split_path(path): 30 | split = path.split(':') 31 | if len(split) > 2: 32 | raise ValueError('Invalid path specification, expected [container:]path.') 33 | elif len(split) == 2: 34 | return split[0], split[1] 35 | return None, path 36 | 37 | def _resolve_path(path): 38 | return os.path.abspath(path) 39 | 40 | def _validate_path_pair(name, path): 41 | if name and not path.startswith('/'): 42 | raise RuntimeError('You must provide an absolute path inside containers') 43 | 44 | def main(argv): 45 | args = docopt(__doc__, argv) 46 | source_name, source_path = _split_path(args['']) 47 | dest_name, dest_path = _split_path(args['']) 48 | _validate_path_pair(source_name, source_path) 49 | _validate_path_pair(dest_name, dest_path) 50 | source_path, dest_path = _resolve_path(source_path), _resolve_path(dest_path) 51 | if source_name and dest_name: 52 | return Payload(copy_between_containers, source_name, source_path, dest_name, dest_path) 53 | elif dest_name: 54 | return Payload(copy_from_local, source_path, dest_name, dest_path) 55 | elif source_name: 56 | return Payload(copy_to_local, dest_path, source_name, source_path) 57 | else: 58 | raise ValueError('Refusing to copy files between your local filesystem.') 59 | -------------------------------------------------------------------------------- /tests/integration/cli/stop_test.py: -------------------------------------------------------------------------------- 1 | from ...testcases import DustyIntegrationTestCase 2 | from ...fixtures import busybox_single_app_bundle_fixture 3 | 4 | class TestStopCLI(DustyIntegrationTestCase): 5 | def setUp(self): 6 | super(TestStopCLI, self).setUp() 7 | busybox_single_app_bundle_fixture(num_bundles=3) 8 | self.run_command('bundles activate busyboxa busyboxb busyboxc') 9 | self.run_command('up') 10 | 11 | def tearDown(self): 12 | try: 13 | self.run_command('stop') 14 | except Exception: 15 | pass 16 | super(TestStopCLI, self).tearDown() 17 | 18 | def test_stop_container(self): 19 | self.assertContainerRunning('busyboxa') 20 | self.run_command('stop') 21 | self.assertContainerIsNotRunning('busyboxa') 22 | self.assertContainerExists('busyboxa') 23 | 24 | def test_stop_container_with_rm_flag(self): 25 | self.assertContainerRunning('busyboxa') 26 | self.run_command('stop --rm') 27 | self.assertContainerIsNotRunning('busyboxa') 28 | self.assertContainerDoesNotExist('busyboxa') 29 | 30 | def test_stop_only_one(self): 31 | self.assertContainerRunning('busyboxa') 32 | self.assertContainerRunning('busyboxb') 33 | self.run_command('stop busyboxa') 34 | self.assertContainerIsNotRunning('busyboxa') 35 | self.assertContainerRunning('busyboxb') 36 | 37 | def test_stop_multiple_but_not_all(self): 38 | self.assertContainerRunning('busyboxa') 39 | self.assertContainerRunning('busyboxb') 40 | self.assertContainerRunning('busyboxc') 41 | self.run_command('stop busyboxa busyboxb') 42 | self.assertContainerIsNotRunning('busyboxa') 43 | self.assertContainerIsNotRunning('busyboxb') 44 | self.assertContainerRunning('busyboxc') 45 | 46 | def test_stop_with_rm(self): 47 | self.assertContainerRunning('busyboxa') 48 | self.run_command('stop --rm busyboxa') 49 | self.assertContainerDoesNotExist('busyboxa') 50 | 51 | def test_stop_multiple_but_not_all_with_rm(self): 52 | self.assertContainerRunning('busyboxa') 53 | self.assertContainerRunning('busyboxb') 54 | self.assertContainerRunning('busyboxc') 55 | self.run_command('stop --rm busyboxa busyboxb') 56 | self.assertContainerDoesNotExist('busyboxa') 57 | self.assertContainerDoesNotExist('busyboxb') 58 | self.assertContainerRunning('busyboxc') 59 | -------------------------------------------------------------------------------- /tests/unit/parallel_test.py: -------------------------------------------------------------------------------- 1 | from mock import Mock, call 2 | 3 | from ..testcases import DustyTestCase 4 | from dusty.parallel import TaskQueue, parallel_task_queue 5 | 6 | global_mock = Mock() 7 | 8 | def _fake_task(*args, **kwargs): 9 | global_mock(*args, **kwargs) 10 | 11 | def _fake_exception(*args, **kwargs): 12 | raise ValueError() 13 | 14 | class TestParallel(DustyTestCase): 15 | def setUp(self): 16 | super(TestParallel, self).setUp() 17 | self.queue = TaskQueue(2) 18 | global_mock.reset_mock() 19 | 20 | def test_enqueue_task(self): 21 | self.queue.enqueue_task(_fake_task, 1, 2, a=3, b=4) 22 | fn, args, kwargs = self.queue.get() 23 | self.assertEqual(fn, _fake_task) 24 | self.assertEqual(args, (1, 2)) 25 | self.assertItemsEqual(kwargs, {'a': 3, 'b': 4}) 26 | 27 | def test_task_executor(self): 28 | self.queue._task_executor(_fake_task, (1,), {'a': 2}) 29 | global_mock.assert_called_with(1, a=2) 30 | 31 | def test_task_executor_exception(self): 32 | self.queue._task_executor(_fake_exception, tuple(), {}) 33 | self.assertEqual(len(self.queue.errors), 1) 34 | 35 | def test_execute_single(self): 36 | self.queue.enqueue_task(_fake_task, 1, a=2) 37 | self.queue.execute() 38 | global_mock.assert_called_with(1, a=2) 39 | 40 | def test_execute_multiple(self): 41 | self.queue.enqueue_task(_fake_task, 1, a=2) 42 | self.queue.enqueue_task(_fake_task, 3, b=4) 43 | self.queue.execute() 44 | global_mock.assert_has_calls([call(1, a=2), 45 | call(3, b=4)], any_order=True) 46 | 47 | def test_execute_with_exceptions(self): 48 | self.queue.enqueue_task(_fake_task, 1, a=2) 49 | self.queue.enqueue_task(_fake_exception) 50 | with self.assertRaises(RuntimeError): 51 | self.queue.execute() 52 | 53 | def test_context_manager(self): 54 | with parallel_task_queue() as queue: 55 | queue.enqueue_task(_fake_task, 1, a=2) 56 | queue.enqueue_task(_fake_task, 3, b=4) 57 | global_mock.assert_has_calls([call(1, a=2), 58 | call(3, b=4)], any_order=True) 59 | 60 | def test_context_manager_exception(self): 61 | with self.assertRaises(RuntimeError): 62 | with parallel_task_queue() as queue: 63 | queue.enqueue_task(_fake_task, 1, a=2) 64 | queue.enqueue_task(_fake_exception) 65 | -------------------------------------------------------------------------------- /tests/upgrade_integration/upgrade_test.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import shutil 3 | from time import sleep 4 | 5 | from unittest import TestCase 6 | 7 | from dusty.commands.upgrade import _test_dusty_binary 8 | from dusty import constants 9 | 10 | from ..testcases import DustyIntegrationTestCase 11 | 12 | class TestUpgrade(DustyIntegrationTestCase): 13 | def tearDown(self): 14 | super(TestUpgrade, self).tearDown() 15 | self.stop_daemon() 16 | 17 | def run_daemon_binary(self, path='./dist/dusty'): 18 | self.daemon_process = subprocess.Popen(args=[path, '-d', '--suppress-warnings'], stdout=subprocess.PIPE) 19 | sleep(1) 20 | 21 | def run_daemon_source(self): 22 | self.daemon_process = subprocess.Popen(args=['dusty', '-d', '--suppress-warnings'], stdout=subprocess.PIPE) 23 | sleep(1) 24 | 25 | def stop_daemon(self): 26 | self.daemon_process.terminate() 27 | 28 | def recreate_dusty_binary(self): 29 | subprocess.check_call(['./setup/create_binaries.sh'], stdout=subprocess.PIPE) 30 | 31 | def test_upgrade_2_1(self): 32 | self.run_daemon_binary() 33 | version = '0.2.1' 34 | output = self.run_command('version') 35 | self.assertInSameLine(output, 'daemon', 'version') 36 | output = self.run_command('upgrade {}'.format(version)) 37 | self.assertInSameLine(output, 'Downloading', version) 38 | self.assertInSameLine(output, 'Finished upgrade', version) 39 | sleep(2) 40 | output = self.run_command('version', raise_on_error=False) 41 | self.assertInSameLine(output, 'daemon', 'version', version) 42 | self.assertInSameLine(output, 'client', 'version', constants.VERSION) 43 | self.recreate_dusty_binary() 44 | 45 | def test_upgrade_source_fails(self): 46 | self.run_daemon_source() 47 | output = self.run_command('upgrade') 48 | self.assertTrue('It looks like you\'re running Dusty from source' in output) 49 | self.assertBinaryVersionUnchanged() 50 | 51 | def test_upgrade_bad_name_fails(self): 52 | shutil.copy('dist/dusty', 'dist/python') 53 | self.run_daemon_binary(path='./dist/python') 54 | with self.assertRaises(self.CommandError): 55 | self.run_command('upgrade 0.2.2') 56 | self.assertBinaryVersionUnchanged() 57 | 58 | def assertBinaryVersionUnchanged(self): 59 | output = self.run_command('version') 60 | self.assertInSameLine(output, 'daemon', 'version', constants.VERSION) 61 | -------------------------------------------------------------------------------- /tests/unit/commands/bundles_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | import shutil 4 | 5 | from dusty.config import get_config_value 6 | from dusty.commands.bundles import list_bundles, activate_bundle, deactivate_bundle 7 | from dusty.compiler.spec_assembler import get_specs_repo 8 | from ...testcases import DustyTestCase 9 | from dusty import constants 10 | 11 | class TestBundlesCommands(DustyTestCase): 12 | def _assert_listed_bundles(self, result, bundle_active_tuples): 13 | for index, bundle_active in enumerate(bundle_active_tuples): 14 | bundle, activated = bundle_active 15 | output_row = index + 3 16 | self.assertIn(bundle, result.splitlines()[output_row]) 17 | check_fn = self.assertIn if activated else self.assertNotIn 18 | check_fn("X", result.splitlines()[output_row]) 19 | 20 | def test_list_bundles_with_none_activated(self): 21 | list_bundles() 22 | self._assert_listed_bundles(self.last_client_output, 23 | [['bundle-a', False], 24 | ['bundle-b', False]]) 25 | 26 | def test_list_bundles_with_one_activated(self): 27 | activate_bundle(['bundle-a'], False) 28 | list_bundles() 29 | self._assert_listed_bundles(self.last_client_output, 30 | [['bundle-a', True], 31 | ['bundle-b', False]]) 32 | 33 | def test_list_bundles_with_both_activated(self): 34 | activate_bundle(['bundle-a', 'bundle-b'], False) 35 | list_bundles() 36 | self._assert_listed_bundles(self.last_client_output, 37 | [['bundle-a', True], 38 | ['bundle-b', True]]) 39 | 40 | def test_activate_bundle(self): 41 | activate_bundle(['bundle-a'], False) 42 | self.assertItemsEqual(get_config_value(constants.CONFIG_BUNDLES_KEY), ['bundle-a']) 43 | 44 | def test_activate_bundle_only(self): 45 | activate_bundle(['bundle-a'], False) 46 | activate_bundle(['bundle-b'], True) 47 | self.assertItemsEqual(get_config_value(constants.CONFIG_BUNDLES_KEY), ['bundle-b']) 48 | 49 | def test_deactivate_bundle(self): 50 | activate_bundle(['bundle-a', 'bundle-b'], False) 51 | self.assertItemsEqual(get_config_value(constants.CONFIG_BUNDLES_KEY), ['bundle-a', 'bundle-b']) 52 | deactivate_bundle(['bundle-a', 'bundle-b']) 53 | self.assertItemsEqual(get_config_value(constants.CONFIG_BUNDLES_KEY), []) 54 | -------------------------------------------------------------------------------- /docs/faq.md: -------------------------------------------------------------------------------- 1 | # Frequently Asked Questions 2 | 3 | ### I'm having trouble running Docker commands on my Mac - what gives?? 4 | 5 | If you're seeing something like: 6 | ``` 7 | $ docker ps 8 | FATA[0000] Get http:///var/run/docker.sock/v1.18/containers/json: dial unix /var/run/docker.sock: no such file or directory. Are you trying to connect to a TLS-enabled daemon without TLS? 9 | ``` 10 | 11 | You probably just need to run `eval $(docker-machine env dusty)` 12 | 13 | ### How can Dusty access my private GitHub repos? 14 | 15 | The Dusty daemon, which runs as root, manages your GitHub repos by default. 16 | To get permissions to clone from GitHub, the 17 | Daemon will make use of your unprivileged user's SSH_AUTH_SOCK. This means that the daemon's 18 | behavior using git should be the same as your user's, in terms of SSH authentication. 19 | 20 | If you want Dusty to be 21 | able to clone your private GitHub repos, you need to configure your standard user (whoever 22 | your `mac_username` is set to), to be able to clone repositories without any prompt for 23 | confirmation or password. 24 | 25 | ``` 26 | ssh-add -K 27 | ``` 28 | ssh-add adds private key identities to the authentication agent, ssh-agent. This will allow Dusty 29 | to pull repos using your `mac_username`'s ssh credentials. 30 | 31 | If you arent using default SSH agent but instead use something like `gnupg` as SSH agent 32 | you might need to adjust `SSH_AUTH_SOCK`: 33 | 34 | ``` 35 | $ launchctl asuser $(id -u) launchctl setenv SSH_AUTH_SOCK $SSH_AUTH_SOCK 36 | ``` 37 | 38 | ### My tests are hanging at this step: `Creating test...`. What do I do? 39 | Dusty tests use docker logs, which streams one line at a time. So if you are using a test library which outputs its results on a single line (like python's nosetests) this can give the illusion that your tests are hanging when they running fine. 40 | 41 | ### Why doesn't Dusty support Linux? 42 | 43 | Dusty might be ported to Linux in the future, although we don't have concrete plans to 44 | do so. Many of the problems Dusty solves are specific to an OSX environment, where 45 | Docker can't run natively. 46 | 47 | ### I've read the Dusty docs, but I'd like more information! 48 | 49 | We have a public Slack org where you can hang out with the Dusty core contributors 50 | and other users. You can [register here](https://dusty-slackin.herokuapp.com/). 51 | 52 | ### Why is it called Dusty? 53 | 54 | Dusty is named after [Dusty Baker](https://en.wikipedia.org/wiki/Dusty_Baker), 55 | baseball manager and co-inventor of the [high five](https://en.wikipedia.org/wiki/High_five). 56 | -------------------------------------------------------------------------------- /tests/integration/cli/setup_test.py: -------------------------------------------------------------------------------- 1 | from mock import patch 2 | 3 | from ...testcases import DustyIntegrationTestCase 4 | 5 | from dusty import constants 6 | from dusty.config import save_config_value 7 | 8 | class TestSetupCLI(DustyIntegrationTestCase): 9 | def setUp(self): 10 | super(TestSetupCLI, self).setUp() 11 | save_config_value(constants.CONFIG_SETUP_KEY, False) 12 | save_config_value(constants.CONFIG_SPECS_REPO_KEY, '') 13 | save_config_value(constants.CONFIG_MAC_USERNAME_KEY, 'nobody') 14 | 15 | @patch('dusty.commands.setup._get_recommended_vm_size') 16 | @patch('dusty.commands.setup._get_raw_input') 17 | def test_setup_defaults(self, fake_raw_input, fake_vm_size): 18 | fake_vm_size.return_value = 2048 19 | fake_raw_input.side_effect = ['y', '', 'y'] 20 | self.run_command('setup --no-update') 21 | self.assertConfigValue(constants.CONFIG_MAC_USERNAME_KEY, self.current_user) 22 | self.assertConfigValue(constants.CONFIG_SPECS_REPO_KEY, 'github.com/gamechanger/dusty-example-specs') 23 | self.assertConfigValue(constants.CONFIG_VM_MEM_SIZE, '2048') 24 | 25 | @patch('dusty.commands.setup._get_raw_input') 26 | def test_setup_override_user(self, fake_raw_input): 27 | fake_raw_input.side_effect = ['n', self.current_user, '', 'y'] 28 | self.run_command('setup --no-update') 29 | self.assertConfigValue(constants.CONFIG_MAC_USERNAME_KEY, self.current_user) 30 | 31 | @patch('dusty.commands.setup._get_raw_input') 32 | def test_setup_override_specs(self, fake_raw_input): 33 | fake_raw_input.side_effect = ['y', 'github.com/gamechanger/dusty-specs', 'y'] 34 | self.run_command('setup --no-update') 35 | self.assertConfigValue(constants.CONFIG_SPECS_REPO_KEY, 'github.com/gamechanger/dusty-specs') 36 | 37 | @patch('dusty.commands.setup._get_raw_input') 38 | def test_setup_override_memory(self, fake_raw_input): 39 | fake_raw_input.side_effect = ['y', '', 'n', '1024'] 40 | self.run_command('setup --no-update') 41 | self.assertConfigValue(constants.CONFIG_VM_MEM_SIZE, '1024') 42 | 43 | def test_setup_flags(self): 44 | self.run_command('setup --no-update --mac_username={} --default_specs_repo=github.com/gamechanger/dusty-specs --vm_memory=1024'.format(self.current_user)) 45 | self.assertConfigValue(constants.CONFIG_MAC_USERNAME_KEY, self.current_user) 46 | self.assertConfigValue(constants.CONFIG_SPECS_REPO_KEY, 'github.com/gamechanger/dusty-specs') 47 | self.assertConfigValue(constants.CONFIG_VM_MEM_SIZE, '1024') 48 | -------------------------------------------------------------------------------- /tests/unit/commands/scripts_test.py: -------------------------------------------------------------------------------- 1 | from mock import patch 2 | 3 | from ...testcases import DustyTestCase 4 | from dusty.commands.scripts import script_info_for_app, execute_script 5 | 6 | class TestScriptCommands(DustyTestCase): 7 | def test_script_info_for_app_nonexistent_app(self): 8 | with self.assertRaises(KeyError): 9 | script_info_for_app('some-nonexistent-app') 10 | 11 | def test_script_info_for_app_no_scripts(self): 12 | script_info_for_app('app-c') 13 | self.assertEqual(self.last_client_output, 'No scripts registered for app app-c') 14 | 15 | def test_script_info_for_app_valid_input(self): 16 | script_info_for_app('app-a') 17 | spec_line = self.last_client_output.splitlines()[3] 18 | self.assertIn('A script description', spec_line) 19 | self.assertIn('example', spec_line) 20 | 21 | def test_execute_script_nonexistent_app(self): 22 | with self.assertRaises(KeyError): 23 | execute_script('some-nonexistent-app', 'this arg should not matter') 24 | 25 | def test_execute_script_no_scripts(self): 26 | with self.assertRaises(KeyError): 27 | execute_script('app-b', 'should not matter') 28 | 29 | def test_execute_script_script_not_found(self): 30 | with self.assertRaises(KeyError): 31 | execute_script('app-a', 'wrong name') 32 | 33 | @patch('dusty.commands.utils.exec_docker') 34 | def test_execute_script_valid_input(self, fake_exec_docker): 35 | execute_script('app-a', 'example') 36 | fake_exec_docker.assert_called_once_with('exec', '-i', 'dusty_app-a_1', 'sh', '/command_files/dusty_command_file_app-a_script_example.sh') 37 | 38 | @patch('dusty.commands.utils.exec_docker') 39 | def test_execute_script_valid_input_one_arg(self, fake_exec_docker): 40 | execute_script('app-a', 'example', ['.']) 41 | fake_exec_docker.assert_called_once_with('exec', '-i', 'dusty_app-a_1', 'sh', '/command_files/dusty_command_file_app-a_script_example.sh', '.') 42 | 43 | @patch('dusty.commands.utils.exec_docker') 44 | def test_execute_script_valid_input_three_args(self, fake_exec_docker): 45 | execute_script('app-a', 'example', ['.', './', '..']) 46 | fake_exec_docker.assert_called_once_with('exec', '-i', 'dusty_app-a_1', 'sh', '/command_files/dusty_command_file_app-a_script_example.sh', '.', './', '..') 47 | 48 | @patch('dusty.commands.utils.exec_docker') 49 | def test_execute_script_valid_input_not_accept_arguments(self, fake_exec_docker): 50 | execute_script('app-b', 'example', ['.', './', '..']) 51 | fake_exec_docker.assert_has_calls([]) 52 | -------------------------------------------------------------------------------- /tests/unit/systems/known_hosts/init_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | from mock import patch 5 | 6 | import dusty.constants 7 | from dusty.systems.known_hosts import ensure_known_hosts 8 | from ....testcases import DustyTestCase 9 | 10 | @patch('dusty.systems.known_hosts._get_known_hosts_path') 11 | @patch('dusty.systems.known_hosts.check_output') 12 | class TestKnownHostsSystem(DustyTestCase): 13 | def setUp(self): 14 | super(TestKnownHostsSystem, self).setUp() 15 | self.temp_hosts_path = tempfile.mkstemp()[1] 16 | 17 | def tearDown(self): 18 | super(TestKnownHostsSystem, self).tearDown() 19 | os.remove(self.temp_hosts_path) 20 | 21 | def test_preserves_existing_content(self, fake_check_output, fake_get_known_hosts): 22 | fake_get_known_hosts.return_value = self.temp_hosts_path 23 | fake_check_output.return_value = 'dusty.host:SOMESHA' 24 | 25 | initial_content = 'prev.known.host.1:SOMESHA\nprev.known.host.2:SOMESHA' 26 | with open(self.temp_hosts_path, 'w') as f: 27 | f.write(initial_content) 28 | expected_result_content = 'prev.known.host.1:SOMESHA\nprev.known.host.2:SOMESHA\ndusty.host:SOMESHA' 29 | 30 | ensure_known_hosts(['dusty.host']) 31 | with open(self.temp_hosts_path, 'r') as f: 32 | self.assertEqual(f.read(), expected_result_content) 33 | 34 | def test_not_modified(self, fake_check_output, fake_get_known_hosts): 35 | fake_get_known_hosts.return_value = self.temp_hosts_path 36 | fake_check_output.return_value = 'prev.known.host.1:SOMESHA' 37 | 38 | initial_content = 'prev.known.host.1:SOMESHA\nprev.known.host.2:SOMESHA' 39 | with open(self.temp_hosts_path, 'w') as f: 40 | f.write(initial_content) 41 | 42 | ensure_known_hosts(['prev.known.host.1']) 43 | with open(self.temp_hosts_path, 'r') as f: 44 | self.assertEqual(f.read(), initial_content) 45 | 46 | def test_redundant_additions(self, fake_check_output, fake_get_known_hosts): 47 | fake_get_known_hosts.return_value = self.temp_hosts_path 48 | fake_check_output.return_value = 'dusty.host:SOMESHA' 49 | 50 | initial_content = 'prev.known.host.1:SOMESHA\nprev.known.host.2:SOMESHA' 51 | with open(self.temp_hosts_path, 'w') as f: 52 | f.write(initial_content) 53 | expected_result_content = 'prev.known.host.1:SOMESHA\nprev.known.host.2:SOMESHA\ndusty.host:SOMESHA' 54 | 55 | ensure_known_hosts(['dusty.host', 'dusty.host', 'dusty.host']) 56 | with open(self.temp_hosts_path, 'r') as f: 57 | self.assertEqual(f.read(), expected_result_content) 58 | -------------------------------------------------------------------------------- /tests/unit/commands/logs_test.py: -------------------------------------------------------------------------------- 1 | from mock import patch 2 | 3 | from ...testcases import DustyTestCase 4 | from dusty.commands.logs import tail_container_logs 5 | 6 | class TestLogsCommands(DustyTestCase): 7 | @patch('dusty.commands.utils.exec_docker') 8 | @patch('dusty.commands.logs.get_dusty_containers') 9 | def test_tail_container_logs(self, fake_get_containers, fake_exec_docker): 10 | fake_get_containers.return_value = [{'Id': 'container-id'}] 11 | tail_container_logs('app-a') 12 | fake_get_containers.assert_called_once_with(['app-a'], include_exited=True) 13 | fake_exec_docker.assert_called_once_with('logs', 'container-id') 14 | 15 | @patch('dusty.commands.utils.exec_docker') 16 | @patch('dusty.commands.logs.get_dusty_containers') 17 | def test_tail_with_line_number(self, fake_get_containers, fake_exec_docker): 18 | fake_get_containers.return_value = [{'Id': 'container-id'}] 19 | tail_container_logs('app-a', lines=4) 20 | fake_get_containers.assert_called_once_with(['app-a'], include_exited=True) 21 | fake_exec_docker.assert_called_once_with('logs', '--tail=4', 'container-id') 22 | 23 | @patch('dusty.commands.utils.exec_docker') 24 | @patch('dusty.commands.logs.get_dusty_containers') 25 | def test_tail_container_logs_with_follow(self, fake_get_containers, fake_exec_docker): 26 | fake_get_containers.return_value = [{'Id': 'container-id'}] 27 | tail_container_logs('app-a', follow=True) 28 | fake_get_containers.assert_called_once_with(['app-a'], include_exited=True) 29 | fake_exec_docker.assert_called_once_with('logs', '-f', 'container-id') 30 | 31 | @patch('dusty.commands.utils.exec_docker') 32 | @patch('dusty.commands.logs.get_dusty_containers') 33 | def test_tail_with_line_number_and_follow(self, fake_get_containers, fake_exec_docker): 34 | fake_get_containers.return_value = [{'Id': 'container-id'}] 35 | tail_container_logs('app-a', follow=True, lines=4) 36 | fake_get_containers.assert_called_once_with(['app-a'], include_exited=True) 37 | fake_exec_docker.assert_called_once_with('logs', '-f', '--tail=4', 'container-id') 38 | 39 | @patch('dusty.commands.utils.exec_docker') 40 | @patch('dusty.commands.logs.get_dusty_containers') 41 | def test_tail_container_logs_with_timestamps(self, fake_get_containers, fake_exec_docker): 42 | fake_get_containers.return_value = [{'Id': 'container-id'}] 43 | tail_container_logs('app-a', timestamps=True) 44 | fake_get_containers.assert_called_once_with(['app-a'], include_exited=True) 45 | fake_exec_docker.assert_called_once_with('logs', '-t', 'container-id') 46 | -------------------------------------------------------------------------------- /tests/unit/systems/docker/config_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | import json 4 | 5 | from dusty import constants 6 | from dusty.systems.docker.config import registry_from_image, get_authed_registries 7 | from ....testcases import DustyTestCase 8 | 9 | class TestDockerConfigSystem(DustyTestCase): 10 | def setUp(self): 11 | super(TestDockerConfigSystem, self).setUp() 12 | self.temp_docker_config_path = tempfile.mkstemp()[1] 13 | self.old_docker_config_path = constants.DOCKER_CONFIG_PATH 14 | constants.DOCKER_CONFIG_PATH = self.temp_docker_config_path 15 | 16 | def tearDown(self): 17 | super(TestDockerConfigSystem, self).tearDown() 18 | constants.DOCKER_CONFIG_PATH = self.old_docker_config_path 19 | if os.path.exists(self.temp_docker_config_path): 20 | os.remove(self.temp_docker_config_path) 21 | 22 | def _write_config(self, config): 23 | json.dump(config, open(self.temp_docker_config_path, 'w')) 24 | 25 | def test_authed_registries_from_empty_config(self): 26 | os.remove(self.temp_docker_config_path) 27 | self.assertEqual(get_authed_registries(), set()) 28 | 29 | def test_authed_registries_with_no_auth_key(self): 30 | self._write_config({'some_stuff': 'not auth'}) 31 | self.assertEqual(get_authed_registries(), set()) 32 | 33 | def test_authed_registries_with_https_auth(self): 34 | self._write_config({'auths': {'https://index.docker.io/v1/': {'stuff': 'irrelevant'}}}) 35 | self.assertEqual(get_authed_registries(), set(['index.docker.io'])) 36 | 37 | def test_authed_registries_with_multiple_styles(self): 38 | self._write_config({'auths': {'https://index.docker.io/v1/': {'stuff': 'irrelevant'}, 39 | 'gamechanger.io': {'stuff': 'irrelevant'}}}) 40 | self.assertEqual(get_authed_registries(), set(['index.docker.io', 'gamechanger.io'])) 41 | 42 | def test_registry_from_image_official(self): 43 | self.assertEqual(registry_from_image('postgres:9.3'), 44 | 'index.docker.io') 45 | 46 | def test_registry_from_image_public(self): 47 | self.assertEqual(registry_from_image('library/postgres:9.3'), 48 | 'index.docker.io') 49 | self.assertEqual(registry_from_image('thieman/clojure'), 50 | 'index.docker.io') 51 | 52 | def test_registry_from_image_private(self): 53 | self.assertEqual(registry_from_image('gamechanger.io/clojure:1.6'), 54 | 'gamechanger.io') 55 | self.assertEqual(registry_from_image('a.b.c.com/clojure:1.6'), 56 | 'a.b.c.com') 57 | -------------------------------------------------------------------------------- /dusty/systems/rsync/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | from subprocess import CalledProcessError 4 | 5 | from ... import constants 6 | from ...config import get_config_value 7 | from ...subprocess import check_call_demoted, check_and_log_output_and_error 8 | from ...source import Repo 9 | from ...path import parent_dir 10 | from ...log import log_to_client 11 | from ...compiler.spec_assembler import get_same_container_repos_from_spec 12 | from ...systems.virtualbox import get_docker_vm_ip, check_call_on_vm 13 | 14 | def _ensure_vm_dir_exists(remote_dir): 15 | check_call_on_vm('sudo mkdir -p {0}; sudo chown -R docker {0}'.format(remote_dir)) 16 | 17 | def _rsync_command(local_path, remote_path, is_dir=True, from_local=True, exclude_git=True): 18 | key_format_string = '~{}/.docker/machine/machines/{}/id_rsa' 19 | key_path = os.path.expanduser(key_format_string.format(get_config_value(constants.CONFIG_MAC_USERNAME_KEY), 20 | constants.VM_MACHINE_NAME)) 21 | ssh_opts = 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -i {}'.format(key_path) 22 | command = ['rsync', '-e', ssh_opts, '-az', '--del', '--force', '--rsync-path', 'sudo rsync'] 23 | if exclude_git: 24 | command += ['--exclude', '*/.git'] 25 | if from_local: 26 | path_args = ['{}{}'.format(local_path, '/' if is_dir else ''), 'docker@{}:{}'.format(get_docker_vm_ip(), remote_path)] 27 | else: 28 | path_args = ['docker@{}:{}{}'.format(get_docker_vm_ip(), remote_path, '/' if is_dir else ''), local_path] 29 | command += path_args 30 | return command 31 | 32 | def vm_path_is_directory(remote_path): 33 | """A weak check of whether a path in the Dusty VM is a directory. 34 | This function returns False on any process error, so False may indicate 35 | other failures such as the path not actually existing.""" 36 | try: 37 | check_call_on_vm('test -d {}'.format(remote_path)) 38 | except CalledProcessError: 39 | return False 40 | return True 41 | 42 | def sync_local_path_to_vm(local_path, remote_path, demote=False): 43 | is_dir = os.path.isdir(local_path) 44 | _ensure_vm_dir_exists(remote_path if is_dir else parent_dir(remote_path)) 45 | command = _rsync_command(local_path, remote_path, is_dir=is_dir) 46 | logging.debug('Executing rsync command: {}'.format(' '.join(command))) 47 | check_and_log_output_and_error(command, demote=demote, quiet_on_success=True) 48 | 49 | def sync_local_path_from_vm(local_path, remote_path, demote=False, is_dir=True): 50 | command = _rsync_command(local_path, remote_path, is_dir=is_dir, from_local=False) 51 | logging.debug('Executing rsync command: {}'.format(' '.join(command))) 52 | check_and_log_output_and_error(command, demote=demote, quiet_on_success=True) 53 | -------------------------------------------------------------------------------- /dusty/cli/test.py: -------------------------------------------------------------------------------- 1 | """Allow you to run tests in an isolated container for an app or a lib. 2 | If args are passed, default arguments are dropped 3 | 4 | Usage: 5 | test [options] [] [...] 6 | 7 | Options: 8 | Name of the test suite you would like to run 9 | If `all` is specified, all suites in the spec will be run 10 | A list of arguments to be passed to the test script 11 | --recreate Ensures that the testing image will be recreated 12 | --no-pull Do not pull dusty managed repos from remotes. 13 | 14 | Examples: 15 | To call test suite frontend with default arguments: 16 | dusty test web frontend 17 | To call test suite frontend with arguments in place of the defaults: 18 | dusty test web frontend /web/javascript 19 | 20 | """ 21 | 22 | from docopt import docopt 23 | 24 | from ..payload import Payload 25 | from ..commands.test import (run_one_suite, run_all_suites, test_info_for_app_or_lib, setup_for_test, 26 | ensure_valid_suite_name, ensure_vm_initialized, 27 | log_in_to_required_registries) 28 | 29 | def main(argv): 30 | args = docopt(__doc__, argv, options_first=True) 31 | if args[''] == 'all': 32 | payload0 = Payload(ensure_vm_initialized) 33 | payload1 = Payload(log_in_to_required_registries, args['']) 34 | payload1.run_on_daemon = False 35 | payload2 = Payload(setup_for_test, 36 | args[''], 37 | pull_repos=not args['--no-pull'], 38 | force_recreate=args['--recreate']) 39 | payload3 = Payload(run_all_suites, 40 | args['']) 41 | payload3.run_on_daemon = False 42 | return [payload0, payload1, payload2, payload3] 43 | elif args['']: 44 | payload0 = Payload(ensure_valid_suite_name, args[''], args['']) 45 | payload1 = Payload(ensure_vm_initialized) 46 | payload2 = Payload(log_in_to_required_registries, args['']) 47 | payload2.run_on_daemon = False 48 | payload3 = Payload(setup_for_test, 49 | args[''], 50 | pull_repos=not args['--no-pull'], 51 | force_recreate=args['--recreate']) 52 | payload4 = Payload(run_one_suite, 53 | args[''], 54 | args[''], 55 | args['']) 56 | payload4.run_on_daemon = False 57 | return [payload0, payload1, payload2, payload3, payload4] 58 | 59 | else: 60 | return Payload(test_info_for_app_or_lib, args['']) 61 | -------------------------------------------------------------------------------- /tests/unit/payload_test.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from ..testcases import DustyTestCase 4 | from dusty.constants import VERSION 5 | from dusty.payload import Payload, daemon_command, function_key, get_payload_function 6 | 7 | @daemon_command 8 | def _fn(*args, **kwargs): 9 | return args, kwargs 10 | 11 | def _fn2(*args, **kwargs): 12 | return args, kwargs 13 | 14 | class TestPayload(DustyTestCase): 15 | def setUp(self): 16 | super(TestPayload, self).setUp() 17 | self.test_payload = Payload(_fn, 'arg1', arg2='arg2value') 18 | self.serialized_payload = {'fn_key': function_key(_fn), 'client_version': VERSION, 'suppress_warnings': False, 'args': ('arg1',), 'kwargs': (('arg2', 'arg2value'),)} 19 | 20 | def test_serialize(self): 21 | result = json.loads(self.test_payload.serialize().decode('string_escape')) 22 | self.assertItemsEqual(result, self.serialized_payload) 23 | 24 | def test_deserialize(self): 25 | payload = Payload.deserialize(self.test_payload.serialize()) 26 | fn_key, client_version, suppress_warnings, args, kwargs = payload['fn_key'], payload['client_version'], payload['suppress_warnings'], payload['args'], payload['kwargs'] 27 | self.assertEqual(get_payload_function(fn_key), _fn) 28 | self.assertEqual(client_version, VERSION) 29 | self.assertEqual(set(args), set(('arg1',))) 30 | self.assertItemsEqual(kwargs, {'arg2': 'arg2value'}) 31 | self.assertEqual(suppress_warnings, False) 32 | 33 | def test_equality_matches(self): 34 | self.assertEqual(self.test_payload, Payload(_fn, 'arg1', arg2='arg2value')) 35 | 36 | def test_equality_fails_bad_suppress(self): 37 | payload = Payload(_fn, 'arg1', arg2='arg2value') 38 | payload.suppress_warnings = True 39 | self.assertNotEqual(self.test_payload, payload) 40 | 41 | def test_equality_fails_bad_run_on_daemon(self): 42 | payload = Payload(_fn, 'arg1', arg2='arg2value') 43 | payload.run_on_daemon = False 44 | self.assertNotEqual(self.test_payload, payload) 45 | 46 | def test_equality_fails_bad_fn(self): 47 | self.assertNotEqual(self.test_payload, Payload(_fn2, 'arg1', arg2='arg2value')) 48 | 49 | def test_equality_fails_bad_args(self): 50 | self.assertNotEqual(self.test_payload, Payload(_fn, 'arg3', arg2='arg2value')) 51 | 52 | def test_equality_fails_bad_kwargs(self): 53 | self.assertNotEqual(self.test_payload, Payload(_fn, 'arg1', arg2='wrongvalue')) 54 | 55 | def test_equality_fails_wrong_class(self): 56 | self.assertNotEqual(self.test_payload, object()) 57 | 58 | def test_get_payload_function_succeeds(self): 59 | self.assertEqual(_fn, get_payload_function(function_key(_fn))) 60 | 61 | def test_get_payload_function_raises(self): 62 | with self.assertRaises(RuntimeError): 63 | get_payload_function(function_key(_fn2)) 64 | -------------------------------------------------------------------------------- /tests/unit/commands/manage_config_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | import shutil 4 | import json 5 | 6 | import dusty.constants 7 | from dusty.commands.manage_config import list_config_values, save_value, _eligible_config_keys_for_setting 8 | from dusty.compiler.spec_assembler import get_specs_repo 9 | from ...testcases import DustyTestCase 10 | from dusty import constants 11 | 12 | class TestManageConfigCommands(DustyTestCase): 13 | def setUp(self): 14 | super(TestManageConfigCommands, self).setUp() 15 | self.old_config_path = dusty.constants.CONFIG_PATH 16 | self.old_config_settings = dusty.constants.CONFIG_SETTINGS 17 | dusty.constants.CONFIG_SETTINGS = {k: '' for k in [constants.CONFIG_BUNDLES_KEY, constants.CONFIG_REPO_OVERRIDES_KEY, constants.CONFIG_SPECS_REPO_KEY, 'docker_user']} 18 | self.expected_config = {constants.CONFIG_BUNDLES_KEY: [], 19 | constants.CONFIG_ENV_KEY: {}, 20 | constants.CONFIG_REPO_OVERRIDES_KEY: {get_specs_repo(): self.temp_specs_path}, 21 | constants.CONFIG_SPECS_REPO_KEY: 'github.com/org/dusty-specs', 22 | constants.CONFIG_SETUP_KEY: False, 23 | constants.CONFIG_VM_MEM_SIZE: '2048'} 24 | 25 | def tearDown(self): 26 | super(TestManageConfigCommands, self).tearDown() 27 | dusty.constants.CONFIG_PATH = self.old_config_path 28 | dusty.constants.CONFIG_SETTINGS = self.old_config_settings 29 | 30 | def test_eligible_config_key_for_setting(self): 31 | self.assertItemsEqual(_eligible_config_keys_for_setting(), [constants.CONFIG_SPECS_REPO_KEY, 'docker_user']) 32 | 33 | def test_list_config_values(self): 34 | list_config_values() 35 | self.assertItemsEqual(json.loads(self.last_client_output.replace('\'', '\"').replace('False', 'false').replace('True', 'true')), self.expected_config) 36 | 37 | def test_save_value_changes_value(self): 38 | save_value('docker_user', '~/here') 39 | list_config_values() 40 | self.assertItemsEqual(json.loads(self.last_client_output.replace('\'', '\"').replace('False', 'false').replace('True', 'true')), 41 | {constants.CONFIG_BUNDLES_KEY: [], 42 | constants.CONFIG_ENV_KEY: {}, 43 | constants.CONFIG_REPO_OVERRIDES_KEY: {get_specs_repo(): self.temp_specs_path}, 44 | 'docker_user': '~/here', 45 | constants.CONFIG_SPECS_REPO_KEY: 'github.com/org/dusty-specs', 46 | constants.CONFIG_SETUP_KEY: False, 47 | constants.CONFIG_VM_MEM_SIZE: '2048'}) 48 | 49 | def test_save_value_no_changes(self): 50 | with self.assertRaises(KeyError): 51 | save_value(constants.CONFIG_BUNDLES_KEY, '~/here') 52 | -------------------------------------------------------------------------------- /dusty/systems/docker/config.py: -------------------------------------------------------------------------------- 1 | """Functions with operate on the Docker client's config file""" 2 | 3 | import os 4 | import json 5 | from urlparse import urlparse 6 | import getpass 7 | from subprocess import CalledProcessError 8 | 9 | from ... import constants 10 | from ...log import log_to_client 11 | from ...memoize import memoized 12 | from ...subprocess import check_call_demoted 13 | from . import get_docker_env 14 | 15 | def registry_from_image(image_name): 16 | """Returns the Docker registry host associated with 17 | a given image name.""" 18 | if '/' not in image_name: # official image 19 | return constants.PUBLIC_DOCKER_REGISTRY 20 | prefix = image_name.split('/')[0] 21 | if '.' not in prefix: # user image on official repository, e.g. thieman/clojure 22 | return constants.PUBLIC_DOCKER_REGISTRY 23 | return prefix # custom registry, e.g. gamechanger.io/lox 24 | 25 | @memoized 26 | def get_authed_registries(): 27 | """Reads the local Docker client config for the current user 28 | and returns all registries to which the user may be logged in. 29 | This is intended to be run client-side, not by the daemon.""" 30 | result = set() 31 | if not os.path.exists(constants.DOCKER_CONFIG_PATH): 32 | return result 33 | config = json.load(open(constants.DOCKER_CONFIG_PATH, 'r')) 34 | for registry in config.get('auths', {}).iterkeys(): 35 | try: 36 | parsed = urlparse(registry) 37 | except Exception: 38 | log_to_client('Error parsing registry {} from Docker config, will skip this registry').format(registry) 39 | # This logic assumes the auth is either of the form 40 | # gamechanger.io (no scheme, no path after host) or 41 | # of the form https://index.docker.io/v1/ (scheme, 42 | # netloc parses correctly, additional path does not matter). 43 | # These are the formats I saw in my personal config file, 44 | # not sure what other formats it might accept. 45 | result.add(parsed.netloc) if parsed.netloc else result.add(parsed.path) 46 | return result 47 | 48 | def log_in_to_registry(registry): 49 | log_to_client('\nProcessing required login for {}'.format(registry)) 50 | 51 | while True: 52 | username = raw_input('Username: ') 53 | password = getpass.getpass('Password: ') 54 | email = raw_input('Email: ') 55 | 56 | args = ['-u', username, 57 | '-p', password, 58 | '-e', email] 59 | if registry != constants.PUBLIC_DOCKER_REGISTRY: 60 | args.append(registry) 61 | 62 | try: 63 | check_call_demoted(['docker', 'login'] + args, env=get_docker_env()) 64 | except CalledProcessError: 65 | log_to_client('\nLogin failed, please try again for {} (Ctrl-C to quit)\n'.format(registry)) 66 | else: 67 | break 68 | 69 | log_to_client('Login successful for {}'.format(registry)) 70 | -------------------------------------------------------------------------------- /docs/specs/lib-specs.md: -------------------------------------------------------------------------------- 1 | # Lib Specs 2 | 3 | Libs define additional repos which are maintained by Dusty and mounted into running 4 | app containers which depend on them. Dusty can keep your libs' repos up to date and 5 | ensure their install commands are run when necessary inside app containers to keep 6 | the container's state clean. 7 | 8 | ## repo 9 | 10 | ``` 11 | repo: github.com/my-org/my-app 12 | -or- 13 | repo: https://github.com/my-org/my-app.git 14 | -or- 15 | repo: /Users/myuser/my-app 16 | ``` 17 | 18 | `repo` specifies the repo containing the source for a lib. By default, Dusty manages this 19 | repo for you and will keep its local copy up to date. Once a repo is defined in an active spec, 20 | it can be controlled using the `dusty repos` command. 21 | 22 | Repos can be specified using either a URL or an absolute path to a Git repo on your local filesystem. 23 | If a repo URL starts with `https`, HTTPS will be used to clone the repo. Note that this will only work 24 | with public repositories. By default, SSH is used to clone repos. 25 | 26 | `repo` is required in lib specs. 27 | 28 | ## mount 29 | 30 | ``` 31 | mount: /my-app 32 | ``` 33 | 34 | `mount` tells Dusty where to mount the contents of the lib's repo inside any app containers 35 | which depend on it. 36 | 37 | `mount` is required in lib specs. 38 | 39 | ## assets 40 | 41 | ``` 42 | assets: 43 | - name: GITHUB_KEY 44 | path: /root/.ssh/id_rsa 45 | - name: AWS_KEY 46 | path: /root/.aws_key 47 | required: false 48 | ``` 49 | 50 | Assets are files which your containers need access to, but which you don't have in a repository. The main usecase here is for private keys. 51 | 52 | You must register a local file as an asset with the `dusty assets set` command. When you specify the file, its contents are copied to your VM and shared with your running Dusty containers. Assets are placed in your container at the absolute path specified at `path`. 53 | 54 | The `required` key defaults to `true`, and `dusty up` will not succeed if any required assets have not been registered. 55 | 56 | ## install 57 | 58 | ``` 59 | install: 60 | - python setup.py install 61 | ``` 62 | 63 | `install` specifies a list of commands that should be run to prepare the library to be used. 64 | This command is run in the container of an app which depends on the lib. 65 | 66 | Lib install commands are executed prior to the app's `once` and `always` commands during 67 | container startup. 68 | 69 | ## depends 70 | 71 | ``` 72 | depends: 73 | libs: 74 | - lib2 75 | - lib3 76 | ``` 77 | 78 | `depends` is used to specify other libs which should be installed inside any app container 79 | which depends on this lib. Only `libs` may be specified inside a lib's `depends` key. 80 | 81 | ## test 82 | 83 | ``` 84 | test: 85 | ... 86 | ``` 87 | 88 | The `test` key contains information on how to run tests for a lib. Once specified, 89 | tests may be run with the `dusty test` command. To find out more about the testing spec, 90 | see the [testing spec page](./test-specs.md). 91 | -------------------------------------------------------------------------------- /dusty/schemas/app_schema.py: -------------------------------------------------------------------------------- 1 | from schemer import Schema, Array 2 | from schemer.validators import one_of 3 | 4 | from .test_schema import test_schema 5 | from .asset_schema import asset_schema 6 | 7 | def image_build_isolation_validator(): 8 | def validator(document): 9 | if 'image' in document and 'build' in document: 10 | return 'Only one of image and build is allowed in app schema' 11 | elif 'image' not in document and 'build' not in document: 12 | return 'Need to have at least one of `image` or `build` in app schema' 13 | return validator 14 | 15 | def repo_mount_validator(): 16 | """If either repo or mount are provided, they must both be provided.""" 17 | def validator(document): 18 | if 'repo' in document and 'mount' in document: 19 | return 20 | elif 'repo' not in document and 'mount' not in document: 21 | return 22 | return 'If either `repo` or `mount` are provided, they must both be provided.' 23 | return validator 24 | 25 | app_depends_schema = Schema({ 26 | 'services': {'type': Array(basestring), 'default': list}, 27 | 'apps': {'type': Array(basestring), 'default': list}, 28 | 'libs': {'type': Array(basestring), 'default': list} 29 | }) 30 | 31 | conditional_links_schema = Schema({ 32 | 'services': {'type': Array(basestring), 'default': list}, 33 | 'apps': {'type': Array(basestring), 'default': list}, 34 | }) 35 | 36 | host_forwarding_schema = Schema({ 37 | 'host_name': {'type': basestring}, 38 | 'host_port': {'type': int}, 39 | 'container_port': {'type': int}, 40 | 'type': {'type': basestring, 'default': 'http', 'validates': one_of('http', 'stream')} 41 | }) 42 | 43 | commands_schema = Schema({ 44 | 'always': {'type': Array(basestring), 'required': True, 'default': list}, 45 | 'once': {'type': Array(basestring), 'default': list} 46 | }) 47 | 48 | script_schema = Schema({ 49 | 'name': {'type': basestring, 'required': True}, 50 | 'description': {'type': basestring}, 51 | 'command': {'type': Array(basestring), 'required': True} 52 | }) 53 | 54 | dusty_app_compose_schema = Schema({ 55 | 'volumes': {'type': Array(basestring), 'default': list} 56 | }, strict=False) 57 | 58 | app_schema = Schema({ 59 | 'repo': {'type': basestring, 'default': str}, 60 | 'depends': {'type': app_depends_schema, 'default': dict}, 61 | 'conditional_links': {'type': conditional_links_schema, 'default': dict}, 62 | 'host_forwarding': {'type': Array(host_forwarding_schema), 'default': list}, 63 | 'image': {'type': basestring}, 64 | 'image_requires_login': {'type': bool, 'default': False}, 65 | 'build': {'type': basestring}, 66 | 'mount': {'type': basestring, 'default': str}, 67 | 'commands': {'type': commands_schema, 'required': True}, 68 | 'scripts': {'type': Array(script_schema), 'default': list}, 69 | 'assets': {'type': Array(asset_schema), 'default': list}, 70 | 'compose': {'type': dusty_app_compose_schema, 'default': dict}, 71 | 'test': {'type': test_schema, 'default': dict} 72 | }, validates=[ 73 | image_build_isolation_validator(), 74 | repo_mount_validator(), 75 | ]) 76 | -------------------------------------------------------------------------------- /dusty/systems/nfs/client.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import logging 4 | from subprocess import CalledProcessError 5 | import time 6 | 7 | from ... import constants 8 | from ..virtualbox import get_host_ip, check_output_on_vm, check_call_on_vm, call_on_vm 9 | from ...log import log_to_client 10 | from ...subprocess import call_demoted, check_call_demoted, check_output_demoted, check_output_demoted 11 | from ...compiler.spec_assembler import get_all_repos 12 | 13 | def mount_active_repos(): 14 | remount_repos(get_all_repos(active_only=True, include_specs_repo=False)) 15 | 16 | def remount_repos(repos): 17 | _start_nfs_client() 18 | for i, repo in enumerate(repos): 19 | _unmount_repo(repo) 20 | _mount_repo(repo, wait_for_server=(i==0)) 21 | 22 | def unmount_all_repos(): 23 | mounts = check_output_on_vm('mount | {{ grep {} || true; }}'.format(constants.VM_REPOS_DIR)) 24 | mounted_dirs = [] 25 | for mount in mounts.splitlines(): 26 | for word in mount.split(' '): 27 | if constants.VM_REPOS_DIR in word: 28 | mounted_dirs.append(word) 29 | for mounted_dir in mounted_dirs: 30 | _unmount_vm_dir(mounted_dir) 31 | 32 | def _start_nfs_client(): 33 | check_call_on_vm('sudo /usr/local/etc/init.d/nfs-client start') 34 | 35 | def _unmount_repo(repo): 36 | _unmount_vm_dir(repo.vm_path) 37 | 38 | def _unmount_vm_dir(vm_dir): 39 | call_on_vm('sudo umount -l {}'.format(vm_dir)) 40 | 41 | def _mount_repo(repo, wait_for_server=False): 42 | """ 43 | This function will create the VM directory where a repo will be mounted, if it 44 | doesn't exist. If wait_for_server is set, it will wait up to 10 seconds for 45 | the nfs server to start, by retrying mounts that fail with 'Connection Refused'. 46 | 47 | If wait_for_server is not set, it will attempt to run the mount command once 48 | """ 49 | check_call_on_vm('sudo mkdir -p {}'.format(repo.vm_path)) 50 | if wait_for_server: 51 | for i in range(0,10): 52 | try: 53 | _run_mount_command(repo) 54 | return 55 | except CalledProcessError as e: 56 | if 'Connection refused' in e.output: 57 | logging.info('Failed to mount repo; waiting for nfsd to restart') 58 | time.sleep(1) 59 | else: 60 | logging.info(e.output) 61 | raise e 62 | log_to_client('Failed to mount repo {}'.format(repo.short_name)) 63 | raise RuntimeError('Unable to mount repo with NFS') 64 | else: 65 | _run_mount_command(repo) 66 | 67 | def _run_mount_command(repo): 68 | # Check output is used here so that if it raises an error, the output can be parsed 69 | return check_output_on_vm('sudo mount {}'.format(_nfs_mount_args_string(repo)), redirect_stderr=True) 70 | 71 | def _nfs_mount_args_string(repo): 72 | mount_string = '-t nfs {} '.format(_nfs_options_string()) 73 | mount_string += '{}:{} '.format(get_host_ip(), repo.local_path) 74 | mount_string += repo.vm_path 75 | return mount_string 76 | 77 | def _nfs_options_string(): 78 | return '-o async,udp,noatime,nfsvers=3' 79 | -------------------------------------------------------------------------------- /dusty/compiler/compose/common.py: -------------------------------------------------------------------------------- 1 | from ...source import Repo 2 | from ...path import vm_command_files_path 3 | from ... import constants 4 | 5 | def get_command_files_volume_mount(app_or_lib_name, test=False): 6 | return "{}{}:{}".format(vm_command_files_path(app_or_lib_name), '/test' if test else '', constants.CONTAINER_COMMAND_FILES_DIR) 7 | 8 | def get_asset_volume_mount(app_name): 9 | return "{}:{}".format(constants.VM_ASSETS_DIR, constants.IN_CONTAINER_ASSETS_DIR) 10 | 11 | def get_volume_mounts(app_or_lib_name, assembled_specs, test=False): 12 | if app_or_lib_name in assembled_specs['apps']: 13 | return get_app_volume_mounts(app_or_lib_name, assembled_specs, test=test) 14 | elif app_or_lib_name in assembled_specs['libs']: 15 | return get_lib_volume_mounts(app_or_lib_name, assembled_specs) 16 | raise KeyError('{} is not an app or lib'.format(app_or_lib_name)) 17 | 18 | def get_app_volume_mounts(app_name, assembled_specs, test=False): 19 | """ This returns a list of formatted volume specs for an app. These mounts declared in the apps' spec 20 | and mounts declared in all lib specs the app depends on""" 21 | app_spec = assembled_specs['apps'][app_name] 22 | volumes = [get_command_files_volume_mount(app_name, test=test)] 23 | volumes.append(get_asset_volume_mount(app_name)) 24 | repo_mount = _get_app_repo_volume_mount(app_spec) 25 | if repo_mount: 26 | volumes.append(repo_mount) 27 | volumes += _get_app_libs_volume_mounts(app_name, assembled_specs) 28 | return volumes 29 | 30 | def get_lib_volume_mounts(base_lib_name, assembled_specs): 31 | """ Returns a list of the formatted volume specs for a lib""" 32 | volumes = [_get_lib_repo_volume_mount(assembled_specs['libs'][base_lib_name])] 33 | volumes.append(get_command_files_volume_mount(base_lib_name, test=True)) 34 | for lib_name in assembled_specs['libs'][base_lib_name]['depends']['libs']: 35 | lib_spec = assembled_specs['libs'][lib_name] 36 | volumes.append(_get_lib_repo_volume_mount(lib_spec)) 37 | return volumes 38 | 39 | def _get_app_repo_volume_mount(app_spec): 40 | """ This returns the formatted volume mount spec to mount the local code for an app in the 41 | container """ 42 | if app_spec['repo']: 43 | return "{}:{}".format(Repo(app_spec['repo']).vm_path, container_code_path(app_spec)) 44 | 45 | def _get_lib_repo_volume_mount(lib_spec): 46 | """ This returns the formatted volume mount spec to mount the local code for a lib in the 47 | container """ 48 | return "{}:{}".format(Repo(lib_spec['repo']).vm_path, container_code_path(lib_spec)) 49 | 50 | def container_code_path(spec): 51 | """ Returns the path inside the docker container that a spec (for an app or lib) says it wants 52 | to live at """ 53 | return spec['mount'] 54 | 55 | def _get_app_libs_volume_mounts(app_name, assembled_specs): 56 | """ Returns a list of the formatted volume mounts for all libs that an app uses """ 57 | volumes = [] 58 | for lib_name in assembled_specs['apps'][app_name]['depends']['libs']: 59 | lib_spec = assembled_specs['libs'][lib_name] 60 | volumes.append("{}:{}".format(Repo(lib_spec['repo']).vm_path, container_code_path(lib_spec))) 61 | return volumes 62 | -------------------------------------------------------------------------------- /tests/unit/systems/virtualbox/init_test.py: -------------------------------------------------------------------------------- 1 | from mock import patch, call 2 | 3 | from dusty.systems.virtualbox import get_host_ip 4 | from ....testcases import DustyTestCase 5 | 6 | @patch('dusty.systems.virtualbox.get_vm_hostonly_adapter') 7 | @patch('dusty.systems.virtualbox._get_hostonly_config') 8 | class TestVirtualbox(DustyTestCase): 9 | def test_get_host_ip_success(self, fake_get_config, fake_get_adapter): 10 | fake_get_config.return_value = [ 11 | 'Name: vboxnet0', 12 | 'GUID: 786f6276-656e-4074-8000-0a0027000000', 13 | 'DHCP: Disabled', 14 | 'IPAddress: 192.168.57.1', 15 | 'NetworkMask: 255.255.255.0', 16 | '', 17 | 'Name: vboxnet1', 18 | 'GUID: 786f6276-656e-4174-8000-0a0027000001', 19 | 'DHCP: Disabled', 20 | 'IPAddress: 192.168.59.3', 21 | 'NetworkMask: 255.255.255.0', 22 | '', 23 | 'Name: vboxnet2', 24 | 'GUID: 786f6276-656e-4174-8000-0a0027000001', 25 | 'DHCP: Disabled', 26 | 'IPAddress: 192.168.58.10', 27 | 'NetworkMask: 255.255.255.0', 28 | ] 29 | fake_get_adapter.return_value = 'vboxnet1' 30 | self.assertEqual(get_host_ip(), '192.168.59.3') 31 | 32 | def test_get_host_ip_no_network(self, fake_get_config, fake_get_adapter): 33 | fake_get_config.return_value = [ 34 | 'Name: vboxnet0', 35 | 'GUID: 786f6276-656e-4074-8000-0a0027000000', 36 | 'DHCP: Disabled', 37 | 'IPAddress: 192.168.57.1', 38 | 'NetworkMask: 255.255.255.0', 39 | '', 40 | 'Name: vboxnet2', 41 | 'GUID: 786f6276-656e-4174-8000-0a0027000001', 42 | 'DHCP: Disabled', 43 | 'IPAddress: 192.168.59.3', 44 | 'NetworkMask: 255.255.255.0', 45 | ] 46 | fake_get_adapter.return_value = 'vboxnet1' 47 | with self.assertRaises(RuntimeError): 48 | get_host_ip() 49 | 50 | def test_get_host_ip_no_ipaddress(self, fake_get_config, fake_get_adapter): 51 | fake_get_config.return_value = [ 52 | 'Name: vboxnet0', 53 | 'GUID: 786f6276-656e-4074-8000-0a0027000000', 54 | 'DHCP: Disabled', 55 | 'IPAddress: 192.168.57.1', 56 | 'NetworkMask: 255.255.255.0', 57 | '', 58 | 'Name: vboxnet1', 59 | 'GUID: 786f6276-656e-4174-8000-0a0027000001', 60 | 'DHCP: Disabled', 61 | 'NetworkMask: 255.255.255.0', 62 | '', 63 | 'Name: vboxnet2', 64 | 'GUID: 786f6276-656e-4174-8000-0a0027000001', 65 | 'DHCP: Disabled', 66 | 'IPAddress: 192.168.58.10', 67 | 'NetworkMask: 255.255.255.0', 68 | ] 69 | fake_get_adapter.return_value = 'vboxnet1' 70 | with self.assertRaises(RuntimeError): 71 | get_host_ip() 72 | -------------------------------------------------------------------------------- /dusty/log.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logging 3 | import logging.handlers 4 | from .constants import SOCKET_PATH, SOCKET_LOGGER_NAME 5 | from threading import RLock 6 | import contextlib 7 | 8 | handler = None 9 | log_to_client_lock = RLock() 10 | 11 | class DustySocketHandler(logging.Handler): 12 | def __init__(self, connection_socket): 13 | super(DustySocketHandler, self).__init__() 14 | self.connection_socket = connection_socket 15 | self.append_newlines = True 16 | 17 | def emit(self, record): 18 | msg = self.format(record) 19 | if isinstance(msg, unicode): 20 | msg = msg.encode('utf-8') 21 | if self.append_newlines: 22 | msg = msg.strip() 23 | self.connection_socket.sendall("{}{}".format(msg, '\n' if self.append_newlines else '')) 24 | 25 | class DustyClientTestingSocketHandler(logging.Handler): 26 | def __init__(self): 27 | super(DustyClientTestingSocketHandler, self).__init__() 28 | self.log_to_client_output = '' 29 | self.append_newlines = True 30 | 31 | def emit(self, record): 32 | msg = self.format(record) 33 | self.log_to_client_output += '{}\n'.format(msg.encode('utf-8').strip()) 34 | 35 | client_logger = logging.getLogger(SOCKET_LOGGER_NAME) 36 | 37 | def configure_logging(): 38 | logging.basicConfig(stream=sys.stdout, 39 | level=logging.INFO, 40 | format='%(asctime)s %(levelname)s:%(name)s %(message)s', 41 | datefmt='%Y-%m-%d %H:%M:%S') 42 | logging.captureWarnings(True) 43 | 44 | def make_socket_logger(connection_socket): 45 | global handler 46 | logger = logging.getLogger(SOCKET_LOGGER_NAME) 47 | handler = DustySocketHandler(connection_socket) 48 | logger.addHandler(handler) 49 | 50 | def log_to_client(message): 51 | with log_to_client_lock: 52 | client_logger.info(message) 53 | 54 | def close_socket_logger(): 55 | global handler 56 | logger = logging.getLogger(SOCKET_LOGGER_NAME) 57 | logger.removeHandler(handler) 58 | handler = None 59 | 60 | def configure_client_logging(): 61 | client_logger.addHandler(logging.NullHandler()) 62 | logging.basicConfig(stream=sys.stdout, 63 | level=logging.INFO, 64 | format='%(message)s') 65 | 66 | @contextlib.contextmanager 67 | def streaming_to_client(): 68 | """Puts the client logger into streaming mode, which sends 69 | unbuffered input through to the socket one character at a time. 70 | We also disable propagation so the root logger does not 71 | receive many one-byte emissions. This context handler 72 | was originally created for streaming Compose up's 73 | terminal output through to the client and should only be 74 | used for similarly complex circumstances.""" 75 | for handler in client_logger.handlers: 76 | if hasattr(handler, 'append_newlines'): 77 | break 78 | else: 79 | handler = None 80 | old_propagate = client_logger.propagate 81 | client_logger.propagate = False 82 | if handler is not None: 83 | old_append = handler.append_newlines 84 | handler.append_newlines = False 85 | yield 86 | client_logger.propagate = old_propagate 87 | if handler is not None: 88 | handler.append_newlines = old_append 89 | -------------------------------------------------------------------------------- /dusty/subprocess.py: -------------------------------------------------------------------------------- 1 | """Module for running subprocesses. Providies features such as 2 | demotion, to execute the process as another user, log streaming 3 | to the client""" 4 | 5 | from __future__ import absolute_import 6 | 7 | import os 8 | import pwd 9 | import subprocess 10 | from copy import copy 11 | 12 | from .config import get_config_value 13 | from .log import log_to_client 14 | from . import constants 15 | 16 | def demote_to_user(user_name): 17 | def _demote(): 18 | pw_record = pwd.getpwnam(user_name) 19 | _set_demoted_home_dir(user_name) 20 | os.setgid(pw_record.pw_gid) 21 | os.setuid(pw_record.pw_uid) 22 | return _demote 23 | 24 | def _set_demoted_home_dir(user_name): 25 | home_dir = os.path.expanduser('~{}'.format(user_name)) 26 | os.environ['HOME'] = home_dir 27 | 28 | def run_subprocess(fn, shell_args, demote=True, env=None, **kwargs): 29 | if env: 30 | passed_env = copy(os.environ) 31 | passed_env.update(env) 32 | else: 33 | passed_env = None 34 | if demote: 35 | kwargs['preexec_fn'] = demote_to_user(get_config_value(constants.CONFIG_MAC_USERNAME_KEY)) 36 | output = fn(shell_args, env=passed_env, close_fds=True, **kwargs) 37 | return output 38 | 39 | def call_demoted(shell_args, env=None, redirect_stderr=False): 40 | kwargs = {} if not redirect_stderr else {'stderr': subprocess.STDOUT} 41 | return run_subprocess(subprocess.call, shell_args, demote=True, env=env, **kwargs) 42 | 43 | def check_call(shell_args, demote=True, env=None, redirect_stderr=False): 44 | kwargs = {} if not redirect_stderr else {'stderr': subprocess.STDOUT} 45 | return run_subprocess(subprocess.check_call, shell_args, demote=demote, env=env, **kwargs) 46 | 47 | def check_call_demoted(shell_args, env=None, redirect_stderr=False): 48 | return check_call(shell_args, demote=True, env=env, redirect_stderr=redirect_stderr) 49 | 50 | def check_output(shell_args, demote=True, env=None, redirect_stderr=False): 51 | kwargs = {} if not redirect_stderr else {'stderr': subprocess.STDOUT} 52 | return run_subprocess(subprocess.check_output, shell_args, demote=demote, env=env, **kwargs) 53 | 54 | def check_output_demoted(shell_args, env=None, redirect_stderr=False): 55 | return check_output(shell_args, env=env, redirect_stderr=redirect_stderr, demote=True) 56 | 57 | def check_and_log_output_and_error_demoted(shell_args, env=None, strip_newlines=False, quiet_on_success=False): 58 | return check_and_log_output_and_error(shell_args, demote=True, env=env, strip_newlines=strip_newlines, quiet_on_success=quiet_on_success) 59 | 60 | def check_and_log_output_and_error(shell_args, demote=True, env=None, strip_newlines=False, quiet_on_success=False): 61 | total_output = "" 62 | process = run_subprocess(subprocess.Popen, shell_args, demote=demote, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) 63 | for output in iter(process.stdout.readline, ''): 64 | if not strip_newlines or output.strip('\n') != '': 65 | total_output += output 66 | if not quiet_on_success: 67 | log_to_client(output.strip()) 68 | return_code = process.wait() 69 | if return_code != 0: 70 | if quiet_on_success: 71 | log_to_client(total_output) 72 | raise subprocess.CalledProcessError(return_code, ' '.join(shell_args)) 73 | return total_output 74 | --------------------------------------------------------------------------------