├── dploi_fabric ├── celery.py ├── solr.py ├── db │ ├── __init__.py │ ├── pg.py │ ├── mysql.py │ └── base.py ├── __init__.py ├── toolbox │ ├── __init__.py │ ├── logger.py │ ├── template.py │ └── datastructures.py ├── templates │ ├── supervisor │ │ ├── supervisor-group.conf │ │ ├── celerycam_command │ │ ├── gunicorn_command │ │ ├── celeryd_command │ │ ├── supervisor.conf │ │ └── supervisord.conf │ ├── redis │ │ └── redis.conf │ └── nginx │ │ └── nginx.conf ├── buildout.py ├── south.py ├── conf.py ├── virtualenv.py ├── nginx.py ├── redis.py ├── github.py ├── messages.py ├── project.py ├── git.py ├── newrelic.py ├── supervisor.py ├── django_utils.py ├── tests.py └── utils.py ├── MANIFEST.in ├── .gitignore ├── docs ├── internal │ └── utils.rst ├── public │ ├── south.rst │ ├── buildout.rst │ ├── github.rst │ ├── virtualenv.rst │ ├── project.rst │ ├── django_utils.rst │ ├── git.rst │ ├── utils.rst │ ├── nginx.rst │ ├── supervisor.rst │ └── newrelic.rst ├── general_information.rst ├── index.rst ├── Makefile ├── make.bat └── conf.py ├── metadata.py ├── setup.py └── README.rst /dploi_fabric/celery.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dploi_fabric/solr.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dploi_fabric/db/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dploi_fabric/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.2.1' 2 | -------------------------------------------------------------------------------- /dploi_fabric/toolbox/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include metadata.py 2 | include README.rst 3 | include MANIFEST.in 4 | recursive-include dploi_fabric/templates * 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | README.html 2 | *.pyc 3 | /dist 4 | /Django* 5 | /Fabric* 6 | /lib 7 | /include 8 | *.egg 9 | .Python 10 | docs/_build 11 | .idea/ 12 | -------------------------------------------------------------------------------- /dploi_fabric/templates/supervisor/supervisor-group.conf: -------------------------------------------------------------------------------- 1 | {% for group, processes in groups.items %} 2 | [group:{{ group }}] 3 | programs={{ processes|join:',' }} 4 | 5 | {% endfor %} 6 | -------------------------------------------------------------------------------- /docs/internal/utils.rst: -------------------------------------------------------------------------------- 1 | .. _internal/utils: 2 | 3 | ################## 4 | Internal utilities 5 | ################## 6 | 7 | .. autoclass:: dploi_fabric.utils.Configuration 8 | :members: 9 | -------------------------------------------------------------------------------- /docs/public/south.rst: -------------------------------------------------------------------------------- 1 | .. _public/south: 2 | 3 | ############### 4 | South Utilities 5 | ############### 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | .. autoclass:: dploi_fabric.south.SouthMigrateTask -------------------------------------------------------------------------------- /docs/public/buildout.rst: -------------------------------------------------------------------------------- 1 | .. _public/buildout: 2 | 3 | ################## 4 | Buildout Utilities 5 | ################## 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | .. autofunction:: dploi_fabric.buildout.run -------------------------------------------------------------------------------- /docs/public/github.rst: -------------------------------------------------------------------------------- 1 | .. _public/github: 2 | 3 | ################## 4 | Github integration 5 | ################## 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | .. autofunction:: dploi_fabric.github.upload_ssh_deploy_key -------------------------------------------------------------------------------- /dploi_fabric/buildout.py: -------------------------------------------------------------------------------- 1 | from fabric.operations import run as do_run 2 | from fabric.api import task, env 3 | 4 | @task 5 | def run(): 6 | """ 7 | runs buildout 8 | """ 9 | # TODO: Refactor to use utils.config 10 | do_run('cd %(path)s;./bin/buildout -c %(buildout_cfg)s' % env) 11 | 12 | -------------------------------------------------------------------------------- /docs/public/virtualenv.rst: -------------------------------------------------------------------------------- 1 | .. _public/virtualenv: 2 | 3 | ################## 4 | Virtualenv Utilities 5 | ################## 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | .. autofunction:: dploi_fabric.virtualenv.update 10 | .. autofunction:: dploi_fabric.virtualenv.create -------------------------------------------------------------------------------- /dploi_fabric/south.py: -------------------------------------------------------------------------------- 1 | from fabric.api import env, run 2 | from fabric.tasks import Task 3 | from .utils import config 4 | 5 | class SouthMigrateTask(Task): 6 | 7 | name = 'migrate' 8 | 9 | def run(self): 10 | config.django_manage("syncdb") 11 | config.django_manage("migrate") 12 | 13 | migrate = SouthMigrateTask() -------------------------------------------------------------------------------- /docs/public/project.rst: -------------------------------------------------------------------------------- 1 | .. _public/project: 2 | 3 | ######################## 4 | Project layout and setup 5 | ######################## 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | .. autofunction:: dploi_fabric.project.init 10 | .. autofunction:: dploi_fabric.project.upload_ssl -------------------------------------------------------------------------------- /metadata.py: -------------------------------------------------------------------------------- 1 | package_name = 'dploi_fabric' 2 | name = 'dploi-fabric' 3 | author = 'Benjamin Wohlwend, Kristian Oellegaard, Stefan Foulis' 4 | author_email = 'developers@divio.ch' 5 | description = "A collection of fabric tasks" 6 | version = __import__(package_name).__version__ 7 | project_url = 'http://github.com/dploi/%s' % name 8 | license = 'TBA' 9 | -------------------------------------------------------------------------------- /docs/general_information.rst: -------------------------------------------------------------------------------- 1 | .. _general_information: 2 | 3 | ################### 4 | General Information 5 | ################### 6 | 7 | This project is designed to work together with dploi-puppet. Even though we prefer to make it as pluggable as possible, certain things (like directory structure) might be asserted to follow the guidelines from dploi-puppet. 8 | 9 | -------------------------------------------------------------------------------- /dploi_fabric/templates/supervisor/celerycam_command: -------------------------------------------------------------------------------- 1 | {% if version == '3.1' %} 2 | {{ cmd }} 3 | events 4 | --workdir={{ path }}src 5 | --app={{ celery_app }} 6 | --frequency={{ frequency }} 7 | --camera=djcelery.snapshot.Camera 8 | --loglevel={{ loglevel }} 9 | {% else %} 10 | {{ django_cmd }} 11 | celerycam 12 | {{ django_args }} 13 | --loglevel={{ loglevel }} 14 | {% endif %} -------------------------------------------------------------------------------- /docs/public/django_utils.rst: -------------------------------------------------------------------------------- 1 | .. _public/django_utils: 2 | 3 | ################ 4 | Django Utilities 5 | ################ 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | .. autofunction:: dploi_fabric.django_utils.manage 10 | .. autofunction:: dploi_fabric.django_utils.collectstatic 11 | .. autofunction:: dploi_fabric.django_utils.append_settings -------------------------------------------------------------------------------- /docs/public/git.rst: -------------------------------------------------------------------------------- 1 | .. _public/git: 2 | 3 | ############# 4 | Git Utilities 5 | ############# 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | .. autofunction:: dploi_fabric.git.update 10 | .. autofunction:: dploi_fabric.git.diff 11 | .. autofunction:: dploi_fabric.git.status 12 | .. autofunction:: dploi_fabric.git.reset 13 | .. autofunction:: dploi_fabric.git.incoming -------------------------------------------------------------------------------- /dploi_fabric/templates/supervisor/gunicorn_command: -------------------------------------------------------------------------------- 1 | {% if version == '19' %} 2 | {{ cmd }} 3 | wsgi 4 | -w {{ workers }} 5 | --max-requests {{ maxrequests }} 6 | {% if timeout %} --timeout {{ timeout }}{% endif %} 7 | -b {{ bind }} 8 | {% else %} 9 | {{ django_cmd }} 10 | run_gunicorn {{ django_args }} 11 | -w {{ workers }} 12 | --max-requests {{ maxrequests }} 13 | {% if timeout %}--timeout {{ timeout }}{% endif %} 14 | -b {{ bind }} 15 | {% endif %} -------------------------------------------------------------------------------- /docs/public/utils.rst: -------------------------------------------------------------------------------- 1 | .. _public/utils: 2 | 3 | ################ 4 | Remote Utilities 5 | ################ 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | Remote processes 10 | ================ 11 | .. autofunction:: dploi_fabric.utils.ps 12 | 13 | Media files sync 14 | ================ 15 | .. autofunction:: dploi_fabric.utils.download_media 16 | .. autofunction:: dploi_fabric.utils.upload_media 17 | -------------------------------------------------------------------------------- /dploi_fabric/conf.py: -------------------------------------------------------------------------------- 1 | from fabric.api import env 2 | 3 | from deployment import settings, project_name 4 | 5 | env.project_name = project_name 6 | 7 | for key, value in settings.items(): 8 | value['identifier']=key 9 | 10 | def load_settings(identifier): 11 | if not any(settings[identifier]['hosts']): 12 | raise RuntimeError("Hosts not defined, stopping...") 13 | env.identifier = identifier 14 | for key, value in settings[identifier].items(): 15 | setattr(env, key, value) 16 | -------------------------------------------------------------------------------- /dploi_fabric/db/pg.py: -------------------------------------------------------------------------------- 1 | from fabric.tasks import Task 2 | from dploi_fabric.db.base import DumpDatabaseTask, DownloadDatabase 3 | 4 | class PostgreDumpDatabaseTask(DumpDatabaseTask, Task): 5 | """ 6 | Dump the database (PostgreSQL) 7 | """ 8 | 9 | name = 'dump' 10 | 11 | def get_command(self, env, file_name, **flags): 12 | return ('pg_dump --no-owner ' + self.get_flags_string(**flags) + ' --username="%(db_username)s" "%(db_name)s" > ' % env) + file_name 13 | 14 | dump = PostgreDumpDatabaseTask() 15 | download = DownloadDatabase(dump_task=dump) 16 | -------------------------------------------------------------------------------- /dploi_fabric/templates/redis/redis.conf: -------------------------------------------------------------------------------- 1 | daemonize no 2 | 3 | port 0 4 | 5 | unixsocket {{ socket }} 6 | 7 | timeout 300 8 | 9 | loglevel notice 10 | logfile {{ log_directory }}/{{ process_name }}.log 11 | 12 | 13 | databases 16 14 | 15 | save 900 1 16 | save 300 10 17 | save 60 10000 18 | 19 | rdbcompression yes 20 | 21 | dbfilename dump.rdb 22 | 23 | dir {{ working_directory }} 24 | 25 | slave-serve-stale-data yes 26 | 27 | appendonly {{ redis.appendonly }} 28 | appendfsync everysec 29 | 30 | no-appendfsync-on-rewrite no 31 | 32 | 33 | list-max-ziplist-entries 512 34 | list-max-ziplist-value 64 35 | 36 | set-max-intset-entries 512 37 | 38 | activerehashing yes 39 | 40 | -------------------------------------------------------------------------------- /dploi_fabric/db/mysql.py: -------------------------------------------------------------------------------- 1 | from fabric.tasks import Task 2 | from dploi_fabric.db.base import DumpDatabaseTask, DownloadDatabase 3 | 4 | class MysqlDumpDatabaseTask(DumpDatabaseTask, Task): 5 | """ 6 | Dump the database (MySQL) 7 | """ 8 | name = 'dump' 9 | 10 | def get_command(self, env, file_name, **flags): 11 | if hasattr(env, 'db_host'): 12 | flags['host'] = env['db_host'] 13 | return ('mysqldump ' + self.get_flags_string(**flags) + ' --user="%(db_username)s" --password="%(db_password)s" "%(db_name)s" > ' % env) + file_name 14 | 15 | dump = MysqlDumpDatabaseTask() 16 | download = DownloadDatabase(dump_task=dump, **{'lock-tables':'false'}) 17 | -------------------------------------------------------------------------------- /dploi_fabric/toolbox/logger.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | class Logger(): 3 | """ 4 | Capture print statements and write them to a variable 5 | but still allow them to be printed on the screen. 6 | You can also redirect multiple streams into one Logger. 7 | """ 8 | 9 | def __init__(self, stream): 10 | self.stream = stream 11 | self.log = "" 12 | 13 | def __getattr__(self, name): 14 | return getattr(self.stream, name) 15 | 16 | def write(self, text): 17 | self.stream.write(text) 18 | self.log += str(text) 19 | 20 | def get_log(self): 21 | return self.log 22 | 23 | def clear(self): 24 | self.log = "" 25 | -------------------------------------------------------------------------------- /dploi_fabric/templates/supervisor/celeryd_command: -------------------------------------------------------------------------------- 1 | {% if version == '3.1' %} 2 | {{ cmd }} 3 | worker 4 | {% if extra_options %}{{ extra_options }}{% endif %} 5 | --workdir={{ path }}src 6 | --app={{ celery_app }} 7 | {% if has_cam %}--events{% endif %} 8 | {% if enable_beat %}--beat{% endif %} 9 | --concurrency={{ concurrency }} 10 | --maxtasksperchild={{ maxtasksperchild }} 11 | --loglevel={{ loglevel }} 12 | {% else %} 13 | {{ django_cmd }} celeryd {{ django_args }} 14 | {% if has_cam %} -E{% endif %} 15 | {% if enable_beat %}-B{% endif %} 16 | -c {{ concurrency }} 17 | --maxtasksperchild {{ maxtasksperchild }} 18 | --loglevel={{ loglevel }} 19 | {% if pidfile %} --pidfile={{ pidfile }}{% endif %} 20 | {% endif %} -------------------------------------------------------------------------------- /docs/public/nginx.rst: -------------------------------------------------------------------------------- 1 | .. _public/nginx: 2 | 3 | ############### 4 | Nginx utilities 5 | ############### 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | Configuration files 10 | =================== 11 | .. autofunction:: dploi_fabric.nginx.update_config_file 12 | 13 | Process management 14 | ================== 15 | .. autofunction:: dploi_fabric.nginx.reload_nginx 16 | 17 | Settings 18 | ======== 19 | 20 | Can be set in config.ini in the ``[nginx]`` section or in ``deployment.py`` in 21 | this site ``nginx`` section: 22 | 23 | * ``client_max_body_size`` (default: ``10m``) 24 | * ``template`` (default: bundled template): path to template for nginx.conf template (relative to project root) 25 | 26 | -------------------------------------------------------------------------------- /dploi_fabric/virtualenv.py: -------------------------------------------------------------------------------- 1 | from fabric.operations import run as do_run 2 | from fabric.api import task 3 | from .utils import config 4 | 5 | @task 6 | def update(): 7 | """ 8 | updates a virtualenv (pip install requirements.txt) 9 | """ 10 | do_run('cd %(path)s; bin/pip install -r requirements.txt --upgrade --no-deps' % config.sites["main"].deployment) 11 | 12 | @task 13 | def create(): 14 | """ 15 | creates a virtualenv and calls update 16 | """ 17 | do_run('cd %(path)s; virtualenv . --system-site-packages --setuptools' % config.sites["main"].deployment) 18 | update() 19 | # this is ugly. I know. But it seems that on first run, pip does not 20 | # install the correct version of packages that are pulled directly from 21 | # git. Only the second time around it uses the correct one. 22 | update() 23 | -------------------------------------------------------------------------------- /dploi_fabric/templates/supervisor/supervisor.conf: -------------------------------------------------------------------------------- 1 | {% autoescape off %}[program:{{ process_name }}] 2 | command={{ process_cmd }} 3 | directory={{ deployment.path }}../ 4 | user={{ deployment.user }} 5 | autostart={{ autostart }} 6 | autorestart=True 7 | redirect_stderr=True 8 | environment={% for k,v in env.iteritems %}{{ k }}="{{ v }}"{% if not forloop.last %};{% endif %}{% endfor %}, 9 | priority={{ priority }} 10 | 11 | stdout_logfile={{ deployment.logdir }}/{{ process_name }}.stdout.log 12 | stdout_logfile_backups=2 13 | stderr_logfile={{ deployment.logdir }}/{{ process_name }}.stderr.log 14 | {% if killasgroup %}killasgroup={{ killasgroup }} 15 | {% endif %}{% if stopasgroup %}stopasgroup={{ stopasgroup }} 16 | {% endif %}{% if stopwaitsecs %}stopwaitsecs={{ stopwaitsecs }} 17 | {% endif %} 18 | #################################################{% endautoescape %} 19 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. dploi-fabric documentation master file, created by 2 | sphinx-quickstart on Mon Jan 2 13:29:35 2012. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to dploi-fabric's documentation! 7 | ======================================== 8 | 9 | Usage: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | general_information 15 | public/buildout 16 | public/django_utils 17 | public/git 18 | public/github 19 | public/nginx 20 | public/project 21 | public/south 22 | public/supervisor 23 | public/utils 24 | public/virtualenv 25 | public/newrelic 26 | 27 | For developers: 28 | 29 | .. toctree:: 30 | :maxdepth: 2 31 | 32 | internal/utils 33 | 34 | 35 | Indices and tables 36 | ================== 37 | 38 | * :ref:`genindex` 39 | * :ref:`modindex` 40 | * :ref:`search` 41 | 42 | -------------------------------------------------------------------------------- /dploi_fabric/toolbox/template.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | 4 | from django.conf import settings 5 | from django.template import Template 6 | from django.template.context import Context 7 | 8 | import dploi_fabric 9 | 10 | settings.configure(DEBUG=True, TEMPLATE_DEBUG=True) 11 | 12 | 13 | def render_template(path, context, strip_newlines=False): 14 | if not isinstance(context, Context): 15 | context = Context(context) 16 | with open(path) as template_file: 17 | template_data = template_file.read() 18 | 19 | if strip_newlines: 20 | template_data = u' '.join(template_data.splitlines()) 21 | template = Template(template_data) 22 | return template.render(context).lstrip() 23 | 24 | 25 | def app_package_path(path): 26 | """ 27 | returns the abs path with the dploi_fabric package as base 28 | """ 29 | return os.path.abspath(os.path.join(os.path.dirname(dploi_fabric.__file__), path)) 30 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import metadata as m 2 | 3 | from setuptools import setup, find_packages 4 | 5 | install_requires = [ 6 | "fabric >= 1.2", 7 | "requests", 8 | "Django", 9 | ] 10 | 11 | setup( 12 | name = m.name, 13 | version = m.version, 14 | url = m.project_url, 15 | license = m.license, 16 | platforms=['OS Independent'], 17 | description = m.description, 18 | author = m.author, 19 | author_email = m.author_email, 20 | packages=find_packages(), 21 | install_requires = install_requires, 22 | include_package_data = True, #Accept all data files and directories matched by MANIFEST.in or found in source control. 23 | package_dir = { 24 | m.package_name:m.package_name, 25 | }, 26 | zip_safe=False, 27 | classifiers = [ 28 | 'Development Status :: 4 - Beta', 29 | 'Framework :: Django', 30 | 'Intended Audience :: Developers', 31 | 'License :: OSI Approved :: BSD License', 32 | 'Operating System :: OS Independent', 33 | 'Programming Language :: Python', 34 | 'Topic :: Internet :: WWW/HTTP', 35 | ] 36 | ) 37 | -------------------------------------------------------------------------------- /dploi_fabric/nginx.py: -------------------------------------------------------------------------------- 1 | import StringIO 2 | from fabric.decorators import task 3 | from fabric.api import run, env, put 4 | 5 | from dploi_fabric.toolbox.template import render_template 6 | from dploi_fabric.utils import config 7 | import posixpath 8 | 9 | 10 | @task(alias="reload") 11 | def reload_nginx(): 12 | run('sudo /etc/init.d/nginx reload') 13 | 14 | @task 15 | def update_config_file(dryrun=False): 16 | output = "" 17 | for site, site_config in config.sites.items(): 18 | context_dict = site_config 19 | context_dict.update({ 20 | 'domains': " ".join(site_config.deployment.get("domains")[site]), 21 | 'www_processes': [site_config.processes[x] for x in site_config.processes if site_config.processes[x]["type"] == "gunicorn"], 22 | }) 23 | template_path = context_dict['nginx']['template'] 24 | output += render_template(template_path, context_dict) 25 | path = posixpath.abspath(posixpath.join(env.path, '..', 'config', 'nginx.conf')) 26 | if dryrun: 27 | print path + ':' 28 | print output 29 | else: 30 | put(StringIO.StringIO(output), path) 31 | reload_nginx() 32 | -------------------------------------------------------------------------------- /docs/public/supervisor.rst: -------------------------------------------------------------------------------- 1 | .. _public/supervisor: 2 | 3 | #################### 4 | Supervisor utilities 5 | #################### 6 | 7 | The following functions are available through the CLI, e.g. fab 8 | 9 | Configuration files 10 | =================== 11 | .. autofunction:: dploi_fabric.supervisor.add 12 | .. autofunction:: dploi_fabric.supervisor.update 13 | .. autofunction:: dploi_fabric.supervisor.update_config_file 14 | 15 | Process management 16 | ================== 17 | .. autofunction:: dploi_fabric.supervisor.start 18 | .. autofunction:: dploi_fabric.supervisor.stop 19 | .. autofunction:: dploi_fabric.supervisor.restart 20 | .. autofunction:: dploi_fabric.supervisor.status 21 | 22 | 23 | Settings 24 | ======== 25 | 26 | Can be set in config.ini in the ``[supervisor]`` section or in ``deployment.py`` in 27 | this site ``supervisor`` section: 28 | 29 | * ``template`` (default: bundled ``dploi_fabric/templates/supervisor/supervisor.conf``) 30 | * ``group_template`` (default: bundled ``dploi_fabric/templates/supervisor/supervisor-group.conf``) 31 | * ``gunicorn_command_template`` (default: bundled ``dploi_fabric/templates/supervisor/gunicorn_command``) 32 | * ``celeryd_command_template`` (default: bundled ``dploi_fabric/templates/supervisor/celeryd_command``) 33 | * ``celerycam_command_template`` (default: bundled ``dploi_fabric/templates/supervisor/celerycam_command``) -------------------------------------------------------------------------------- /dploi_fabric/templates/supervisor/supervisord.conf: -------------------------------------------------------------------------------- 1 | ; supervisor config file 2 | 3 | [unix_http_server] 4 | file={{ deployment.path }}../tmp/supervisor.sock ; (the path to the socket file) 5 | chmod=0700 ; sockef file mode (default 0700) 6 | 7 | [supervisord] 8 | logfile={{ deployment.logdir }}/supervisor.log ; (main log file;default $CWD/supervisord.log) 9 | pidfile={{ deployment.path }}../tmp/supervisor.pid ; (supervisord pidfile;default supervisord.pid) 10 | childlogdir={{ deployment.logdir }} ; ('AUTO' child log dir, default $TEMP) 11 | 12 | ; the below section must remain in the config file for RPC 13 | ; (supervisorctl/web interface) to work, additional interfaces may be 14 | ; added by defining them in separate rpcinterface: sections 15 | [rpcinterface:supervisor] 16 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface 17 | 18 | [supervisorctl] 19 | serverurl=unix://{{ deployment.path }}../tmp/supervisor.sock ; use a unix:// URL for a unix socket 20 | 21 | ; The [include] section can just contain the "files" setting. This 22 | ; setting can list multiple files (separated by whitespace or 23 | ; newlines). It can also contain wildcards. The filenames are 24 | ; interpreted as relative to this file. Included files *cannot* 25 | ; include files themselves. 26 | 27 | [include] 28 | files = {{ deployment.path }}../config/supervisor.conf 29 | -------------------------------------------------------------------------------- /docs/public/newrelic.rst: -------------------------------------------------------------------------------- 1 | .. _public/newrelic: 2 | 3 | ################## 4 | NewRelic utilities 5 | ################## 6 | 7 | Settings 8 | ======== 9 | 10 | Can be set in config.ini in the ``[newrelic]`` section or in ``deployment.py`` in 11 | this site ``newrelic`` section: 12 | 13 | * ``enabled`` (default: ``False``) 14 | * ``config_file`` (default: 'newrelic.ini'): path to ``newrelic.ini`` (relative to project root) 15 | * ``environment_name`` (default: ``''``): new relic environment name (for deployment specific settings in newrelic.ini) 16 | * ``license_key`` (default: ``''``): license key to override the one in ``newrelic.ini``. 17 | * ``deployment_tracking_apikey`` (default: ``''``): new relic deployment tracking API key (this has to be set in ``deployment.py``) 18 | 19 | 20 | Deployment Tracking 21 | =================== 22 | 23 | You can track deployments and send them to New Relic's API. There are two parts to it: 24 | 25 | * ``@newrelic.register_deployment`` by using this decorator the deployment info will be sent to New Relic's API after the deployment has run through 26 | * ``newrelic.log_output()`` by using this context manager, you can specify which additional logging data should be sent 27 | 28 | Example usage 29 | ------------- 30 | 31 | :: 32 | 33 | from dploi_fabric import newrelic 34 | 35 | @task 36 | @newrelic.register_deployment 37 | def deploy(): 38 | with newrelic.log_output(): 39 | pg.dump.run() 40 | git.update() 41 | virtualenv.update() 42 | south.migrate.run() 43 | django_utils.collectstatic() 44 | django_utils.manage('compress --force') 45 | run('~/bin/gunicorn restart') 46 | 47 | -------------------------------------------------------------------------------- /dploi_fabric/db/base.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from fabric.api import env, run, get 3 | from fabric.tasks import Task 4 | import os 5 | 6 | class DumpDatabaseTask(object): 7 | def get_path(self, env, reason): 8 | mytimestamp = datetime.datetime.now().strftime('%Y-%m-%d-%H%M%S') 9 | reason = reason.replace(' ', '_') 10 | return os.path.join('%(backup_dir)s' % env, '%(db_name)s-' % env + mytimestamp + '-' + reason + '.sql') 11 | 12 | def get_command(self, env, file_name, **flags): 13 | raise NotImplementedError 14 | 15 | def run(self, reason='unknown', compress=False, **flags): 16 | file_name = self.get_path(env, reason) 17 | command = self.get_command(env, file_name, **flags) 18 | run(command) 19 | if compress: 20 | run('gzip ' + file_name) 21 | file_name += '.gz' 22 | return file_name 23 | 24 | def get_flags_string(self, **flags): 25 | flag_list = [] 26 | for k, v in flags.iteritems(): 27 | result = ('-' if len(k) == 1 else '--') + k 28 | if v: 29 | result += (' ' if len(k) == 1 else '=') + v 30 | flag_list.append(result) 31 | return ' '.join(flag_list) 32 | 33 | 34 | class DownloadDatabase(Task): 35 | """ 36 | Download the database 37 | """ 38 | 39 | name = 'download' 40 | 41 | def __init__(self, dump_task, **flags): 42 | self.dump_task = dump_task 43 | self.flags = flags 44 | 45 | def run(self, path='tmp', **flags): 46 | flags.update(self.flags) 47 | file_name = self.dump_task.run(reason='for_download', compress=True, **flags) 48 | get(file_name, path) 49 | -------------------------------------------------------------------------------- /dploi_fabric/redis.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import StringIO 4 | import posixpath 5 | from fabric.decorators import task 6 | from fabric.api import run, env, put 7 | 8 | from dploi_fabric.toolbox.template import render_template 9 | from dploi_fabric.utils import config 10 | 11 | 12 | @task 13 | def update_config_file(dryrun=False): 14 | for site, site_config in config.sites.items(): 15 | redis_processes = [(x, site_config.processes[x]) for x in site_config.processes if site_config.processes[x]["type"] == "redis"] 16 | template_path = site_config['redis']['template'] 17 | print redis_processes 18 | for process_name, process in redis_processes: 19 | working_directoy = posixpath.normpath(posixpath.join(env.path, '..', 'data', 'redis', process_name)) 20 | log_directory = posixpath.normpath(posixpath.join(site_config['deployment']['logdir'], 'log', 'redis')) 21 | run('mkdir -p ' + working_directoy) 22 | run('mkdir -p ' + log_directory) 23 | context_dict = site_config 24 | context_dict.update({ 25 | 'site': site, 26 | 'working_directory': working_directoy, 27 | 'log_directory': log_directory, 28 | 'process_name': process_name, 29 | 'socket': process['socket'], 30 | }) 31 | path = posixpath.abspath(posixpath.join(site_config['deployment']['path'], '..', 'config', process_name + '.conf')) 32 | output = render_template(template_path, context_dict) 33 | if dryrun: 34 | print path + ":" 35 | print output 36 | else: 37 | put(StringIO.StringIO(output), path) 38 | 39 | -------------------------------------------------------------------------------- /dploi_fabric/github.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from fabric.api import env, task, prompt, run, get 4 | from fabric.contrib import files 5 | import getpass 6 | import json 7 | import urllib2 8 | import subprocess 9 | import StringIO 10 | import requests 11 | 12 | @task 13 | def upload_ssh_deploy_key(): 14 | if not files.exists("/home/%(user)s/.ssh/id_rsa.pub" % env): 15 | if not files.exists("/home/%(user)s/.ssh/"): 16 | run("mkdir -p /home/%(user)s/.ssh/" % env) 17 | run("ssh-keygen -t rsa -f '/home/%(user)s/.ssh/id_rsa' -P ''" % env) 18 | 19 | output = StringIO.StringIO() 20 | get("/home/%(user)s/.ssh/id_rsa.pub" % env, output) 21 | output.seek(0) 22 | 23 | ssh_key = output.read() 24 | 25 | logged_in = False 26 | headers = {} 27 | while not logged_in: 28 | try: 29 | default_username = subprocess.check_output(["git", "config", "--get", "github.user"]).strip() 30 | except Exception: 31 | default_username = '' 32 | username = prompt("Please enter your GitHub username:", default=default_username) 33 | password = getpass.getpass("Please enter your GitHub password: ") 34 | 35 | repository = env.repo.rsplit(":", 1)[-1].replace(".git", "") 36 | response = requests.get("https://api.github.com/repos/%s/keys" % repository, auth=(username, password)) 37 | if response.status_code == 401 and response.headers.get('X-GitHub-OTP', '').startswith('required'): 38 | headers['X-GitHub-OTP'] = prompt('Please enter the Two-Factor-Auth code:') 39 | response = requests.get("https://api.github.com/repos/%s/keys" % repository, auth=(username, password), headers=headers) 40 | response = json.loads(response.content) 41 | 42 | if 'message' in response: 43 | print(response['message']) 44 | else: 45 | logged_in = True 46 | 47 | match = [x for x in response if x.get("key") in ssh_key] 48 | if not match: 49 | data = {'key': ssh_key} 50 | data = json.dumps(data) 51 | response = requests.post("https://api.github.com/repos/%s/keys" % repository, auth=(username, password), data=data, headers=headers) 52 | -------------------------------------------------------------------------------- /dploi_fabric/messages.py: -------------------------------------------------------------------------------- 1 | # http://patorjk.com/software/taag/ - font Basic 2 | # 3 | DEPRECATED = """ 4 | ================================================================================= 5 | 6 | d8888b. d88888b d8888b. d8888b. d88888b .o88b. .d8b. d888888b d88888b d8888b. 7 | 88 `8D 88' 88 `8D 88 `8D 88' d8P Y8 d8' `8b `~~88~~' 88' 88 `8D 8 | 88 88 88ooooo 88oodD' 88oobY' 88ooooo 8P 88ooo88 88 88ooooo 88 88 9 | 88 88 88~~~~~ 88~~~ 88`8b 88~~~~~ 8b 88~~~88 88 88~~~~~ 88 88 10 | 88 .8D 88. 88 88 `88. 88. Y8b d8 88 88 88 88. 88 .8D 11 | Y8888D' Y88888P 88 88 YD Y88888P `Y88P' YP YP YP Y88888P Y8888D' 12 | 13 | ================================================================================= 14 | """ 15 | EXCEPTION = """ 16 | ============================================================================ 17 | 18 | d88888b db db .o88b. d88888b d8888b. d888888b d888888b .d88b. d8b db 19 | 88' `8b d8' d8P Y8 88' 88 `8D `~~88~~' `88' .8P Y8. 888o 88 20 | 88ooooo `8bd8' 8P 88ooooo 88oodD' 88 88 88 88 88V8o 88 21 | 88~~~~~ .dPYb. 8b 88~~~~~ 88~~~ 88 88 88 88 88 V8o88 22 | 88. .8P Y8. Y8b d8 88. 88 88 .88. `8b d8' 88 V888 23 | Y88888P YP YP `Y88P' Y88888P 88 YP Y888888P `Y88P' VP V8P 24 | 25 | ============================================================================ 26 | """ 27 | CAUTION = """ 28 | =============================================================== 29 | .o88b. .d8b. db db d888888b d888888b .d88b. d8b db db 30 | d8P Y8 d8' `8b 88 88 `~~88~~' `88' .8P Y8. 888o 88 88 31 | 8P 88ooo88 88 88 88 88 88 88 88V8o 88 YP 32 | 8b 88~~~88 88 88 88 88 88 88 88 V8o88 33 | Y8b d8 88 88 88b d88 88 .88. `8b d8' 88 V888 db 34 | `Y88P' YP YP ~Y8888P' YP Y888888P `Y88P' VP V8P YP 35 | =============================================================== 36 | """ 37 | 38 | 39 | 40 | 41 | 42 | DOMAIN_DICT_DEPRECATION_WARNING = DEPRECATED + """ 43 | - Please use a dict to describe domains in deployments.py , e.g. 44 | 45 | 'domains': { 46 | 'main': ['domain.tld'], 47 | 'multisite1': ['domain2.tld'], 48 | } 49 | ================================================================================= 50 | """ 51 | 52 | DATABASES_DICT_DEPRECATION_WARNING = DEPRECATED + """ 53 | - Please use a dict to describe databases in deployments.py , e.g. 54 | 55 | 'databases': { 56 | 'default': { 57 | 'ENGINE': "django.db.backends.postgresql_psycopg2", 58 | 'NAME': "username-dev", 59 | 'USER': "username-dev", 60 | } 61 | } 62 | ================================================================================= 63 | """ -------------------------------------------------------------------------------- /dploi_fabric/project.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from fabric.api import env, task, prompt, run, put 4 | from fabric.contrib import files 5 | from dploi_fabric import git 6 | from github import upload_ssh_deploy_key 7 | from supervisor import update_config_file as supervisor_update_config_file 8 | from nginx import update_config_file as nginx_update_config_file 9 | from redis import update_config_file as redis_update_config_file 10 | import django_utils 11 | from .utils import config 12 | 13 | 14 | @task 15 | def init(): 16 | if files.exists(os.path.join(env.path, 'bin')): 17 | print "buildout environment exists already" 18 | return 19 | upload_ssh_deploy_key() 20 | run('mkdir -p %(path)s' % env) 21 | run('mkdir -p %(logdir)s' % env) 22 | run('mkdir -p %(path)s../tmp' % env) 23 | run('mkdir -p %(path)s../config' % env) 24 | if env.repo.startswith('git'): 25 | run('cd %(path)s; git clone -b %(branch)s %(repo)s .' % env) 26 | git.update() 27 | elif env.repo.startswith('ssh+svn'): 28 | run('cd %(path)s; svn co %(repo)s' % env) 29 | 30 | if config.sites['main']['redis']['enabled']: 31 | redis_update_config_file() 32 | 33 | if config.sites["main"]['supervisor']['use_global_supervisord'] == True: 34 | supervisor_update_config_file() 35 | else: 36 | supervisor_update_config_file(load_config=False) 37 | # This can fail if the supervisor daemon is already running. 38 | run(config.sites["main"]['supervisor']['supervisord_command']) 39 | supervisor_update_config_file(load_config=True) 40 | 41 | if config.sites["main"]['nginx']['enabled'] == True: 42 | nginx_update_config_file() 43 | 44 | tool = config.sites['main'].get('checkout', {}).get('tool') 45 | if tool == "buildout": 46 | run('cd %(path)s; sh init.sh -c %(buildout_cfg)s' % env) 47 | django_utils.append_settings() 48 | elif tool == "virtualenv": 49 | import virtualenv 50 | virtualenv.create() 51 | django_utils.append_settings() 52 | django_utils.manage("syncdb --all --noinput") 53 | django_utils.manage("migrate --fake") 54 | else: 55 | print "WARNING: Couldnt find [checkout] tool - please set it to either virtualenv or buildout in your config.ini" 56 | print "Got tool: %s" % tool 57 | django_utils.append_settings() 58 | 59 | 60 | @task 61 | def upload_ssl(): 62 | """ 63 | Upload the SSL key and certificate to the directories and with the filenames 64 | specified in your settings. 65 | """ 66 | for site, site_dict in config.sites.items(): 67 | ssl_key_path = prompt("SSL Key path (%s):" % site) 68 | ssl_cert_path = prompt("SSL Certificate path (%s):" % site) 69 | put(ssl_key_path, site_dict.get("deployment").get("ssl_key_path")) 70 | put(ssl_cert_path, site_dict.get("deployment").get("ssl_cert_path")) 71 | -------------------------------------------------------------------------------- /dploi_fabric/toolbox/datastructures.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import ConfigParser 3 | 4 | class EnvConfigParser(ConfigParser.SafeConfigParser): 5 | """ A config parser that can handle "namespaced" sections. Example: 6 | 7 | [base] 8 | name = base 9 | 10 | [base:some-env] 11 | name = some-env 12 | 13 | """ 14 | 15 | def _concat(self, parent, child): 16 | return '%s:%s' % (parent, child) 17 | 18 | def items(self, section, raw=False, vars=None, env=None): 19 | items = {} 20 | try: 21 | items.update(dict(ConfigParser.SafeConfigParser.items(self, section, raw, vars))) 22 | except ConfigParser.NoSectionError: 23 | pass 24 | if env: 25 | try: 26 | env_items = dict(ConfigParser.SafeConfigParser.items(self, self._concat(section, env), raw, vars)) 27 | items.update(env_items) 28 | except ConfigParser.NoSectionError: 29 | pass 30 | if not items: 31 | raise ConfigParser.NoSectionError(self._concat(section, env) if env else section) 32 | return tuple(items.iteritems()) 33 | 34 | def get(self, section, option, raw=False, vars=None, env=None): 35 | if env and self.has_section(self._concat(section, env)): 36 | try: 37 | return ConfigParser.SafeConfigParser.get(self, self._concat(section, env), option, raw, vars) 38 | except ConfigParser.NoOptionError: 39 | if not self.has_section(section): 40 | raise 41 | return ConfigParser.SafeConfigParser.get(self, section, option, raw, vars) 42 | 43 | def _get(self, section, conv, option, env=None): 44 | return conv(self.get(section, option, env=env)) 45 | 46 | def getint(self, section, option, env=None): 47 | return self._get(section, int, option, env) 48 | 49 | def getfloat(self, section, option, env=None): 50 | return self._get(section, float, option, env) 51 | 52 | def getboolean(self, section, option, env=None): 53 | v = self.get(section, option, env=env) 54 | if v.lower() not in self._boolean_states: 55 | raise ValueError, 'Not a boolean: %s' % v 56 | return self._boolean_states[v.lower()] 57 | 58 | def has_section(self, section, env=None, strict=False): 59 | if not env: 60 | return ConfigParser.SafeConfigParser.has_section(self,section) 61 | return ( 62 | (not strict and ConfigParser.SafeConfigParser.has_section(self, section)) or 63 | ConfigParser.SafeConfigParser.has_section(self, self._concat(section, env)) 64 | ) 65 | 66 | def section_namespaces(self, section): 67 | namespaces = [] 68 | for s in self.sections(): 69 | s = s.split(":") 70 | if s[0] == section: 71 | if len(s) == 1: 72 | namespaces.append("main") 73 | else: 74 | namespaces.append(s[1]) 75 | return namespaces 76 | 77 | def _interpolate(self, section, option, rawval, vars): 78 | return rawval 79 | -------------------------------------------------------------------------------- /dploi_fabric/git.py: -------------------------------------------------------------------------------- 1 | from fabric.operations import run, prompt 2 | from fabric.api import task, env, get, put, local 3 | from fabric.contrib.files import exists 4 | import ConfigParser 5 | import StringIO 6 | import posixpath 7 | from .utils import STATIC_COLLECTED, DATA_DIRECTORY 8 | from django_utils import django_settings_file, append_settings 9 | import os 10 | from .messages import CAUTION 11 | from .utils import config 12 | 13 | 14 | @task 15 | def update(): 16 | test = run("cd %(path)s; git --no-pager diff --stat" % env) 17 | if "files changed" in test: 18 | print CAUTION 19 | print "You have local file changes to the git repository on the server. Run 'fab %s git.reset' to remove them, " \ 20 | "or keep them by applying the diff locally with the command 'git apply filename.diff' and upload it to your git host" % env.identifier 21 | print 22 | print "You now have the following options:" 23 | print 24 | print "[D]ownload diff" 25 | print "Continue and [R]eset changes" 26 | print "[E]xit" 27 | download_diff = prompt("What do you want to do?", default="D") 28 | if download_diff.lower() == "d": 29 | diff = run(("cd %(path)s; git diff --color .") % env) 30 | for i in range(1, 50): 31 | print 32 | print diff 33 | for i in range(1, 5): 34 | print 35 | exit() 36 | elif download_diff.lower() == "e": 37 | exit() 38 | run("cd %(path)s; find . -iname '*.pyc' -delete" % env) 39 | run("cd %(path)s; git fetch origin" % env) 40 | run("cd %(path)s; git reset --hard" % env) 41 | run("cd %(path)s; git checkout %(branch)s" % env) 42 | run("cd %(path)s; git pull origin %(branch)s" % env) 43 | if exists(posixpath.join(env.path, ".gitmodules")): 44 | run("cd %(path)s; git submodule init" % env) 45 | run("cd %(path)s; git submodule update" % env) 46 | append_settings() 47 | 48 | 49 | @task 50 | def diff(what=''): 51 | run(("cd %(path)s; git --no-pager diff " + what) % env) 52 | 53 | 54 | @task 55 | def status(): 56 | run("cd %(path)s; git status" % env) 57 | 58 | 59 | @task 60 | def reset(): 61 | """ 62 | discard all non-committed changes 63 | """ 64 | run("cd %(path)s; find . -iname '*.pyc' -delete" % env) 65 | run("cd %(path)s; git reset --hard HEAD" % env) 66 | 67 | 68 | @task 69 | def incoming(remote='origin', branch=None): 70 | """ 71 | Displays incoming commits 72 | """ 73 | if not branch: 74 | branch = env.branch 75 | run(("cd %(path)s; git fetch " + remote + " && git log --oneline --pretty=format:'%%C(yellow)%%h%%C(reset) - %%s %%C(bold blue)<%%an>%%C(reset)' .." + remote + '/' + branch) % env) 76 | 77 | 78 | def local_branch_is_dirty(ignore_untracked_files=True): 79 | untracked_files = '--untracked-files=no' if ignore_untracked_files else '' 80 | git_status = local( 81 | 'git status %s --porcelain' % untracked_files, capture=True) 82 | return git_status != '' 83 | 84 | 85 | def local_branch_matches_remote(): 86 | local_branch = local( 87 | 'git rev-parse --symbolic-full-name --abbrev-ref HEAD', 88 | capture=True).strip() 89 | target_branch = env.branch.strip() 90 | return local_branch == target_branch 91 | -------------------------------------------------------------------------------- /dploi_fabric/newrelic.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys 3 | import subprocess 4 | from functools import wraps 5 | 6 | from fabric.api import task, env, prompt 7 | from fabric.operations import run 8 | 9 | from toolbox import logger 10 | 11 | 12 | logged_output = {'stdout': "", 'stderr': ""} 13 | 14 | 15 | class log_output(): 16 | def __init__(self): 17 | global logged_output 18 | self.stdout_logger = logger.Logger(sys.stdout) 19 | self.stderr_logger = logger.Logger(sys.stderr) 20 | sys.stdout = self.stdout_logger 21 | sys.stderr = self.stderr_logger 22 | 23 | def __enter__(self): 24 | self.stdout_logger.clear() 25 | self.stderr_logger.clear() 26 | 27 | def __exit__(self, exc_type, exc_val, exc_tb): 28 | # TODO: Add exception handling? 29 | logged_output['stdout'] += self.stdout_logger.get_log() 30 | logged_output['stderr'] += self.stderr_logger.get_log() 31 | 32 | 33 | def register_deployment(func): 34 | @wraps(func) 35 | def with_logging(*args, **kwargs): 36 | hash_before = run("cd %(path)s; git --no-pager log -1 --oneline %(branch)s --pretty='%%h'" % env) 37 | if hash_before == "": 38 | hash_before = False 39 | 40 | __func_ret = func(*args, **kwargs) 41 | 42 | try: 43 | handle = subprocess.check_output(["git", "config", "--get", "github.user"]).strip() 44 | except subprocess.CalledProcessError: 45 | handle = prompt("Please enter your GitHub username:") 46 | 47 | try: 48 | email = subprocess.check_output(["git", "config", "--get", "user.email"]).strip() 49 | except subprocess.CalledProcessError: 50 | email = prompt("Please enter your email address:") 51 | 52 | user = "%s (%s)" % (handle, email) 53 | 54 | log_base = "cd app && git --no-pager log -1 --oneline" 55 | commit_hash = run(log_base + " --pretty=%h") 56 | commit_message = run(log_base + " --pretty=%s") 57 | commit_author_name = run(log_base + " --pretty=%aN") 58 | 59 | if hash_before and hash_before != commit_hash: 60 | diff_url = "https://%s/compare/%s...%s" % (run("cd app && git config --get remote.origin.url").replace( 61 | "git@", "", 1).replace(":", "/", 1).replace(".git", "", 1), hash_before, commit_hash) 62 | msg = "%s from %s (%s)" % (commit_message, commit_author_name, diff_url) 63 | 64 | else: 65 | url = "https://%s/commit/%s" % (run("cd app && git config --get remote.origin.url").replace( 66 | "git@", "", 1).replace(":", "/", 1).replace(".git", "", 1), commit_hash) 67 | 68 | if hash_before and hash_before == commit_hash: 69 | msg = "No changes in the repository. %s" % url 70 | 71 | else: 72 | msg = "No pre-pull commit hash provided. %s" % url 73 | 74 | log = logged_output['stdout'] 75 | if logged_output['stderr']: 76 | log += "\nErrors:\n%s" % str(logged_output['stderr']) 77 | 78 | options = {'app_name': env['user'], 'user': user, 'description': msg, 'revision': commit_hash, 'changelog': log} 79 | 80 | cmd = 'curl -H "x-api-key:%s"' % env['newrelic']['deployment_tracking_apikey'] 81 | 82 | for key, val in options.items(): 83 | cmd += ' --data-urlencode "deployment[%s]=%s"' % (key, val) 84 | 85 | cmd += " https://rpm.newrelic.com/deployments.xml" 86 | subprocess.call(cmd, shell=True) 87 | return __func_ret 88 | return with_logging 89 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Dploi Fabric Tasks 3 | ================== 4 | 5 | This is a set of reusable fabric tasks. It uses the new-style task system 6 | of fabric >= 1.2 7 | 8 | Usage 9 | ===== 10 | * Add ``dploi-fabric`` to your buildout environment (preferably in 11 | ``development_local.cfg``, the servers have no use for it). 12 | * Create a ``fabfile.py`` as normal. 13 | * Pick and choose the modules and import them in the ``fabfile.py``, e.g.:: 14 | 15 | from fabric.decorators import task 16 | 17 | from dploi_fabric.db import pg # if project uses mysql, import "mysql" instead 18 | from dploi_fabric import supervisor, nginx 19 | from dploi_fabric import git, utils, buildout, south, django_utils, project 20 | 21 | from dploi_fabric.conf import load_settings 22 | 23 | @task 24 | def dev(): 25 | load_settings('dev') 26 | 27 | @task 28 | def stage(): 29 | load_settings('stage') 30 | 31 | @task 32 | def live(): 33 | load_settings('live') 34 | 35 | @task 36 | def deploy(): 37 | pg.dump.run() 38 | git.update() 39 | buildout.run() 40 | south.migrate.run() 41 | django_utils.collectstatic() 42 | supervisor.restart() 43 | supervisor.status() 44 | nginx.update_config_file() 45 | 46 | * in the project root, create a file ``deployment.py`` following this template:: 47 | 48 | project_name = 'awesome_new_website' 49 | 50 | settings = { 51 | 'dev': { 52 | 'hosts': ['yourserver.com'], 53 | 'autostart': True, 54 | 'path': '/home/awesome_new_website-dev/app/', 55 | 'user': 'awesome_new_website-dev', 56 | 'buildout_cfg': 'server_dev.cfg', 57 | 'repo': 'git@github.com:youruser/awesome_new_website.git', 58 | 'branch': 'master', 59 | 'backup_dir': '/home/awesome_new_website-dev/tmp/', # Used for mysql/psql dumps 60 | 'db_name': 'awesome_new_website-dev', 61 | 'db_username': 'awesome_new_website-dev', 62 | 'domains': ['sitename-dev.agency.com', 'www.sitename.com'], 63 | 'domains_redirect': [ 64 | {'domain': 'sitename.com', 'destination_domain': 'www.sitename.com'}, 65 | ], 66 | 'ssl': True, 67 | 'ssl_key_path': '../config/ssl.key', # This must be uploaded manually, possibly by a task in the future 68 | 'ssl_cert_path': '../config/ssl.crt', # This must be uploaded manually, possibly by a task in the future 69 | 'basic_auth': False, 70 | 'basic_auth_path': '../config/htpasswd', # This must be uploaded manually, possibly by a task in the future 71 | }, 72 | } 73 | 74 | add settings for stage/live as needed. 75 | 76 | 77 | * call ``bin/fab --list`` for a list of commands 78 | 79 | .. note:: when using these tasks, all project-specific tasks have to be decorated 80 | with the ``@task`` decorator from ``fabric.api``. 81 | 82 | Configuration file (config.ini) 83 | =============================== 84 | 85 | Remember to add config.ini, example: 86 | 87 | [static] 88 | 89 | /media/ = py_src/project/media/ 90 | 91 | and/or 92 | 93 | [static] 94 | 95 | /static/ = %(static_files)s 96 | 97 | Celery 98 | ------ 99 | 100 | The ``[celery]`` section has some special configuration parameters: 101 | 102 | * ``version`` (default: empty): set the celery version. Other the value ``3.1`` is recognized and triggers the 103 | usage of the ``celery`` command instead of the django-celery based ``manage.py celeryd``. 104 | * ``app`` (default: ``project``): the module that contains the ``celery.py`` (see the `celery documentation`_) 105 | 106 | Other options 107 | ------------- 108 | 109 | [checkout] 110 | 111 | tool = buildout (default) 112 | 113 | tool = virtualenv 114 | 115 | 116 | [celery] (if the section is present, celery is enabled) 117 | 118 | 119 | [django] 120 | 121 | base = . 122 | 123 | base = project/ 124 | 125 | base = py_src/project (doesnt work with buildout yet, as it would try to access py_src/project/bin/django) 126 | 127 | 128 | append_settings = true 129 | 130 | append_settings = false 131 | 132 | [static] 133 | 134 | (see above) 135 | 136 | /url-path/ = rel-path-filesystem/ 137 | 138 | [sendfile] 139 | /protected/media/ = ../upload/ 140 | 141 | .. _celery documentation: http://docs.celeryproject.org/en/latest/django/first-steps-with-django.html -------------------------------------------------------------------------------- /dploi_fabric/supervisor.py: -------------------------------------------------------------------------------- 1 | import StringIO 2 | from copy import copy 3 | from fabric.decorators import task 4 | from fabric.api import run, put, env 5 | from dploi_fabric.toolbox.template import render_template 6 | from dploi_fabric.utils import config, safe_put 7 | import posixpath 8 | 9 | 10 | @task 11 | def stop(): 12 | for site, site_config in config.sites.items(): 13 | run('%s stop %s:*' % (site_config['supervisor']['supervisorctl_command'], get_group_name(site, site_config))) 14 | 15 | 16 | @task 17 | def start(): 18 | for site, site_config in config.sites.items(): 19 | run('%s start %s:*' % (site_config['supervisor']['supervisorctl_command'], get_group_name(site, site_config))) 20 | 21 | 22 | @task 23 | def restart(): 24 | for site, site_config in config.sites.items(): 25 | run('%s restart %s:*' % (site_config['supervisor']['supervisorctl_command'], get_group_name(site, site_config))) 26 | 27 | 28 | @task 29 | def status(): 30 | """ 31 | print status of the supervisor process 32 | 33 | Note: "status" does not yet support the group syntax 34 | """ 35 | for site, site_config in config.sites.items(): 36 | group_name = get_group_name(site, site_config) 37 | for process_name, process_cmd in site_config.processes.items(): 38 | run('%s status %s:%s' % (site_config['supervisor']['supervisorctl_command'], group_name, process_name)) 39 | 40 | 41 | @task 42 | def add(): 43 | for site, site_config in config.sites.items(): 44 | group_name = get_group_name(site, site_config) 45 | for process_name, process_cmd in site_config.processes.items(): 46 | run('%s add %s:%s' % (site_config['supervisor']['supervisorctl_command'], group_name, process_name)) 47 | 48 | 49 | @task 50 | def update(): 51 | for site, site_config in config.sites.items(): 52 | group_name = get_group_name(site, site_config) 53 | for process_name, process_cmd in site_config.processes.items(): 54 | run('%s update %s:%s' % (site_config['supervisor']['supervisorctl_command'], group_name, process_name)) 55 | 56 | 57 | def get_group_name(site, site_config): 58 | return '%s-%s' % (site_config['deployment']['user'], site) 59 | 60 | 61 | @task 62 | def update_config_file(dryrun=False, update_command=update, load_config=True): 63 | output = '' 64 | groups = {} 65 | for site, site_config in config.sites.items(): 66 | template_path = site_config['supervisor']['template'] 67 | group_template_path = site_config['supervisor']['group_template'] 68 | group_name = get_group_name(site, site_config) 69 | groups[group_name] = [] 70 | for process_name, process_dict in site_config.processes.items(): 71 | context_dict = copy(site_config) 72 | env_dict = { 73 | 'HOME': site_config.deployment['home'], 74 | 'USER': site_config.deployment['user'], 75 | 'PYTHONPATH': ":".join([ 76 | site_config.deployment['path'], 77 | posixpath.join(site_config.deployment['path'], site_config.get("django").get("base")+'/'), 78 | ]), 79 | } 80 | env_dict.update(site_config.environment) 81 | context_dict.update({ 82 | 'process_name': process_name, 83 | 'process_cmd': process_dict["command"], 84 | 'socket': process_dict["socket"], 85 | 'env': env_dict, 86 | 'priority': process_dict.get('priority', 200), 87 | 'autostart': 'True' if getattr(env, 'autostart', True) else 'False', 88 | 'killasgroup': process_dict.get('killasgroup', None), 89 | 'stopasgroup': process_dict.get('killasgroup', None), 90 | 'stopwaitsecs': process_dict.get('stopwaitsecs', None), 91 | }) 92 | output += render_template(template_path, context_dict) 93 | groups[group_name].append(process_name) 94 | output += render_template(group_template_path, {'groups': groups}) 95 | conf_path = posixpath.abspath(posixpath.join(config.sites["main"].deployment['path'], '..', 'config')) 96 | path = posixpath.join(conf_path, 'supervisor.conf') 97 | 98 | daemon_template_path = site_config['supervisor']['daemon_template'] 99 | supervisord_conf_path = posixpath.join(conf_path, 'supervisord.conf') 100 | supervisord_conf_output = render_template(daemon_template_path, copy(site_config)) 101 | 102 | if dryrun: 103 | print path + ':' 104 | print output 105 | print daemon_template_path + ':' 106 | print supervisord_conf_output 107 | else: 108 | safe_put(StringIO.StringIO(output), path) 109 | safe_put(StringIO.StringIO(supervisord_conf_output), supervisord_conf_path) 110 | if load_config: 111 | update_command() 112 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | 15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest 16 | 17 | help: 18 | @echo "Please use \`make ' where is one of" 19 | @echo " html to make standalone HTML files" 20 | @echo " dirhtml to make HTML files named index.html in directories" 21 | @echo " singlehtml to make a single large HTML file" 22 | @echo " pickle to make pickle files" 23 | @echo " json to make JSON files" 24 | @echo " htmlhelp to make HTML files and a HTML help project" 25 | @echo " qthelp to make HTML files and a qthelp project" 26 | @echo " devhelp to make HTML files and a Devhelp project" 27 | @echo " epub to make an epub" 28 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 29 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 30 | @echo " text to make text files" 31 | @echo " man to make manual pages" 32 | @echo " changes to make an overview of all changed/added/deprecated items" 33 | @echo " linkcheck to check all external links for integrity" 34 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 35 | 36 | clean: 37 | -rm -rf $(BUILDDIR)/* 38 | 39 | html: 40 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 41 | @echo 42 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 43 | 44 | dirhtml: 45 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 48 | 49 | singlehtml: 50 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 51 | @echo 52 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 53 | 54 | pickle: 55 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 56 | @echo 57 | @echo "Build finished; now you can process the pickle files." 58 | 59 | json: 60 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 61 | @echo 62 | @echo "Build finished; now you can process the JSON files." 63 | 64 | htmlhelp: 65 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 66 | @echo 67 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 68 | ".hhp project file in $(BUILDDIR)/htmlhelp." 69 | 70 | qthelp: 71 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 72 | @echo 73 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 74 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 75 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/dploi-fabric.qhcp" 76 | @echo "To view the help file:" 77 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/dploi-fabric.qhc" 78 | 79 | devhelp: 80 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 81 | @echo 82 | @echo "Build finished." 83 | @echo "To view the help file:" 84 | @echo "# mkdir -p $$HOME/.local/share/devhelp/dploi-fabric" 85 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/dploi-fabric" 86 | @echo "# devhelp" 87 | 88 | epub: 89 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 90 | @echo 91 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 92 | 93 | latex: 94 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 95 | @echo 96 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 97 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 98 | "(use \`make latexpdf' here to do that automatically)." 99 | 100 | latexpdf: 101 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 102 | @echo "Running LaTeX files through pdflatex..." 103 | make -C $(BUILDDIR)/latex all-pdf 104 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 105 | 106 | text: 107 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 108 | @echo 109 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 110 | 111 | man: 112 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 113 | @echo 114 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 115 | 116 | changes: 117 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 118 | @echo 119 | @echo "The overview file is in $(BUILDDIR)/changes." 120 | 121 | linkcheck: 122 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 123 | @echo 124 | @echo "Link check complete; look for any errors in the above output " \ 125 | "or in $(BUILDDIR)/linkcheck/output.txt." 126 | 127 | doctest: 128 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 129 | @echo "Testing of doctests in the sources finished, look at the " \ 130 | "results in $(BUILDDIR)/doctest/output.txt." 131 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | if NOT "%PAPER%" == "" ( 11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 12 | ) 13 | 14 | if "%1" == "" goto help 15 | 16 | if "%1" == "help" ( 17 | :help 18 | echo.Please use `make ^` where ^ is one of 19 | echo. html to make standalone HTML files 20 | echo. dirhtml to make HTML files named index.html in directories 21 | echo. singlehtml to make a single large HTML file 22 | echo. pickle to make pickle files 23 | echo. json to make JSON files 24 | echo. htmlhelp to make HTML files and a HTML help project 25 | echo. qthelp to make HTML files and a qthelp project 26 | echo. devhelp to make HTML files and a Devhelp project 27 | echo. epub to make an epub 28 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 29 | echo. text to make text files 30 | echo. man to make manual pages 31 | echo. changes to make an overview over all changed/added/deprecated items 32 | echo. linkcheck to check all external links for integrity 33 | echo. doctest to run all doctests embedded in the documentation if enabled 34 | goto end 35 | ) 36 | 37 | if "%1" == "clean" ( 38 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 39 | del /q /s %BUILDDIR%\* 40 | goto end 41 | ) 42 | 43 | if "%1" == "html" ( 44 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 45 | if errorlevel 1 exit /b 1 46 | echo. 47 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 48 | goto end 49 | ) 50 | 51 | if "%1" == "dirhtml" ( 52 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 53 | if errorlevel 1 exit /b 1 54 | echo. 55 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 56 | goto end 57 | ) 58 | 59 | if "%1" == "singlehtml" ( 60 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 61 | if errorlevel 1 exit /b 1 62 | echo. 63 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 64 | goto end 65 | ) 66 | 67 | if "%1" == "pickle" ( 68 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 69 | if errorlevel 1 exit /b 1 70 | echo. 71 | echo.Build finished; now you can process the pickle files. 72 | goto end 73 | ) 74 | 75 | if "%1" == "json" ( 76 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished; now you can process the JSON files. 80 | goto end 81 | ) 82 | 83 | if "%1" == "htmlhelp" ( 84 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished; now you can run HTML Help Workshop with the ^ 88 | .hhp project file in %BUILDDIR%/htmlhelp. 89 | goto end 90 | ) 91 | 92 | if "%1" == "qthelp" ( 93 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 94 | if errorlevel 1 exit /b 1 95 | echo. 96 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 97 | .qhcp project file in %BUILDDIR%/qthelp, like this: 98 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\dploi-fabric.qhcp 99 | echo.To view the help file: 100 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\dploi-fabric.ghc 101 | goto end 102 | ) 103 | 104 | if "%1" == "devhelp" ( 105 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 106 | if errorlevel 1 exit /b 1 107 | echo. 108 | echo.Build finished. 109 | goto end 110 | ) 111 | 112 | if "%1" == "epub" ( 113 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 117 | goto end 118 | ) 119 | 120 | if "%1" == "latex" ( 121 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 122 | if errorlevel 1 exit /b 1 123 | echo. 124 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 125 | goto end 126 | ) 127 | 128 | if "%1" == "text" ( 129 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 130 | if errorlevel 1 exit /b 1 131 | echo. 132 | echo.Build finished. The text files are in %BUILDDIR%/text. 133 | goto end 134 | ) 135 | 136 | if "%1" == "man" ( 137 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 141 | goto end 142 | ) 143 | 144 | if "%1" == "changes" ( 145 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.The overview file is in %BUILDDIR%/changes. 149 | goto end 150 | ) 151 | 152 | if "%1" == "linkcheck" ( 153 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Link check complete; look for any errors in the above output ^ 157 | or in %BUILDDIR%/linkcheck/output.txt. 158 | goto end 159 | ) 160 | 161 | if "%1" == "doctest" ( 162 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 163 | if errorlevel 1 exit /b 1 164 | echo. 165 | echo.Testing of doctests in the sources finished, look at the ^ 166 | results in %BUILDDIR%/doctest/output.txt. 167 | goto end 168 | ) 169 | 170 | :end 171 | -------------------------------------------------------------------------------- /dploi_fabric/templates/nginx/nginx.conf: -------------------------------------------------------------------------------- 1 | upstream {{ domains|slugify }} { 2 | {% for process in www_processes %} 3 | server unix:{{ process.socket}} fail_timeout=0; 4 | {% endfor %} 5 | } 6 | server { 7 | {% if deployment.ssl %} 8 | listen {{ deployment.bind_ip }}:443; 9 | ssl on; 10 | ssl_certificate {{ deployment.ssl_cert_path }}; 11 | ssl_certificate_key {{ deployment.ssl_key_path }}; 12 | ssl_ciphers RC4:HIGH:!aNULL:!MD5; 13 | ssl_prefer_server_ciphers on; 14 | ssl_protocols SSLv3 TLSv1 TLSv1.1 TLSv1.2; 15 | {% else %} 16 | listen {{ deployment.bind_ip }}:80; 17 | {% endif %} 18 | server_name {{ domains }}; 19 | 20 | access_log {{ deployment.logdir }}nginx/access.log; 21 | error_log {{ deployment.logdir }}nginx/error.log; 22 | 23 | location / { 24 | proxy_pass http://{{ domains|slugify }}; 25 | proxy_redirect off; 26 | proxy_set_header Host $host; 27 | proxy_set_header X-Real-IP $remote_addr; 28 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 29 | {% for key, value in nginx.location_settings.items %} 30 | {{ key }} {{ value }}; 31 | {% endfor %} 32 | client_body_buffer_size 128k; 33 | proxy_connect_timeout 90; 34 | proxy_send_timeout 90; 35 | proxy_read_timeout 90; 36 | proxy_buffer_size 4k; 37 | proxy_buffers 4 32k; 38 | proxy_busy_buffers_size 64k; 39 | proxy_temp_file_write_size 64k; 40 | {% if deployment.ssl %} 41 | proxy_set_header X-Forwarded-Protocol https; 42 | proxy_set_header X-Forwarded-SSL on; 43 | {% else %} 44 | proxy_set_header X-Forwarded-Protocol http; 45 | proxy_set_header X-Forwarded-SSL off; 46 | {% endif %} 47 | } 48 | {% for location,max_body_size in deployment.big_body_endpoints %} 49 | location {{ location }} { 50 | proxy_pass http://{{ domains|slugify }}; 51 | proxy_redirect off; 52 | proxy_set_header Host $host; 53 | proxy_set_header X-Real-IP $remote_addr; 54 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 55 | client_max_body_size {{ max_body_size }}; 56 | client_body_buffer_size 128k; 57 | proxy_connect_timeout 90; 58 | proxy_send_timeout 90; 59 | proxy_read_timeout 90; 60 | proxy_buffer_size 4k; 61 | proxy_buffers 4 32k; 62 | proxy_busy_buffers_size 64k; 63 | proxy_temp_file_write_size 64k; 64 | {% if deployment.ssl %} 65 | proxy_set_header X-Forwarded-Protocol https; 66 | proxy_set_header X-Forwarded-SSL on; 67 | {% else %} 68 | proxy_set_header X-Forwarded-Protocol http; 69 | proxy_set_header X-Forwarded-SSL off; 70 | {% endif %} 71 | } 72 | {% endfor %} 73 | {% for url, relpath in static.items %} 74 | location {{ url }} { 75 | access_log off; 76 | gzip_static on; 77 | alias {{ deployment.path }}{{ relpath }}; 78 | } 79 | {% endfor %} 80 | 81 | {% for url, relpath in sendfile.items %} 82 | location {{ url }} { 83 | internal; 84 | alias {{ deployment.path }}{{ relpath }}; 85 | } 86 | {% endfor%} 87 | 88 | {% for redirect in deployment.url_redirect %} 89 | rewrite {{ redirect.source }} {{ redirect.destination }} {{ redirect.options|default:"permanent" }}; 90 | {% endfor %} 91 | 92 | {% if deployment.basic_auth %} 93 | auth_basic "Restricted"; 94 | auth_basic_user_file {{ deployment.basic_auth_path }}; 95 | {% endif %} 96 | 97 | {% for codes, filename, root in deployment.static_error_pages %} 98 | error_page {{ codes|join:' '}} /{{ filename }}; 99 | 100 | location = /{{ filename }} { 101 | root {{ root }}; 102 | allow all; 103 | } 104 | {% endfor %} 105 | } 106 | {% if deployment.ssl %} 107 | server { 108 | listen {{ deployment.bind_ip }}:80; 109 | server_name {{ domains }}; 110 | rewrite ^(.*) https://$host$1 permanent; 111 | } 112 | {% endif %} 113 | {% for redirect in deployment.domains_redirect %} 114 | server { 115 | listen {{ deployment.bind_ip }}:80; 116 | server_name {{ redirect.domain }}; 117 | rewrite ^(.*) http://{{ redirect.destination_domain }}$1 permanent; 118 | access_log {{ deployment.logdir }}nginx/access.log; 119 | error_log {{ deployment.logdir }}nginx/error.log; 120 | } 121 | 122 | {% if deployment.ssl %} 123 | server { 124 | listen {{ deployment.bind_ip }}:443; 125 | ssl on; 126 | ssl_certificate {{ deployment.ssl_cert_path }}; 127 | ssl_certificate_key {{ deployment.ssl_key_path }}; 128 | ssl_ciphers RC4:HIGH:!aNULL:!MD5; 129 | ssl_prefer_server_ciphers on; 130 | ssl_protocols SSLv3 TLSv1 TLSv1.1 TLSv1.2; 131 | 132 | server_name {{ redirect.domain }}; 133 | rewrite ^(.*) http://{{ redirect.destination_domain }}$1 permanent; 134 | access_log {{ deployment.logdir }}nginx/access.log; 135 | error_log {{ deployment.logdir }}nginx/error.log; 136 | } 137 | {% endif %} 138 | {% endfor %} 139 | -------------------------------------------------------------------------------- /dploi_fabric/django_utils.py: -------------------------------------------------------------------------------- 1 | import ConfigParser 2 | import StringIO 3 | from fabric.decorators import task 4 | from fabric.api import env, get, put 5 | import os 6 | from fabric.contrib.files import exists 7 | from .utils import config 8 | import posixpath 9 | from .utils import STATIC_COLLECTED, DATA_DIRECTORY 10 | from pprint import pformat 11 | from fabric.operations import run 12 | 13 | def django_exec(dictionary = {}, tool="buildout"): 14 | # TODO: Remove this and change dependants to use utils.config 15 | config = ConfigParser.RawConfigParser() 16 | config_file = os.path.join(env.path, "config.ini") 17 | django_base = "." # default to current dir 18 | if exists(config_file): 19 | output = StringIO.StringIO() 20 | get(u"%s" % config_file, output) 21 | output.seek(0) 22 | config.readfp(output) 23 | 24 | try: 25 | tool = config.get("checkout", "tool") 26 | except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): 27 | tool = "buildout" # default to buildout 28 | 29 | try: 30 | django_base = config.get("django", "base") 31 | except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): 32 | pass 33 | if django_base == ".": 34 | django_base = "" 35 | if tool == "buildout": 36 | cmd = os.path.join(env.path, django_base, "bin/django") 37 | django_settings = os.path.join(env.path, django_base, "settings.py") 38 | else: 39 | cmd = "%s %s" % (os.path.join(env.path, "bin/python"), os.path.join(env.path, django_base, "manage.py")) 40 | django_settings = os.path.join(env.path, django_base, "settings.py") 41 | dictionary['django_exec'] = cmd 42 | dictionary['django_settings'] = django_settings 43 | dictionary['checkout_tool'] = tool 44 | return dictionary 45 | 46 | def django_settings_file(dictionary = {}): # TODO: Remove this and change dependants to use utils.config 47 | return django_exec().get("django_settings") 48 | 49 | @task 50 | def manage(*args): 51 | """ 52 | Proxy for manage.py 53 | """ 54 | config.django_manage(" ".join(args)) 55 | 56 | @task 57 | def collectstatic(staticdir='static'): # As defined in puppet config 58 | # TODO: Use utils.config 59 | run(('cd %(path)s; mkdir -p ' + staticdir) % env) 60 | manage("collectstatic", "--noinput", "--link") 61 | 62 | @task 63 | def load_fixture(file_path): 64 | remote_path = put(file_path, '~/tmp/')[0] 65 | manage('loaddata %s' % remote_path) 66 | run('rm %s' % remote_path) 67 | 68 | @task 69 | def append_settings(): 70 | # TODO: make it work with multisites! 71 | append = config.sites["main"].get("django").get("append_settings", False) 72 | if append: 73 | site_config = config.sites["main"] 74 | settings_file_path = django_settings_file() 75 | print "Appending auto generated settings to", settings_file_path 76 | output = StringIO.StringIO() 77 | get(u"%s" % os.path.join(env.path, "../config/django.py"), output) 78 | output.seek(0) 79 | manual_settings = output.read() 80 | 81 | # START OF DIRTY DATABASE HACK 82 | 83 | 84 | additional_settings = """if "DATABASES" in locals():\n""" 85 | # DATABASES 86 | #additional_settings = "DATABASES = %s\n" % pformat(config.sites["main"].get("deployment").get("databases")) 87 | additional_settings +=" DATABASES = %s\n" % pformat(config.sites["main"].get("deployment").get("databases")) 88 | 89 | db_old_dict = config.sites["main"].get("deployment").get("databases")["default"] 90 | db_old_dict["ENGINE"] = db_old_dict["ENGINE"].replace("django.db.backends.", "") 91 | additional_settings += """else: 92 | DATABASE_ENGINE = "%(ENGINE)s" 93 | DATABASE_NAME = "%(NAME)s" 94 | DATABASE_USER = "%(USER)s" 95 | DATABASE_PASSWORD = "%(PASSWORD)s" 96 | DATABASE_HOST = "%(HOST)s" 97 | """ % db_old_dict 98 | 99 | # // END OF DIRTY DATABASE HACK 100 | 101 | # CACHES 102 | processes = config.sites["main"].get("processes") 103 | cache_dict = { 104 | 'default': { 105 | 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 106 | 'LOCATION': 'unix:%s' % [processes[x] for x in processes if processes[x]["type"] == "memcached"][0].get("socket"), 107 | } 108 | } 109 | additional_settings += "CACHES = %s\n" % pformat(cache_dict) 110 | 111 | # PATHS 112 | additional_settings += """ 113 | STATIC_ROOT = "%(static_root)s" 114 | MEDIA_ROOT = "%(media_root)s" 115 | """ % { 116 | 'static_root': posixpath.join(site_config.get("deployment").get("path"), STATIC_COLLECTED), 117 | 'media_root': posixpath.join(site_config.get("deployment").get("path"), DATA_DIRECTORY, 'media/'), 118 | } 119 | 120 | output = StringIO.StringIO() 121 | get(settings_file_path, output) 122 | output.seek(0) 123 | settings_file = output.read() 124 | 125 | 126 | run("mkdir -p %s" % posixpath.join(site_config.get("deployment").get("path"), "_gen/")) 127 | put(StringIO.StringIO("%s\n%s\n%s" % (settings_file, additional_settings, manual_settings)), site_config.get("deployment").get("generated_settings_path")) 128 | put(StringIO.StringIO(""), posixpath.join(site_config.get("deployment").get("path"), "_gen/__init__.py")) -------------------------------------------------------------------------------- /dploi_fabric/tests.py: -------------------------------------------------------------------------------- 1 | import ConfigParser 2 | import StringIO 3 | import unittest 4 | from dploi_fabric.utils import EnvConfigParser, Configuration, _AttributeDict, STATIC_COLLECTED 5 | 6 | class TestConfigurationTestCase(unittest.TestCase): 7 | test_config = """ 8 | [django] 9 | base = code_checkout/ 10 | append_settings = true 11 | cmd = bin/whatever 12 | 13 | [django:multisite1] 14 | base = . 15 | append_settings = false 16 | cmd = bin/django 17 | 18 | [static] 19 | /static/ = %(static_collected)s 20 | 21 | """ 22 | def setUp(self): 23 | self.env_dict = { 24 | 'host_string': 'some.server.tld', 25 | 'hosts': ['some.server.tld'], 26 | 'path': '/home/username/app/', 27 | 'user': 'username', 28 | 'buildout_cfg': 'buildout.cfg', 29 | 'repo': 'git@github.com:user/repo.git', 30 | 'branch': 'master', 31 | 'backup_dir': '/home/username/tmp/', 32 | 'db_name': 'db-name', 33 | 'db_username': 'db-name', 34 | 'identifier': 'dev', 35 | 'domains': { 36 | 'main': ['main.domain.tld'], 37 | 'multisite1': ['multisite1.domain.tld'], 38 | }, 39 | 'celery': { 40 | 'concurrency': 32, 41 | } 42 | } 43 | self.sites = Configuration().load_sites(self.test_config, self.env_dict) 44 | 45 | def test_value_types(self): 46 | config = self.sites["main"] 47 | self.assertFalse(config.get("celery").get("enabled")) 48 | self.assertEqual(config.django.base, "code_checkout/") 49 | 50 | config = self.sites["multisite1"] 51 | self.assertEqual(config.django.base, ".") 52 | 53 | def test_celery(self): 54 | self.sites = Configuration().load_sites(self.test_config + """ 55 | [celery] 56 | enabled=true""", self.env_dict) 57 | config = self.sites["main"] 58 | self.assertTrue(config.get("celery").get("enabled")) 59 | self.assertEqual(config.get("celery").get("concurrency"), 32) 60 | self.assertEqual(config.get("celery").get("maxtasksperchild"), 500) 61 | self.assertTrue("%s_%s_celeryd" % (config.deployment.get("user"), "main") in config.get("processes")) 62 | self.assertIn("celeryd -E -B -c 32 --maxtasksperchild 500", config.get("processes").get("%s_%s_celeryd" % (config.deployment.get("user"), "main")).get("command")) 63 | 64 | def test_static(self): 65 | self.assertEqual(self.sites["main"].get("static").get("/static/"), STATIC_COLLECTED) 66 | 67 | 68 | 69 | class TestInheritConfigParserRead(unittest.TestCase): 70 | test_config = """ 71 | [base] 72 | name = test 73 | type = nginx 74 | count = 5 75 | enable = false 76 | threshold = 1.0 77 | 78 | [base:dev] 79 | host = dev.example.com 80 | type = apache 81 | count = 1 82 | enable = True 83 | threshold = 0.9 84 | 85 | [other:dev] 86 | foo = bar 87 | """ 88 | def setUp(self): 89 | f = StringIO.StringIO(self.test_config) 90 | self.config = EnvConfigParser() 91 | self.config.readfp(f) 92 | 93 | def test_items(self): 94 | items = dict(self.config.items('base', env='dev')) 95 | self.assertIn('host', items) 96 | self.assertIn('name', items) 97 | 98 | def test_items_only_env(self): 99 | self.assertEqual(self.config.items('other', env='dev'), (('foo', 'bar'),)) 100 | self.assertRaises(ConfigParser.NoSectionError, lambda: self.config.items('other')) 101 | 102 | def test_inherited_value(self): 103 | self.assertEquals(self.config.get('base', 'host', env='dev'), 'dev.example.com') 104 | 105 | def test_value_from_base(self): 106 | self.assertEquals(self.config.get('base', 'name', env='dev'), 'test') 107 | 108 | def test_overriden_value(self): 109 | self.assertEquals(self.config.get('base', 'type',), 'nginx') 110 | self.assertEquals(self.config.get('base', 'type', env='dev'), 'apache') 111 | 112 | def test_correct_exception_on_no_base(self): 113 | self.assertRaises(ConfigParser.NoOptionError, lambda: self.config.get('other', 'baz', env='dev')) 114 | 115 | def test_int(self): 116 | self.assertEquals(self.config.getint('base', 'count',), 5) 117 | self.assertEquals(self.config.getint('base', 'count', env='dev'), 1) 118 | 119 | def test_float(self): 120 | self.assertEquals(self.config.getfloat('base', 'threshold',), 1.0) 121 | self.assertEquals(self.config.getfloat('base', 'threshold', env='dev'), 0.9) 122 | 123 | def test_bool(self): 124 | self.assertFalse(self.config.getboolean('base', 'enable',)) 125 | self.assertEqual(type(self.config.getboolean('base', 'enable',)), bool) 126 | self.assertTrue(self.config.getboolean('base', 'enable', env='dev')) 127 | 128 | def test_has_section(self): 129 | self.assertTrue(self.config.has_section('base')) 130 | self.assertTrue(self.config.has_section('base', env='dev')) 131 | self.assertTrue(self.config.has_section('base', env='stage')) 132 | self.assertFalse(self.config.has_section('base', env='stage', strict=True)) 133 | self.assertTrue(self.config.has_section('other', env='dev')) 134 | self.assertFalse(self.config.has_section('other')) 135 | 136 | def test_section_namespaces(self): 137 | self.assertEqual(self.config.section_namespaces("base"), ["main", "dev"]) 138 | 139 | 140 | if __name__ == '__main__': 141 | unittest.main() -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # dploi-fabric documentation build configuration file, created by 4 | # sphinx-quickstart on Mon Jan 2 13:29:35 2012. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import sys, os 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | sys.path.insert(0, os.path.abspath('../')) 20 | 21 | # -- General configuration ----------------------------------------------------- 22 | 23 | # If your documentation needs a minimal Sphinx version, state it here. 24 | #needs_sphinx = '1.0' 25 | 26 | # Add any Sphinx extension module names here, as strings. They can be extensions 27 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 28 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.viewcode'] 29 | 30 | # Add any paths that contain templates here, relative to this directory. 31 | templates_path = ['_templates'] 32 | 33 | # The suffix of source filenames. 34 | source_suffix = '.rst' 35 | 36 | # The encoding of source files. 37 | #source_encoding = 'utf-8-sig' 38 | 39 | # The master toctree document. 40 | master_doc = 'index' 41 | 42 | # General information about the project. 43 | project = u'dploi-fabric' 44 | copyright = u'2012, Benjamin Wohlwend, Kristian Oellegaard, Stefan Foulis' 45 | 46 | # The version info for the project you're documenting, acts as replacement for 47 | # |version| and |release|, also used in various other places throughout the 48 | # built documents. 49 | # 50 | # The short X.Y version. 51 | version = '0.1.10' 52 | # The full version, including alpha/beta/rc tags. 53 | release = '0.1.10' 54 | 55 | # The language for content autogenerated by Sphinx. Refer to documentation 56 | # for a list of supported languages. 57 | #language = None 58 | 59 | # There are two options for replacing |today|: either, you set today to some 60 | # non-false value, then it is used: 61 | #today = '' 62 | # Else, today_fmt is used as the format for a strftime call. 63 | #today_fmt = '%B %d, %Y' 64 | 65 | # List of patterns, relative to source directory, that match files and 66 | # directories to ignore when looking for source files. 67 | exclude_patterns = ['_build'] 68 | 69 | # The reST default role (used for this markup: `text`) to use for all documents. 70 | #default_role = None 71 | 72 | # If true, '()' will be appended to :func: etc. cross-reference text. 73 | #add_function_parentheses = True 74 | 75 | # If true, the current module name will be prepended to all description 76 | # unit titles (such as .. function::). 77 | #add_module_names = True 78 | 79 | # If true, sectionauthor and moduleauthor directives will be shown in the 80 | # output. They are ignored by default. 81 | #show_authors = False 82 | 83 | # The name of the Pygments (syntax highlighting) style to use. 84 | pygments_style = 'sphinx' 85 | 86 | # A list of ignored prefixes for module index sorting. 87 | #modindex_common_prefix = [] 88 | 89 | 90 | # -- Options for HTML output --------------------------------------------------- 91 | 92 | # The theme to use for HTML and HTML Help pages. See the documentation for 93 | # a list of builtin themes. 94 | html_theme = 'default' 95 | 96 | # Theme options are theme-specific and customize the look and feel of a theme 97 | # further. For a list of options available for each theme, see the 98 | # documentation. 99 | #html_theme_options = {} 100 | 101 | # Add any paths that contain custom themes here, relative to this directory. 102 | #html_theme_path = [] 103 | 104 | # The name for this set of Sphinx documents. If None, it defaults to 105 | # " v documentation". 106 | #html_title = None 107 | 108 | # A shorter title for the navigation bar. Default is the same as html_title. 109 | #html_short_title = None 110 | 111 | # The name of an image file (relative to this directory) to place at the top 112 | # of the sidebar. 113 | #html_logo = None 114 | 115 | # The name of an image file (within the static path) to use as favicon of the 116 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 117 | # pixels large. 118 | #html_favicon = None 119 | 120 | # Add any paths that contain custom static files (such as style sheets) here, 121 | # relative to this directory. They are copied after the builtin static files, 122 | # so a file named "default.css" will overwrite the builtin "default.css". 123 | html_static_path = ['_static'] 124 | 125 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 126 | # using the given strftime format. 127 | #html_last_updated_fmt = '%b %d, %Y' 128 | 129 | # If true, SmartyPants will be used to convert quotes and dashes to 130 | # typographically correct entities. 131 | #html_use_smartypants = True 132 | 133 | # Custom sidebar templates, maps document names to template names. 134 | #html_sidebars = {} 135 | 136 | # Additional templates that should be rendered to pages, maps page names to 137 | # template names. 138 | #html_additional_pages = {} 139 | 140 | # If false, no module index is generated. 141 | #html_domain_indices = True 142 | 143 | # If false, no index is generated. 144 | #html_use_index = True 145 | 146 | # If true, the index is split into individual pages for each letter. 147 | #html_split_index = False 148 | 149 | # If true, links to the reST sources are added to the pages. 150 | #html_show_sourcelink = True 151 | 152 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 153 | #html_show_sphinx = True 154 | 155 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 156 | #html_show_copyright = True 157 | 158 | # If true, an OpenSearch description file will be output, and all pages will 159 | # contain a tag referring to it. The value of this option must be the 160 | # base URL from which the finished HTML is served. 161 | #html_use_opensearch = '' 162 | 163 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 164 | #html_file_suffix = None 165 | 166 | # Output file base name for HTML help builder. 167 | htmlhelp_basename = 'dploi-fabricdoc' 168 | 169 | 170 | # -- Options for LaTeX output -------------------------------------------------- 171 | 172 | # The paper size ('letter' or 'a4'). 173 | #latex_paper_size = 'letter' 174 | 175 | # The font size ('10pt', '11pt' or '12pt'). 176 | #latex_font_size = '10pt' 177 | 178 | # Grouping the document tree into LaTeX files. List of tuples 179 | # (source start file, target name, title, author, documentclass [howto/manual]). 180 | latex_documents = [ 181 | ('index', 'dploi-fabric.tex', u'dploi-fabric Documentation', 182 | u'Benjamin Wohlwend, Kristian Oellegaard, Stefan Foulis', 'manual'), 183 | ] 184 | 185 | # The name of an image file (relative to this directory) to place at the top of 186 | # the title page. 187 | #latex_logo = None 188 | 189 | # For "manual" documents, if this is true, then toplevel headings are parts, 190 | # not chapters. 191 | #latex_use_parts = False 192 | 193 | # If true, show page references after internal links. 194 | #latex_show_pagerefs = False 195 | 196 | # If true, show URL addresses after external links. 197 | #latex_show_urls = False 198 | 199 | # Additional stuff for the LaTeX preamble. 200 | #latex_preamble = '' 201 | 202 | # Documents to append as an appendix to all manuals. 203 | #latex_appendices = [] 204 | 205 | # If false, no module index is generated. 206 | #latex_domain_indices = True 207 | 208 | 209 | # -- Options for manual page output -------------------------------------------- 210 | 211 | # One entry per manual page. List of tuples 212 | # (source start file, name, description, authors, manual section). 213 | man_pages = [ 214 | ('index', 'dploi-fabric', u'dploi-fabric Documentation', 215 | [u'Benjamin Wohlwend, Kristian Oellegaard, Stefan Foulis'], 1) 216 | ] 217 | -------------------------------------------------------------------------------- /dploi_fabric/utils.py: -------------------------------------------------------------------------------- 1 | from dploi_fabric.toolbox.template import app_package_path, render_template 2 | import os 3 | import posixpath 4 | 5 | import StringIO 6 | from fabric.operations import run, local, put 7 | from fabric.api import task, env, get 8 | from fabric.contrib.files import exists 9 | from fabric.state import _AttributeDict 10 | 11 | 12 | from .toolbox.datastructures import EnvConfigParser 13 | from .messages import DOMAIN_DICT_DEPRECATION_WARNING 14 | 15 | 16 | STATIC_COLLECTED = "../static/" 17 | DATA_DIRECTORY = "../upload/" 18 | 19 | class Configuration(object): 20 | """ 21 | This class is the only correct source of information for this project. 22 | To reduce the amount of times config.ini is downloaded, it should always 23 | be used from utils.config, which is an instance of Configuration 24 | """ 25 | #: Default values for the configuration 26 | defaults = { 27 | 'django': { 28 | 'base': '.', 29 | 'append_settings': False, 30 | 'cmd': 'bin/django', 31 | 'args': [], 32 | }, 33 | 'checkout': { 34 | 'tool': 'buildout', 35 | }, 36 | 'gunicorn': { 37 | 'workers': 2, 38 | 'maxrequests': 0, 39 | 'timeout': None, 40 | 'bind': None, 41 | 'version': None, 42 | }, 43 | 'celery': { 44 | 'enabled': False, 45 | 'concurrency': 1, 46 | 'maxtasksperchild': 500, 47 | 'loglevel': 'WARNING', 48 | 'celerycam': False, 49 | 'celerycam-frequency': 1.0, 50 | 'extra_options': '', 51 | # Beat is enabled by default but only used 52 | # if celery is enabled. 53 | 'celerybeat': True, 54 | 'version': None, 55 | 'app': 'project', 56 | }, 57 | 'static': { 58 | 59 | }, 60 | 'redis': { 61 | 'enabled': False, 62 | 'appendonly': 'no', 63 | 'template': app_package_path('templates/redis/redis.conf'), 64 | }, 65 | 'memcached': { 66 | 'enabled': True, 67 | 'size': 64, 68 | }, 69 | 'processes': { 70 | 71 | }, 72 | 'sendfile': { 73 | 74 | }, 75 | 'environment': { 76 | 77 | }, 78 | 'nginx': { 79 | 'enabled': True, 80 | 'client_max_body_size': '10m', 81 | 'template': app_package_path('templates/nginx/nginx.conf'), 82 | }, 83 | 'supervisor': { 84 | 'template': app_package_path('templates/supervisor/supervisor.conf'), 85 | 'daemon_template': app_package_path('templates/supervisor/supervisord.conf'), 86 | 'group_template': app_package_path('templates/supervisor/supervisor-group.conf'), 87 | 'gunicorn_command_template': app_package_path('templates/supervisor/gunicorn_command'), 88 | 'celeryd_command_template': app_package_path('templates/supervisor/celeryd_command'), 89 | 'celerycam_command_template': app_package_path('templates/supervisor/celerycam_command'), 90 | 'supervisorctl_command': None, 91 | 'supervisord_command': None, 92 | 'use_global_supervisord': False, 93 | }, 94 | 'newrelic': { 95 | 'enabled': False, 96 | 'config_file': 'newrelic.ini', 97 | 'environment_name': '', 98 | 'license': '', 99 | }, 100 | 'logdir': None, 101 | } 102 | def load_sites(self, config_file_content=None, env_dict=None): 103 | """ 104 | Called from self.sites and returns a dictionary with the different sites 105 | and their individual settings. 106 | """ 107 | if not config_file_content: 108 | if env.get("use_local_config_ini", False): 109 | output = open("config.ini") 110 | else: 111 | config_file = os.path.join(env.path, "config.ini") 112 | if exists(config_file): 113 | output = StringIO.StringIO() 114 | get(u"%s" % config_file, output) 115 | output.seek(0) 116 | else: 117 | raise Exception("Missing config.ini, tried path %s" % config_file) 118 | else: 119 | output = StringIO.StringIO(config_file_content) 120 | 121 | if not env_dict: 122 | env_dict = env 123 | 124 | config = EnvConfigParser() 125 | config.readfp(output) 126 | self._sites = {} 127 | 128 | variables = { 129 | 'static_collected': STATIC_COLLECTED, 130 | 'data_directory': DATA_DIRECTORY, 131 | } 132 | 133 | for site in config.section_namespaces("django") or ["main"]: 134 | attr_dict = self.defaults.copy() 135 | for key, value in attr_dict.items(): 136 | attr_dict[key] = None if value is None else _AttributeDict(value.copy()) 137 | for section in config.sections(): 138 | section = section.split(":")[0] 139 | 140 | is_custom_process = section in (env_dict.get('custom_processes') or []) 141 | 142 | if is_custom_process: 143 | attr_dict[section] = { 144 | 'enabled': config.getboolean(section, 'enabled', env=site), 145 | 'command': config.get(section, 'command', env=site) % variables, 146 | 'django': config.getboolean(section, 'django', env=site), 147 | } 148 | continue 149 | 150 | if self.defaults.get(section) is None: 151 | print "Caution: Section %s is not supported, skipped" % section 152 | continue 153 | for option, default_value in config.items(section, env=site): 154 | setting = self.defaults.get(section).get(option) 155 | if type(setting) == bool: 156 | value = config.getboolean(section, option, env=site) 157 | elif type(setting) == int: 158 | value = config.getint(section, option, env=site) 159 | elif type(setting) == float: 160 | value = config.getfloat(section, option, env=site) 161 | else: 162 | value = config.get(section, option, env=site) % variables 163 | attr_dict[section][option] = value 164 | self.sites[site] = _AttributeDict(attr_dict) 165 | attr_dict.update(self.deployment(site, env_dict)) 166 | if attr_dict.get("checkout").get("tool") == "buildout": 167 | # e.g. bin/django -> /home/username/app/bin/django 168 | attr_dict["django"]["cmd"] = posixpath.join( 169 | attr_dict.get("deployment").get("path"), 170 | attr_dict.get("django").get("cmd") 171 | ) 172 | else: 173 | # e.g. manage.py -> /home/username/app/bin/python /home/username/app/manage.py 174 | new_django_cmd = [ 175 | posixpath.join( 176 | attr_dict.get("deployment").get("path"), 177 | "bin/python", 178 | ), 179 | posixpath.join( 180 | attr_dict.get("deployment").get("path"), 181 | attr_dict.get("django").get("base"), 182 | attr_dict.get("django").get("cmd") 183 | ) 184 | ] 185 | attr_dict["django"]["cmd"] = " ".join(new_django_cmd) 186 | if attr_dict["django"]["append_settings"]: 187 | attr_dict["django"]["args"].append(" --settings=%s" % ('_gen.settings', )) 188 | if attr_dict["newrelic"]["enabled"]: 189 | attr_dict["django"]["cmd"] = posixpath.join( 190 | attr_dict.get("deployment").get("path"), 191 | "bin/newrelic-admin" 192 | ) + " run-program " + attr_dict["django"]["cmd"] 193 | attr_dict.update({'processes': self.processes(site, env_dict)}) 194 | attr_dict['environment'] = self.environment(site, env_dict) 195 | attr_dict['environment'].setdefault('DEPLOYMENT_SITE', site) 196 | if attr_dict['deployment']['django_settings_module']: 197 | attr_dict['environment']['DJANGO_SETTINGS_MODULE'] = attr_dict['deployment']['django_settings_module'] 198 | attr_dict['environment_export'] = self.build_environment_export(attr_dict['environment']) 199 | attr_dict['identifier'] = env_dict.identifier 200 | self._sites[site] = _AttributeDict(attr_dict) 201 | return self._sites 202 | 203 | def build_environment_export(self, environment): 204 | """ 205 | takes a dict with environment variables and products a shell compatible export statement: 206 | 'export PYTHONPATH="stuff/here:more/here" USER="mysite-dev";' 207 | """ 208 | vars = " ".join([u'%s=%s' % (key, value) for key, value in environment.items()]) 209 | return u"export %s;" % vars 210 | 211 | @property 212 | def sites(self): 213 | if getattr(self, "_sites", False) == False: 214 | self.load_sites() 215 | return self._sites 216 | 217 | def processes(self, site, env_dict): 218 | """ 219 | Returns a dictionary of dictionaries each having the following keys: 220 | 221 | * command 222 | command to be run by supervisor 223 | * port 224 | port number, 225 | * socket 226 | path to unix socket 227 | * type 228 | gunicorn/memcached/celeryd 229 | """ 230 | process_dict = {} 231 | site_dict = self.sites[site] 232 | common_cmd_context = { 233 | "django_cmd": site_dict.django['cmd'], 234 | "django_args": " ".join(site_dict.get("django").get("args", [])), 235 | } 236 | socket = posixpath.normpath(posixpath.join(env_dict.get("path"), "..", "tmp", "%s_%s_gunicorn.sock" % (env_dict.get("user"), site))) # Asserts pony project layout 237 | if site_dict.gunicorn['bind']: 238 | bind = site_dict.gunicorn['bind'] 239 | else: 240 | bind = 'unix:{}'.format(socket) 241 | 242 | cmd = env_dict.get("path") if not site_dict.get("newrelic").get("enabled") else '%sbin/newrelic-admin run-program %s' % (env_dict.get("path"), env_dict.get("path")) 243 | cmd += 'bin/gunicorn' 244 | 245 | gunicorn_cmd_context = { 246 | 'cmd': cmd, 247 | "socket": socket, 248 | "bind": bind, 249 | "workers": site_dict.gunicorn['workers'], 250 | "maxrequests": site_dict.gunicorn['maxrequests'], 251 | "timeout": site_dict.gunicorn['timeout'], 252 | "version": site_dict.gunicorn['version'], 253 | } 254 | gunicorn_cmd_context.update(common_cmd_context) 255 | gunicorn_command_template_path = self.sites[site]['supervisor']['gunicorn_command_template'] 256 | gunicorn_command = render_template( 257 | gunicorn_command_template_path, 258 | gunicorn_cmd_context, 259 | strip_newlines=True, 260 | ) 261 | process_dict["%s_%s_gunicorn" % (env_dict.get("user"), site)] = { 262 | 'command': gunicorn_command, 263 | 'port': None, 264 | 'socket': gunicorn_cmd_context['socket'], 265 | 'type': 'gunicorn', 266 | 'priority': 100, 267 | } 268 | 269 | custom_processes = env_dict.get("custom_processes") or [] 270 | 271 | for process in custom_processes: 272 | process_config = site_dict[process] 273 | 274 | if not process_config.get("enabled"): 275 | continue 276 | 277 | custom_command = process_config['command'] 278 | 279 | if process_config.get('django'): 280 | custom_command = '%s %s' % (site_dict.django['cmd'], custom_command) 281 | 282 | process_name = "%s_%s_%s" % (env_dict.get("user"), site, process) 283 | process_dict[process_name] = { 284 | 'command': custom_command, 285 | 'type': 'custom', 286 | 'priority': 100, 287 | 'port': None, 288 | 'socket': None, 289 | } 290 | 291 | if site_dict.get("memcached").get("enabled"): 292 | memcached_socket = posixpath.normpath(posixpath.join(env_dict.get("path"), "..", "tmp", "%s_%s_memcached.sock" % (env_dict.get("user"), site))) # Asserts pony project layout 293 | process_dict["%s_%s_memcached" % (env_dict.get("user"), site)] = { 294 | 'command': "memcached -s %s -m %d" % (memcached_socket, int(site_dict.get("memcached").get("size"))), 295 | 'port': None, 296 | 'socket': memcached_socket, 297 | 'type': 'memcached', 298 | 'priority': 60, 299 | } 300 | if site_dict.get("celery").get("enabled"): 301 | conf = site_dict.get("celery") 302 | cmd = env_dict.get("path") if not site_dict.get("newrelic").get("enabled") else '%sbin/newrelic-admin run-program %s' % (env_dict.get("path"), env_dict.get("path")) 303 | cmd += 'bin/celery' 304 | celeryd_command_context = { 305 | 'concurrency': conf.get("concurrency"), 306 | 'maxtasksperchild': conf.get("maxtasksperchild"), 307 | 'loglevel': conf.get("loglevel"), 308 | 'extra_options': conf.get('extra_options'), 309 | 'path': env_dict.get("path"), 310 | 'version': conf.get("version"), 311 | 'celery_app': conf.get("app"), 312 | 'has_cam': conf.get("celerycam"), 313 | 'enable_beat': conf.get("celerybeat"), 314 | 'cmd': cmd, 315 | 'pidfile': posixpath.normpath(posixpath.join(env_dict.get("path"), '..', 'tmp', 'celery-%s.pid' % site)), 316 | } 317 | celeryd_command_context.update(common_cmd_context) 318 | celeryd_command_template_path = self.sites[site]['supervisor']['celeryd_command_template'] 319 | celeryd_command = render_template( 320 | celeryd_command_template_path, 321 | celeryd_command_context, 322 | strip_newlines=True, 323 | ) 324 | process_dict["%s_%s_celeryd" % (env_dict.get("user"), site)] = { 325 | 'command': celeryd_command, 326 | 'port': None, 327 | 'socket': None, 328 | 'type': 'celeryd', 329 | 'priority': 40, 330 | 'stopasgroup': 'true', 331 | 'killasgroup': 'true', 332 | 'stopwaitsecs': conf.get('stopwaitsecs', None), 333 | } 334 | if conf.get("celerycam"): 335 | celerycam_command_context = { 336 | 'loglevel': conf.get("loglevel"), 337 | 'path': env_dict.get("path"), 338 | 'version': conf.get("version"), 339 | 'celery_app': conf.get("app"), 340 | 'cmd': cmd, 341 | 'frequency': conf.get('celerycam-frequency'), 342 | } 343 | celerycam_command_context.update(common_cmd_context) 344 | celerycam_command_template_path = self.sites[site]['supervisor']['celerycam_command_template'] 345 | celerycam_command = render_template( 346 | celerycam_command_template_path, 347 | celerycam_command_context, 348 | strip_newlines=True, 349 | ) 350 | process_dict["%s_%s_celerycam" % (env_dict.get("user"), site)] = { 351 | 'command': celerycam_command, 352 | 'port': None, 353 | 'socket': None, 354 | 'type': 'celerycam', 355 | 'priority': 50, 356 | } 357 | if site_dict.get("redis").get("enabled"): 358 | process_name = "%s_%s_redis" % (env_dict.get("user"), site) 359 | redis_socket = posixpath.normpath(posixpath.join(env_dict.get("path"), "..", "tmp", process_name + ".sock" )) # Asserts pony project layout 360 | process_dict[process_name] = { 361 | 'command': "/usr/bin/redis-server %s" % posixpath.normpath(posixpath.join(env_dict.get('path'), '..', 'config', process_name + '.conf')), 362 | 'port': None, 363 | 'socket': redis_socket, 364 | 'type': 'redis', 365 | 'priority': 20, 366 | } 367 | if site_dict.get('processes'): 368 | processes = site_dict.get('processes') 369 | for process, command in processes.iteritems(): 370 | process_name = "%s_%s_process_%s" % (env_dict.get("user"), site, process) 371 | process_dict[process_name] = { 372 | 'command': posixpath.join(env_dict.get("path"), command), 373 | 'port': None, 374 | 'socket': None, 375 | 'type': 'supervisor', 376 | 'priority': env_dict.get("priority", 200), 377 | } 378 | 379 | return process_dict 380 | 381 | def environment(self, site, env_dict): 382 | site_dict = self.sites[site] 383 | return site_dict['environment'] 384 | 385 | def deployment(self, site, env_dict): 386 | """ 387 | Here we add the information from deployments.py and merge it into our site dictionaries. 388 | Can also be used to output warnings to the user, if he is using an old deployments.py 389 | format. 390 | """ 391 | deployment_dict = { 392 | # Old settings 393 | 'servername': env_dict.get("host_string"), 394 | 'path': env_dict.get("path"), 395 | 'backup_dir': env_dict.get("backup_dir"), 396 | 'repo': env_dict.get("repo"), 397 | 'branch': env_dict.get("branch"), 398 | 'user': env_dict.get("user"), 399 | 'buildout_cfg': env_dict.get("buildout_cfg"), 400 | 'django_settings_module': env_dict.get("django_settings_module"), 401 | 'generated_settings_path': posixpath.join(env_dict.get("path"), "_gen/settings.py"), 402 | 403 | # New settings 404 | 'domains_redirect': env_dict.get('domains_redirect'), 405 | 'url_redirect': env_dict.get('url_redirect'), 406 | 407 | 'basic_auth': env_dict.get('basic_auth', False), 408 | 'basic_auth_path': os.path.join(env_dict.get("path"), env_dict.get('basic_auth_path', None) or ""), 409 | 410 | 'ssl': env.get('ssl', False), 411 | 'ssl_cert_path': os.path.join(env_dict.get("path"), env_dict.get('ssl_cert_path', None) or ""), 412 | 'ssl_key_path': os.path.join(env_dict.get("path"), env_dict.get('ssl_key_path', None) or ""), 413 | 'bind_ip': env_dict.get('bind_ip', '*'), 414 | 'static_error_pages': env_dict.get('static_error_pages', []), 415 | 'big_body_endpoints': env_dict.get('big_body_endpoints', []), 416 | 'home': '/home/%s' % env_dict.get("user"), 417 | } 418 | deployment_dict['logdir'] = env_dict.get("logdir") or os.path.join(deployment_dict['home'], 'log') 419 | 420 | 421 | if not env_dict.get("databases"): 422 | deployment_dict["databases"] = { 423 | 'default': { 424 | 'ENGINE': env_dict.get("db_engine", "django.db.backends.postgresql_psycopg2"), 425 | 'NAME': env_dict.get("db_name"), 426 | 'USER': env_dict.get("db_username"), 427 | 'PASSWORD': env_dict.get("db_password"), 428 | 'HOST': env_dict.get("db_host", ""), 429 | } 430 | } 431 | 432 | if type(env_dict.get("domains")) == list: 433 | domains = { 434 | "main": env_dict.get("domains"), 435 | } 436 | print(DOMAIN_DICT_DEPRECATION_WARNING) 437 | elif type(env_dict.get("domains")) == dict: 438 | domains = env_dict.get("domains") 439 | elif env_dict.get("domains") is None: 440 | domains = { 441 | "main": [], 442 | } 443 | print("Warning: No domains supplied in settings, ignoring.") 444 | else: 445 | raise Exception("Invalid domain format") 446 | deployment_dict.update({'domains': domains}) 447 | 448 | ############### 449 | # Environment # 450 | ############### 451 | 452 | environment_dict = self.sites[site].get("environment") 453 | for key, value in env_dict.get("environment", {}).items(): 454 | environment_dict[key] = value 455 | 456 | ################# 457 | # Gunicorn dict # 458 | ################# 459 | gunicorn_dict = self.sites[site].get("gunicorn") 460 | gunicorn_dict["workers"] = env_dict.get("gunicorn", {}).get("workers", gunicorn_dict.get("workers")) 461 | gunicorn_dict["maxrequests"] = env_dict.get("gunicorn", {}).get("maxrequests", gunicorn_dict.get("maxrequests")) 462 | gunicorn_dict["timeout"] = env_dict.get("gunicorn", {}).get("timeout", gunicorn_dict.get("timeout")) 463 | gunicorn_dict["bind"] = env_dict.get("gunicorn", {}).get("bind", gunicorn_dict.get("bind")) 464 | 465 | ############### 466 | # Celery dict # 467 | ############### 468 | celery_dict = self.sites[site].get("celery") 469 | 470 | celery_dict["concurrency"] = env_dict.get("celery", {}).get("concurrency", celery_dict.get("concurrency")) 471 | celery_dict["maxtasksperchild"] = env_dict.get("celery", {}).get("maxtasksperchild", celery_dict.get("maxtasksperchild")) 472 | 473 | ############## 474 | # nginx dict # 475 | ############## 476 | 477 | nginx_dict = self.sites[site].get("nginx") 478 | nginx_dict["enabled"] = env_dict.get("nginx", {}).get("enabled", nginx_dict.get("enabled")) 479 | nginx_dict["location_settings"] = { 480 | "client_max_body_size": env_dict.get("nginx", {}).get("client_max_body_size", nginx_dict.get("client_max_body_size")), 481 | } 482 | nginx_dict["template"] = env_dict.get("nginx", {}).get("template", nginx_dict.get("template")) 483 | 484 | ############## 485 | # redis dict # 486 | ############## 487 | 488 | redis_dict = self.sites[site].get("redis") 489 | redis_dict["template"] = env_dict.get("redis", {}).get("template", redis_dict.get("template")) 490 | 491 | ################## 492 | # memcached dict # 493 | ################## 494 | 495 | memcached_dict = self.sites[site].get("memcached") 496 | memcached_dict["enabled"] = env_dict.get("memcached", {}).get("enabled", memcached_dict.get("enabled")) 497 | memcached_dict["size"] = env_dict.get("memcached", {}).get("size", memcached_dict.get("size")) 498 | 499 | ################### 500 | # supervisor dict # 501 | ################### 502 | 503 | supervisor_dict = self.sites[site].get("supervisor") 504 | supervisor_dict["template"] = env_dict.get("supervisor", {}).get("template", supervisor_dict.get("template")) 505 | supervisor_dict["daemon_template"] = env_dict.get("supervisor", {}).get("daemon_template", supervisor_dict.get("daemon_template")) 506 | supervisor_dict["group_template"] = env_dict.get("supervisor", {}).get("group_template", supervisor_dict.get("group_template")) 507 | supervisor_dict["gunicorn_command_template"] = env_dict.get("supervisor", {}).get("gunicorn_command_template", supervisor_dict.get("gunicorn_command_template")) 508 | supervisor_dict["celeryd_command_template"] = env_dict.get("supervisor", {}).get("celeryd_command_template", supervisor_dict.get("celeryd_command_template")) 509 | supervisor_dict["celeryd_command_template"] = env_dict.get("supervisor", {}).get("celeryd_command_template", supervisor_dict.get("celeryd_command_template")) 510 | supervisor_dict["supervisorctl_command"] = env_dict.get("supervisor", {}).get("supervisorctl_command", supervisor_dict.get("supervisorctl_command")) 511 | supervisor_dict["supervisord_command"] = env_dict.get("supervisor", {}).get("supervisord_command", supervisor_dict.get("supervisord_command")) 512 | supervisor_dict["use_global_supervisord"] = env_dict.get("supervisor", {}).get("use_global_supervisord", supervisor_dict.get("use_global_supervisord")) 513 | if supervisor_dict["supervisorctl_command"] is None: 514 | if supervisor_dict["use_global_supervisord"]: 515 | supervisor_dict["supervisorctl_command"] = 'sudo supervisorctl' 516 | else: 517 | supervisor_dict["supervisorctl_command"] = 'supervisorctl --config={}../config/supervisord.conf'.format(deployment_dict['path']) 518 | 519 | if supervisor_dict["supervisord_command"] is None and not supervisor_dict["use_global_supervisord"]: 520 | supervisor_dict["supervisord_command"] = 'supervisord -c {}../config/supervisord.conf'.format(deployment_dict['path']) 521 | 522 | ################# 523 | # newrelic dict # 524 | ################# 525 | 526 | newrelic_dict = self.sites[site].get("newrelic") 527 | newrelic_dict["enabled"] = env_dict.get("newrelic", {}).get("enabled", newrelic_dict.get("enabled")) 528 | newrelic_dict["config_file"] = env_dict.get("newrelic", {}).get("config_file", newrelic_dict.get("config_file")) 529 | if not newrelic_dict["config_file"].startswith('/'): 530 | newrelic_dict["config_file"] = posixpath.abspath(posixpath.join( 531 | deployment_dict["path"], 532 | newrelic_dict["config_file"], 533 | )) 534 | self.sites[site]["environment"]["NEW_RELIC_CONFIG_FILE"] = newrelic_dict["config_file"] 535 | newrelic_dict["environment_name"] = env_dict.get("newrelic", {}).get("environment_name", newrelic_dict.get("environment_name")) 536 | if newrelic_dict["environment_name"]: 537 | self.sites[site]["environment"]["NEW_RELIC_ENVIRONMENT"] = newrelic_dict["environment_name"] 538 | 539 | newrelic_dict["license_key"] = env_dict.get("newrelic", {}).get("license_key", newrelic_dict.get("license_key")) 540 | if newrelic_dict["license_key"]: 541 | self.sites[site]["environment"]["NEW_RELIC_LICENSE_KEY"] = newrelic_dict["license_key"] 542 | 543 | return { 544 | 'deployment': deployment_dict, 545 | 'environment': environment_dict, 546 | 'gunicorn': gunicorn_dict, 547 | 'celery': celery_dict, 548 | 'nginx': nginx_dict, 549 | 'redis': redis_dict, 550 | 'memcached': memcached_dict, 551 | 'supervisor': supervisor_dict, 552 | 'newrelic': newrelic_dict, 553 | } 554 | 555 | def django_manage(self, command, site="main"): 556 | """ 557 | Wrapper around the commands to inject the correct pythonpath. 558 | 559 | Example: django_manage("migrate"), could result in 560 | 561 | export PYTONPATH=/home/app-dev/app/; /home/app-dev/app/bin/python /home/app-dev/app/manage.py migrate 562 | """ 563 | site_dict = config.sites[site] 564 | cmd = site_dict.get("django").get("cmd") 565 | django_args = " ".join(site_dict.get("django").get("args", [])) 566 | run('%s %s %s %s' % (site_dict['environment_export'], cmd, command, django_args)) 567 | 568 | if not __name__ == '__main__': 569 | #: A shared instance of configuration, always to be used 570 | config = Configuration() 571 | 572 | 573 | @task 574 | def check_config(): 575 | for section in config_ini.config_parser.sections(): 576 | print "[%s]" % section 577 | print config_ini.config_parser.items(section) 578 | 579 | @task 580 | def uname(): 581 | print env.host_string 582 | run('uname -a') 583 | 584 | @task 585 | def ls(): 586 | run('cd %(path)s;ls -lAF' % env) 587 | 588 | @task 589 | def ps(): 590 | """ 591 | show processes of this user 592 | """ 593 | run('ps -f -u %(user)s | grep -v "ps -f" | grep -v sshd' % env) 594 | 595 | @task 596 | def download_media(to_dir="./tmp/media/", from_dir="../upload/media/"): 597 | """ 598 | Downloads media from a remote folder, default ../uploads/ -> ./tmp/media/ 599 | 600 | * Example: download_media:from_dir="py_src/project/media/" 601 | """ 602 | print "Downloading media from", env.host_string 603 | env.from_dir = from_dir 604 | local('rsync -avz --no-links --progress --exclude=".svn" -e "ssh" %(user)s@%(host_string)s:"%(path)s/%(from_dir)s"' % env + " " +to_dir) 605 | 606 | @task 607 | def upload_media(from_dir="./tmp/media/", to_dir="../upload/media/"): 608 | """ 609 | Uploads media from a local folder, default ./tmp/media -> ../uploads/ 610 | 611 | * Example: upload_media:to_dir="py_src/project/media/" 612 | """ 613 | print "Uploading media to", env.host_string 614 | env.to_dir = to_dir 615 | local('rsync -avz --no-links --progress --exclude=".svn" '+ from_dir +' -e "ssh" %(user)s@%(host_string)s:"%(path)s/%(to_dir)s"' % env) 616 | 617 | 618 | @task 619 | def use_local_config_ini(): 620 | env.use_local_config_ini = True 621 | 622 | @task 623 | def safe_put(*args, **kwargs): 624 | """ 625 | a version of put that makes sure the directory exists first. 626 | :return: 627 | """ 628 | if len(args) >= 2: 629 | dst_path = args[1] 630 | else: 631 | dst_path = kwargs.get('remote_path', None) 632 | if dst_path: 633 | run('mkdir -p {}'.format(os.path.dirname(dst_path))) 634 | return put(*args, **kwargs) 635 | 636 | 637 | @task 638 | def gulp_deploy(css_dir='private', *args, **kwargs): 639 | # Import here to avoid circular references 640 | from .git import local_branch_is_dirty, local_branch_matches_remote 641 | 642 | if local_branch_is_dirty() or not local_branch_matches_remote(): 643 | print ("Please make sure that local branch is not dirty and " 644 | "matches the remote (deployment) branch.") 645 | else: 646 | print "Preparing files (CSS/JS)" 647 | local('compass compile {}'.format(css_dir)) 648 | # Replace compass with 'gulp' when front-end is ready 649 | upload_media('./static/css/', '../static/css/') 650 | upload_media('./static/js/', '../static/js/') 651 | --------------------------------------------------------------------------------