├── fabric
├── contrib
│ ├── __init__.py
│ ├── console.py
│ ├── django.py
│ └── project.py
├── __main__.py
├── __init__.py
├── auth.py
├── thread_handling.py
├── api.py
├── exceptions.py
├── colors.py
├── docs.py
├── task_utils.py
├── version.py
├── decorators.py
├── job_queue.py
└── io.py
├── tests
├── support
│ ├── __init__.py
│ ├── deep.py
│ ├── nested_alias.py
│ ├── nested_aliases.py
│ ├── default_tasks.py
│ ├── implicit_fabfile.py
│ ├── submodule
│ │ ├── __init__.py
│ │ └── subsubmodule
│ │ │ └── __init__.py
│ ├── tree
│ │ ├── db.py
│ │ ├── system
│ │ │ ├── debian.py
│ │ │ └── __init__.py
│ │ └── __init__.py
│ ├── aborts.py
│ ├── explicit_fabfile.py
│ ├── flat_alias.py
│ ├── module_fabtasks.py
│ ├── default_task_submodule.py
│ ├── docstring.py
│ ├── flat_aliases.py
│ ├── decorated_fabfile.py
│ ├── testserver_ssh_config
│ ├── classbased_task_fabfile.py
│ ├── decorated_fabfile_with_modules.py
│ ├── mapping.py
│ ├── ssh_config
│ ├── decorator_order.py
│ └── decorated_fabfile_with_classbased_task.py
├── client.key.pub
├── private.key
├── test_version.py
├── test_io.py
├── test_state.py
├── client.key
├── fake_filesystem.py
├── mock_streams.py
├── test_parallel.py
├── test_server.py
├── test_contrib.py
├── test_project.py
├── utils.py
├── test_decorators.py
├── test_context_managers.py
└── test_utils.py
├── sites
├── _shared_static
│ └── logo.png
├── docs
│ ├── api
│ │ ├── core
│ │ │ ├── utils.rst
│ │ │ ├── operations.rst
│ │ │ ├── network.rst
│ │ │ ├── tasks.rst
│ │ │ ├── docs.rst
│ │ │ ├── context_managers.rst
│ │ │ ├── colors.rst
│ │ │ └── decorators.rst
│ │ └── contrib
│ │ │ ├── console.rst
│ │ │ ├── project.rst
│ │ │ ├── django.rst
│ │ │ └── files.rst
│ ├── conf.py
│ ├── running_tests.rst
│ ├── index.rst
│ └── usage
│ │ ├── library.rst
│ │ ├── ssh.rst
│ │ ├── fabfiles.rst
│ │ ├── parallel.rst
│ │ ├── output_controls.rst
│ │ └── interactivity.rst
├── www
│ ├── index.rst
│ ├── conf.py
│ ├── contact.rst
│ ├── troubleshooting.rst
│ ├── development.rst
│ ├── roadmap.rst
│ └── installing.rst
└── shared_conf.py
├── INSTALL
├── CONTRIBUTING.rst
├── tasks.py
├── .gitignore
├── MANIFEST.in
├── requirements.txt
├── integration
├── utils.py
├── test_contrib.py
└── test_operations.py
├── dev-requirements.txt
├── fabfile
└── __init__.py
├── .travis.yml
├── AUTHORS
├── LICENSE
├── README.rst
└── setup.py
/fabric/contrib/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/support/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/support/deep.py:
--------------------------------------------------------------------------------
1 | import submodule
2 |
--------------------------------------------------------------------------------
/tests/support/nested_alias.py:
--------------------------------------------------------------------------------
1 | import flat_alias as nested
2 |
--------------------------------------------------------------------------------
/fabric/__main__.py:
--------------------------------------------------------------------------------
1 | import fabric.main
2 | fabric.main.main()
3 |
--------------------------------------------------------------------------------
/tests/support/nested_aliases.py:
--------------------------------------------------------------------------------
1 | import flat_aliases as nested
2 |
--------------------------------------------------------------------------------
/tests/support/default_tasks.py:
--------------------------------------------------------------------------------
1 | import default_task_submodule as mymodule
2 |
--------------------------------------------------------------------------------
/fabric/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | See `fabric.api` for the publically importable API.
3 | """
4 |
--------------------------------------------------------------------------------
/tests/support/implicit_fabfile.py:
--------------------------------------------------------------------------------
1 | def foo():
2 | pass
3 |
4 | def bar():
5 | pass
6 |
--------------------------------------------------------------------------------
/tests/support/submodule/__init__.py:
--------------------------------------------------------------------------------
1 | import subsubmodule
2 |
3 | def classic_task():
4 | pass
5 |
--------------------------------------------------------------------------------
/sites/_shared_static/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/crash/fabric/master/sites/_shared_static/logo.png
--------------------------------------------------------------------------------
/tests/support/tree/db.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task
2 |
3 |
4 | @task
5 | def migrate():
6 | pass
7 |
--------------------------------------------------------------------------------
/sites/docs/api/core/utils.rst:
--------------------------------------------------------------------------------
1 | =====
2 | Utils
3 | =====
4 |
5 | .. automodule:: fabric.utils
6 | :members:
7 |
--------------------------------------------------------------------------------
/tests/support/aborts.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task, abort
2 |
3 | @task
4 | def kaboom():
5 | abort("It burns!")
6 |
--------------------------------------------------------------------------------
/tests/support/explicit_fabfile.py:
--------------------------------------------------------------------------------
1 | __all__ = ['foo']
2 |
3 | def foo():
4 | pass
5 |
6 | def bar():
7 | pass
8 |
--------------------------------------------------------------------------------
/tests/support/flat_alias.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task
2 |
3 | @task(alias="foo_aliased")
4 | def foo():
5 | pass
6 |
--------------------------------------------------------------------------------
/tests/support/module_fabtasks.py:
--------------------------------------------------------------------------------
1 | def hello():
2 | print("hello")
3 |
4 |
5 | def world():
6 | print("world")
7 |
--------------------------------------------------------------------------------
/tests/support/tree/system/debian.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task
2 |
3 |
4 | @task
5 | def update_apt():
6 | pass
7 |
--------------------------------------------------------------------------------
/INSTALL:
--------------------------------------------------------------------------------
1 | For installation help, please see http://fabfile.org/ or (if using a source
2 | checkout) sites/www/installing.rst.
3 |
--------------------------------------------------------------------------------
/tests/support/submodule/subsubmodule/__init__.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task
2 |
3 | @task
4 | def deeptask():
5 | pass
6 |
--------------------------------------------------------------------------------
/sites/docs/api/core/operations.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | Operations
3 | ==========
4 |
5 | .. automodule:: fabric.operations
6 | :members:
7 |
--------------------------------------------------------------------------------
/tests/support/default_task_submodule.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task
2 |
3 | @task(default=True)
4 | def long_task_name():
5 | pass
6 |
--------------------------------------------------------------------------------
/tests/support/docstring.py:
--------------------------------------------------------------------------------
1 | from fabric.decorators import task
2 |
3 | @task
4 | def foo():
5 | """
6 | Foos!
7 | """
8 | pass
9 |
--------------------------------------------------------------------------------
/tests/support/flat_aliases.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task
2 |
3 | @task(aliases=["foo_aliased", "foo_aliased_two"])
4 | def foo():
5 | pass
6 |
--------------------------------------------------------------------------------
/tests/support/tree/system/__init__.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task
2 |
3 | import debian
4 |
5 | @task
6 | def install_package():
7 | pass
8 |
--------------------------------------------------------------------------------
/sites/docs/api/core/network.rst:
--------------------------------------------------------------------------------
1 | =======
2 | Network
3 | =======
4 |
5 | .. automodule:: fabric.network
6 |
7 | .. autofunction:: disconnect_all
8 |
--------------------------------------------------------------------------------
/sites/docs/api/core/tasks.rst:
--------------------------------------------------------------------------------
1 | =====
2 | Tasks
3 | =====
4 |
5 | .. automodule:: fabric.tasks
6 | :members: Task, WrappedCallableTask, execute
7 |
--------------------------------------------------------------------------------
/tests/support/decorated_fabfile.py:
--------------------------------------------------------------------------------
1 | from fabric.decorators import task
2 |
3 | @task
4 | def foo():
5 | pass
6 |
7 | def bar():
8 | pass
9 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Please see `contribution-guide.org `_ for
2 | details on what we expect from contributors. Thanks!
3 |
--------------------------------------------------------------------------------
/sites/docs/api/contrib/console.rst:
--------------------------------------------------------------------------------
1 | Console Output Utilities
2 | ========================
3 |
4 | .. automodule:: fabric.contrib.console
5 | :members:
6 |
--------------------------------------------------------------------------------
/sites/docs/api/contrib/project.rst:
--------------------------------------------------------------------------------
1 | =============
2 | Project Tools
3 | =============
4 |
5 | .. automodule:: fabric.contrib.project
6 | :members:
7 |
--------------------------------------------------------------------------------
/sites/docs/api/core/docs.rst:
--------------------------------------------------------------------------------
1 | =====================
2 | Documentation helpers
3 | =====================
4 |
5 | .. automodule:: fabric.docs
6 | :members:
7 |
--------------------------------------------------------------------------------
/sites/docs/api/contrib/django.rst:
--------------------------------------------------------------------------------
1 | ==================
2 | Django Integration
3 | ==================
4 |
5 | .. automodule:: fabric.contrib.django
6 | :members:
7 |
--------------------------------------------------------------------------------
/sites/docs/api/core/context_managers.rst:
--------------------------------------------------------------------------------
1 | ================
2 | Context Managers
3 | ================
4 |
5 | .. automodule:: fabric.context_managers
6 | :members:
7 |
--------------------------------------------------------------------------------
/tests/support/testserver_ssh_config:
--------------------------------------------------------------------------------
1 | Host testserver
2 | # TODO: get these pulling from server.py. Meh.
3 | HostName 127.0.0.1
4 | Port 2200
5 | User username
6 |
--------------------------------------------------------------------------------
/tasks.py:
--------------------------------------------------------------------------------
1 | from invocations.docs import docs, www
2 | from invocations import packaging
3 |
4 | from invoke import Collection
5 |
6 |
7 | ns = Collection(docs, www, release=packaging)
8 |
--------------------------------------------------------------------------------
/sites/docs/api/core/colors.rst:
--------------------------------------------------------------------------------
1 | ======================
2 | Color output functions
3 | ======================
4 |
5 | .. automodule:: fabric.colors
6 | :members:
7 | :undoc-members:
8 |
--------------------------------------------------------------------------------
/sites/docs/api/contrib/files.rst:
--------------------------------------------------------------------------------
1 | =============================
2 | File and Directory Management
3 | =============================
4 |
5 | .. automodule:: fabric.contrib.files
6 | :members:
7 |
--------------------------------------------------------------------------------
/sites/docs/api/core/decorators.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | Decorators
3 | ==========
4 |
5 | .. automodule:: fabric.decorators
6 | :members: hosts, roles, runs_once, serial, parallel, task, with_settings
7 |
--------------------------------------------------------------------------------
/tests/support/classbased_task_fabfile.py:
--------------------------------------------------------------------------------
1 | from fabric import tasks
2 |
3 | class ClassBasedTask(tasks.Task):
4 | def run(self, *args, **kwargs):
5 | pass
6 |
7 | foo = ClassBasedTask()
8 |
--------------------------------------------------------------------------------
/tests/support/tree/__init__.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task
2 |
3 | import system, db
4 |
5 |
6 | @task
7 | def deploy():
8 | pass
9 |
10 | @task
11 | def build_docs():
12 | pass
13 |
--------------------------------------------------------------------------------
/tests/support/decorated_fabfile_with_modules.py:
--------------------------------------------------------------------------------
1 | from fabric.decorators import task
2 | import module_fabtasks as tasks
3 |
4 | @task
5 | def foo():
6 | pass
7 |
8 | def bar():
9 | pass
10 |
--------------------------------------------------------------------------------
/tests/support/mapping.py:
--------------------------------------------------------------------------------
1 | from fabric.tasks import Task
2 |
3 | class MappingTask(dict, Task):
4 | def run(self):
5 | pass
6 |
7 | mapping_task = MappingTask()
8 | mapping_task.name = "mapping_task"
9 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *~
2 | *.pyc
3 | *.pyo
4 | *.pyt
5 | *.pytc
6 | *.egg
7 | .DS_Store
8 | .*.swp
9 | Fabric.egg-info
10 | .coverage
11 | docs/_build
12 | dist
13 | build/
14 | tags
15 | TAGS
16 | .tox
17 | tox.ini
18 | .idea/
19 | sites/*/_build
20 |
--------------------------------------------------------------------------------
/tests/support/ssh_config:
--------------------------------------------------------------------------------
1 | Host myhost
2 | User neighbor
3 | Port 664
4 | IdentityFile neighbor.pub
5 |
6 | Host myalias
7 | HostName otherhost
8 |
9 | Host *
10 | User satan
11 | Port 666
12 | IdentityFile foobar.pub
13 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include AUTHORS
2 | include INSTALL
3 | include LICENSE
4 | include README.rst
5 | recursive-include sites *
6 | recursive-exclude sites/docs/_build *
7 | recursive-exclude sites/www/_build *
8 | include requirements.txt
9 | recursive-include tests *
10 | recursive-exclude tests *.pyc *.pyo
11 |
--------------------------------------------------------------------------------
/tests/support/decorator_order.py:
--------------------------------------------------------------------------------
1 | from fabric.api import task, hosts, roles
2 |
3 |
4 | @hosts('whatever')
5 | @task
6 | def foo():
7 | pass
8 |
9 | # There must be at least one unmolested new-style task for the decorator order
10 | # problem to appear.
11 | @task
12 | def caller():
13 | pass
14 |
--------------------------------------------------------------------------------
/tests/support/decorated_fabfile_with_classbased_task.py:
--------------------------------------------------------------------------------
1 | from fabric import tasks
2 | from fabric.decorators import task
3 |
4 | class ClassBasedTask(tasks.Task):
5 | def __init__(self):
6 | self.name = "foo"
7 | self.use_decorated = True
8 |
9 | def run(self, *args, **kwargs):
10 | pass
11 |
12 | foo = ClassBasedTask()
13 |
--------------------------------------------------------------------------------
/tests/client.key.pub:
--------------------------------------------------------------------------------
1 | ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA2FxgXlTZGk/JZMacwgMPC6LEd3efYgIdgK0RXGRMNs06aSyeEUwTKqmelNnElsRsUW68Ybosox0LoHGfTUj0gtSOqG+pb0QJQ5yslPBwBlL+WUC65HDzHdBrUf/bFR+rc02i2Ciraan4elvuLW07UfO5ceCOeJSYyNmrhN/vboHr3Pcv2QG717sEy/9pSAVzrriCqYFd6IFg9o6UhuSB7hvW4bzKXDHtz6OeXrC6U/FWxx3rYZg3h9K2SBGXLavqiJSkFgeSzn3geSbyAjTgowaZ8kNq4+Mc1hsAMtLZBKMBZUTuMjHpQR31nWloUUfuz5QhaORk1pJBmE90MqShiw== jforcier@ytram
2 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # These requirements are for DEVELOPMENT ONLY!
2 | # You do not need e.g. Sphinx or Fudge just to run the 'fab' tool.
3 | # Instead, these are necessary for executing the test suite or developing the
4 | # cutting edge (which may have different requirements from released versions.)
5 |
6 | # Development version of Paramiko, just in case we're in one of those phases.
7 | -e git+https://github.com/paramiko/paramiko#egg=paramiko
8 | # Pull in actual "you already have local installed checkouts of Fabric +
9 | # Paramiko" dev deps.
10 | -r dev-requirements.txt
11 |
--------------------------------------------------------------------------------
/integration/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | # Pull in regular tests' utilities
5 | mod = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'tests'))
6 | sys.path.insert(0, mod)
7 | from mock_streams import mock_streams
8 | #from utils import FabricTest
9 | # Clean up
10 | del sys.path[0]
11 |
12 |
13 | class Integration(object):
14 | def setup(self):
15 | # Just so subclasses can super() us w/o fear. Meh.
16 | pass
17 |
18 | def teardown(self):
19 | # Just so subclasses can super() us w/o fear. Meh.
20 | pass
21 |
--------------------------------------------------------------------------------
/fabric/auth.py:
--------------------------------------------------------------------------------
1 | """
2 | Common authentication subroutines. Primarily for internal use.
3 | """
4 |
5 |
6 | def get_password(user, host, port):
7 | from fabric.state import env
8 | from fabric.network import join_host_strings
9 | host_string = join_host_strings(user, host, port)
10 | return env.passwords.get(host_string, env.password)
11 |
12 |
13 | def set_password(user, host, port, password):
14 | from fabric.state import env
15 | from fabric.network import join_host_strings
16 | host_string = join_host_strings(user, host, port)
17 | env.password = env.passwords[host_string] = password
18 |
--------------------------------------------------------------------------------
/dev-requirements.txt:
--------------------------------------------------------------------------------
1 | # You should already have the dev version of Paramiko and your local Fabric
2 | # checkout installed! Stable Paramiko may not be sufficient!
3 |
4 | # Test runner/testing utils
5 | nose
6 | # Rudolf adds color to the output of 'fab test'. This is a custom fork
7 | # addressing Python 2.7 and Nose's 'skip' plugin compatibility issues.
8 | -e git+https://github.com/bitprophet/rudolf#egg=rudolf
9 | # Mocking library
10 | Fudge<1.0
11 | # Documentation generation
12 | Sphinx>=1.2
13 | releases==0.6.1
14 | invoke==0.10.1
15 | invocations>=0.10,<0.11
16 | alabaster>=0.6.1
17 | semantic_version==2.4
18 | wheel==0.24
19 |
--------------------------------------------------------------------------------
/sites/www/index.rst:
--------------------------------------------------------------------------------
1 | Welcome to Fabric!
2 | ==================
3 |
4 | .. include:: ../../README.rst
5 |
6 | ----
7 |
8 | This website covers project information for Fabric such as the changelog,
9 | contribution guidelines, development roadmap, news/blog, and so forth.
10 | Detailed usage and API documentation can be found at our code documentation
11 | site, `docs.fabfile.org `_.
12 |
13 | Please see the navigation sidebar to the left to begin.
14 |
15 | .. toctree::
16 | :hidden:
17 |
18 | changelog
19 | FAQs
20 | installing
21 | troubleshooting
22 | development
23 | Roadmap
24 | contact
25 |
--------------------------------------------------------------------------------
/fabfile/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Fabric's own fabfile.
3 | """
4 |
5 | from __future__ import with_statement
6 |
7 | import nose
8 |
9 | from fabric.api import abort, local, task
10 |
11 |
12 | @task(default=True)
13 | def test(args=None):
14 | """
15 | Run all unit tests and doctests.
16 |
17 | Specify string argument ``args`` for additional args to ``nosetests``.
18 | """
19 | # Default to explicitly targeting the 'tests' folder, but only if nothing
20 | # is being overridden.
21 | tests = "" if args else " tests"
22 | default_args = "-sv --with-doctest --nologcapture --with-color %s" % tests
23 | default_args += (" " + args) if args else ""
24 | nose.core.run_exit(argv=[''] + default_args.split())
25 |
--------------------------------------------------------------------------------
/fabric/thread_handling.py:
--------------------------------------------------------------------------------
1 | import threading
2 | import sys
3 |
4 |
5 | class ThreadHandler(object):
6 | def __init__(self, name, callable, *args, **kwargs):
7 | # Set up exception handling
8 | self.exception = None
9 |
10 | def wrapper(*args, **kwargs):
11 | try:
12 | callable(*args, **kwargs)
13 | except BaseException:
14 | self.exception = sys.exc_info()
15 | # Kick off thread
16 | thread = threading.Thread(None, wrapper, name, args, kwargs)
17 | thread.setDaemon(True)
18 | thread.start()
19 | # Make thread available to instantiator
20 | self.thread = thread
21 |
22 | def raise_if_needed(self):
23 | if self.exception:
24 | e = self.exception
25 | raise e[0], e[1], e[2]
26 |
--------------------------------------------------------------------------------
/fabric/api.py:
--------------------------------------------------------------------------------
1 | """
2 | Non-init module for doing convenient * imports from.
3 |
4 | Necessary because if we did this in __init__, one would be unable to import
5 | anything else inside the package -- like, say, the version number used in
6 | setup.py -- without triggering loads of most of the code. Which doesn't work so
7 | well when you're using setup.py to install e.g. ssh!
8 | """
9 | from fabric.context_managers import (cd, hide, settings, show, path, prefix,
10 | lcd, quiet, warn_only, remote_tunnel, shell_env)
11 | from fabric.decorators import (hosts, roles, runs_once, with_settings, task,
12 | serial, parallel)
13 | from fabric.operations import (require, prompt, put, get, run, sudo, local,
14 | reboot, open_shell)
15 | from fabric.state import env, output
16 | from fabric.utils import abort, warn, puts, fastprint
17 | from fabric.tasks import execute
18 |
--------------------------------------------------------------------------------
/sites/www/conf.py:
--------------------------------------------------------------------------------
1 | # Obtain shared config values
2 | import sys
3 | import os
4 | from os.path import abspath, join, dirname
5 |
6 | sys.path.append(abspath(join(dirname(__file__), '..')))
7 | from shared_conf import *
8 |
9 |
10 | # Releases changelog extension
11 | extensions.append('releases')
12 | releases_github_path = "fabric/fabric"
13 |
14 | # Intersphinx for referencing API/usage docs
15 | extensions.append('sphinx.ext.intersphinx')
16 | # Default is 'local' building, but reference the public docs site when building
17 | # under RTD.
18 | target = join(dirname(__file__), '..', 'docs', '_build')
19 | if os.environ.get('READTHEDOCS') == 'True':
20 | target = 'http://docs.fabfile.org/en/latest/'
21 | intersphinx_mapping = {
22 | 'docs': (target, None),
23 | }
24 |
25 | # Sister-site links to API docs
26 | html_theme_options['extra_nav_links'] = {
27 | "API Docs": 'http://docs.fabfile.org',
28 | }
29 |
--------------------------------------------------------------------------------
/tests/private.key:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIICWgIBAAKBgQDTj1bqB4WmayWNPB+8jVSYpZYk80Ujvj680pOTh2bORBjbIAyz
3 | oWGW+GUjzKxTiiPvVmxFgx5wdsFvF03v34lEVVhMpouqPAYQ15N37K/ir5XY+9m/
4 | d8ufMCkjeXsQkKqFbAlQcnWMCRnOoPHS3I4vi6hmnDDeeYTSRvfLbW0fhwIBIwKB
5 | gBIiOqZYaoqbeD9OS9z2K9KR2atlTxGxOJPXiP4ESqP3NVScWNwyZ3NXHpyrJLa0
6 | EbVtzsQhLn6rF+TzXnOlcipFvjsem3iYzCpuChfGQ6SovTcOjHV9z+hnpXvQ/fon
7 | soVRZY65wKnF7IAoUwTmJS9opqgrN6kRgCd3DASAMd1bAkEA96SBVWFt/fJBNJ9H
8 | tYnBKZGw0VeHOYmVYbvMSstssn8un+pQpUm9vlG/bp7Oxd/m+b9KWEh2xPfv6zqU
9 | avNwHwJBANqzGZa/EpzF4J8pGti7oIAPUIDGMtfIcmqNXVMckrmzQ2vTfqtkEZsA
10 | 4rE1IERRyiJQx6EJsz21wJmGV9WJQ5kCQQDwkS0uXqVdFzgHO6S++tjmjYcxwr3g
11 | H0CoFYSgbddOT6miqRskOQF3DZVkJT3kyuBgU2zKygz52ukQZMqxCb1fAkASvuTv
12 | qfpH87Qq5kQhNKdbbwbmd2NxlNabazPijWuphGTdW0VfJdWfklyS2Kr+iqrs/5wV
13 | HhathJt636Eg7oIjAkA8ht3MQ+XSl9yIJIS8gVpbPxSw5OMfw0PjVE7tBdQruiSc
14 | nvuQES5C9BMHjF39LZiGH1iLQy7FgdHyoP+eodI7
15 | -----END RSA PRIVATE KEY-----
16 |
--------------------------------------------------------------------------------
/tests/test_version.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests covering Fabric's version number pretty-print functionality.
3 | """
4 |
5 | from nose.tools import eq_
6 |
7 | import fabric.version
8 |
9 |
10 | def test_get_version():
11 | get_version = fabric.version.get_version
12 | for tup, short, normal, verbose in [
13 | ((0, 9, 0, 'final', 0), '0.9.0', '0.9', '0.9 final'),
14 | ((0, 9, 1, 'final', 0), '0.9.1', '0.9.1', '0.9.1 final'),
15 | ((0, 9, 0, 'alpha', 1), '0.9a1', '0.9 alpha 1', '0.9 alpha 1'),
16 | ((0, 9, 1, 'beta', 1), '0.9.1b1', '0.9.1 beta 1', '0.9.1 beta 1'),
17 | ((0, 9, 0, 'release candidate', 1),
18 | '0.9rc1', '0.9 release candidate 1', '0.9 release candidate 1'),
19 | ((1, 0, 0, 'alpha', 0), '1.0a', '1.0 pre-alpha', '1.0 pre-alpha'),
20 | ]:
21 | fabric.version.VERSION = tup
22 | yield eq_, get_version('short'), short
23 | yield eq_, get_version('normal'), normal
24 | yield eq_, get_version('verbose'), verbose
25 |
--------------------------------------------------------------------------------
/sites/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Obtain shared config values
2 | import os, sys
3 | from os.path import abspath, join, dirname
4 | sys.path.append(abspath(join(dirname(__file__), '..')))
5 | sys.path.append(abspath(join(dirname(__file__), '..', '..')))
6 | from shared_conf import *
7 |
8 | # Enable autodoc, intersphinx
9 | extensions.extend(['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'])
10 |
11 | # Autodoc settings
12 | autodoc_default_flags = ['members', 'special-members']
13 |
14 | # Default is 'local' building, but reference the public WWW site when building
15 | # under RTD.
16 | target = join(dirname(__file__), '..', 'www', '_build')
17 | if os.environ.get('READTHEDOCS') == 'True':
18 | target = 'http://www.fabfile.org/'
19 | # Intersphinx connection to stdlib + www site
20 | intersphinx_mapping = {
21 | 'python': ('http://docs.python.org/2.6', None),
22 | 'www': (target, None),
23 | }
24 |
25 | # Sister-site links to WWW
26 | html_theme_options['extra_nav_links'] = {
27 | "Main website": 'http://www.fabfile.org',
28 | }
29 |
--------------------------------------------------------------------------------
/tests/test_io.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | from nose.tools import eq_
4 |
5 | from fabric.io import OutputLooper
6 | from fabric.context_managers import settings
7 |
8 |
9 | def test_request_prompts():
10 | """
11 | Test valid responses from prompts
12 | """
13 | def run(txt, prompts):
14 | with settings(prompts=prompts):
15 | # try to fulfil the OutputLooper interface, only want to test
16 | # _get_prompt_response. (str has a method upper)
17 | ol = OutputLooper(str, 'upper', None, list(txt), None)
18 | return ol._get_prompt_response()
19 |
20 | prompts = {"prompt2": "response2",
21 | "prompt1": "response1",
22 | "prompt": "response"
23 | }
24 |
25 | eq_(run("this is a prompt for prompt1", prompts), ("prompt1", "response1"))
26 | eq_(run("this is a prompt for prompt2", prompts), ("prompt2", "response2"))
27 | eq_(run("this is a prompt for promptx:", prompts), (None, None))
28 | eq_(run("prompt for promp", prompts), (None, None))
29 |
--------------------------------------------------------------------------------
/fabric/exceptions.py:
--------------------------------------------------------------------------------
1 | """
2 | Custom Fabric exception classes.
3 |
4 | Most are simply distinct Exception subclasses for purposes of message-passing
5 | (though typically still in actual error situations.)
6 | """
7 |
8 |
9 | class NetworkError(Exception):
10 | # Must allow for calling with zero args/kwargs, since pickle is apparently
11 | # stupid with exceptions and tries to call it as such when passed around in
12 | # a multiprocessing.Queue.
13 | def __init__(self, message=None, wrapped=None):
14 | self.message = message
15 | self.wrapped = wrapped
16 |
17 | def __str__(self):
18 | return self.message or ""
19 |
20 | def __repr__(self):
21 | return "%s(%s) => %r" % (
22 | self.__class__.__name__, self.message, self.wrapped
23 | )
24 |
25 |
26 | class CommandTimeout(Exception):
27 | def __init__(self, timeout):
28 | self.timeout = timeout
29 |
30 | message = 'Command failed to finish in %s seconds' % (timeout)
31 | self.message = message
32 | super(CommandTimeout, self).__init__(message)
33 |
--------------------------------------------------------------------------------
/sites/shared_conf.py:
--------------------------------------------------------------------------------
1 | from os.path import join
2 | from datetime import datetime
3 |
4 | import alabaster
5 |
6 |
7 | # Alabaster theme + mini-extension
8 | html_theme_path = [alabaster.get_path()]
9 | extensions = ['alabaster']
10 | # Paths relative to invoking conf.py - not this shared file
11 | html_static_path = [join('..', '_shared_static')]
12 | html_theme = 'alabaster'
13 | html_theme_options = {
14 | 'logo': 'logo.png',
15 | 'logo_name': True,
16 | 'logo_text_align': 'center',
17 | 'description': "Pythonic remote execution",
18 | 'github_user': 'fabric',
19 | 'github_repo': 'fabric',
20 | 'travis_button': True,
21 | 'analytics_id': 'UA-18486793-1',
22 |
23 | 'link': '#3782BE',
24 | 'link_hover': '#3782BE',
25 | }
26 | html_sidebars = {
27 | '**': [
28 | 'about.html',
29 | 'navigation.html',
30 | 'searchbox.html',
31 | 'donate.html',
32 | ]
33 | }
34 |
35 | # Regular settings
36 | project = 'Fabric'
37 | year = datetime.now().year
38 | copyright = '%d Jeff Forcier' % year
39 | master_doc = 'index'
40 | templates_path = ['_templates']
41 | exclude_trees = ['_build']
42 | source_suffix = '.rst'
43 | default_role = 'obj'
44 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - "2.6"
4 | - "2.7"
5 | install:
6 | # Build/test dependencies
7 | - pip install -r requirements.txt
8 | # Get fab to test fab
9 | - pip install -e .
10 | # Deal with issue on Travis builders re: multiprocessing.Queue :(
11 | - "sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm"
12 | - "pip install jinja2"
13 | before_script:
14 | # Allow us to SSH passwordless to localhost
15 | - ssh-keygen -f ~/.ssh/id_rsa -N ""
16 | - cp ~/.ssh/{id_rsa.pub,authorized_keys}
17 | # Creation of an SSH agent for testing forwarding
18 | - eval $(ssh-agent)
19 | - ssh-add
20 | script:
21 | # Normal tests
22 | - fab test
23 | # Integration tests
24 | - fab -H localhost test:"--tests\=integration"
25 | # Build docs; www first without warnings so its intersphinx objects file
26 | # generates. Then docs (with warnings->errors), then www again (also w/
27 | # warnings on.) FUN TIMES WITH CIRCULAR DEPENDENCIES.
28 | - invoke www
29 | - invoke docs -o -W
30 | - invoke www -c -o -W
31 | notifications:
32 | irc:
33 | channels: "irc.freenode.org#fabric"
34 | template:
35 | - "%{repository}@%{branch}: %{message} (%{build_url})"
36 | on_success: change
37 | on_failure: change
38 | email: false
39 |
--------------------------------------------------------------------------------
/fabric/colors.py:
--------------------------------------------------------------------------------
1 | """
2 | .. versionadded:: 0.9.2
3 |
4 | Functions for wrapping strings in ANSI color codes.
5 |
6 | Each function within this module returns the input string ``text``, wrapped
7 | with ANSI color codes for the appropriate color.
8 |
9 | For example, to print some text as green on supporting terminals::
10 |
11 | from fabric.colors import green
12 |
13 | print(green("This text is green!"))
14 |
15 | Because these functions simply return modified strings, you can nest them::
16 |
17 | from fabric.colors import red, green
18 |
19 | print(red("This sentence is red, except for " + \
20 | green("these words, which are green") + "."))
21 |
22 | If ``bold`` is set to ``True``, the ANSI flag for bolding will be flipped on
23 | for that particular invocation, which usually shows up as a bold or brighter
24 | version of the original color on most terminals.
25 | """
26 |
27 |
28 | def _wrap_with(code):
29 |
30 | def inner(text, bold=False):
31 | c = code
32 | if bold:
33 | c = "1;%s" % c
34 | return "\033[%sm%s\033[0m" % (c, text)
35 | return inner
36 |
37 | red = _wrap_with('31')
38 | green = _wrap_with('32')
39 | yellow = _wrap_with('33')
40 | blue = _wrap_with('34')
41 | magenta = _wrap_with('35')
42 | cyan = _wrap_with('36')
43 | white = _wrap_with('37')
44 |
--------------------------------------------------------------------------------
/fabric/contrib/console.py:
--------------------------------------------------------------------------------
1 | """
2 | Console/terminal user interface functionality.
3 | """
4 |
5 | from fabric.api import prompt
6 |
7 |
8 | def confirm(question, default=True):
9 | """
10 | Ask user a yes/no question and return their response as True or False.
11 |
12 | ``question`` should be a simple, grammatically complete question such as
13 | "Do you wish to continue?", and will have a string similar to " [Y/n] "
14 | appended automatically. This function will *not* append a question mark for
15 | you.
16 |
17 | By default, when the user presses Enter without typing anything, "yes" is
18 | assumed. This can be changed by specifying ``default=False``.
19 | """
20 | # Set up suffix
21 | if default:
22 | suffix = "Y/n"
23 | else:
24 | suffix = "y/N"
25 | # Loop till we get something we like
26 | while True:
27 | response = prompt("%s [%s] " % (question, suffix)).lower()
28 | # Default
29 | if not response:
30 | return default
31 | # Yes
32 | if response in ['y', 'yes']:
33 | return True
34 | # No
35 | if response in ['n', 'no']:
36 | return False
37 | # Didn't get empty, yes or no, so complain and loop
38 | print("I didn't understand you. Please specify '(y)es' or '(n)o'.")
39 |
--------------------------------------------------------------------------------
/tests/test_state.py:
--------------------------------------------------------------------------------
1 | from nose.tools import eq_
2 |
3 | from fabric.state import _AliasDict
4 |
5 |
6 | def test_dict_aliasing():
7 | """
8 | Assigning values to aliases updates aliased keys
9 | """
10 | ad = _AliasDict(
11 | {'bar': False, 'biz': True, 'baz': False},
12 | aliases={'foo': ['bar', 'biz', 'baz']}
13 | )
14 | # Before
15 | eq_(ad['bar'], False)
16 | eq_(ad['biz'], True)
17 | eq_(ad['baz'], False)
18 | # Change
19 | ad['foo'] = True
20 | # After
21 | eq_(ad['bar'], True)
22 | eq_(ad['biz'], True)
23 | eq_(ad['baz'], True)
24 |
25 |
26 | def test_nested_dict_aliasing():
27 | """
28 | Aliases can be nested
29 | """
30 | ad = _AliasDict(
31 | {'bar': False, 'biz': True},
32 | aliases={'foo': ['bar', 'nested'], 'nested': ['biz']}
33 | )
34 | # Before
35 | eq_(ad['bar'], False)
36 | eq_(ad['biz'], True)
37 | # Change
38 | ad['foo'] = True
39 | # After
40 | eq_(ad['bar'], True)
41 | eq_(ad['biz'], True)
42 |
43 |
44 | def test_dict_alias_expansion():
45 | """
46 | Alias expansion
47 | """
48 | ad = _AliasDict(
49 | {'bar': False, 'biz': True},
50 | aliases={'foo': ['bar', 'nested'], 'nested': ['biz']}
51 | )
52 | eq_(ad.expand_aliases(['foo']), ['bar', 'biz'])
53 |
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | The following list contains individuals who contributed nontrivial code to
2 | Fabric's codebase, ordered by date of first contribution. Individuals who
3 | submitted bug reports or trivial one-line "you forgot to do X" patches are
4 | generally credited in the commit log only.
5 |
6 | IMPORTANT: as of 2012, this file is historical only and we'll probably stop
7 | updating it. The changelog and/or Git history is the canonical source for
8 | thanks, credits etc.
9 |
10 | Christian Vest Hansen
11 | Rob Cowie
12 | Jeff Forcier
13 | Travis Cline
14 | Niklas Lindström
15 | Kevin Horn
16 | Max Battcher
17 | Alexander Artemenko
18 | Dennis Schoen
19 | Erick Dennis
20 | Sverre Johansen
21 | Michael Stephens
22 | Armin Ronacher
23 | Curt Micol
24 | Patrick McNerthney
25 | Steve Steiner
26 | Ali Saifee
27 | Jorge Vargas
28 | Peter Ellis
29 | Brian Rosner
30 | Xinan Wu
31 | Alex Koshelev
32 | Mich Matuson
33 | Morgan Goose
34 | Carl Meyer
35 | Erich Heine
36 | Travis Swicegood
37 | Paul Smith
38 | Alex Koshelev
39 | Stephen Goss
40 | James Murty
41 | Thomas Ballinger
42 | Rick Harding
43 | Kirill Pinchuk
44 | Ales Zoulek
45 | Casey Banner
46 | Roman Imankulov
47 | Rodrigue Alcazar
48 | Jeremy Avnet
49 | Matt Chisholm
50 | Mark Merritt
51 | Max Arnold
52 | Szymon Reichmann
53 | David Wolever
54 | Jason Coombs
55 | Ben Davis
56 | Neilen Marais
57 | Rory Geoghegan
58 | Alexey Diyan
59 | Kamil Kisiel
60 | Jonas Lundberg
61 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2009-2016 Jeffrey E. Forcier
2 | Copyright (c) 2008-2009 Christian Vest Hansen
3 | All rights reserved.
4 |
5 | Redistribution and use in source and binary forms, with or without
6 | modification, are permitted provided that the following conditions are met:
7 |
8 | * Redistributions of source code must retain the above copyright notice,
9 | this list of conditions and the following disclaimer.
10 | * Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
15 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
16 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
18 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
20 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
21 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
22 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | Fabric is a Python (2.5-2.7) library and command-line tool for
2 | streamlining the use of SSH for application deployment or systems
3 | administration tasks.
4 |
5 | It provides a basic suite of operations for executing local or remote shell
6 | commands (normally or via ``sudo``) and uploading/downloading files, as well as
7 | auxiliary functionality such as prompting the running user for input, or
8 | aborting execution.
9 |
10 | Typical use involves creating a Python module containing one or more functions,
11 | then executing them via the ``fab`` command-line tool. Below is a small but
12 | complete "fabfile" containing a single task:
13 |
14 | .. code-block:: python
15 |
16 | from fabric.api import run
17 |
18 | def host_type():
19 | run('uname -s')
20 |
21 | If you save the above as ``fabfile.py`` (the default module that
22 | ``fab`` loads), you can run the tasks defined in it on one or more
23 | servers, like so::
24 |
25 | $ fab -H localhost,linuxbox host_type
26 | [localhost] run: uname -s
27 | [localhost] out: Darwin
28 | [linuxbox] run: uname -s
29 | [linuxbox] out: Linux
30 |
31 | Done.
32 | Disconnecting from localhost... done.
33 | Disconnecting from linuxbox... done.
34 |
35 | In addition to use via the ``fab`` tool, Fabric's components may be imported
36 | into other Python code, providing a Pythonic interface to the SSH protocol
37 | suite at a higher level than that provided by e.g. the ``Paramiko`` library
38 | (which Fabric itself uses.)
39 |
--------------------------------------------------------------------------------
/sites/docs/running_tests.rst:
--------------------------------------------------------------------------------
1 | ======================
2 | Running Fabric's Tests
3 | ======================
4 |
5 | Fabric is maintained with 100% passing tests. Where possible, patches should
6 | include tests covering the changes, making things far easier to verify & merge.
7 |
8 | When developing on Fabric, it works best to establish a `virtualenv`_ to install
9 | the dependencies in isolation for running tests.
10 |
11 | .. _`virtualenv`: https://virtualenv.pypa.io/en/latest/
12 |
13 | .. _first-time-setup:
14 |
15 | First-time Setup
16 | ================
17 |
18 | * Fork the `repository`_ on GitHub
19 | * Clone your new fork (e.g.
20 | ``git clone git@github.com:/fabric.git``)
21 | * ``cd fabric``
22 | * ``virtualenv env``
23 | * ``. env/bin/activate``
24 | * ``pip install -r requirements.txt``
25 | * ``python setup.py develop``
26 |
27 | .. _`repository`: https://github.com/fabric/fabric
28 |
29 | .. _running-tests:
30 |
31 | Running Tests
32 | =============
33 |
34 | Once your virtualenv is activated (``. env/bin/activate``) & you have the latest
35 | requirements, running tests is just::
36 |
37 | nosetests tests/
38 |
39 | You should **always** run tests on ``master`` (or the release branch you're
40 | working with) to ensure they're passing before working on your own
41 | changes/tests.
42 |
43 | Alternatively, if you've run ``python setup.py develop`` on your Fabric clone,
44 | you can also run::
45 |
46 | fab test
47 |
48 | This adds additional flags which enable running doctests & adds nice coloration.
49 |
--------------------------------------------------------------------------------
/sites/www/contact.rst:
--------------------------------------------------------------------------------
1 | =======
2 | Contact
3 | =======
4 |
5 | If you've scoured the :ref:`prose ` and :ref:`API `
6 | documentation and still can't find an answer to your question, below are
7 | various support resources that should help. We do request that you do at least
8 | skim the documentation before posting tickets or mailing list questions,
9 | however!
10 |
11 | Mailing list
12 | ------------
13 |
14 | The best way to get help with using Fabric is via the `fab-user mailing list
15 | `_ (currently hosted at
16 | ``nongnu.org``.) The Fabric developers do their best to reply promptly, and the
17 | list contains an active community of other Fabric users and contributors as
18 | well.
19 |
20 | Twitter
21 | -------
22 |
23 | Fabric has an official Twitter account, `@pyfabric
24 | `_, which is used for announcements and occasional
25 | related news tidbits (e.g. "Hey, check out this neat article on Fabric!").
26 |
27 | .. _bugs:
28 |
29 | Bugs/ticket tracker
30 | -------------------
31 |
32 | To file new bugs or search existing ones, you may visit Fabric's `Github Issues
33 | `_ page. This does require a (free, easy to set up) Github account.
34 |
35 | .. _irc:
36 |
37 | IRC
38 | ---
39 |
40 | We maintain a semi-official IRC channel at ``#fabric`` on Freenode
41 | (``irc://irc.freenode.net``) where the developers and other users may be found.
42 | As always with IRC, we can't promise immediate responses, but some folks keep
43 | logs of the channel and will try to get back to you when they can.
44 |
--------------------------------------------------------------------------------
/tests/client.key:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | Proc-Type: 4,ENCRYPTED
3 | DEK-Info: DES-EDE3-CBC,F1AFE040F412E6D1
4 |
5 | cIBbwu1/PD9vjtyFn+xbpc2X9Uv9sllCRooLwkOv9rkBxDRItT8D5UiGHGIGIAvj
6 | eq9sUze8bXQeXs9zpJwMRH1kjdmCmnmRX0iXcsxSgnioL3aEGLTbXqxkUOnSgj4Y
7 | cJ1trT51XVRSBGlRHYPmF1IhYYW/RPZlFUPMJDE5s1moROU29DfnaboTREf8shJ9
8 | A/jHvKoivn4GgM1U6VcwwtijvmgrrB5KzqpRfTLf6Rxe6St3e4WjQusYWVP4BOmz
9 | ImQyaATcPwn5iMWPfvXohPQR/ajuoU9jzMM3DqzcrH7Q4VmpSTrmkdG7Ra5GfSE1
10 | O5WEiqNwUkfjAYIjbxo11gVtIH8ddsMuF5odsh2LVXYocHeZzRlZvsip2AePKiKX
11 | xMkZItP4xqFBfi0jnqCVkQGUdtRYhHomDUO8U0JtB3BFNT/L+LC+dsrj8G/FaQiD
12 | n8an2sDf1CrYXqfz3V3rGzuPDq/CKwPD8HeTpjZUT7bPUNsTNMVx58LiYShRV2uB
13 | zUn83diKX12xS+gyS5PfuujwQP93ZQXOP9agKSa2UlY2ojUxtpc1vxiEzcFcU9Zg
14 | 2uLEbsRKW1qe2jLDTmRyty14rJmi7ocbjPUuEuw9Aj1v46jzhBXBPE7cWHGm1o2/
15 | /e0lGfLTtm3Q2SponTLTcHTrBvrDBRlDAN5sChhbaoEoUCHjTKo8aj6whDKfAw4Q
16 | KNHrOkkXyDyvd90c1loen5u5iaol+l5W+7LG3Sr5uRHMHAsF0MH9cZd/RQXMSY/U
17 | sQLWumskx/iSrbjFztW0La0bBCB6vHBYLervC3lrrmvnhfYrNBrZM8eH1hTSZUsT
18 | VFeKgm+KVkwEG/uXoI/XOge01b1oOHzKNKGT7Q5ogbV6w67LtOrSeTH0FCjHsN8z
19 | 2LCQHWuII4h3b1U/Pg8N5Pz59+qraSrMZAHOROYc19r0HSS5gg7m1yD3IPXO73fI
20 | gLO0/44f/KYqVP2+FKgQo9enUSLI5GuMAfhWaTpeOpJNd10egSOB3SaJ7nn20/Pm
21 | vSBSL0KsSeXY4/Df43MuHu46PvYzRwKvZB7GJJJPi2XjdFqCxuoCuEqfaZxf1lnI
22 | ZhZFmsZE1rd7kgBYyn0VXn1AvrLjaLuvmsOKaFdO4TAbQpE3Pps6AdQ8EpJ62Gei
23 | 0yZlXgh2+zZp5lRMfO5JFtr7/pVpIqnRKfaDk1XawWP7i1/0PnVXsR2G6yu6kbEg
24 | R/v2LKnp49TUldfNmVW8QHElw/LrCBW08iA+44vlGYdCU8nAW9Sy+y4plW+X32z8
25 | Viw82ISUcoJSHmRfzXOWaj24AftbSOzo2bRmCO+xkBkXFrhTI83Aqbu7TN/yejB8
26 | hDb04AVxzEkBTw/B0pLkJUt5lpcr9fZMvACHsL0gTRc5OPb4/zhG7y9npWgq5Snb
27 | ZnUAOi+ndnW8IL4y9YI6U7LBSyMvE7L7+QCnLJxVnO2NxjDCJVDDe6fLR9pRBCCC
28 | Sh3X/FNsu1YQzNIOvf75ri1zzqKmv4x6ETmmgs+vMGRl62s8SQcgWFEGAVrAP+uR
29 | ocx0chW3BWEQalRat2vBWpj1gyH2aHd8tgamb8XXFLK35iTk2/oCqQ==
30 | -----END RSA PRIVATE KEY-----
31 |
--------------------------------------------------------------------------------
/tests/fake_filesystem.py:
--------------------------------------------------------------------------------
1 | import os
2 | import stat
3 | from StringIO import StringIO
4 | from types import StringTypes
5 |
6 | from fabric.network import ssh
7 |
8 |
9 | class FakeFile(StringIO):
10 |
11 | def __init__(self, value=None, path=None):
12 | init = lambda x: StringIO.__init__(self, x)
13 | if value is None:
14 | init("")
15 | ftype = 'dir'
16 | size = 4096
17 | else:
18 | init(value)
19 | ftype = 'file'
20 | size = len(value)
21 | attr = ssh.SFTPAttributes()
22 | attr.st_mode = {'file': stat.S_IFREG, 'dir': stat.S_IFDIR}[ftype]
23 | attr.st_size = size
24 | attr.filename = os.path.basename(path)
25 | self.attributes = attr
26 |
27 | def __str__(self):
28 | return self.getvalue()
29 |
30 | def write(self, value):
31 | StringIO.write(self, value)
32 | self.attributes.st_size = len(self.getvalue())
33 |
34 | def close(self):
35 | """
36 | Always hold fake files open.
37 | """
38 | pass
39 |
40 | def __cmp__(self, other):
41 | me = str(self) if isinstance(other, StringTypes) else self
42 | return cmp(me, other)
43 |
44 |
45 | class FakeFilesystem(dict):
46 | def __init__(self, d=None):
47 | # Replicate input dictionary using our custom __setitem__
48 | d = d or {}
49 | for key, value in d.iteritems():
50 | self[key] = value
51 |
52 | def __setitem__(self, key, value):
53 | if isinstance(value, StringTypes) or value is None:
54 | value = FakeFile(value, key)
55 | super(FakeFilesystem, self).__setitem__(key, value)
56 |
57 | def normalize(self, path):
58 | """
59 | Normalize relative paths.
60 |
61 | In our case, the "home" directory is just the root, /.
62 |
63 | I expect real servers do this as well but with the user's home
64 | directory.
65 | """
66 | if not path.startswith(os.path.sep):
67 | path = os.path.join(os.path.sep, path)
68 | return path
69 |
70 | def __getitem__(self, key):
71 | return super(FakeFilesystem, self).__getitem__(self.normalize(key))
72 |
--------------------------------------------------------------------------------
/sites/docs/index.rst:
--------------------------------------------------------------------------------
1 | ==================================
2 | Welcome to Fabric's documentation!
3 | ==================================
4 |
5 | This site covers Fabric's usage & API documentation. For basic info on what
6 | Fabric is, including its public changelog & how the project is maintained,
7 | please see `the main project website `_.
8 |
9 |
10 | Tutorial
11 | --------
12 |
13 | For new users, and/or for an overview of Fabric's basic functionality, please
14 | see the :doc:`tutorial`. The rest of the documentation will assume you're
15 | at least passingly familiar with the material contained within.
16 |
17 | .. toctree::
18 | :hidden:
19 |
20 | tutorial
21 |
22 |
23 | .. _usage-docs:
24 |
25 | Usage documentation
26 | -------------------
27 |
28 | The following list contains all major sections of Fabric's prose (non-API)
29 | documentation, which expands upon the concepts outlined in the
30 | :doc:`tutorial` and also covers advanced topics.
31 |
32 | .. toctree::
33 | :maxdepth: 2
34 | :glob:
35 |
36 | usage/*
37 |
38 |
39 | .. _api_docs:
40 |
41 | API documentation
42 | -----------------
43 |
44 | Fabric maintains two sets of API documentation, autogenerated from the source
45 | code's docstrings (which are typically very thorough.)
46 |
47 | .. _core-api:
48 |
49 | Core API
50 | ~~~~~~~~
51 |
52 | The **core** API is loosely defined as those functions, classes and methods
53 | which form the basic building blocks of Fabric (such as
54 | `~fabric.operations.run` and `~fabric.operations.sudo`) upon which everything
55 | else (the below "contrib" section, and user fabfiles) builds.
56 |
57 | .. toctree::
58 | :maxdepth: 1
59 | :glob:
60 |
61 | api/core/*
62 |
63 | .. _contrib-api:
64 |
65 | Contrib API
66 | ~~~~~~~~~~~
67 |
68 | Fabric's **contrib** package contains commonly useful tools (often merged in
69 | from user fabfiles) for tasks such as user I/O, modifying remote files, and so
70 | forth. While the core API is likely to remain small and relatively unchanged
71 | over time, this contrib section will grow and evolve (while trying to remain
72 | backwards-compatible) as more use-cases are solved and added.
73 |
74 | .. toctree::
75 | :maxdepth: 1
76 | :glob:
77 |
78 | api/contrib/*
79 |
80 |
81 | Contributing & Running Tests
82 | ----------------------------
83 |
84 | For advanced users & developers looking to help fix bugs or add new features.
85 |
86 | .. toctree::
87 | :hidden:
88 |
89 | running_tests
90 |
--------------------------------------------------------------------------------
/sites/www/troubleshooting.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | Troubleshooting
3 | ===============
4 |
5 | Stuck? Having a problem? Here are the steps to try before you submit a bug
6 | report.
7 |
8 | * **Make sure you're on the latest version.** If you're not on the most recent
9 | version, your problem may have been solved already! Upgrading is always the
10 | best first step.
11 | * **Try older versions.** If you're already *on* the latest Fabric, try rolling
12 | back a few minor versions (e.g. if on 1.7, try Fabric 1.5 or 1.6) and see if
13 | the problem goes away. This will help the devs narrow down when the problem
14 | first arose in the commit log.
15 | * **Try switching up your Paramiko.** Fabric relies heavily on the Paramiko
16 | library for its SSH functionality, so try applying the above two steps to
17 | your Paramiko install as well.
18 |
19 | .. note::
20 | Fabric versions sometimes have different Paramiko dependencies - so to
21 | try older Paramikos you may need to downgrade Fabric as well.
22 |
23 | * **Make sure Fabric is really the problem.** If your problem is in the
24 | behavior or output of a remote command, try recreating it without Fabric
25 | involved:
26 |
27 | * Run Fabric with ``--show=debug`` and look for the ``run:`` or ``sudo:``
28 | line about the command in question. Try running that exact command,
29 | including any ``/bin/bash`` wrapper, remotely and see what happens. This
30 | may find problems related to the bash or sudo wrappers.
31 | * Execute the command (both the normal version, and the 'unwrapped' version
32 | seen via ``--show=debug``) from your local workstation using ``ssh``,
33 | e.g.::
34 |
35 | $ ssh -t mytarget "my command"
36 |
37 | The ``-t`` flag matches Fabric's default behavior of enabling a PTY
38 | remotely. This helps identify apps that behave poorly when run in a
39 | non-shell-spawned PTY.
40 |
41 | * **Enable Paramiko-level debug logging.** If your issue is in the lower level
42 | Paramiko library, it can help us to see the debug output Paramiko prints. At
43 | top level in your fabfile, add the following::
44 |
45 | import logging
46 | logging.basicConfig(level=logging.DEBUG)
47 |
48 | This should start printing Paramiko's debug statements to your standard error
49 | stream. (Feel free to add more logging kwargs to ``basicConfig()`` such as
50 | ``filename='/path/to/a/file'`` if you like.)
51 |
52 | Then submit this info to anybody helping you on IRC or in your bug report.
53 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from __future__ import with_statement
4 |
5 | import sys
6 |
7 | from setuptools import setup, find_packages
8 |
9 | from fabric.version import get_version
10 |
11 |
12 | with open('README.rst') as f:
13 | readme = f.read()
14 |
15 | long_description = """
16 | To find out what's new in this version of Fabric, please see `the changelog
17 | `_.
18 |
19 | You can also install the `in-development version
20 | `_ using
21 | pip, with `pip install fabric==dev`.
22 |
23 | ----
24 |
25 | %s
26 |
27 | ----
28 |
29 | For more information, please see the Fabric website or execute ``fab --help``.
30 | """ % (readme)
31 |
32 | if sys.version_info[:2] < (2, 6):
33 | install_requires=['paramiko>=1.10,<1.13']
34 | else:
35 | install_requires=['paramiko>=1.10']
36 |
37 |
38 | setup(
39 | name='Fabric',
40 | version=get_version('short'),
41 | description='Fabric is a simple, Pythonic tool for remote execution and deployment.',
42 | long_description=long_description,
43 | author='Jeff Forcier',
44 | author_email='jeff@bitprophet.org',
45 | url='http://fabfile.org',
46 | packages=find_packages(),
47 | test_suite='nose.collector',
48 | tests_require=['nose', 'fudge<1.0', 'jinja2'],
49 | install_requires=install_requires,
50 | entry_points={
51 | 'console_scripts': [
52 | 'fab = fabric.main:main',
53 | ]
54 | },
55 | classifiers=[
56 | 'Development Status :: 5 - Production/Stable',
57 | 'Environment :: Console',
58 | 'Intended Audience :: Developers',
59 | 'Intended Audience :: System Administrators',
60 | 'License :: OSI Approved :: BSD License',
61 | 'Operating System :: MacOS :: MacOS X',
62 | 'Operating System :: Unix',
63 | 'Operating System :: POSIX',
64 | 'Programming Language :: Python',
65 | 'Programming Language :: Python :: 2.5',
66 | 'Programming Language :: Python :: 2.6',
67 | 'Programming Language :: Python :: 2.7',
68 | 'Topic :: Software Development',
69 | 'Topic :: Software Development :: Build Tools',
70 | 'Topic :: Software Development :: Libraries',
71 | 'Topic :: Software Development :: Libraries :: Python Modules',
72 | 'Topic :: System :: Clustering',
73 | 'Topic :: System :: Software Distribution',
74 | 'Topic :: System :: Systems Administration',
75 | ],
76 | )
77 |
--------------------------------------------------------------------------------
/fabric/docs.py:
--------------------------------------------------------------------------------
1 | from fabric.tasks import WrappedCallableTask
2 |
3 |
4 | def unwrap_tasks(module, hide_nontasks=False):
5 | """
6 | Replace task objects on ``module`` with their wrapped functions instead.
7 |
8 | Specifically, look for instances of `~fabric.tasks.WrappedCallableTask` and
9 | replace them with their ``.wrapped`` attribute (the original decorated
10 | function.)
11 |
12 | This is intended for use with the Sphinx autodoc tool, to be run near the
13 | bottom of a project's ``conf.py``. It ensures that the autodoc extension
14 | will have full access to the "real" function, in terms of function
15 | signature and so forth. Without use of ``unwrap_tasks``, autodoc is unable
16 | to access the function signature (though it is able to see e.g.
17 | ``__doc__``.)
18 |
19 | For example, at the bottom of your ``conf.py``::
20 |
21 | from fabric.docs import unwrap_tasks
22 | import my_package.my_fabfile
23 | unwrap_tasks(my_package.my_fabfile)
24 |
25 | You can go above and beyond, and explicitly **hide** all non-task
26 | functions, by saying ``hide_nontasks=True``. This renames all objects
27 | failing the "is it a task?" check so they appear to be private, which will
28 | then cause autodoc to skip over them.
29 |
30 | ``hide_nontasks`` is thus useful when you have a fabfile mixing in
31 | subroutines with real tasks and want to document *just* the real tasks.
32 |
33 | If you run this within an actual Fabric-code-using session (instead of
34 | within a Sphinx ``conf.py``), please seek immediate medical attention.
35 |
36 | .. versionadded: 1.5
37 |
38 | .. seealso:: `~fabric.tasks.WrappedCallableTask`, `~fabric.decorators.task`
39 | """
40 | set_tasks = []
41 | for name, obj in vars(module).items():
42 | if isinstance(obj, WrappedCallableTask):
43 | setattr(module, obj.name, obj.wrapped)
44 | # Handle situation where a task's real name shadows a builtin.
45 | # If the builtin comes after the task in vars().items(), the object
46 | # we just setattr'd above will get re-hidden :(
47 | set_tasks.append(obj.name)
48 | # In the same vein, "privately" named wrapped functions whose task
49 | # name is public, needs to get renamed so autodoc picks it up.
50 | obj.wrapped.func_name = obj.name
51 | else:
52 | if name in set_tasks:
53 | continue
54 | has_docstring = getattr(obj, '__doc__', False)
55 | if hide_nontasks and has_docstring and not name.startswith('_'):
56 | setattr(module, '_%s' % name, obj)
57 | delattr(module, name)
58 |
--------------------------------------------------------------------------------
/tests/mock_streams.py:
--------------------------------------------------------------------------------
1 | """
2 | Stand-alone stream mocking decorator for easier imports.
3 | """
4 | from functools import wraps
5 | import sys
6 | from StringIO import StringIO # No need for cStringIO at this time
7 |
8 |
9 | class CarbonCopy(StringIO):
10 | """
11 | A StringIO capable of multiplexing its writes to other buffer objects.
12 | """
13 |
14 | def __init__(self, buffer='', cc=None):
15 | """
16 | If ``cc`` is given and is a file-like object or an iterable of same,
17 | it/they will be written to whenever this StringIO instance is written
18 | to.
19 | """
20 | StringIO.__init__(self, buffer)
21 | if cc is None:
22 | cc = []
23 | elif hasattr(cc, 'write'):
24 | cc = [cc]
25 | self.cc = cc
26 |
27 | def write(self, s):
28 | StringIO.write(self, s)
29 | for writer in self.cc:
30 | writer.write(s)
31 |
32 |
33 | def mock_streams(which):
34 | """
35 | Replaces a stream with a ``StringIO`` during the test, then restores after.
36 |
37 | Must specify which stream (stdout, stderr, etc) via string args, e.g.::
38 |
39 | @mock_streams('stdout')
40 | def func():
41 | pass
42 |
43 | @mock_streams('stderr')
44 | def func():
45 | pass
46 |
47 | @mock_streams('both')
48 | def func()
49 | pass
50 |
51 | If ``'both'`` is specified, not only will both streams be replaced with
52 | StringIOs, but a new combined-streams output (another StringIO) will appear
53 | at ``sys.stdall``. This StringIO will resemble what a user sees at a
54 | terminal, i.e. both streams intermingled.
55 | """
56 | both = (which == 'both')
57 | stdout = (which == 'stdout') or both
58 | stderr = (which == 'stderr') or both
59 |
60 | def mocked_streams_decorator(func):
61 | @wraps(func)
62 | def inner_wrapper(*args, **kwargs):
63 | if both:
64 | sys.stdall = StringIO()
65 | fake_stdout = CarbonCopy(cc=sys.stdall)
66 | fake_stderr = CarbonCopy(cc=sys.stdall)
67 | else:
68 | fake_stdout, fake_stderr = StringIO(), StringIO()
69 | if stdout:
70 | my_stdout, sys.stdout = sys.stdout, fake_stdout
71 | if stderr:
72 | my_stderr, sys.stderr = sys.stderr, fake_stderr
73 | try:
74 | ret = func(*args, **kwargs)
75 | finally:
76 | if stdout:
77 | sys.stdout = my_stdout
78 | if stderr:
79 | sys.stderr = my_stderr
80 | if both:
81 | del sys.stdall
82 | return inner_wrapper
83 | return mocked_streams_decorator
84 |
85 |
86 |
--------------------------------------------------------------------------------
/tests/test_parallel.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | from fabric.api import run, parallel, env, hide, execute, settings
4 |
5 | from utils import FabricTest, eq_, aborts, mock_streams
6 | from server import server, RESPONSES, USER, HOST, PORT
7 |
8 | # TODO: move this into test_tasks? meh.
9 |
10 | class OhNoesException(Exception): pass
11 |
12 |
13 | class TestParallel(FabricTest):
14 | @server()
15 | @parallel
16 | def test_parallel(self):
17 | """
18 | Want to do a simple call and respond
19 | """
20 | env.pool_size = 10
21 | cmd = "ls /simple"
22 | with hide('everything'):
23 | eq_(run(cmd), RESPONSES[cmd])
24 |
25 | @server(port=2200)
26 | @server(port=2201)
27 | def test_env_host_no_user_or_port(self):
28 | """
29 | Ensure env.host doesn't get user/port parts when parallel
30 | """
31 | @parallel
32 | def _task():
33 | run("ls /simple")
34 | assert USER not in env.host
35 | assert str(PORT) not in env.host
36 |
37 | host_string = '%s@%s:%%s' % (USER, HOST)
38 | with hide('everything'):
39 | execute(_task, hosts=[host_string % 2200, host_string % 2201])
40 |
41 | @server(port=2200)
42 | @server(port=2201)
43 | @aborts
44 | def test_parallel_failures_abort(self):
45 | with hide('everything'):
46 | host1 = '127.0.0.1:2200'
47 | host2 = '127.0.0.1:2201'
48 |
49 | @parallel
50 | def mytask():
51 | run("ls /")
52 | if env.host_string == host2:
53 | raise OhNoesException
54 |
55 | execute(mytask, hosts=[host1, host2])
56 |
57 | @server(port=2200)
58 | @server(port=2201)
59 | @mock_streams('stderr') # To hide the traceback for now
60 | def test_parallel_failures_honor_warn_only(self):
61 | with hide('everything'):
62 | host1 = '127.0.0.1:2200'
63 | host2 = '127.0.0.1:2201'
64 |
65 | @parallel
66 | def mytask():
67 | run("ls /")
68 | if env.host_string == host2:
69 | raise OhNoesException
70 |
71 | with settings(warn_only=True):
72 | result = execute(mytask, hosts=[host1, host2])
73 | eq_(result[host1], None)
74 | assert isinstance(result[host2], OhNoesException)
75 |
76 |
77 | @server(port=2200)
78 | @server(port=2201)
79 | def test_parallel_implies_linewise(self):
80 | host1 = '127.0.0.1:2200'
81 | host2 = '127.0.0.1:2201'
82 |
83 | assert not env.linewise
84 |
85 | @parallel
86 | def mytask():
87 | run("ls /")
88 | return env.linewise
89 |
90 | with hide('everything'):
91 | result = execute(mytask, hosts=[host1, host2])
92 | eq_(result[host1], True)
93 | eq_(result[host2], True)
94 |
--------------------------------------------------------------------------------
/sites/docs/usage/library.rst:
--------------------------------------------------------------------------------
1 | ===========
2 | Library Use
3 | ===========
4 |
5 | Fabric's primary use case is via fabfiles and the :doc:`fab ` tool,
6 | and this is reflected in much of the documentation. However, Fabric's internals
7 | are written in such a manner as to be easily used without ``fab`` or fabfiles
8 | at all -- this document will show you how.
9 |
10 | There's really only a couple of considerations one must keep in mind, when
11 | compared to writing a fabfile and using ``fab`` to run it: how connections are
12 | really made, and how disconnections occur.
13 |
14 | Connections
15 | ===========
16 |
17 | We've documented how Fabric really connects to its hosts before, but it's
18 | currently somewhat buried in the middle of the overall :doc:`execution docs
19 | `. Specifically, you'll want to skip over to the
20 | :ref:`connections` section and read it real quick. (You should really give that
21 | entire document a once-over, but it's not absolutely required.)
22 |
23 | As that section mentions, the key is simply that `~fabric.operations.run`,
24 | `~fabric.operations.sudo` and the other operations only look in one place when
25 | connecting: :ref:`env.host_string `. All of the other mechanisms
26 | for setting hosts are interpreted by the ``fab`` tool when it runs, and don't
27 | matter when running as a library.
28 |
29 | That said, most use cases where you want to marry a given task ``X`` and a given list of hosts ``Y`` can, as of Fabric 1.3, be handled with the `~fabric.tasks.execute` function via ``execute(X, hosts=Y)``. Please see `~fabric.tasks.execute`'s documentation for details -- manual host string manipulation should be rarely necessary.
30 |
31 | Disconnecting
32 | =============
33 |
34 | The other main thing that ``fab`` does for you is to disconnect from all hosts
35 | at the end of a session; otherwise, Python will sit around forever waiting for
36 | those network resources to be released.
37 |
38 | Fabric 0.9.4 and newer have a function you can use to do this easily:
39 | `~fabric.network.disconnect_all`. Simply make sure your code calls this when it
40 | terminates (typically in the ``finally`` clause of an outer ``try: finally``
41 | statement -- lest errors in your code prevent disconnections from happening!)
42 | and things ought to work pretty well.
43 |
44 | If you're on Fabric 0.9.3 or older, you can simply do this (``disconnect_all``
45 | just adds a bit of nice output to this logic)::
46 |
47 | from fabric.state import connections
48 |
49 | for key in connections.keys():
50 | connections[key].close()
51 | del connections[key]
52 |
53 |
54 | Final note
55 | ==========
56 |
57 | This document is an early draft, and may not cover absolutely every difference
58 | between ``fab`` use and library use. However, the above should highlight the
59 | largest stumbling blocks. When in doubt, note that in the Fabric source code,
60 | ``fabric/main.py`` contains the bulk of the extra work done by ``fab``, and may
61 | serve as a useful reference.
62 |
--------------------------------------------------------------------------------
/sites/www/development.rst:
--------------------------------------------------------------------------------
1 | ===========
2 | Development
3 | ===========
4 |
5 | The Fabric development team is headed by `Jeff Forcier
6 | `_, aka ``bitprophet``. However, dozens of other
7 | developers pitch in by submitting patches and ideas via `GitHub issues and pull
8 | requests `_, :ref:`IRC ` or the `mailing
9 | list `_.
10 |
11 | Get the code
12 | ============
13 |
14 | Please see the :ref:`source-code-checkouts` section of the :doc:`installing`
15 | page for details on how to obtain Fabric's source code.
16 |
17 | Contributing
18 | ============
19 |
20 | There are a number of ways to get involved with Fabric:
21 |
22 | * **Use Fabric and send us feedback!** This is both the easiest and arguably
23 | the most important way to improve the project -- let us know how you
24 | currently use Fabric and how you want to use it. (Please do try to search the
25 | `ticket tracker`_ first, though,
26 | when submitting feature ideas.)
27 | * **Report bugs or submit feature requests.** We follow `contribution-guide.org`_'s guidelines, so please check them out before
28 | visiting the `ticket tracker`_.
29 | * **Fix bugs or implement features!** Again, follow `contribution-guide.org`_
30 | for details on this process. Regarding the changelog step, our changelog is
31 | stored in ``sites/www/changelog.rst``.
32 |
33 | .. _contribution-guide.org: http://contribution-guide.org
34 | .. _ticket tracker: https://github.com/fabric/fabric/issues
35 |
36 | While we may not always reply promptly, we do try to make time eventually to
37 | inspect all contributions and either incorporate them or explain why we don't
38 | feel the change is a good fit.
39 |
40 |
41 | Support of older releases
42 | =========================
43 |
44 | Major and minor releases do not mark the end of the previous line or lines of
45 | development:
46 |
47 | * The two most recent minor release branches will continue to receive critical
48 | bugfixes. For example, if 1.1 were the latest minor release, it and 1.0 would
49 | get bugfixes, but not 0.9 or earlier; and once 1.2 came out, this window
50 | would then only extend back to 1.1.
51 | * Depending on the nature of bugs found and the difficulty in backporting them,
52 | older release lines may also continue to get bugfixes -- but there's no
53 | longer a guarantee of any kind. Thus, if a bug were found in 1.1 that
54 | affected 0.9 and could be easily applied, a new 0.9.x version *might* be
55 | released.
56 | * This policy may change in the future to accommodate more branches, depending
57 | on development speed.
58 |
59 | We hope that this policy will allow us to have a rapid minor release cycle (and
60 | thus keep new features coming out frequently) without causing users to feel too
61 | much pressure to upgrade right away. At the same time, the backwards
62 | compatibility guarantee means that users should still feel comfortable
63 | upgrading to the next minor release in order to stay within this sliding
64 | support window.
65 |
--------------------------------------------------------------------------------
/fabric/task_utils.py:
--------------------------------------------------------------------------------
1 | from fabric.utils import abort, indent
2 | from fabric import state
3 |
4 |
5 | # For attribute tomfoolery
6 | class _Dict(dict):
7 | pass
8 |
9 |
10 | def _crawl(name, mapping):
11 | """
12 | ``name`` of ``'a.b.c'`` => ``mapping['a']['b']['c']``
13 | """
14 | key, _, rest = name.partition('.')
15 | value = mapping[key]
16 | if not rest:
17 | return value
18 | return _crawl(rest, value)
19 |
20 |
21 | def crawl(name, mapping):
22 | try:
23 | result = _crawl(name, mapping)
24 | # Handle default tasks
25 | if isinstance(result, _Dict):
26 | if getattr(result, 'default', False):
27 | result = result.default
28 | # Ensure task modules w/ no default are treated as bad targets
29 | else:
30 | result = None
31 | return result
32 | except (KeyError, TypeError):
33 | return None
34 |
35 |
36 | def merge(hosts, roles, exclude, roledefs):
37 | """
38 | Merge given host and role lists into one list of deduped hosts.
39 | """
40 | # Abort if any roles don't exist
41 | bad_roles = [x for x in roles if x not in roledefs]
42 | if bad_roles:
43 | abort("The following specified roles do not exist:\n%s" % (
44 | indent(bad_roles)
45 | ))
46 |
47 | # Coerce strings to one-item lists
48 | if isinstance(hosts, basestring):
49 | hosts = [hosts]
50 |
51 | # Look up roles, turn into flat list of hosts
52 | role_hosts = []
53 | for role in roles:
54 | value = roledefs[role]
55 | # Handle dict style roledefs
56 | if isinstance(value, dict):
57 | value = value['hosts']
58 | # Handle "lazy" roles (callables)
59 | if callable(value):
60 | value = value()
61 | role_hosts += value
62 |
63 | # Strip whitespace from host strings.
64 | cleaned_hosts = [x.strip() for x in list(hosts) + list(role_hosts)]
65 | # Return deduped combo of hosts and role_hosts, preserving order within
66 | # them (vs using set(), which may lose ordering) and skipping hosts to be
67 | # excluded.
68 | # But only if the user hasn't indicated they want this behavior disabled.
69 | all_hosts = cleaned_hosts
70 | if state.env.dedupe_hosts:
71 | deduped_hosts = []
72 | for host in cleaned_hosts:
73 | if host not in deduped_hosts and host not in exclude:
74 | deduped_hosts.append(host)
75 | all_hosts = deduped_hosts
76 | return all_hosts
77 |
78 |
79 | def parse_kwargs(kwargs):
80 | new_kwargs = {}
81 | hosts = []
82 | roles = []
83 | exclude_hosts = []
84 | for key, value in kwargs.iteritems():
85 | if key == 'host':
86 | hosts = [value]
87 | elif key == 'hosts':
88 | hosts = value
89 | elif key == 'role':
90 | roles = [value]
91 | elif key == 'roles':
92 | roles = value
93 | elif key == 'exclude_hosts':
94 | exclude_hosts = value
95 | else:
96 | new_kwargs[key] = value
97 | return new_kwargs, hosts, roles, exclude_hosts
98 |
--------------------------------------------------------------------------------
/sites/www/roadmap.rst:
--------------------------------------------------------------------------------
1 | ===================
2 | Development roadmap
3 | ===================
4 |
5 | This document outlines Fabric's intended development path. Please make sure
6 | you're reading `the latest version `_ of this
7 | document!
8 |
9 | .. warning::
10 | This information is subject to change without warning, and should not be
11 | used as a basis for any life- or career-altering decisions!
12 |
13 | Fabric 1.x
14 | ==========
15 |
16 | Fabric 1.x, while not quite yet end-of-life'd, has reached a tipping point
17 | regarding internal tech debt & ability to make improvements without harming
18 | backwards compatibility.
19 |
20 | As such, future 1.x releases (**1.6** onwards) will emphasize small-to-medium
21 | features (new features not requiring major overhauls of the internals) and
22 | bugfixes.
23 |
24 | Invoke, Fabric 2.x and Patchwork
25 | ================================
26 |
27 | While 1.x moves on as above, we are working on a reimagined 2.x version of the
28 | tool, and plan to:
29 |
30 | * Finish and release `the Invoke tool/library
31 | `_ (see also :issue:`565` and `this
32 | Invoke FAQ
33 | `_),
34 | which is a revamped and standalone version of Fabric's task running
35 | components.
36 |
37 | * As of early 2015, Invoke is already reasonably mature and has a handful of
38 | features lacking in Fabric itself, including but not limited to:
39 |
40 | * a more explicit and powerful namespacing implementation
41 | * "regular" style CLI flags, including powerful tab completion
42 | * before/after hooks
43 | * explicit context management (no shared state)
44 | * significantly more powerful configuration mechanisms
45 |
46 | * Invoke is already Python 3 compatible, due to being a new codebase with
47 | few dependencies.
48 | * As Fabric 2 is developed, Invoke will approach a 1.0 release, and will
49 | continue to grow & change to suit Fabric's needs while remaining a high
50 | quality standalone task runner.
51 |
52 | * Release Fabric 2.0, a mostly-rewritten Fabric core:
53 |
54 | * Leverage Invoke for task running, leaving Fabric itself much more library
55 | oriented.
56 | * Implement object-oriented hosts/host lists and all the fun stuff that
57 | provides (no more hacky host string and unintuitive env var
58 | manipulation.)
59 | * No more shared state by default (thanks to Invoke's context design.)
60 | * Any other core overhauls difficult to do in a backwards compatible
61 | fashion.
62 | * Test-driven development (Invoke does this as well.)
63 |
64 | * Spin off ``fabric.contrib.*`` into a standalone "super-Fabric" (as in, "above
65 | Fabric") library, `Patchwork `_.
66 |
67 | * This lets core "execute commands on hosts" functionality iterate
68 | separately from "commonly useful shortcuts using Fabric core".
69 | * Lots of preliminary work & prior-art scanning has been done in
70 | :issue:`461`.
71 | * A public-but-alpha codebase for Patchwork exists as we think about the
72 | API, and is currently based on Fabric 1.x. It will likely be Fabric 2.x
73 | based by the time it is stable.
74 |
--------------------------------------------------------------------------------
/tests/test_server.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests for the test server itself.
3 |
4 | Not intended to be run by the greater test suite, only by specifically
5 | targeting it on the command-line. Rationale: not really testing Fabric itself,
6 | no need to pollute Fab's own test suite. (Yes, if these tests fail, it's likely
7 | that the Fabric tests using the test server may also have issues, but still.)
8 | """
9 | __test__ = False
10 |
11 | from nose.tools import eq_, ok_
12 |
13 | from fabric.network import ssh
14 |
15 | from server import FakeSFTPServer
16 |
17 |
18 | class AttrHolder(object):
19 | pass
20 |
21 |
22 | def test_list_folder():
23 | for desc, file_map, arg, expected in (
24 | (
25 | "Single file",
26 | {'file.txt': 'contents'},
27 | '',
28 | ['file.txt']
29 | ),
30 | (
31 | "Single absolute file",
32 | {'/file.txt': 'contents'},
33 | '/',
34 | ['file.txt']
35 | ),
36 | (
37 | "Multiple files",
38 | {'file1.txt': 'contents', 'file2.txt': 'contents2'},
39 | '',
40 | ['file1.txt', 'file2.txt']
41 | ),
42 | (
43 | "Single empty folder",
44 | {'folder': None},
45 | '',
46 | ['folder']
47 | ),
48 | (
49 | "Empty subfolders",
50 | {'folder': None, 'folder/subfolder': None},
51 | '',
52 | ['folder']
53 | ),
54 | (
55 | "Non-empty sub-subfolder",
56 | {'folder/subfolder/subfolder2/file.txt': 'contents'},
57 | "folder/subfolder/subfolder2",
58 | ['file.txt']
59 | ),
60 | (
61 | "Mixed files, folders empty and non-empty, in homedir",
62 | {
63 | 'file.txt': 'contents',
64 | 'file2.txt': 'contents2',
65 | 'folder/file3.txt': 'contents3',
66 | 'empty_folder': None
67 | },
68 | '',
69 | ['file.txt', 'file2.txt', 'folder', 'empty_folder']
70 | ),
71 | (
72 | "Mixed files, folders empty and non-empty, in subdir",
73 | {
74 | 'file.txt': 'contents',
75 | 'file2.txt': 'contents2',
76 | 'folder/file3.txt': 'contents3',
77 | 'folder/subfolder/file4.txt': 'contents4',
78 | 'empty_folder': None
79 | },
80 | "folder",
81 | ['file3.txt', 'subfolder']
82 | ),
83 | ):
84 | # Pass in fake server obj. (Can't easily clean up API to be more
85 | # testable since it's all implementing 'ssh' interface stuff.)
86 | server = AttrHolder()
87 | server.files = file_map
88 | interface = FakeSFTPServer(server)
89 | results = interface.list_folder(arg)
90 | # In this particular suite of tests, all results should be a file list,
91 | # not "no files found"
92 | ok_(results != ssh.SFTP_NO_SUCH_FILE)
93 | # Grab filename from SFTPAttribute objects in result
94 | output = map(lambda x: x.filename, results)
95 | # Yield test generator
96 | eq_.description = "list_folder: %s" % desc
97 | yield eq_, set(expected), set(output)
98 | del eq_.description
99 |
--------------------------------------------------------------------------------
/fabric/version.py:
--------------------------------------------------------------------------------
1 | """
2 | Current Fabric version constant plus version pretty-print method.
3 |
4 | This functionality is contained in its own module to prevent circular import
5 | problems with ``__init__.py`` (which is loaded by setup.py during installation,
6 | which in turn needs access to this version information.)
7 | """
8 | from subprocess import Popen, PIPE
9 | from os.path import abspath, dirname
10 |
11 |
12 | VERSION = (1, 10, 2, 'final', 0)
13 |
14 |
15 | def git_sha():
16 | loc = abspath(dirname(__file__))
17 | try:
18 | p = Popen(
19 | "cd \"%s\" && git log -1 --format=format:%%h" % loc,
20 | shell=True,
21 | stdout=PIPE,
22 | stderr=PIPE
23 | )
24 | return p.communicate()[0]
25 | # OSError occurs on Unix-derived platforms lacking Popen's configured shell
26 | # default, /bin/sh. E.g. Android.
27 | except OSError:
28 | return None
29 |
30 |
31 | def get_version(form='short'):
32 | """
33 | Return a version string for this package, based on `VERSION`.
34 |
35 | Takes a single argument, ``form``, which should be one of the following
36 | strings:
37 |
38 | * ``branch``: just the major + minor, e.g. "0.9", "1.0".
39 | * ``short`` (default): compact, e.g. "0.9rc1", "0.9.0". For package
40 | filenames or SCM tag identifiers.
41 | * ``normal``: human readable, e.g. "0.9", "0.9.1", "0.9 beta 1". For e.g.
42 | documentation site headers.
43 | * ``verbose``: like ``normal`` but fully explicit, e.g. "0.9 final". For
44 | tag commit messages, or anywhere that it's important to remove ambiguity
45 | between a branch and the first final release within that branch.
46 | * ``all``: Returns all of the above, as a dict.
47 | """
48 | # Setup
49 | versions = {}
50 | branch = "%s.%s" % (VERSION[0], VERSION[1])
51 | tertiary = VERSION[2]
52 | type_ = VERSION[3]
53 | final = (type_ == "final")
54 | type_num = VERSION[4]
55 | firsts = "".join([x[0] for x in type_.split()])
56 |
57 | # Branch
58 | versions['branch'] = branch
59 |
60 | # Short
61 | v = branch
62 | if (tertiary or final):
63 | v += "." + str(tertiary)
64 | if not final:
65 | v += firsts
66 | if type_num:
67 | v += str(type_num)
68 | versions['short'] = v
69 |
70 | # Normal
71 | v = branch
72 | if tertiary:
73 | v += "." + str(tertiary)
74 | if not final:
75 | if type_num:
76 | v += " " + type_ + " " + str(type_num)
77 | else:
78 | v += " pre-" + type_
79 | versions['normal'] = v
80 |
81 | # Verbose
82 | v = branch
83 | if tertiary:
84 | v += "." + str(tertiary)
85 | if not final:
86 | if type_num:
87 | v += " " + type_ + " " + str(type_num)
88 | else:
89 | v += " pre-" + type_
90 | else:
91 | v += " final"
92 | versions['verbose'] = v
93 |
94 | try:
95 | return versions[form]
96 | except KeyError:
97 | if form == 'all':
98 | return versions
99 | raise TypeError('"%s" is not a valid form specifier.' % form)
100 |
101 | __version__ = get_version('short')
102 |
103 | if __name__ == "__main__":
104 | print(get_version('all'))
105 |
--------------------------------------------------------------------------------
/sites/docs/usage/ssh.rst:
--------------------------------------------------------------------------------
1 | ============
2 | SSH behavior
3 | ============
4 |
5 | Fabric currently makes use of a pure-Python SSH re-implementation for managing
6 | connections, meaning that there are occasionally spots where it is limited by
7 | that library's capabilities. Below are areas of note where Fabric will exhibit
8 | behavior that isn't consistent with, or as flexible as, the behavior of the
9 | ``ssh`` command-line program.
10 |
11 |
12 | Unknown hosts
13 | =============
14 |
15 | SSH's host key tracking mechanism keeps tabs on all the hosts you attempt to
16 | connect to, and maintains a ``~/.ssh/known_hosts`` file with mappings between
17 | identifiers (IP address, sometimes with a hostname as well) and SSH keys. (For
18 | details on how this works, please see the `OpenSSH documentation
19 | `_.)
20 |
21 | The ``paramiko`` library is capable of loading up your ``known_hosts`` file,
22 | and will then compare any host it connects to, with that mapping. Settings are
23 | available to determine what happens when an unknown host (a host whose username
24 | or IP is not found in ``known_hosts``) is seen:
25 |
26 | * **Reject**: the host key is rejected and the connection is not made. This
27 | results in a Python exception, which will terminate your Fabric session with a
28 | message that the host is unknown.
29 | * **Add**: the new host key is added to the in-memory list of known hosts, the
30 | connection is made, and things continue normally. Note that this does **not**
31 | modify your on-disk ``known_hosts`` file!
32 | * **Ask**: not yet implemented at the Fabric level, this is a ``paramiko``
33 | library option which would result in the user being prompted about the
34 | unknown key and whether to accept it.
35 |
36 | Whether to reject or add hosts, as above, is controlled in Fabric via the
37 | :ref:`env.reject_unknown_hosts ` option, which is False
38 | by default for convenience's sake. We feel this is a valid tradeoff between
39 | convenience and security; anyone who feels otherwise can easily modify their
40 | fabfiles at module level to set ``env.reject_unknown_hosts = True``.
41 |
42 |
43 | Known hosts with changed keys
44 | =============================
45 |
46 | The point of SSH's key/fingerprint tracking is so that man-in-the-middle
47 | attacks can be detected: if an attacker redirects your SSH traffic to a
48 | computer under his control, and pretends to be your original destination
49 | server, the host keys will not match. Thus, the default behavior of SSH (and
50 | its Python implementation) is to immediately abort the connection when a host
51 | previously recorded in ``known_hosts`` suddenly starts sending us a different
52 | host key.
53 |
54 | In some edge cases such as some EC2 deployments, you may want to ignore this
55 | potential problem. Our SSH layer, at the time of writing, doesn't give us
56 | control over this exact behavior, but we can sidestep it by simply skipping the
57 | loading of ``known_hosts`` -- if the host list being compared to is empty, then
58 | there's no problem. Set :ref:`env.disable_known_hosts ` to
59 | True when you want this behavior; it is False by default, in order to preserve
60 | default SSH behavior.
61 |
62 | .. warning::
63 | Enabling :ref:`env.disable_known_hosts ` will leave
64 | you wide open to man-in-the-middle attacks! Please use with caution.
65 |
--------------------------------------------------------------------------------
/fabric/contrib/django.py:
--------------------------------------------------------------------------------
1 | """
2 | .. versionadded:: 0.9.2
3 |
4 | These functions streamline the process of initializing Django's settings module
5 | environment variable. Once this is done, your fabfile may import from your
6 | Django project, or Django itself, without requiring the use of ``manage.py``
7 | plugins or having to set the environment variable yourself every time you use
8 | your fabfile.
9 |
10 | Currently, these functions only allow Fabric to interact with
11 | local-to-your-fabfile Django installations. This is not as limiting as it
12 | sounds; for example, you can use Fabric as a remote "build" tool as well as
13 | using it locally. Imagine the following fabfile::
14 |
15 | from fabric.api import run, local, hosts, cd
16 | from fabric.contrib import django
17 |
18 | django.project('myproject')
19 | from myproject.myapp.models import MyModel
20 |
21 | def print_instances():
22 | for instance in MyModel.objects.all():
23 | print(instance)
24 |
25 | @hosts('production-server')
26 | def print_production_instances():
27 | with cd('/path/to/myproject'):
28 | run('fab print_instances')
29 |
30 | With Fabric installed on both ends, you could execute
31 | ``print_production_instances`` locally, which would trigger ``print_instances``
32 | on the production server -- which would then be interacting with your
33 | production Django database.
34 |
35 | As another example, if your local and remote settings are similar, you can use
36 | it to obtain e.g. your database settings, and then use those when executing a
37 | remote (non-Fabric) command. This would allow you some degree of freedom even
38 | if Fabric is only installed locally::
39 |
40 | from fabric.api import run
41 | from fabric.contrib import django
42 |
43 | django.settings_module('myproject.settings')
44 | from django.conf import settings
45 |
46 | def dump_production_database():
47 | run('mysqldump -u %s -p=%s %s > /tmp/prod-db.sql' % (
48 | settings.DATABASE_USER,
49 | settings.DATABASE_PASSWORD,
50 | settings.DATABASE_NAME
51 | ))
52 |
53 | The above snippet will work if run from a local, development environment, again
54 | provided your local ``settings.py`` mirrors your remote one in terms of
55 | database connection info.
56 | """
57 |
58 | import os
59 |
60 |
61 | def settings_module(module):
62 | """
63 | Set ``DJANGO_SETTINGS_MODULE`` shell environment variable to ``module``.
64 |
65 | Due to how Django works, imports from Django or a Django project will fail
66 | unless the shell environment variable ``DJANGO_SETTINGS_MODULE`` is
67 | correctly set (see `the Django settings docs
68 | `_.)
69 |
70 | This function provides a shortcut for doing so; call it near the top of
71 | your fabfile or Fabric-using code, after which point any Django imports
72 | should work correctly.
73 |
74 | .. note::
75 |
76 | This function sets a **shell** environment variable (via
77 | ``os.environ``) and is unrelated to Fabric's own internal "env"
78 | variables.
79 | """
80 | os.environ['DJANGO_SETTINGS_MODULE'] = module
81 |
82 |
83 | def project(name):
84 | """
85 | Sets ``DJANGO_SETTINGS_MODULE`` to ``'.settings'``.
86 |
87 | This function provides a handy shortcut for the common case where one is
88 | using the Django default naming convention for their settings file and
89 | location.
90 |
91 | Uses `settings_module` -- see its documentation for details on why and how
92 | to use this functionality.
93 | """
94 | settings_module('%s.settings' % name)
95 |
--------------------------------------------------------------------------------
/sites/docs/usage/fabfiles.rst:
--------------------------------------------------------------------------------
1 | ============================
2 | Fabfile construction and use
3 | ============================
4 |
5 | This document contains miscellaneous sections about fabfiles, both how to best
6 | write them, and how to use them once written.
7 |
8 | .. _fabfile-discovery:
9 |
10 | Fabfile discovery
11 | =================
12 |
13 | Fabric is capable of loading Python modules (e.g. ``fabfile.py``) or packages
14 | (e.g. a ``fabfile/`` directory containing an ``__init__.py``). By default, it
15 | looks for something named (to Python's import machinery) ``fabfile`` - so
16 | either ``fabfile/`` or ``fabfile.py``.
17 |
18 | The fabfile discovery algorithm searches in the invoking user's current working
19 | directory or any parent directories. Thus, it is oriented around "project" use,
20 | where one keeps e.g. a ``fabfile.py`` at the root of a source code tree. Such a
21 | fabfile will then be discovered no matter where in the tree the user invokes
22 | ``fab``.
23 |
24 | The specific name to be searched for may be overridden on the command-line with
25 | the :option:`-f` option, or by adding a :ref:`fabricrc ` line which
26 | sets the value of ``fabfile``. For example, if you wanted to name your fabfile
27 | ``fab_tasks.py``, you could create such a file and then call ``fab -f
28 | fab_tasks.py ``, or add ``fabfile = fab_tasks.py`` to
29 | ``~/.fabricrc``.
30 |
31 | If the given fabfile name contains path elements other than a filename (e.g.
32 | ``../fabfile.py`` or ``/dir1/dir2/custom_fabfile``) it will be treated as a
33 | file path and directly checked for existence without any sort of searching.
34 | When in this mode, tilde-expansion will be applied, so one may refer to e.g.
35 | ``~/personal_fabfile.py``.
36 |
37 | .. note::
38 |
39 | Fabric does a normal ``import`` (actually an ``__import__``) of your
40 | fabfile in order to access its contents -- it does not do any ``eval``-ing
41 | or similar. In order for this to work, Fabric temporarily adds the found
42 | fabfile's containing folder to the Python load path (and removes it
43 | immediately afterwards.)
44 |
45 | .. versionchanged:: 0.9.2
46 | The ability to load package fabfiles.
47 |
48 |
49 | .. _importing-the-api:
50 |
51 | Importing Fabric
52 | ================
53 |
54 | Because Fabric is just Python, you *can* import its components any way you
55 | want. However, for the purposes of encapsulation and convenience (and to make
56 | life easier for Fabric's packaging script) Fabric's public API is maintained in
57 | the ``fabric.api`` module.
58 |
59 | All of Fabric's :doc:`../api/core/operations`,
60 | :doc:`../api/core/context_managers`, :doc:`../api/core/decorators` and
61 | :doc:`../api/core/utils` are included in this module as a single, flat
62 | namespace. This enables a very simple and consistent interface to Fabric within
63 | your fabfiles::
64 |
65 | from fabric.api import *
66 |
67 | # call run(), sudo(), etc etc
68 |
69 | This is not technically best practices (for `a
70 | number of reasons`_) and if you're only using a couple of
71 | Fab API calls, it *is* probably a good idea to explicitly ``from fabric.api
72 | import env, run`` or similar. However, in most nontrivial fabfiles, you'll be
73 | using all or most of the API, and the star import::
74 |
75 | from fabric.api import *
76 |
77 | will be a lot easier to write and read than::
78 |
79 | from fabric.api import abort, cd, env, get, hide, hosts, local, prompt, \
80 | put, require, roles, run, runs_once, settings, show, sudo, warn
81 |
82 | so in this case we feel pragmatism overrides best practices.
83 |
84 | .. _a number of reasons: http://python.net/~goodger/projects/pycon/2007/idiomatic/handout.html#importing
85 |
86 |
87 | Defining tasks and importing callables
88 | ======================================
89 |
90 | For important information on what exactly Fabric will consider as a task when
91 | it loads your fabfile, as well as notes on how best to import other code,
92 | please see :doc:`/usage/tasks` in the :doc:`execution` documentation.
93 |
--------------------------------------------------------------------------------
/integration/test_contrib.py:
--------------------------------------------------------------------------------
1 | import os
2 | import types
3 | import re
4 | import sys
5 |
6 | from fabric.api import run, local
7 | from fabric.contrib import files, project
8 |
9 | from utils import Integration
10 |
11 |
12 | def tildify(path):
13 | home = run("echo ~", quiet=True).stdout.strip()
14 | return path.replace('~', home)
15 |
16 | def expect(path):
17 | assert files.exists(tildify(path))
18 |
19 | def expect_contains(path, value):
20 | assert files.contains(tildify(path), value)
21 |
22 | def escape(path):
23 | return path.replace(' ', r'\ ')
24 |
25 |
26 | class FileCleaner(Integration):
27 | def setup(self):
28 | self.local = []
29 | self.remote = []
30 |
31 | def teardown(self):
32 | super(FileCleaner, self).teardown()
33 | for created in self.local:
34 | os.unlink(created)
35 | for created in self.remote:
36 | run("rm %s" % escape(created))
37 |
38 |
39 | class TestTildeExpansion(FileCleaner):
40 | def test_append(self):
41 | for target in ('~/append_test', '~/append_test with spaces'):
42 | self.remote.append(target)
43 | files.append(target, ['line'])
44 | expect(target)
45 |
46 | def test_exists(self):
47 | for target in ('~/exists_test', '~/exists test with space'):
48 | self.remote.append(target)
49 | run("touch %s" % escape(target))
50 | expect(target)
51 |
52 | def test_sed(self):
53 | for target in ('~/sed_test', '~/sed test with space'):
54 | self.remote.append(target)
55 | run("echo 'before' > %s" % escape(target))
56 | files.sed(target, 'before', 'after')
57 | expect_contains(target, 'after')
58 |
59 | def test_upload_template(self):
60 | for i, target in enumerate((
61 | '~/upload_template_test',
62 | '~/upload template test with space'
63 | )):
64 | src = "source%s" % i
65 | local("touch %s" % src)
66 | self.local.append(src)
67 | self.remote.append(target)
68 | files.upload_template(src, target)
69 | expect(target)
70 |
71 |
72 | class TestIsLink(FileCleaner):
73 | # TODO: add more of these. meh.
74 | def test_is_link_is_true_on_symlink(self):
75 | self.remote.extend(['/tmp/foo', '/tmp/bar'])
76 | run("touch /tmp/foo")
77 | run("ln -s /tmp/foo /tmp/bar")
78 | assert files.is_link('/tmp/bar')
79 |
80 | def test_is_link_is_false_on_non_link(self):
81 | self.remote.append('/tmp/biz')
82 | run("touch /tmp/biz")
83 | assert not files.is_link('/tmp/biz')
84 |
85 |
86 | rsync_sources = (
87 | 'integration/',
88 | 'integration/test_contrib.py',
89 | 'integration/test_operations.py',
90 | 'integration/utils.py'
91 | )
92 |
93 | class TestRsync(Integration):
94 | def rsync(self, id_, **kwargs):
95 | remote = '/tmp/rsync-test-%s/' % id_
96 | if files.exists(remote):
97 | run("rm -rf %s" % remote)
98 | return project.rsync_project(
99 | remote_dir=remote,
100 | local_dir='integration',
101 | ssh_opts='-o StrictHostKeyChecking=no',
102 | capture=True,
103 | **kwargs
104 | )
105 |
106 | def test_existing_default_args(self):
107 | """
108 | Rsync uses -v by default
109 | """
110 | r = self.rsync(1)
111 | for x in rsync_sources:
112 | assert re.search(r'^%s$' % x, r.stdout, re.M), "'%s' was not found in '%s'" % (x, r.stdout)
113 |
114 | def test_overriding_default_args(self):
115 | """
116 | Use of default_args kwarg can be used to nuke e.g. -v
117 | """
118 | r = self.rsync(2, default_opts='-pthrz')
119 | for x in rsync_sources:
120 | assert not re.search(r'^%s$' % x, r.stdout, re.M), "'%s' was found in '%s'" % (x, r.stdout)
121 |
122 |
123 | class TestUploadTemplate(FileCleaner):
124 | def test_allows_pty_disable(self):
125 | src = "source_file"
126 | target = "remote_file"
127 | local("touch %s" % src)
128 | self.local.append(src)
129 | self.remote.append(target)
130 | # Just make sure it doesn't asplode. meh.
131 | files.upload_template(src, target, pty=False)
132 | expect(target)
133 |
--------------------------------------------------------------------------------
/tests/test_contrib.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from __future__ import with_statement
3 | from fabric.operations import local
4 | import os
5 |
6 | from fabric.api import hide, get, show
7 | from fabric.contrib.files import upload_template, contains
8 | from fabric.context_managers import lcd
9 |
10 | from utils import FabricTest, eq_contents
11 | from server import server
12 |
13 |
14 | class TestContrib(FabricTest):
15 | # Make sure it knows / is a directory.
16 | # This is in lieu of starting down the "actual honest to god fake operating
17 | # system" road...:(
18 | @server(responses={'test -d "$(echo /)"': ""})
19 | def test_upload_template_uses_correct_remote_filename(self):
20 | """
21 | upload_template() shouldn't munge final remote filename
22 | """
23 | template = self.mkfile('template.txt', 'text')
24 | with hide('everything'):
25 | upload_template(template, '/')
26 | assert self.exists_remotely('/template.txt')
27 |
28 | @server()
29 | def test_upload_template_handles_file_destination(self):
30 | """
31 | upload_template() should work OK with file and directory destinations
32 | """
33 | template = self.mkfile('template.txt', '%(varname)s')
34 | local = self.path('result.txt')
35 | remote = '/configfile.txt'
36 | var = 'foobar'
37 | with hide('everything'):
38 | upload_template(template, remote, {'varname': var})
39 | get(remote, local)
40 | eq_contents(local, var)
41 |
42 | @server()
43 | def test_upload_template_handles_template_dir(self):
44 | """
45 | upload_template() should work OK with template dir
46 | """
47 | template = self.mkfile('template.txt', '%(varname)s')
48 | template_dir = os.path.dirname(template)
49 | local = self.path('result.txt')
50 | remote = '/configfile.txt'
51 | var = 'foobar'
52 | with hide('everything'):
53 | upload_template(
54 | 'template.txt', remote, {'varname': var},
55 | template_dir=template_dir
56 | )
57 | get(remote, local)
58 | eq_contents(local, var)
59 |
60 |
61 | @server(responses={
62 | 'egrep "text" "/file.txt"': (
63 | "sudo: unable to resolve host fabric",
64 | "",
65 | 1
66 | )}
67 | )
68 | def test_contains_checks_only_succeeded_flag(self):
69 | """
70 | contains() should return False on bad grep even if stdout isn't empty
71 | """
72 | with hide('everything'):
73 | result = contains('/file.txt', 'text', use_sudo=True)
74 | assert result == False
75 |
76 | @server()
77 | def test_upload_template_handles_jinja_template(self):
78 | """
79 | upload_template() should work OK with Jinja2 template
80 | """
81 | template = self.mkfile('template_jinja2.txt', '{{ first_name }}')
82 | template_name = os.path.basename(template)
83 | template_dir = os.path.dirname(template)
84 | local = self.path('result.txt')
85 | remote = '/configfile.txt'
86 | first_name = u'S\u00E9bastien'
87 | with hide('everything'):
88 | upload_template(template_name, remote, {'first_name': first_name},
89 | use_jinja=True, template_dir=template_dir)
90 | get(remote, local)
91 | eq_contents(local, first_name.encode('utf-8'))
92 |
93 | @server()
94 | def test_upload_template_jinja_and_no_template_dir(self):
95 | # Crummy doesn't-die test
96 | fname = "foo.tpl"
97 | try:
98 | with hide('everything'):
99 | with open(fname, 'w+') as fd:
100 | fd.write('whatever')
101 | upload_template(fname, '/configfile.txt', {}, use_jinja=True)
102 | finally:
103 | os.remove(fname)
104 |
105 |
106 | def test_upload_template_obeys_lcd(self):
107 | for jinja in (True, False):
108 | for mirror in (True, False):
109 | self._upload_template_obeys_lcd(jinja=jinja, mirror=mirror)
110 |
111 | @server()
112 | def _upload_template_obeys_lcd(self, jinja, mirror):
113 | template_content = {True: '{{ varname }}s', False: '%(varname)s'}
114 |
115 | template_dir = 'template_dir'
116 | template_name = 'template.txt'
117 | if not self.exists_locally(self.path(template_dir)):
118 | os.mkdir(self.path(template_dir))
119 |
120 | self.mkfile(
121 | os.path.join(template_dir, template_name), template_content[jinja]
122 | )
123 |
124 | remote = '/configfile.txt'
125 | var = 'foobar'
126 | with hide('everything'):
127 | with lcd(self.path(template_dir)):
128 | upload_template(
129 | template_name, remote, {'varname': var},
130 | mirror_local_mode=mirror
131 | )
132 |
--------------------------------------------------------------------------------
/tests/test_project.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import os
3 |
4 | import fudge
5 | from fudge.inspector import arg
6 |
7 | from fabric.contrib import project
8 |
9 |
10 | class UploadProjectTestCase(unittest.TestCase):
11 | """Test case for :func: `fabric.contrib.project.upload_project`."""
12 |
13 | fake_tmp = "testtempfolder"
14 |
15 |
16 | def setUp(self):
17 | fudge.clear_expectations()
18 |
19 | # We need to mock out run, local, and put
20 |
21 | self.fake_run = fudge.Fake('project.run', callable=True)
22 | self.patched_run = fudge.patch_object(
23 | project,
24 | 'run',
25 | self.fake_run
26 | )
27 |
28 | self.fake_local = fudge.Fake('local', callable=True)
29 | self.patched_local = fudge.patch_object(
30 | project,
31 | 'local',
32 | self.fake_local
33 | )
34 |
35 | self.fake_put = fudge.Fake('put', callable=True)
36 | self.patched_put = fudge.patch_object(
37 | project,
38 | 'put',
39 | self.fake_put
40 | )
41 |
42 | # We don't want to create temp folders
43 | self.fake_mkdtemp = fudge.Fake(
44 | 'mkdtemp',
45 | expect_call=True
46 | ).returns(self.fake_tmp)
47 | self.patched_mkdtemp = fudge.patch_object(
48 | project,
49 | 'mkdtemp',
50 | self.fake_mkdtemp
51 | )
52 |
53 |
54 | def tearDown(self):
55 | self.patched_run.restore()
56 | self.patched_local.restore()
57 | self.patched_put.restore()
58 |
59 | fudge.clear_expectations()
60 |
61 |
62 | @fudge.with_fakes
63 | def test_temp_folder_is_used(self):
64 | """A unique temp folder is used for creating the archive to upload."""
65 |
66 | # Exercise
67 | project.upload_project()
68 |
69 |
70 | @fudge.with_fakes
71 | def test_project_is_archived_locally(self):
72 | """The project should be archived locally before being uploaded."""
73 |
74 | # local() is called more than once so we need an extra next_call()
75 | # otherwise fudge compares the args to the last call to local()
76 | self.fake_local.with_args(arg.startswith("tar -czf")).next_call()
77 |
78 | # Exercise
79 | project.upload_project()
80 |
81 |
82 | @fudge.with_fakes
83 | def test_current_directory_is_uploaded_by_default(self):
84 | """By default the project uploaded is the current working directory."""
85 |
86 | cwd_path, cwd_name = os.path.split(os.getcwd())
87 |
88 | # local() is called more than once so we need an extra next_call()
89 | # otherwise fudge compares the args to the last call to local()
90 | self.fake_local.with_args(
91 | arg.endswith("-C %s %s" % (cwd_path, cwd_name))
92 | ).next_call()
93 |
94 | # Exercise
95 | project.upload_project()
96 |
97 |
98 | @fudge.with_fakes
99 | def test_path_to_local_project_can_be_specified(self):
100 | """It should be possible to specify which local folder to upload."""
101 |
102 | project_path = "path/to/my/project"
103 |
104 | # local() is called more than once so we need an extra next_call()
105 | # otherwise fudge compares the args to the last call to local()
106 | self.fake_local.with_args(
107 | arg.endswith("-C %s %s" % os.path.split(project_path))
108 | ).next_call()
109 |
110 | # Exercise
111 | project.upload_project(local_dir=project_path)
112 |
113 |
114 | @fudge.with_fakes
115 | def test_path_to_local_project_can_end_in_separator(self):
116 | """A local path ending in a separator should be handled correctly."""
117 |
118 | project_path = "path/to/my"
119 | base = "project"
120 |
121 | # local() is called more than once so we need an extra next_call()
122 | # otherwise fudge compares the args to the last call to local()
123 | self.fake_local.with_args(
124 | arg.endswith("-C %s %s" % (project_path, base))
125 | ).next_call()
126 |
127 | # Exercise
128 | project.upload_project(local_dir="%s/%s/" % (project_path, base))
129 |
130 |
131 | @fudge.with_fakes
132 | def test_default_remote_folder_is_home(self):
133 | """Project is uploaded to remote home by default."""
134 |
135 | local_dir = "folder"
136 |
137 | # local() is called more than once so we need an extra next_call()
138 | # otherwise fudge compares the args to the last call to local()
139 | self.fake_put.with_args(
140 | "%s/folder.tar.gz" % self.fake_tmp, "folder.tar.gz", use_sudo=False
141 | ).next_call()
142 |
143 | # Exercise
144 | project.upload_project(local_dir=local_dir)
145 |
146 | @fudge.with_fakes
147 | def test_path_to_remote_folder_can_be_specified(self):
148 | """It should be possible to specify which local folder to upload to."""
149 |
150 | local_dir = "folder"
151 | remote_path = "path/to/remote/folder"
152 |
153 | # local() is called more than once so we need an extra next_call()
154 | # otherwise fudge compares the args to the last call to local()
155 | self.fake_put.with_args(
156 | "%s/folder.tar.gz" % self.fake_tmp, "%s/folder.tar.gz" % remote_path, use_sudo=False
157 | ).next_call()
158 |
159 | # Exercise
160 | project.upload_project(local_dir=local_dir, remote_dir=remote_path)
161 |
162 |
--------------------------------------------------------------------------------
/sites/docs/usage/parallel.rst:
--------------------------------------------------------------------------------
1 | ==================
2 | Parallel execution
3 | ==================
4 |
5 | .. _parallel-execution:
6 |
7 | .. versionadded:: 1.3
8 |
9 | By default, Fabric executes all specified tasks **serially** (see
10 | :ref:`execution-strategy` for details.) This document describes Fabric's
11 | options for running tasks on multiple hosts in **parallel**, via per-task
12 | decorators and/or global command-line switches.
13 |
14 |
15 | What it does
16 | ============
17 |
18 | Because Fabric 1.x is not fully threadsafe (and because in general use, task
19 | functions do not typically interact with one another) this functionality is
20 | implemented via the Python `multiprocessing
21 | `_ module. It creates one
22 | new process for each host and task combination, optionally using a
23 | (configurable) sliding window to prevent too many processes from running at the
24 | same time.
25 |
26 | For example, imagine a scenario where you want to update Web application code
27 | on a number of Web servers, and then reload the servers once the code has been
28 | distributed everywhere (to allow for easier rollback if code updates fail.) One
29 | could implement this with the following fabfile::
30 |
31 | from fabric.api import *
32 |
33 | def update():
34 | with cd("/srv/django/myapp"):
35 | run("git pull")
36 |
37 | def reload():
38 | sudo("service apache2 reload")
39 |
40 | and execute it on a set of 3 servers, in serial, like so::
41 |
42 | $ fab -H web1,web2,web3 update reload
43 |
44 | Normally, without any parallel execution options activated, Fabric would run
45 | in order:
46 |
47 | #. ``update`` on ``web1``
48 | #. ``update`` on ``web2``
49 | #. ``update`` on ``web3``
50 | #. ``reload`` on ``web1``
51 | #. ``reload`` on ``web2``
52 | #. ``reload`` on ``web3``
53 |
54 | With parallel execution activated (via :option:`-P` -- see below for details),
55 | this turns into:
56 |
57 | #. ``update`` on ``web1``, ``web2``, and ``web3``
58 | #. ``reload`` on ``web1``, ``web2``, and ``web3``
59 |
60 | Hopefully the benefits of this are obvious -- if ``update`` took 5 seconds to
61 | run and ``reload`` took 2 seconds, serial execution takes (5+2)*3 = 21 seconds
62 | to run, while parallel execution takes only a third of the time, (5+2) = 7
63 | seconds on average.
64 |
65 |
66 | How to use it
67 | =============
68 |
69 | Decorators
70 | ----------
71 |
72 | Since the minimum "unit" that parallel execution affects is a task, the
73 | functionality may be enabled or disabled on a task-by-task basis using the
74 | `~fabric.decorators.parallel` and `~fabric.decorators.serial` decorators. For
75 | example, this fabfile::
76 |
77 | from fabric.api import *
78 |
79 | @parallel
80 | def runs_in_parallel():
81 | pass
82 |
83 | def runs_serially():
84 | pass
85 |
86 | when run in this manner::
87 |
88 | $ fab -H host1,host2,host3 runs_in_parallel runs_serially
89 |
90 | will result in the following execution sequence:
91 |
92 | #. ``runs_in_parallel`` on ``host1``, ``host2``, and ``host3``
93 | #. ``runs_serially`` on ``host1``
94 | #. ``runs_serially`` on ``host2``
95 | #. ``runs_serially`` on ``host3``
96 |
97 | Command-line flags
98 | ------------------
99 |
100 | One may also force all tasks to run in parallel by using the command-line flag
101 | :option:`-P` or the env variable :ref:`env.parallel `. However,
102 | any task specifically wrapped with `~fabric.decorators.serial` will ignore this
103 | setting and continue to run serially.
104 |
105 | For example, the following fabfile will result in the same execution sequence
106 | as the one above::
107 |
108 | from fabric.api import *
109 |
110 | def runs_in_parallel():
111 | pass
112 |
113 | @serial
114 | def runs_serially():
115 | pass
116 |
117 | when invoked like so::
118 |
119 | $ fab -H host1,host2,host3 -P runs_in_parallel runs_serially
120 |
121 | As before, ``runs_in_parallel`` will run in parallel, and ``runs_serially`` in
122 | sequence.
123 |
124 |
125 | Bubble size
126 | ===========
127 |
128 | With large host lists, a user's local machine can get overwhelmed by running
129 | too many concurrent Fabric processes. Because of this, you may opt to use a
130 | moving bubble approach that limits Fabric to a specific number of concurrently
131 | active processes.
132 |
133 | By default, no bubble is used and all hosts are run in one concurrent pool. You
134 | can override this on a per-task level by specifying the ``pool_size`` keyword
135 | argument to `~fabric.decorators.parallel`, or globally via :option:`-z`.
136 |
137 | For example, to run on 5 hosts at a time::
138 |
139 | from fabric.api import *
140 |
141 | @parallel(pool_size=5)
142 | def heavy_task():
143 | # lots of heavy local lifting or lots of IO here
144 |
145 | Or skip the ``pool_size`` kwarg and instead::
146 |
147 | $ fab -P -z 5 heavy_task
148 |
149 | .. _linewise-output:
150 |
151 | Linewise vs bytewise output
152 | ===========================
153 |
154 | Fabric's default mode of printing to the terminal is byte-by-byte, in order to
155 | support :doc:`/usage/interactivity`. This often gives poor results when running
156 | in parallel mode, as the multiple processes may write to your terminal's
157 | standard out stream simultaneously.
158 |
159 | To help offset this problem, Fabric's option for linewise output is
160 | automatically enabled whenever parallelism is active. This will cause you to
161 | lose most of the benefits outlined in the above link Fabric's remote
162 | interactivity features, but as those do not map well to parallel invocations,
163 | it's typically a fair trade.
164 |
165 | There's no way to avoid the multiple processes mixing up on a line-by-line
166 | basis, but you will at least be able to tell them apart by the host-string line
167 | prefix.
168 |
169 | .. note::
170 | Future versions will add improved logging support to make troubleshooting
171 | parallel runs easier.
172 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | from contextlib import contextmanager
4 | from copy import deepcopy
5 | from fudge.patcher import with_patched_object
6 | from functools import partial
7 | from types import StringTypes
8 | import copy
9 | import getpass
10 | import os
11 | import re
12 | import shutil
13 | import sys
14 | import tempfile
15 |
16 | from fudge import Fake, patched_context, clear_expectations, with_patched_object
17 | from nose.tools import raises
18 | from nose import SkipTest
19 |
20 | from fabric.context_managers import settings
21 | from fabric.state import env, output
22 | from fabric.sftp import SFTP
23 | import fabric.network
24 | from fabric.network import normalize, to_dict
25 |
26 | from server import PORT, PASSWORDS, USER, HOST
27 | from mock_streams import mock_streams
28 |
29 |
30 | class FabricTest(object):
31 | """
32 | Nose-oriented test runner which wipes state.env and provides file helpers.
33 | """
34 | def setup(self):
35 | # Clear Fudge mock expectations
36 | clear_expectations()
37 | # Copy env, output for restoration in teardown
38 | self.previous_env = copy.deepcopy(env)
39 | # Deepcopy doesn't work well on AliasDicts; but they're only one layer
40 | # deep anyways, so...
41 | self.previous_output = output.items()
42 | # Allow hooks from subclasses here for setting env vars (so they get
43 | # purged correctly in teardown())
44 | self.env_setup()
45 | # Temporary local file dir
46 | self.tmpdir = tempfile.mkdtemp()
47 |
48 | def set_network(self):
49 | env.update(to_dict('%s@%s:%s' % (USER, HOST, PORT)))
50 |
51 | def env_setup(self):
52 | # Set up default networking for test server
53 | env.disable_known_hosts = True
54 | self.set_network()
55 | env.password = PASSWORDS[USER]
56 | # Command response mocking is easier without having to account for
57 | # shell wrapping everywhere.
58 | env.use_shell = False
59 |
60 | def teardown(self):
61 | env.clear() # In case tests set env vars that didn't exist previously
62 | env.update(self.previous_env)
63 | output.update(self.previous_output)
64 | shutil.rmtree(self.tmpdir)
65 | # Clear Fudge mock expectations...again
66 | clear_expectations()
67 |
68 | def path(self, *path_parts):
69 | return os.path.join(self.tmpdir, *path_parts)
70 |
71 | def mkfile(self, path, contents):
72 | dest = self.path(path)
73 | with open(dest, 'w') as fd:
74 | fd.write(contents)
75 | return dest
76 |
77 | def exists_remotely(self, path):
78 | return SFTP(env.host_string).exists(path)
79 |
80 | def exists_locally(self, path):
81 | return os.path.exists(path)
82 |
83 |
84 | def password_response(password, times_called=None, silent=True):
85 | """
86 | Context manager which patches ``getpass.getpass`` to return ``password``.
87 |
88 | ``password`` may be a single string or an iterable of strings:
89 |
90 | * If single string, given password is returned every time ``getpass`` is
91 | called.
92 | * If iterable, iterated over for each call to ``getpass``, after which
93 | ``getpass`` will error.
94 |
95 | If ``times_called`` is given, it is used to add a ``Fake.times_called``
96 | clause to the mock object, e.g. ``.times_called(1)``. Specifying
97 | ``times_called`` alongside an iterable ``password`` list is unsupported
98 | (see Fudge docs on ``Fake.next_call``).
99 |
100 | If ``silent`` is True, no prompt will be printed to ``sys.stderr``.
101 | """
102 | fake = Fake('getpass', callable=True)
103 | # Assume stringtype or iterable, turn into mutable iterable
104 | if isinstance(password, StringTypes):
105 | passwords = [password]
106 | else:
107 | passwords = list(password)
108 | # Optional echoing of prompt to mimic real behavior of getpass
109 | # NOTE: also echo a newline if the prompt isn't a "passthrough" from the
110 | # server (as it means the server won't be sending its own newline for us).
111 | echo = lambda x, y: y.write(x + ("\n" if x != " " else ""))
112 | # Always return first (only?) password right away
113 | fake = fake.returns(passwords.pop(0))
114 | if not silent:
115 | fake = fake.calls(echo)
116 | # If we had >1, return those afterwards
117 | for pw in passwords:
118 | fake = fake.next_call().returns(pw)
119 | if not silent:
120 | fake = fake.calls(echo)
121 | # Passthrough times_called
122 | if times_called:
123 | fake = fake.times_called(times_called)
124 | return patched_context(getpass, 'getpass', fake)
125 |
126 |
127 | def _assert_contains(needle, haystack, invert):
128 | matched = re.search(needle, haystack, re.M)
129 | if (invert and matched) or (not invert and not matched):
130 | raise AssertionError("r'%s' %sfound in '%s'" % (
131 | needle,
132 | "" if invert else "not ",
133 | haystack
134 | ))
135 |
136 | assert_contains = partial(_assert_contains, invert=False)
137 | assert_not_contains = partial(_assert_contains, invert=True)
138 |
139 |
140 | def line_prefix(prefix, string):
141 | """
142 | Return ``string`` with all lines prefixed by ``prefix``.
143 | """
144 | return "\n".join(prefix + x for x in string.splitlines())
145 |
146 |
147 | def eq_(result, expected, msg=None):
148 | """
149 | Shadow of the Nose builtin which presents easier to read multiline output.
150 | """
151 | params = {'expected': expected, 'result': result}
152 | aka = """
153 |
154 | --------------------------------- aka -----------------------------------------
155 |
156 | Expected:
157 | %(expected)r
158 |
159 | Got:
160 | %(result)r
161 | """ % params
162 | default_msg = """
163 | Expected:
164 | %(expected)s
165 |
166 | Got:
167 | %(result)s
168 | """ % params
169 | if (repr(result) != str(result)) or (repr(expected) != str(expected)):
170 | default_msg += aka
171 | assert result == expected, msg or default_msg
172 |
173 |
174 | def eq_contents(path, text):
175 | with open(path) as fd:
176 | eq_(text, fd.read())
177 |
178 |
179 | def support(path):
180 | return os.path.join(os.path.dirname(__file__), 'support', path)
181 |
182 | fabfile = support
183 |
184 |
185 | @contextmanager
186 | def path_prefix(module):
187 | i = 0
188 | sys.path.insert(i, os.path.dirname(module))
189 | yield
190 | sys.path.pop(i)
191 |
192 |
193 | def aborts(func):
194 | return raises(SystemExit)(mock_streams('stderr')(func))
195 |
196 |
197 | def _patched_input(func, fake):
198 | return func(sys.modules['__builtin__'], 'raw_input', fake)
199 | patched_input = partial(_patched_input, patched_context)
200 | with_patched_input = partial(_patched_input, with_patched_object)
201 |
--------------------------------------------------------------------------------
/sites/docs/usage/output_controls.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | Managing output
3 | ===============
4 |
5 | The ``fab`` tool is very verbose by default and prints out almost everything it
6 | can, including the remote end's stderr and stdout streams, the command strings
7 | being executed, and so forth. While this is necessary in many cases in order to
8 | know just what's going on, any nontrivial Fabric task will quickly become
9 | difficult to follow as it runs.
10 |
11 |
12 | Output levels
13 | =============
14 |
15 | To aid in organizing task output, Fabric output is grouped into a number of
16 | non-overlapping levels or groups, each of which may be turned on or off
17 | independently. This provides flexible control over what is displayed to the
18 | user.
19 |
20 | .. note::
21 |
22 | All levels, save for ``debug`` and ``exceptions``, are on by default.
23 |
24 | Standard output levels
25 | ----------------------
26 |
27 | The standard, atomic output levels/groups are as follows:
28 |
29 | * **status**: Status messages, i.e. noting when Fabric is done running, if
30 | the user used a keyboard interrupt, or when servers are disconnected from.
31 | These messages are almost always relevant and rarely verbose.
32 |
33 | * **aborts**: Abort messages. Like status messages, these should really only be
34 | turned off when using Fabric as a library, and possibly not even then. Note
35 | that even if this output group is turned off, aborts will still occur --
36 | there just won't be any output about why Fabric aborted!
37 |
38 | * **warnings**: Warning messages. These are often turned off when one expects a
39 | given operation to fail, such as when using ``grep`` to test existence of
40 | text in a file. If paired with setting ``env.warn_only`` to True, this
41 | can result in fully silent warnings when remote programs fail. As with
42 | ``aborts``, this setting does not control actual warning behavior, only
43 | whether warning messages are printed or hidden.
44 |
45 | * **running**: Printouts of commands being executed or files transferred, e.g.
46 | ``[myserver] run: ls /var/www``. Also controls printing of tasks being run,
47 | e.g. ``[myserver] Executing task 'foo'``.
48 |
49 | * **stdout**: Local, or remote, stdout, i.e. non-error output from commands.
50 |
51 | * **stderr**: Local, or remote, stderr, i.e. error-related output from commands.
52 |
53 | * **user**: User-generated output, i.e. local output printed by fabfile code
54 | via use of the `~fabric.utils.fastprint` or `~fabric.utils.puts` functions.
55 |
56 | .. versionchanged:: 0.9.2
57 | Added "Executing task" lines to the ``running`` output level.
58 |
59 | .. versionchanged:: 0.9.2
60 | Added the ``user`` output level.
61 |
62 | Debug output
63 | ------------
64 |
65 | There are two more atomic output levels for use when troubleshooting:
66 | ``debug``, which behaves slightly differently from the rest, and
67 | ``exceptions``, whose behavior is included in ``debug`` but may be enabled
68 | separately.
69 |
70 | * **debug**: Turn on debugging (which is off by default.) Currently, this is
71 | largely used to view the "full" commands being run; take for example this
72 | `~fabric.operations.run` call::
73 |
74 | run('ls "/home/username/Folder Name With Spaces/"')
75 |
76 | Normally, the ``running`` line will show exactly what is passed into
77 | `~fabric.operations.run`, like so::
78 |
79 | [hostname] run: ls "/home/username/Folder Name With Spaces/"
80 |
81 | With ``debug`` on, and assuming you've left :ref:`shell` set to ``True``, you
82 | will see the literal, full string as passed to the remote server::
83 |
84 | [hostname] run: /bin/bash -l -c "ls \"/home/username/Folder Name With Spaces\""
85 |
86 | Enabling ``debug`` output will also display full Python tracebacks during
87 | aborts (as if ``exceptions`` output was enabled).
88 |
89 | .. note::
90 |
91 | Where modifying other pieces of output (such as in the above example
92 | where it modifies the 'running' line to show the shell and any escape
93 | characters), this setting takes precedence over the others; so if
94 | ``running`` is False but ``debug`` is True, you will still be shown the
95 | 'running' line in its debugging form.
96 |
97 | * **exceptions**: Enables display of tracebacks when exceptions occur; intended
98 | for use when ``debug`` is set to ``False`` but one is still interested in
99 | detailed error info.
100 |
101 | .. versionchanged:: 1.0
102 | Debug output now includes full Python tracebacks during aborts.
103 |
104 | .. versionchanged:: 1.11
105 | Added the ``exceptions`` output level.
106 |
107 | .. _output-aliases:
108 |
109 | Output level aliases
110 | --------------------
111 |
112 | In addition to the atomic/standalone levels above, Fabric also provides a
113 | couple of convenience aliases which map to multiple other levels. These may be
114 | referenced anywhere the other levels are referenced, and will effectively
115 | toggle all of the levels they are mapped to.
116 |
117 | * **output**: Maps to both ``stdout`` and ``stderr``. Useful for when you only
118 | care to see the 'running' lines and your own print statements (and warnings).
119 |
120 | * **everything**: Includes ``warnings``, ``running``, ``user`` and ``output``
121 | (see above.) Thus, when turning off ``everything``, you will only see a bare
122 | minimum of output (just ``status`` and ``debug`` if it's on), along with your
123 | own print statements.
124 |
125 | * **commands**: Includes ``stdout`` and ``running``. Good for hiding
126 | non-erroring commands entirely, while still displaying any stderr output.
127 |
128 | .. versionchanged:: 1.4
129 | Added the ``commands`` output alias.
130 |
131 |
132 | Hiding and/or showing output levels
133 | ===================================
134 |
135 | You may toggle any of Fabric's output levels in a number of ways; for examples,
136 | please see the API docs linked in each bullet point:
137 |
138 | * **Direct modification of fabric.state.output**: `fabric.state.output` is a
139 | dictionary subclass (similar to :doc:`env `) whose keys are the output
140 | level names, and whose values are either True (show that particular type of
141 | output) or False (hide it.)
142 |
143 | `fabric.state.output` is the lowest-level implementation of output levels and
144 | is what Fabric's internals reference when deciding whether or not to print
145 | their output.
146 |
147 | * **Context managers**: `~fabric.context_managers.hide` and
148 | `~fabric.context_managers.show` are twin context managers that take one or
149 | more output level names as strings, and either hide or show them within the
150 | wrapped block. As with Fabric's other context managers, the prior values are
151 | restored when the block exits.
152 |
153 | .. seealso::
154 |
155 | `~fabric.context_managers.settings`, which can nest calls to
156 | `~fabric.context_managers.hide` and/or `~fabric.context_managers.show`
157 | inside itself.
158 |
159 | * **Command-line arguments**: You may use the :option:`--hide` and/or
160 | :option:`--show` arguments to :doc:`fab`, which behave exactly like the
161 | context managers of the same names (but are, naturally, globally applied) and
162 | take comma-separated strings as input.
163 |
--------------------------------------------------------------------------------
/sites/docs/usage/interactivity.rst:
--------------------------------------------------------------------------------
1 | ================================
2 | Interaction with remote programs
3 | ================================
4 |
5 | Fabric's primary operations, `~fabric.operations.run` and
6 | `~fabric.operations.sudo`, are capable of sending local input to the remote
7 | end, in a manner nearly identical to the ``ssh`` program. For example, programs
8 | which display password prompts (e.g. a database dump utility, or changing a
9 | user's password) will behave just as if you were interacting with them
10 | directly.
11 |
12 | However, as with ``ssh`` itself, Fabric's implementation of this feature is
13 | subject to a handful of limitations which are not always intuitive. This
14 | document discusses such issues in detail.
15 |
16 | .. note::
17 | Readers unfamiliar with the basics of Unix stdout and stderr pipes, and/or
18 | terminal devices, may wish to visit the Wikipedia pages for `Unix pipelines
19 | `_ and `Pseudo terminals
20 | `_ respectively.
21 |
22 |
23 | .. _combine_streams:
24 |
25 | Combining stdout and stderr
26 | ===========================
27 |
28 | The first issue to be aware of is that of the stdout and stderr streams, and
29 | why they are separated or combined as needed.
30 |
31 | Buffering
32 | ---------
33 |
34 | Fabric 0.9.x and earlier, and Python itself, buffer output on a line-by-line
35 | basis: text is not printed to the user until a newline character is found.
36 | This works fine in most situations but becomes problematic when one needs to
37 | deal with partial-line output such as prompts.
38 |
39 | .. note::
40 | Line-buffered output can make programs appear to halt or freeze for no
41 | reason, as prompts print out text without a newline, waiting for the user
42 | to enter their input and press Return.
43 |
44 | Newer Fabric versions buffer both input and output on a character-by-character
45 | basis in order to make interaction with prompts possible. This has the
46 | convenient side effect of enabling interaction with complex programs utilizing
47 | the "curses" libraries or which otherwise redraw the screen (think ``top``).
48 |
49 | Crossing the streams
50 | --------------------
51 |
52 | Unfortunately, printing to stderr and stdout simultaneously (as many programs
53 | do) means that when the two streams are printed independently one byte at a
54 | time, they can become garbled or meshed together. While this can sometimes be
55 | mitigated by line-buffering one of the streams and not the other, it's still a
56 | serious issue.
57 |
58 | To solve this problem, Fabric uses a setting in our SSH layer which merges the
59 | two streams at a low level and causes output to appear more naturally. This
60 | setting is represented in Fabric as the :ref:`combine-stderr` env var and
61 | keyword argument, and is ``True`` by default.
62 |
63 | Due to this default setting, output will appear correctly, but at the
64 | cost of an empty ``.stderr`` attribute on the return values of
65 | `~fabric.operations.run`/`~fabric.operations.sudo`, as all output will appear
66 | to be stdout.
67 |
68 | Conversely, users requiring a distinct stderr stream at the Python level and
69 | who aren't bothered by garbled user-facing output (or who are hiding stdout and
70 | stderr from the command in question) may opt to set this to ``False`` as
71 | needed.
72 |
73 |
74 | .. _pseudottys:
75 |
76 | Pseudo-terminals
77 | ================
78 |
79 | The other main issue to consider when presenting interactive prompts to users
80 | is that of echoing the user's own input.
81 |
82 | Echoes
83 | ------
84 |
85 | Typical terminal applications or bona fide text terminals (e.g. when using a
86 | Unix system without a running GUI) present programs with a terminal device
87 | called a tty or pty (for pseudo-terminal). These automatically echo all text
88 | typed into them back out to the user (via stdout), as interaction without
89 | seeing what you had just typed would be difficult. Terminal devices are also
90 | able to conditionally turn off echoing, allowing secure password prompts.
91 |
92 | However, it's possible for programs to be run without a tty or pty present at
93 | all (consider cron jobs, for example) and in this situation, any stdin data
94 | being fed to the program won't be echoed. This is desirable for programs being
95 | run without any humans around, and it's also Fabric's old default mode of
96 | operation.
97 |
98 | Fabric's approach
99 | -----------------
100 |
101 | Unfortunately, in the context of executing commands via Fabric, when no pty is
102 | present to echo a user's stdin, Fabric must echo it for them. This is
103 | sufficient for many applications, but it presents problems for password
104 | prompts, which become insecure.
105 |
106 | In the interests of security and meeting the principle of least surprise
107 | (insofar as users are typically expecting things to behave as they would when
108 | run in a terminal emulator), Fabric 1.0 and greater force a pty by default.
109 | With a pty enabled, Fabric simply allows the remote end to handle echoing or
110 | hiding of stdin and does not echo anything itself.
111 |
112 | .. note::
113 | In addition to allowing normal echo behavior, a pty also means programs
114 | that behave differently when attached to a terminal device will then do so.
115 | For example, programs that colorize output on terminals but not when run in
116 | the background will print colored output. Be wary of this if you inspect
117 | the return value of `~fabric.operations.run` or `~fabric.operations.sudo`!
118 |
119 | For situations requiring the pty behavior turned off, the :option:`--no-pty`
120 | command-line argument and :ref:`always-use-pty` env var may be used.
121 |
122 |
123 | Combining the two
124 | =================
125 |
126 | As a final note, keep in mind that use of pseudo-terminals effectively implies
127 | combining stdout and stderr -- in much the same way as the :ref:`combine_stderr
128 | ` setting does. This is because a terminal device naturally
129 | sends both stdout and stderr to the same place -- the user's display -- thus
130 | making it impossible to differentiate between them.
131 |
132 | However, at the Fabric level, the two groups of settings are distinct from one
133 | another and may be combined in various ways. The default is for both to be set
134 | to ``True``; the other combinations are as follows:
135 |
136 | * ``run("cmd", pty=False, combine_stderr=True)``: will cause Fabric to echo all
137 | stdin itself, including passwords, as well as potentially altering ``cmd``'s
138 | behavior. Useful if ``cmd`` behaves undesirably when run under a pty and
139 | you're not concerned about password prompts.
140 | * ``run("cmd", pty=False, combine_stderr=False)``: with both settings
141 | ``False``, Fabric will echo stdin and won't issue a pty -- and this is highly
142 | likely to result in undesired behavior for all but the simplest commands.
143 | However, it is also the only way to access a distinct stderr stream, which is
144 | occasionally useful.
145 | * ``run("cmd", pty=True, combine_stderr=False)``: valid, but won't really make
146 | much of a difference, as ``pty=True`` will still result in merged streams.
147 | May be useful for avoiding any edge case problems in ``combine_stderr`` (none
148 | are presently known).
149 |
--------------------------------------------------------------------------------
/integration/test_operations.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | from StringIO import StringIO
4 | import os
5 | import posixpath
6 | import shutil
7 |
8 | from fabric.api import (
9 | run, path, put, sudo, abort, warn_only, env, cd, local, settings, get
10 | )
11 | from fabric.contrib.files import exists
12 |
13 | from utils import Integration
14 |
15 |
16 | def assert_mode(path, mode):
17 | remote_mode = run("stat -c \"%%a\" \"%s\"" % path).stdout
18 | assert remote_mode == mode, "remote %r != expected %r" % (remote_mode, mode)
19 |
20 |
21 | class TestOperations(Integration):
22 | filepath = "/tmp/whocares"
23 | dirpath = "/tmp/whatever/bin"
24 | not_owned = "/tmp/notmine"
25 |
26 | def setup(self):
27 | super(TestOperations, self).setup()
28 | run("mkdir -p %s" % " ".join([self.dirpath, self.not_owned]))
29 |
30 | def teardown(self):
31 | super(TestOperations, self).teardown()
32 | # Revert any chown crap from put sudo tests
33 | sudo("chown %s ." % env.user)
34 | # Nuke to prevent bleed
35 | sudo("rm -rf %s" % " ".join([self.dirpath, self.filepath]))
36 | sudo("rm -rf %s" % self.not_owned)
37 |
38 | def test_no_trailing_space_in_shell_path_in_run(self):
39 | put(StringIO("#!/bin/bash\necho hi"), "%s/myapp" % self.dirpath, mode="0755")
40 | with path(self.dirpath):
41 | assert run('myapp').stdout == 'hi'
42 |
43 | def test_string_put_mode_arg_doesnt_error(self):
44 | put(StringIO("#!/bin/bash\necho hi"), self.filepath, mode="0755")
45 | assert_mode(self.filepath, "755")
46 |
47 | def test_int_put_mode_works_ok_too(self):
48 | put(StringIO("#!/bin/bash\necho hi"), self.filepath, mode=0755)
49 | assert_mode(self.filepath, "755")
50 |
51 | def _chown(self, target):
52 | sudo("chown root %s" % target)
53 |
54 | def _put_via_sudo(self, source=None, target_suffix='myfile', **kwargs):
55 | # Ensure target dir prefix is not owned by our user (so we fail unless
56 | # the sudo part of things is working)
57 | self._chown(self.not_owned)
58 | source = source if source else StringIO("whatever")
59 | # Drop temp file into that dir, via use_sudo, + any kwargs
60 | return put(
61 | source,
62 | self.not_owned + '/' + target_suffix,
63 | use_sudo=True,
64 | **kwargs
65 | )
66 |
67 | def test_put_with_use_sudo(self):
68 | self._put_via_sudo()
69 |
70 | def test_put_with_dir_and_use_sudo(self):
71 | # Test cwd should be root of fabric source tree. Use our own folder as
72 | # the source, meh.
73 | self._put_via_sudo(source='integration', target_suffix='')
74 |
75 | def test_put_with_use_sudo_and_custom_temp_dir(self):
76 | # TODO: allow dependency injection in sftp.put or w/e, test it in
77 | # isolation instead.
78 | # For now, just half-ass it by ensuring $HOME isn't writable
79 | # temporarily.
80 | self._chown('.')
81 | self._put_via_sudo(temp_dir='/tmp')
82 |
83 | def test_put_with_use_sudo_dir_and_custom_temp_dir(self):
84 | self._chown('.')
85 | self._put_via_sudo(source='integration', target_suffix='', temp_dir='/tmp')
86 |
87 | def test_put_use_sudo_and_explicit_mode(self):
88 | # Setup
89 | target_dir = posixpath.join(self.filepath, 'blah')
90 | subdir = "inner"
91 | subdir_abs = posixpath.join(target_dir, subdir)
92 | filename = "whatever.txt"
93 | target_file = posixpath.join(subdir_abs, filename)
94 | run("mkdir -p %s" % subdir_abs)
95 | self._chown(subdir_abs)
96 | local_path = os.path.join('/tmp', filename)
97 | with open(local_path, 'w+') as fd:
98 | fd.write('stuff\n')
99 | # Upload + assert
100 | with cd(target_dir):
101 | put(local_path, subdir, use_sudo=True, mode='777')
102 | assert_mode(target_file, '777')
103 |
104 | def test_put_file_to_dir_with_use_sudo_and_mirror_mode(self):
105 | # Ensure mode of local file, umask varies on eg travis vs various
106 | # localhosts
107 | source = 'whatever.txt'
108 | try:
109 | local("touch %s" % source)
110 | local("chmod 644 %s" % source)
111 | # Target for _put_via_sudo is a directory by default
112 | uploaded = self._put_via_sudo(
113 | source=source, mirror_local_mode=True
114 | )
115 | assert_mode(uploaded[0], '644')
116 | finally:
117 | local("rm -f %s" % source)
118 |
119 | def test_put_directory_use_sudo_and_spaces(self):
120 | localdir = 'I have spaces'
121 | localfile = os.path.join(localdir, 'file.txt')
122 | os.mkdir(localdir)
123 | with open(localfile, 'w') as fd:
124 | fd.write('stuff\n')
125 | try:
126 | uploaded = self._put_via_sudo(localdir, target_suffix='')
127 | # Kinda dumb, put() would've died if it couldn't do it, but.
128 | assert exists(uploaded[0])
129 | assert exists(posixpath.dirname(uploaded[0]))
130 | finally:
131 | shutil.rmtree(localdir)
132 |
133 | def test_agent_forwarding_functions(self):
134 | # When paramiko #399 is present this will hang indefinitely
135 | with settings(forward_agent=True):
136 | run('ssh-add -L')
137 |
138 | def test_get_with_use_sudo_unowned_file(self):
139 | # Ensure target is not normally readable by us
140 | target = self.filepath
141 | sudo("echo 'nope' > %s" % target)
142 | sudo("chown root:root %s" % target)
143 | sudo("chmod 0440 %s" % target)
144 | # Pull down with use_sudo, confirm contents
145 | local_ = StringIO()
146 | result = get(
147 | local_path=local_,
148 | remote_path=target,
149 | use_sudo=True,
150 | )
151 | assert local_.getvalue() == "nope\n"
152 |
153 | def test_get_with_use_sudo_groupowned_file(self):
154 | # Issue #1226: file gotten w/ use_sudo, file normally readable via
155 | # group perms (yes - so use_sudo not required - full use case involves
156 | # full-directory get() where use_sudo *is* required). Prior to fix,
157 | # temp file is chmod 404 which seems to cause perm denied due to group
158 | # membership (despite 'other' readability).
159 | target = self.filepath
160 | sudo("echo 'nope' > %s" % target)
161 | # Same group as connected user
162 | gid = run("id -g")
163 | sudo("chown root:%s %s" % (gid, target))
164 | # Same perms as bug use case (only really need group read)
165 | sudo("chmod 0640 %s" % target)
166 | # Do eet
167 | local_ = StringIO()
168 | result = get(
169 | local_path=local_,
170 | remote_path=target,
171 | use_sudo=True,
172 | )
173 | assert local_.getvalue() == "nope\n"
174 |
175 | def test_get_from_unreadable_dir(self):
176 | # Put file in dir as normal user
177 | remotepath = "%s/myfile.txt" % self.dirpath
178 | run("echo 'foo' > %s" % remotepath)
179 | # Make dir unreadable (but still executable - impossible to obtain
180 | # file if dir is both unreadable and unexecutable)
181 | sudo("chown root:root %s" % self.dirpath)
182 | sudo("chmod 711 %s" % self.dirpath)
183 | # Try gettin' it
184 | local_ = StringIO()
185 | get(local_path=local_, remote_path=remotepath)
186 | assert local_.getvalue() == 'foo\n'
187 |
--------------------------------------------------------------------------------
/sites/www/installing.rst:
--------------------------------------------------------------------------------
1 | ==========
2 | Installing
3 | ==========
4 |
5 | Fabric is best installed via `pip `_ (highly
6 | recommended) or `easy_install
7 | `_ (older, but still works
8 | fine), e.g.::
9 |
10 | $ pip install fabric
11 |
12 | You may also opt to use your operating system's package manager; the package is
13 | typically called ``fabric`` or ``python-fabric``. E.g.::
14 |
15 | $ sudo apt-get install fabric
16 |
17 | Advanced users wanting to install a development version may use ``pip`` to grab
18 | the latest master branch (as well as the dev version of the Paramiko
19 | dependency)::
20 |
21 | $ pip install paramiko==dev
22 | $ pip install fabric==dev
23 |
24 | Or, to install an editable version for debugging/hacking, execute ``pip
25 | install -e .`` (or ``python setup.py develop``) inside a :ref:`downloaded
26 | ` or :ref:`cloned ` copy of the source code.
27 |
28 | .. warning::
29 |
30 | Any development installs of Fabric (whether via ``==dev`` or ``install
31 | -e``) require the development version of Paramiko to be installed
32 | beforehand, or Fabric's installation may fail.
33 |
34 |
35 | Dependencies
36 | ============
37 |
38 | In order for Fabric's installation to succeed, you will need four primary pieces of software:
39 |
40 | * the Python programming language;
41 | * the ``setuptools`` packaging/installation library;
42 | * the Python `Paramiko `_ SSH library;
43 | * and Paramiko's dependency, the PyCrypto cryptography library.
44 |
45 | and, if using the :ref:`parallel execution mode `:
46 |
47 | * the `multiprocessing`_ library.
48 |
49 | If you're using Paramiko 1.12 or above, you will also need an additional
50 | dependency for Paramiko:
51 |
52 | * the `ecdsa `_ library
53 |
54 | Please read on for important details on these -- there are a few gotchas.
55 |
56 | Python
57 | ------
58 |
59 | Fabric requires `Python `_ version 2.5 - 2.7. Some caveats
60 | and notes about other Python versions:
61 |
62 | * We are not planning on supporting **Python 2.4** given its age and the number
63 | of useful tools in Python 2.5 such as context managers and new modules.
64 | That said, the actual amount of 2.5-specific functionality is not
65 | prohibitively large, and we would link to -- but not support -- a third-party
66 | 2.4-compatible fork. (No such fork exists at this time, to our knowledge.)
67 | * Fabric has not yet been tested on **Python 3.x** and is thus likely to be
68 | incompatible with that line of development. However, we try to be at least
69 | somewhat forward-looking (e.g. using ``print()`` instead of ``print``) and
70 | will definitely be porting to 3.x in the future once our dependencies do.
71 |
72 | setuptools
73 | ----------
74 |
75 | `Setuptools`_ comes with some Python installations by default; if yours doesn't,
76 | you'll need to grab it. In such situations it's typically packaged as
77 | ``python-setuptools``, ``py25-setuptools`` or similar. Fabric may drop its
78 | setuptools dependency in the future, or include alternative support for the
79 | `Distribute`_ project, but for now setuptools is required for installation.
80 |
81 | .. _setuptools: http://pypi.python.org/pypi/setuptools
82 | .. _Distribute: http://pypi.python.org/pypi/distribute
83 |
84 | ``multiprocessing``
85 | -------------------
86 |
87 | An optional dependency, the ``multiprocessing`` library is included in Python's
88 | standard library in version 2.6 and higher. If you're using Python 2.5 and want
89 | to make use of Fabric's :ref:`parallel execution features `
90 | you'll need to install it manually; the recommended route, as usual, is via
91 | ``pip``. Please see the `multiprocessing PyPI page
92 | `_ for details.
93 |
94 |
95 | .. warning::
96 | Early versions of Python 2.6 (in our testing, 2.6.0 through 2.6.2) ship
97 | with a buggy ``multiprocessing`` module that appears to cause Fabric to
98 | hang at the end of sessions involving large numbers of concurrent hosts.
99 | If you encounter this problem, either use :ref:`env.pool_size / -z
100 | ` to limit the amount of concurrency, or upgrade to Python
101 | >=2.6.3.
102 |
103 | Python 2.5 is unaffected, as it requires the PyPI version of
104 | ``multiprocessing``, which is newer than that shipped with Python <2.6.3.
105 |
106 | Development dependencies
107 | ------------------------
108 |
109 | If you are interested in doing development work on Fabric (or even just running
110 | the test suite), you may also need to install some or all of the following
111 | packages:
112 |
113 | * `git `_ and `Mercurial`_, in order to obtain some of the
114 | other dependencies below;
115 | * `Nose `_
116 | * `Coverage `_
117 | * `PyLint `_
118 | * `Fudge `_
119 | * `Sphinx `_
120 |
121 | For an up-to-date list of exact testing/development requirements, including
122 | version numbers, please see the ``requirements.txt`` file included with the
123 | source distribution. This file is intended to be used with ``pip``, e.g. ``pip
124 | install -r requirements.txt``.
125 |
126 | .. _Mercurial: http://mercurial.selenic.com/wiki/
127 |
128 |
129 | .. _downloads:
130 |
131 | Downloads
132 | =========
133 |
134 | To obtain a tar.gz or zip archive of the Fabric source code, you may visit
135 | `Fabric's PyPI page `_, which offers manual
136 | downloads in addition to being the entry point for ``pip`` and
137 | ``easy-install``.
138 |
139 |
140 | .. _source-code-checkouts:
141 |
142 | Source code checkouts
143 | =====================
144 |
145 | The Fabric developers manage the project's source code with the `Git
146 | `_ DVCS. To follow Fabric's development via Git instead of
147 | downloading official releases, you have the following options:
148 |
149 | * Clone the canonical repository straight from `the Fabric organization's
150 | repository on Github `_,
151 | ``git://github.com/fabric/fabric.git``
152 | * Make your own fork of the Github repository by making a Github account,
153 | visiting `fabric/fabric `_ and clicking the
154 | "fork" button.
155 |
156 | .. note::
157 |
158 | If you've obtained the Fabric source via source control and plan on
159 | updating your checkout in the future, we highly suggest using ``python
160 | setup.py develop`` instead -- it will use symbolic links instead of file
161 | copies, ensuring that imports of the library or use of the command-line
162 | tool will always refer to your checkout.
163 |
164 | For information on the hows and whys of Fabric development, including which
165 | branches may be of interest and how you can help out, please see the
166 | :doc:`development` page.
167 |
168 |
169 | .. _pypm:
170 |
171 | ActivePython and PyPM
172 | =====================
173 |
174 | Windows users who already have ActiveState's `ActivePython
175 | `_ distribution installed
176 | may find Fabric is best installed with `its package manager, PyPM
177 | `_. Below is example output from an
178 | installation of Fabric via ``pypm``::
179 |
180 | C:\> pypm install fabric
181 | The following packages will be installed into "%APPDATA%\Python" (2.7):
182 | paramiko-1.7.8 pycrypto-2.4 fabric-1.3.0
183 | Get: [pypm-free.activestate.com] fabric 1.3.0
184 | Get: [pypm-free.activestate.com] paramiko 1.7.8
185 | Get: [pypm-free.activestate.com] pycrypto 2.4
186 | Installing paramiko-1.7.8
187 | Installing pycrypto-2.4
188 | Installing fabric-1.3.0
189 | Fixing script %APPDATA%\Python\Scripts\fab-script.py
190 | C:\>
191 |
--------------------------------------------------------------------------------
/tests/test_decorators.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | import random
4 | import sys
5 |
6 | from nose.tools import eq_, ok_, assert_true, assert_false, assert_equal
7 | import fudge
8 | from fudge import Fake, with_fakes, patched_context
9 |
10 | from fabric import decorators, tasks
11 | from fabric.state import env
12 | import fabric # for patching fabric.state.xxx
13 | from fabric.tasks import _parallel_tasks, requires_parallel, execute
14 | from fabric.context_managers import lcd, settings, hide
15 |
16 | from utils import mock_streams
17 |
18 |
19 | #
20 | # Support
21 | #
22 |
23 | def fake_function(*args, **kwargs):
24 | """
25 | Returns a ``fudge.Fake`` exhibiting function-like attributes.
26 |
27 | Passes in all args/kwargs to the ``fudge.Fake`` constructor. However, if
28 | ``callable`` or ``expect_call`` kwargs are not given, ``callable`` will be
29 | set to True by default.
30 | """
31 | # Must define __name__ to be compatible with function wrapping mechanisms
32 | # like @wraps().
33 | if 'callable' not in kwargs and 'expect_call' not in kwargs:
34 | kwargs['callable'] = True
35 | return Fake(*args, **kwargs).has_attr(__name__='fake')
36 |
37 |
38 |
39 | #
40 | # @task
41 | #
42 |
43 | def test_task_returns_an_instance_of_wrappedfunctask_object():
44 | def foo():
45 | pass
46 | task = decorators.task(foo)
47 | ok_(isinstance(task, tasks.WrappedCallableTask))
48 |
49 |
50 | def test_task_will_invoke_provided_class():
51 | def foo(): pass
52 | fake = Fake()
53 | fake.expects("__init__").with_args(foo)
54 | fudge.clear_calls()
55 | fudge.clear_expectations()
56 |
57 | foo = decorators.task(foo, task_class=fake)
58 |
59 | fudge.verify()
60 |
61 |
62 | def test_task_passes_args_to_the_task_class():
63 | random_vars = ("some text", random.randint(100, 200))
64 | def foo(): pass
65 |
66 | fake = Fake()
67 | fake.expects("__init__").with_args(foo, *random_vars)
68 | fudge.clear_calls()
69 | fudge.clear_expectations()
70 |
71 | foo = decorators.task(foo, task_class=fake, *random_vars)
72 | fudge.verify()
73 |
74 |
75 | def test_passes_kwargs_to_the_task_class():
76 | random_vars = {
77 | "msg": "some text",
78 | "number": random.randint(100, 200),
79 | }
80 | def foo(): pass
81 |
82 | fake = Fake()
83 | fake.expects("__init__").with_args(foo, **random_vars)
84 | fudge.clear_calls()
85 | fudge.clear_expectations()
86 |
87 | foo = decorators.task(foo, task_class=fake, **random_vars)
88 | fudge.verify()
89 |
90 |
91 | def test_integration_tests_for_invoked_decorator_with_no_args():
92 | r = random.randint(100, 200)
93 | @decorators.task()
94 | def foo():
95 | return r
96 |
97 | eq_(r, foo())
98 |
99 |
100 | def test_integration_tests_for_decorator():
101 | r = random.randint(100, 200)
102 | @decorators.task(task_class=tasks.WrappedCallableTask)
103 | def foo():
104 | return r
105 |
106 | eq_(r, foo())
107 |
108 |
109 | def test_original_non_invoked_style_task():
110 | r = random.randint(100, 200)
111 | @decorators.task
112 | def foo():
113 | return r
114 |
115 | eq_(r, foo())
116 |
117 |
118 |
119 | #
120 | # @runs_once
121 | #
122 |
123 | @with_fakes
124 | def test_runs_once_runs_only_once():
125 | """
126 | @runs_once prevents decorated func from running >1 time
127 | """
128 | func = fake_function(expect_call=True).times_called(1)
129 | task = decorators.runs_once(func)
130 | for i in range(2):
131 | task()
132 |
133 |
134 | def test_runs_once_returns_same_value_each_run():
135 | """
136 | @runs_once memoizes return value of decorated func
137 | """
138 | return_value = "foo"
139 | task = decorators.runs_once(fake_function().returns(return_value))
140 | for i in range(2):
141 | eq_(task(), return_value)
142 |
143 |
144 | @decorators.runs_once
145 | def single_run():
146 | pass
147 |
148 | def test_runs_once():
149 | assert_false(hasattr(single_run, 'return_value'))
150 | single_run()
151 | assert_true(hasattr(single_run, 'return_value'))
152 | assert_equal(None, single_run())
153 |
154 |
155 |
156 | #
157 | # @serial / @parallel
158 | #
159 |
160 |
161 | @decorators.serial
162 | def serial():
163 | pass
164 |
165 | @decorators.serial
166 | @decorators.parallel
167 | def serial2():
168 | pass
169 |
170 | @decorators.parallel
171 | @decorators.serial
172 | def serial3():
173 | pass
174 |
175 | @decorators.parallel
176 | def parallel():
177 | pass
178 |
179 | @decorators.parallel(pool_size=20)
180 | def parallel2():
181 | pass
182 |
183 | fake_tasks = {
184 | 'serial': serial,
185 | 'serial2': serial2,
186 | 'serial3': serial3,
187 | 'parallel': parallel,
188 | 'parallel2': parallel2,
189 | }
190 |
191 | def parallel_task_helper(actual_tasks, expected):
192 | commands_to_run = map(lambda x: [x], actual_tasks)
193 | with patched_context(fabric.state, 'commands', fake_tasks):
194 | eq_(_parallel_tasks(commands_to_run), expected)
195 |
196 | def test_parallel_tasks():
197 | for desc, task_names, expected in (
198 | ("One @serial-decorated task == no parallelism",
199 | ['serial'], False),
200 | ("One @parallel-decorated task == parallelism",
201 | ['parallel'], True),
202 | ("One @parallel-decorated and one @serial-decorated task == paralellism",
203 | ['parallel', 'serial'], True),
204 | ("Tasks decorated with both @serial and @parallel count as @parallel",
205 | ['serial2', 'serial3'], True)
206 | ):
207 | parallel_task_helper.description = desc
208 | yield parallel_task_helper, task_names, expected
209 | del parallel_task_helper.description
210 |
211 | def test_parallel_wins_vs_serial():
212 | """
213 | @parallel takes precedence over @serial when both are used on one task
214 | """
215 | ok_(requires_parallel(serial2))
216 | ok_(requires_parallel(serial3))
217 |
218 | @mock_streams('stdout')
219 | def test_global_parallel_honors_runs_once():
220 | """
221 | fab -P (or env.parallel) should honor @runs_once
222 | """
223 | @decorators.runs_once
224 | def mytask():
225 | print("yolo") # 'Carpe diem' for stupid people!
226 | with settings(hide('everything'), parallel=True):
227 | execute(mytask, hosts=['localhost', '127.0.0.1'])
228 | result = sys.stdout.getvalue()
229 | eq_(result, "yolo\n")
230 | assert result != "yolo\nyolo\n"
231 |
232 |
233 | #
234 | # @roles
235 | #
236 |
237 | @decorators.roles('test')
238 | def use_roles():
239 | pass
240 |
241 | def test_roles():
242 | assert_true(hasattr(use_roles, 'roles'))
243 | assert_equal(use_roles.roles, ['test'])
244 |
245 |
246 |
247 | #
248 | # @hosts
249 | #
250 |
251 | @decorators.hosts('test')
252 | def use_hosts():
253 | pass
254 |
255 | def test_hosts():
256 | assert_true(hasattr(use_hosts, 'hosts'))
257 | assert_equal(use_hosts.hosts, ['test'])
258 |
259 |
260 |
261 | #
262 | # @with_settings
263 | #
264 |
265 | def test_with_settings_passes_env_vars_into_decorated_function():
266 | env.value = True
267 | random_return = random.randint(1000, 2000)
268 | def some_task():
269 | return env.value
270 | decorated_task = decorators.with_settings(value=random_return)(some_task)
271 | ok_(some_task(), msg="sanity check")
272 | eq_(random_return, decorated_task())
273 |
274 | def test_with_settings_with_other_context_managers():
275 | """
276 | with_settings() should take other context managers, and use them with other
277 | overrided key/value pairs.
278 | """
279 | env.testval1 = "outer 1"
280 | prev_lcwd = env.lcwd
281 |
282 | def some_task():
283 | eq_(env.testval1, "inner 1")
284 | ok_(env.lcwd.endswith("here")) # Should be the side-effect of adding cd to settings
285 |
286 | decorated_task = decorators.with_settings(
287 | lcd("here"),
288 | testval1="inner 1"
289 | )(some_task)
290 | decorated_task()
291 |
292 | ok_(env.testval1, "outer 1")
293 | eq_(env.lcwd, prev_lcwd)
294 |
--------------------------------------------------------------------------------
/fabric/decorators.py:
--------------------------------------------------------------------------------
1 | """
2 | Convenience decorators for use in fabfiles.
3 | """
4 | from __future__ import with_statement
5 |
6 | import types
7 | from functools import wraps
8 |
9 | from Crypto import Random
10 |
11 | from fabric import tasks
12 | from .context_managers import settings
13 |
14 |
15 | def task(*args, **kwargs):
16 | """
17 | Decorator declaring the wrapped function to be a new-style task.
18 |
19 | May be invoked as a simple, argument-less decorator (i.e. ``@task``) or
20 | with arguments customizing its behavior (e.g. ``@task(alias='myalias')``).
21 |
22 | Please see the :ref:`new-style task ` documentation for
23 | details on how to use this decorator.
24 |
25 | .. versionchanged:: 1.2
26 | Added the ``alias``, ``aliases``, ``task_class`` and ``default``
27 | keyword arguments. See :ref:`task-decorator-arguments` for details.
28 | .. versionchanged:: 1.5
29 | Added the ``name`` keyword argument.
30 |
31 | .. seealso:: `~fabric.docs.unwrap_tasks`, `~fabric.tasks.WrappedCallableTask`
32 | """
33 | invoked = bool(not args or kwargs)
34 | task_class = kwargs.pop("task_class", tasks.WrappedCallableTask)
35 | if not invoked:
36 | func, args = args[0], ()
37 |
38 | def wrapper(func):
39 | return task_class(func, *args, **kwargs)
40 |
41 | return wrapper if invoked else wrapper(func)
42 |
43 | def _wrap_as_new(original, new):
44 | if isinstance(original, tasks.Task):
45 | return tasks.WrappedCallableTask(new)
46 | return new
47 |
48 |
49 | def _list_annotating_decorator(attribute, *values):
50 | def attach_list(func):
51 | @wraps(func)
52 | def inner_decorator(*args, **kwargs):
53 | return func(*args, **kwargs)
54 | _values = values
55 | # Allow for single iterable argument as well as *args
56 | if len(_values) == 1 and not isinstance(_values[0], basestring):
57 | _values = _values[0]
58 | setattr(inner_decorator, attribute, list(_values))
59 | # Don't replace @task new-style task objects with inner_decorator by
60 | # itself -- wrap in a new Task object first.
61 | inner_decorator = _wrap_as_new(func, inner_decorator)
62 | return inner_decorator
63 | return attach_list
64 |
65 |
66 | def hosts(*host_list):
67 | """
68 | Decorator defining which host or hosts to execute the wrapped function on.
69 |
70 | For example, the following will ensure that, barring an override on the
71 | command line, ``my_func`` will be run on ``host1``, ``host2`` and
72 | ``host3``, and with specific users on ``host1`` and ``host3``::
73 |
74 | @hosts('user1@host1', 'host2', 'user2@host3')
75 | def my_func():
76 | pass
77 |
78 | `~fabric.decorators.hosts` may be invoked with either an argument list
79 | (``@hosts('host1')``, ``@hosts('host1', 'host2')``) or a single, iterable
80 | argument (``@hosts(['host1', 'host2'])``).
81 |
82 | Note that this decorator actually just sets the function's ``.hosts``
83 | attribute, which is then read prior to executing the function.
84 |
85 | .. versionchanged:: 0.9.2
86 | Allow a single, iterable argument (``@hosts(iterable)``) to be used
87 | instead of requiring ``@hosts(*iterable)``.
88 | """
89 | return _list_annotating_decorator('hosts', *host_list)
90 |
91 |
92 | def roles(*role_list):
93 | """
94 | Decorator defining a list of role names, used to look up host lists.
95 |
96 | A role is simply defined as a key in `env` whose value is a list of one or
97 | more host connection strings. For example, the following will ensure that,
98 | barring an override on the command line, ``my_func`` will be executed
99 | against the hosts listed in the ``webserver`` and ``dbserver`` roles::
100 |
101 | env.roledefs.update({
102 | 'webserver': ['www1', 'www2'],
103 | 'dbserver': ['db1']
104 | })
105 |
106 | @roles('webserver', 'dbserver')
107 | def my_func():
108 | pass
109 |
110 | As with `~fabric.decorators.hosts`, `~fabric.decorators.roles` may be
111 | invoked with either an argument list or a single, iterable argument.
112 | Similarly, this decorator uses the same mechanism as
113 | `~fabric.decorators.hosts` and simply sets ``.roles``.
114 |
115 | .. versionchanged:: 0.9.2
116 | Allow a single, iterable argument to be used (same as
117 | `~fabric.decorators.hosts`).
118 | """
119 | return _list_annotating_decorator('roles', *role_list)
120 |
121 |
122 | def runs_once(func):
123 | """
124 | Decorator preventing wrapped function from running more than once.
125 |
126 | By keeping internal state, this decorator allows you to mark a function
127 | such that it will only run once per Python interpreter session, which in
128 | typical use means "once per invocation of the ``fab`` program".
129 |
130 | Any function wrapped with this decorator will silently fail to execute the
131 | 2nd, 3rd, ..., Nth time it is called, and will return the value of the
132 | original run.
133 |
134 | .. note:: ``runs_once`` does not work with parallel task execution.
135 | """
136 | @wraps(func)
137 | def decorated(*args, **kwargs):
138 | if not hasattr(decorated, 'return_value'):
139 | decorated.return_value = func(*args, **kwargs)
140 | return decorated.return_value
141 | decorated = _wrap_as_new(func, decorated)
142 | # Mark as serial (disables parallelism) and return
143 | return serial(decorated)
144 |
145 |
146 | def serial(func):
147 | """
148 | Forces the wrapped function to always run sequentially, never in parallel.
149 |
150 | This decorator takes precedence over the global value of :ref:`env.parallel
151 | `. However, if a task is decorated with both
152 | `~fabric.decorators.serial` *and* `~fabric.decorators.parallel`,
153 | `~fabric.decorators.parallel` wins.
154 |
155 | .. versionadded:: 1.3
156 | """
157 | if not getattr(func, 'parallel', False):
158 | func.serial = True
159 | return _wrap_as_new(func, func)
160 |
161 |
162 | def parallel(pool_size=None):
163 | """
164 | Forces the wrapped function to run in parallel, instead of sequentially.
165 |
166 | This decorator takes precedence over the global value of :ref:`env.parallel
167 | `. It also takes precedence over `~fabric.decorators.serial`
168 | if a task is decorated with both.
169 |
170 | .. versionadded:: 1.3
171 | """
172 | called_without_args = type(pool_size) == types.FunctionType
173 |
174 | def real_decorator(func):
175 | @wraps(func)
176 | def inner(*args, **kwargs):
177 | # Required for ssh/PyCrypto to be happy in multiprocessing
178 | # (as far as we can tell, this is needed even with the extra such
179 | # calls in newer versions of paramiko.)
180 | Random.atfork()
181 | return func(*args, **kwargs)
182 | inner.parallel = True
183 | inner.serial = False
184 | inner.pool_size = None if called_without_args else pool_size
185 | return _wrap_as_new(func, inner)
186 |
187 | # Allow non-factory-style decorator use (@decorator vs @decorator())
188 | if called_without_args:
189 | return real_decorator(pool_size)
190 |
191 | return real_decorator
192 |
193 |
194 | def with_settings(*arg_settings, **kw_settings):
195 | """
196 | Decorator equivalent of ``fabric.context_managers.settings``.
197 |
198 | Allows you to wrap an entire function as if it was called inside a block
199 | with the ``settings`` context manager. This may be useful if you know you
200 | want a given setting applied to an entire function body, or wish to
201 | retrofit old code without indenting everything.
202 |
203 | For example, to turn aborts into warnings for an entire task function::
204 |
205 | @with_settings(warn_only=True)
206 | def foo():
207 | ...
208 |
209 | .. seealso:: `~fabric.context_managers.settings`
210 | .. versionadded:: 1.1
211 | """
212 | def outer(func):
213 | @wraps(func)
214 | def inner(*args, **kwargs):
215 | with settings(*arg_settings, **kw_settings):
216 | return func(*args, **kwargs)
217 | return _wrap_as_new(func, inner)
218 | return outer
219 |
--------------------------------------------------------------------------------
/fabric/job_queue.py:
--------------------------------------------------------------------------------
1 | """
2 | Sliding-window-based job/task queue class (& example of use.)
3 |
4 | May use ``multiprocessing.Process`` or ``threading.Thread`` objects as queue
5 | items, though within Fabric itself only ``Process`` objects are used/supported.
6 | """
7 |
8 | from __future__ import with_statement
9 | import time
10 | import Queue
11 | from multiprocessing import Process
12 |
13 | from fabric.state import env
14 | from fabric.network import ssh
15 | from fabric.context_managers import settings
16 |
17 |
18 | class JobQueue(object):
19 | """
20 | The goal of this class is to make a queue of processes to run, and go
21 | through them running X number at any given time.
22 |
23 | So if the bubble is 5 start with 5 running and move the bubble of running
24 | procs along the queue looking something like this:
25 |
26 | Start
27 | ...........................
28 | [~~~~~]....................
29 | ___[~~~~~].................
30 | _________[~~~~~]...........
31 | __________________[~~~~~]..
32 | ____________________[~~~~~]
33 | ___________________________
34 | End
35 | """
36 | def __init__(self, max_running, comms_queue):
37 | """
38 | Setup the class to resonable defaults.
39 | """
40 | self._queued = []
41 | self._running = []
42 | self._completed = []
43 | self._num_of_jobs = 0
44 | self._max = max_running
45 | self._comms_queue = comms_queue
46 | self._finished = False
47 | self._closed = False
48 | self._debug = False
49 |
50 | def _all_alive(self):
51 | """
52 | Simply states if all procs are alive or not. Needed to determine when
53 | to stop looping, and pop dead procs off and add live ones.
54 | """
55 | if self._running:
56 | return all([x.is_alive() for x in self._running])
57 | else:
58 | return False
59 |
60 | def __len__(self):
61 | """
62 | Just going to use number of jobs as the JobQueue length.
63 | """
64 | return self._num_of_jobs
65 |
66 | def close(self):
67 | """
68 | A sanity check, so that the need to care about new jobs being added in
69 | the last throws of the job_queue's run are negated.
70 | """
71 | if self._debug:
72 | print("job queue closed.")
73 |
74 | self._closed = True
75 |
76 | def append(self, process):
77 | """
78 | Add the Process() to the queue, so that later it can be checked up on.
79 | That is if the JobQueue is still open.
80 |
81 | If the queue is closed, this will just silently do nothing.
82 |
83 | To get data back out of this process, give ``process`` access to a
84 | ``multiprocessing.Queue`` object, and give it here as ``queue``. Then
85 | ``JobQueue.run`` will include the queue's contents in its return value.
86 | """
87 | if not self._closed:
88 | self._queued.append(process)
89 | self._num_of_jobs += 1
90 | if self._debug:
91 | print("job queue appended %s." % process.name)
92 |
93 | def run(self):
94 | """
95 | This is the workhorse. It will take the intial jobs from the _queue,
96 | start them, add them to _running, and then go into the main running
97 | loop.
98 |
99 | This loop will check for done procs, if found, move them out of
100 | _running into _completed. It also checks for a _running queue with open
101 | spots, which it will then fill as discovered.
102 |
103 | To end the loop, there have to be no running procs, and no more procs
104 | to be run in the queue.
105 |
106 | This function returns an iterable of all its children's exit codes.
107 | """
108 | def _advance_the_queue():
109 | """
110 | Helper function to do the job of poping a new proc off the queue
111 | start it, then add it to the running queue. This will eventually
112 | depleate the _queue, which is a condition of stopping the running
113 | while loop.
114 |
115 | It also sets the env.host_string from the job.name, so that fabric
116 | knows that this is the host to be making connections on.
117 | """
118 | job = self._queued.pop()
119 | if self._debug:
120 | print("Popping '%s' off the queue and starting it" % job.name)
121 | with settings(clean_revert=True, host_string=job.name, host=job.name):
122 | job.start()
123 | self._running.append(job)
124 |
125 | # Prep return value so we can start filling it during main loop
126 | results = {}
127 | for job in self._queued:
128 | results[job.name] = dict.fromkeys(('exit_code', 'results'))
129 |
130 | if not self._closed:
131 | raise Exception("Need to close() before starting.")
132 |
133 | if self._debug:
134 | print("Job queue starting.")
135 |
136 | while len(self._running) < self._max:
137 | _advance_the_queue()
138 |
139 | # Main loop!
140 | while not self._finished:
141 | while len(self._running) < self._max and self._queued:
142 | _advance_the_queue()
143 |
144 | if not self._all_alive():
145 | for id, job in enumerate(self._running):
146 | if not job.is_alive():
147 | if self._debug:
148 | print("Job queue found finished proc: %s." %
149 | job.name)
150 | done = self._running.pop(id)
151 | self._completed.append(done)
152 |
153 | if self._debug:
154 | print("Job queue has %d running." % len(self._running))
155 |
156 | if not (self._queued or self._running):
157 | if self._debug:
158 | print("Job queue finished.")
159 |
160 | for job in self._completed:
161 | job.join()
162 |
163 | self._finished = True
164 |
165 | # Each loop pass, try pulling results off the queue to keep its
166 | # size down. At this point, we don't actually care if any results
167 | # have arrived yet; they will be picked up after the main loop.
168 | self._fill_results(results)
169 |
170 | time.sleep(ssh.io_sleep)
171 |
172 | # Consume anything left in the results queue. Note that there is no
173 | # need to block here, as the main loop ensures that all workers will
174 | # already have finished.
175 | self._fill_results(results)
176 |
177 | # Attach exit codes now that we're all done & have joined all jobs
178 | for job in self._completed:
179 | if isinstance(job, Process):
180 | results[job.name]['exit_code'] = job.exitcode
181 |
182 | return results
183 |
184 | def _fill_results(self, results):
185 | """
186 | Attempt to pull data off self._comms_queue and add to 'results' dict.
187 | If no data is available (i.e. the queue is empty), bail immediately.
188 | """
189 | while True:
190 | try:
191 | datum = self._comms_queue.get_nowait()
192 | results[datum['name']]['results'] = datum['result']
193 | except Queue.Empty:
194 | break
195 |
196 |
197 | #### Sample
198 |
199 | def try_using(parallel_type):
200 | """
201 | This will run the queue through it's paces, and show a simple way of using
202 | the job queue.
203 | """
204 |
205 | def print_number(number):
206 | """
207 | Simple function to give a simple task to execute.
208 | """
209 | print(number)
210 |
211 | if parallel_type == "multiprocessing":
212 | from multiprocessing import Process as Bucket
213 |
214 | elif parallel_type == "threading":
215 | from threading import Thread as Bucket
216 |
217 | # Make a job_queue with a bubble of len 5, and have it print verbosely
218 | queue = Queue.Queue()
219 | jobs = JobQueue(5, queue)
220 | jobs._debug = True
221 |
222 | # Add 20 procs onto the stack
223 | for x in range(20):
224 | jobs.append(Bucket(
225 | target=print_number,
226 | args=[x],
227 | kwargs={},
228 | ))
229 |
230 | # Close up the queue and then start it's execution
231 | jobs.close()
232 | jobs.run()
233 |
234 |
235 | if __name__ == '__main__':
236 | try_using("multiprocessing")
237 | try_using("threading")
238 |
--------------------------------------------------------------------------------
/fabric/contrib/project.py:
--------------------------------------------------------------------------------
1 | """
2 | Useful non-core functionality, e.g. functions composing multiple operations.
3 | """
4 | from __future__ import with_statement
5 |
6 | from os import getcwd, sep
7 | import os.path
8 | from datetime import datetime
9 | from tempfile import mkdtemp
10 |
11 | from fabric.network import needs_host, key_filenames, normalize
12 | from fabric.operations import local, run, sudo, put
13 | from fabric.state import env, output
14 | from fabric.context_managers import cd
15 |
16 | __all__ = ['rsync_project', 'upload_project']
17 |
18 | @needs_host
19 | def rsync_project(
20 | remote_dir,
21 | local_dir=None,
22 | exclude=(),
23 | delete=False,
24 | extra_opts='',
25 | ssh_opts='',
26 | capture=False,
27 | upload=True,
28 | default_opts='-pthrvz'
29 | ):
30 | """
31 | Synchronize a remote directory with the current project directory via rsync.
32 |
33 | Where ``upload_project()`` makes use of ``scp`` to copy one's entire
34 | project every time it is invoked, ``rsync_project()`` uses the ``rsync``
35 | command-line utility, which only transfers files newer than those on the
36 | remote end.
37 |
38 | ``rsync_project()`` is thus a simple wrapper around ``rsync``; for
39 | details on how ``rsync`` works, please see its manpage. ``rsync`` must be
40 | installed on both your local and remote systems in order for this operation
41 | to work correctly.
42 |
43 | This function makes use of Fabric's ``local()`` operation, and returns the
44 | output of that function call; thus it will return the stdout, if any, of
45 | the resultant ``rsync`` call.
46 |
47 | ``rsync_project()`` uses the current Fabric connection parameters (user,
48 | host, port) by default, adding them to rsync's ssh options (then mixing in
49 | ``ssh_opts``, if given -- see below.)
50 |
51 | ``rsync_project()`` takes the following parameters:
52 |
53 | * ``remote_dir``: the only required parameter, this is the path to the
54 | directory on the remote server. Due to how ``rsync`` is implemented, the
55 | exact behavior depends on the value of ``local_dir``:
56 |
57 | * If ``local_dir`` ends with a trailing slash, the files will be
58 | dropped inside of ``remote_dir``. E.g.
59 | ``rsync_project("/home/username/project/", "foldername/")`` will drop
60 | the contents of ``foldername`` inside of ``/home/username/project``.
61 | * If ``local_dir`` does **not** end with a trailing slash (and this
62 | includes the default scenario, when ``local_dir`` is not specified),
63 | ``remote_dir`` is effectively the "parent" directory, and a new
64 | directory named after ``local_dir`` will be created inside of it. So
65 | ``rsync_project("/home/username", "foldername")`` would create a new
66 | directory ``/home/username/foldername`` (if needed) and place the
67 | files there.
68 |
69 | * ``local_dir``: by default, ``rsync_project`` uses your current working
70 | directory as the source directory. This may be overridden by specifying
71 | ``local_dir``, which is a string passed verbatim to ``rsync``, and thus
72 | may be a single directory (``"my_directory"``) or multiple directories
73 | (``"dir1 dir2"``). See the ``rsync`` documentation for details.
74 | * ``exclude``: optional, may be a single string, or an iterable of strings,
75 | and is used to pass one or more ``--exclude`` options to ``rsync``.
76 | * ``delete``: a boolean controlling whether ``rsync``'s ``--delete`` option
77 | is used. If True, instructs ``rsync`` to remove remote files that no
78 | longer exist locally. Defaults to False.
79 | * ``extra_opts``: an optional, arbitrary string which you may use to pass
80 | custom arguments or options to ``rsync``.
81 | * ``ssh_opts``: Like ``extra_opts`` but specifically for the SSH options
82 | string (rsync's ``--rsh`` flag.)
83 | * ``capture``: Sent directly into an inner `~fabric.operations.local` call.
84 | * ``upload``: a boolean controlling whether file synchronization is
85 | performed up or downstream. Upstream by default.
86 | * ``default_opts``: the default rsync options ``-pthrvz``, override if
87 | desired (e.g. to remove verbosity, etc).
88 |
89 | Furthermore, this function transparently honors Fabric's port and SSH key
90 | settings. Calling this function when the current host string contains a
91 | nonstandard port, or when ``env.key_filename`` is non-empty, will use the
92 | specified port and/or SSH key filename(s).
93 |
94 | For reference, the approximate ``rsync`` command-line call that is
95 | constructed by this function is the following::
96 |
97 | rsync [--delete] [--exclude exclude[0][, --exclude[1][, ...]]] \\
98 | [default_opts] [extra_opts] :
99 |
100 | .. versionadded:: 1.4.0
101 | The ``ssh_opts`` keyword argument.
102 | .. versionadded:: 1.4.1
103 | The ``capture`` keyword argument.
104 | .. versionadded:: 1.8.0
105 | The ``default_opts`` keyword argument.
106 | """
107 | # Turn single-string exclude into a one-item list for consistency
108 | if not hasattr(exclude, '__iter__'):
109 | exclude = (exclude,)
110 | # Create --exclude options from exclude list
111 | exclude_opts = ' --exclude "%s"' * len(exclude)
112 | # Double-backslash-escape
113 | exclusions = tuple([str(s).replace('"', '\\\\"') for s in exclude])
114 | # Honor SSH key(s)
115 | key_string = ""
116 | keys = key_filenames()
117 | if keys:
118 | key_string = "-i " + " -i ".join(keys)
119 | # Port
120 | user, host, port = normalize(env.host_string)
121 | port_string = "-p %s" % port
122 | # RSH
123 | rsh_string = ""
124 | rsh_parts = [key_string, port_string, ssh_opts]
125 | if any(rsh_parts):
126 | rsh_string = "--rsh='ssh %s'" % " ".join(rsh_parts)
127 | # Set up options part of string
128 | options_map = {
129 | 'delete': '--delete' if delete else '',
130 | 'exclude': exclude_opts % exclusions,
131 | 'rsh': rsh_string,
132 | 'default': default_opts,
133 | 'extra': extra_opts,
134 | }
135 | options = "%(delete)s%(exclude)s %(default)s %(extra)s %(rsh)s" % options_map
136 | # Get local directory
137 | if local_dir is None:
138 | local_dir = '../' + getcwd().split(sep)[-1]
139 | # Create and run final command string
140 | if host.count(':') > 1:
141 | # Square brackets are mandatory for IPv6 rsync address,
142 | # even if port number is not specified
143 | remote_prefix = "[%s@%s]" % (user, host)
144 | else:
145 | remote_prefix = "%s@%s" % (user, host)
146 | if upload:
147 | cmd = "rsync %s %s %s:%s" % (options, local_dir, remote_prefix, remote_dir)
148 | else:
149 | cmd = "rsync %s %s:%s %s" % (options, remote_prefix, remote_dir, local_dir)
150 |
151 | if output.running:
152 | print("[%s] rsync_project: %s" % (env.host_string, cmd))
153 | return local(cmd, capture=capture)
154 |
155 |
156 | def upload_project(local_dir=None, remote_dir="", use_sudo=False):
157 | """
158 | Upload the current project to a remote system via ``tar``/``gzip``.
159 |
160 | ``local_dir`` specifies the local project directory to upload, and defaults
161 | to the current working directory.
162 |
163 | ``remote_dir`` specifies the target directory to upload into (meaning that
164 | a copy of ``local_dir`` will appear as a subdirectory of ``remote_dir``)
165 | and defaults to the remote user's home directory.
166 |
167 | ``use_sudo`` specifies which method should be used when executing commands
168 | remotely. ``sudo`` will be used if use_sudo is True, otherwise ``run`` will
169 | be used.
170 |
171 | This function makes use of the ``tar`` and ``gzip`` programs/libraries,
172 | thus it will not work too well on Win32 systems unless one is using Cygwin
173 | or something similar. It will attempt to clean up the local and remote
174 | tarfiles when it finishes executing, even in the event of a failure.
175 |
176 | .. versionchanged:: 1.1
177 | Added the ``local_dir`` and ``remote_dir`` kwargs.
178 |
179 | .. versionchanged:: 1.7
180 | Added the ``use_sudo`` kwarg.
181 | """
182 | runner = use_sudo and sudo or run
183 |
184 | local_dir = local_dir or os.getcwd()
185 |
186 | # Remove final '/' in local_dir so that basename() works
187 | local_dir = local_dir.rstrip(os.sep)
188 |
189 | local_path, local_name = os.path.split(local_dir)
190 | tar_file = "%s.tar.gz" % local_name
191 | target_tar = os.path.join(remote_dir, tar_file)
192 | tmp_folder = mkdtemp()
193 |
194 | try:
195 | tar_path = os.path.join(tmp_folder, tar_file)
196 | local("tar -czf %s -C %s %s" % (tar_path, local_path, local_name))
197 | put(tar_path, target_tar, use_sudo=use_sudo)
198 | with cd(remote_dir):
199 | try:
200 | runner("tar -xzf %s" % tar_file)
201 | finally:
202 | runner("rm -f %s" % tar_file)
203 | finally:
204 | local("rm -rf %s" % tmp_folder)
205 |
--------------------------------------------------------------------------------
/tests/test_context_managers.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | import os
4 | import sys
5 | from StringIO import StringIO
6 |
7 | from nose.tools import eq_, ok_
8 |
9 | from fabric.state import env, output
10 | from fabric.context_managers import (cd, settings, lcd, hide, shell_env, quiet,
11 | warn_only, prefix, path)
12 | from fabric.operations import run, local, _prefix_commands
13 | from utils import mock_streams, FabricTest
14 | from server import server
15 |
16 |
17 | #
18 | # cd()
19 | #
20 |
21 | def test_error_handling():
22 | """
23 | cd cleans up after itself even in case of an exception
24 | """
25 |
26 | class TestException(Exception):
27 | pass
28 |
29 | try:
30 | with cd('somewhere'):
31 | raise TestException('Houston, we have a problem.')
32 | except TestException:
33 | pass
34 | finally:
35 | with cd('else'):
36 | eq_(env.cwd, 'else')
37 |
38 |
39 | def test_cwd_with_absolute_paths():
40 | """
41 | cd() should append arg if non-absolute or overwrite otherwise
42 | """
43 | existing = '/some/existing/path'
44 | additional = 'another'
45 | absolute = '/absolute/path'
46 | with settings(cwd=existing):
47 | with cd(absolute):
48 | eq_(env.cwd, absolute)
49 | with cd(additional):
50 | eq_(env.cwd, existing + '/' + additional)
51 |
52 |
53 | def test_cd_home_dir():
54 | """
55 | cd() should work with home directories
56 | """
57 | homepath = "~/somepath"
58 | with cd(homepath):
59 | eq_(env.cwd, homepath)
60 |
61 |
62 | def test_cd_nested_home_abs_dirs():
63 | """
64 | cd() should work with nested user homedir (starting with ~) paths.
65 |
66 | It should always take the last path if the new path begins with `/` or `~`
67 | """
68 |
69 | home_path = "~/somepath"
70 | abs_path = "/some/random/path"
71 | relative_path = "some/random/path"
72 |
73 | # 2 nested homedir paths
74 | with cd(home_path):
75 | eq_(env.cwd, home_path)
76 | another_path = home_path + "/another/path"
77 | with cd(another_path):
78 | eq_(env.cwd, another_path)
79 |
80 | # first absolute path, then a homedir path
81 | with cd(abs_path):
82 | eq_(env.cwd, abs_path)
83 | with cd(home_path):
84 | eq_(env.cwd, home_path)
85 |
86 | # first relative path, then a homedir path
87 | with cd(relative_path):
88 | eq_(env.cwd, relative_path)
89 | with cd(home_path):
90 | eq_(env.cwd, home_path)
91 |
92 | # first home path, then a a relative path
93 | with cd(home_path):
94 | eq_(env.cwd, home_path)
95 | with cd(relative_path):
96 | eq_(env.cwd, home_path + "/" + relative_path)
97 |
98 |
99 | #
100 | # prefix
101 | #
102 |
103 | def test_nested_prefix():
104 | """
105 | prefix context managers can be created outside of the with block and nested
106 | """
107 | cm1 = prefix('1')
108 | cm2 = prefix('2')
109 | with cm1:
110 | with cm2:
111 | eq_(env.command_prefixes, ['1', '2'])
112 |
113 | #
114 | # cd prefix with dev/null
115 | #
116 |
117 | def test_cd_prefix():
118 | """
119 | cd prefix should direct output to /dev/null in case of CDPATH
120 | """
121 | some_path = "~/somepath"
122 |
123 | with cd(some_path):
124 | command_out = _prefix_commands('foo', "remote")
125 | eq_(command_out, 'cd %s >/dev/null && foo' % some_path)
126 |
127 |
128 | # def test_cd_prefix_on_win32():
129 | # """
130 | # cd prefix should NOT direct output to /dev/null on win32
131 | # """
132 | # some_path = "~/somepath"
133 |
134 | # import fabric
135 | # try:
136 | # fabric.state.win32 = True
137 | # with cd(some_path):
138 | # command_out = _prefix_commands('foo', "remote")
139 | # eq_(command_out, 'cd %s && foo' % some_path)
140 | # finally:
141 | # fabric.state.win32 = False
142 |
143 | #
144 | # hide/show
145 | #
146 |
147 | def test_hide_show_exception_handling():
148 | """
149 | hide()/show() should clean up OK if exceptions are raised
150 | """
151 | try:
152 | with hide('stderr'):
153 | # now it's False, while the default is True
154 | eq_(output.stderr, False)
155 | raise Exception
156 | except Exception:
157 | # Here it should be True again.
158 | # If it's False, this means hide() didn't clean up OK.
159 | eq_(output.stderr, True)
160 |
161 |
162 | #
163 | # settings()
164 | #
165 |
166 | def test_setting_new_env_dict_key_should_work():
167 | """
168 | Using settings() with a previously nonexistent key should work correctly
169 | """
170 | key = 'thisshouldnevereverexistseriouslynow'
171 | value = 'a winner is you'
172 | with settings(**{key: value}):
173 | ok_(key in env)
174 | ok_(key not in env)
175 |
176 |
177 | def test_settings():
178 | """
179 | settings() should temporarily override env dict with given key/value pair
180 | """
181 | env.testval = "outer value"
182 | with settings(testval="inner value"):
183 | eq_(env.testval, "inner value")
184 | eq_(env.testval, "outer value")
185 |
186 |
187 | def test_settings_with_multiple_kwargs():
188 | """
189 | settings() should temporarily override env dict with given key/value pairS
190 | """
191 | env.testval1 = "outer 1"
192 | env.testval2 = "outer 2"
193 | with settings(testval1="inner 1", testval2="inner 2"):
194 | eq_(env.testval1, "inner 1")
195 | eq_(env.testval2, "inner 2")
196 | eq_(env.testval1, "outer 1")
197 | eq_(env.testval2, "outer 2")
198 |
199 |
200 | def test_settings_with_other_context_managers():
201 | """
202 | settings() should take other context managers, and use them with other overrided
203 | key/value pairs.
204 | """
205 | env.testval1 = "outer 1"
206 | prev_lcwd = env.lcwd
207 |
208 | with settings(lcd("here"), testval1="inner 1"):
209 | eq_(env.testval1, "inner 1")
210 | ok_(env.lcwd.endswith("here")) # Should be the side-effect of adding cd to settings
211 |
212 | ok_(env.testval1, "outer 1")
213 | eq_(env.lcwd, prev_lcwd)
214 |
215 |
216 | def test_settings_clean_revert():
217 | """
218 | settings(clean_revert=True) should only revert values matching input values
219 | """
220 | env.modified = "outer"
221 | env.notmodified = "outer"
222 | with settings(
223 | modified="inner",
224 | notmodified="inner",
225 | inner_only="only",
226 | clean_revert=True
227 | ):
228 | eq_(env.modified, "inner")
229 | eq_(env.notmodified, "inner")
230 | eq_(env.inner_only, "only")
231 | env.modified = "modified internally"
232 | eq_(env.modified, "modified internally")
233 | ok_("inner_only" not in env)
234 |
235 |
236 | #
237 | # shell_env()
238 | #
239 |
240 | def test_shell_env():
241 | """
242 | shell_env() sets the shell_env attribute in the env dict
243 | """
244 | with shell_env(KEY="value"):
245 | eq_(env.shell_env['KEY'], 'value')
246 |
247 | eq_(env.shell_env, {})
248 |
249 |
250 | class TestQuietAndWarnOnly(FabricTest):
251 | @server()
252 | @mock_streams('both')
253 | def test_quiet_hides_all_output(self):
254 | # Sanity test - normally this is not empty
255 | run("ls /simple")
256 | ok_(sys.stdout.getvalue())
257 | # Reset
258 | sys.stdout = StringIO()
259 | # Real test
260 | with quiet():
261 | run("ls /simple")
262 | # Empty output
263 | ok_(not sys.stdout.getvalue())
264 | # Reset
265 | sys.stdout = StringIO()
266 | # Kwarg test
267 | run("ls /simple", quiet=True)
268 | ok_(not sys.stdout.getvalue())
269 |
270 | @server(responses={'barf': [
271 | "this is my stdout",
272 | "this is my stderr",
273 | 1
274 | ]})
275 | def test_quiet_sets_warn_only_to_true(self):
276 | # Sanity test to ensure environment
277 | with settings(warn_only=False):
278 | with quiet():
279 | eq_(run("barf").return_code, 1)
280 | # Kwarg test
281 | eq_(run("barf", quiet=True).return_code, 1)
282 |
283 | @server(responses={'hrm': ["", "", 1]})
284 | @mock_streams('both')
285 | def test_warn_only_is_same_as_settings_warn_only(self):
286 | with warn_only():
287 | eq_(run("hrm").failed, True)
288 |
289 | @server()
290 | @mock_streams('both')
291 | def test_warn_only_does_not_imply_hide_everything(self):
292 | with warn_only():
293 | run("ls /simple")
294 | assert sys.stdout.getvalue().strip() != ""
295 |
296 |
297 | # path() (distinct from shell_env)
298 |
299 | class TestPathManager(FabricTest):
300 | def setup(self):
301 | super(TestPathManager, self).setup()
302 | self.real = os.environ.get('PATH')
303 |
304 | def via_local(self):
305 | with hide('everything'):
306 | return local("echo $PATH", capture=True)
307 |
308 | def test_lack_of_path_has_default_local_path(self):
309 | """
310 | No use of 'with path' == default local $PATH
311 | """
312 | eq_(self.real, self.via_local())
313 |
314 | def test_use_of_path_appends_by_default(self):
315 | """
316 | 'with path' appends by default
317 | """
318 | with path('foo'):
319 | eq_(self.via_local(), self.real + ":foo")
320 |
--------------------------------------------------------------------------------
/fabric/io.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | import sys
4 | import time
5 | import re
6 | import socket
7 | from select import select
8 |
9 | from fabric.state import env, output, win32
10 | from fabric.auth import get_password, set_password
11 | import fabric.network
12 | from fabric.network import ssh, normalize
13 | from fabric.utils import RingBuffer
14 | from fabric.exceptions import CommandTimeout
15 |
16 |
17 | if win32:
18 | import msvcrt
19 |
20 |
21 | def _endswith(char_list, substring):
22 | tail = char_list[-1 * len(substring):]
23 | substring = list(substring)
24 | return tail == substring
25 |
26 |
27 | def _has_newline(bytelist):
28 | return '\r' in bytelist or '\n' in bytelist
29 |
30 |
31 | def output_loop(*args, **kwargs):
32 | OutputLooper(*args, **kwargs).loop()
33 |
34 |
35 | class OutputLooper(object):
36 | def __init__(self, chan, attr, stream, capture, timeout):
37 | self.chan = chan
38 | self.stream = stream
39 | self.capture = capture
40 | self.timeout = timeout
41 | self.read_func = getattr(chan, attr)
42 | self.prefix = "[%s] %s: " % (
43 | env.host_string,
44 | "out" if attr == 'recv' else "err"
45 | )
46 | self.printing = getattr(output, 'stdout' if (attr == 'recv') else 'stderr')
47 | self.linewise = (env.linewise or env.parallel)
48 | self.reprompt = False
49 | self.read_size = 4096
50 | self.write_buffer = RingBuffer([], maxlen=len(self.prefix))
51 |
52 | def _flush(self, text):
53 | self.stream.write(text)
54 | # Actually only flush if not in linewise mode.
55 | # When linewise is set (e.g. in parallel mode) flushing makes
56 | # doubling-up of line prefixes, and other mixed output, more likely.
57 | if not env.linewise:
58 | self.stream.flush()
59 | self.write_buffer.extend(text)
60 |
61 | def loop(self):
62 | """
63 | Loop, reading from .(), writing to and buffering to .
64 |
65 | Will raise `~fabric.exceptions.CommandTimeout` if network timeouts
66 | continue to be seen past the defined ``self.timeout`` threshold.
67 | (Timeouts before then are considered part of normal short-timeout fast
68 | network reading; see Fabric issue #733 for background.)
69 | """
70 | # Internal capture-buffer-like buffer, used solely for state keeping.
71 | # Unlike 'capture', nothing is ever purged from this.
72 | _buffer = []
73 |
74 | # Initialize loop variables
75 | initial_prefix_printed = False
76 | seen_cr = False
77 | line = []
78 |
79 | # Allow prefix to be turned off.
80 | if not env.output_prefix:
81 | self.prefix = ""
82 |
83 | start = time.time()
84 | while True:
85 | # Handle actual read
86 | try:
87 | bytelist = self.read_func(self.read_size)
88 | except socket.timeout:
89 | elapsed = time.time() - start
90 | if self.timeout is not None and elapsed > self.timeout:
91 | raise CommandTimeout(timeout=self.timeout)
92 | continue
93 | # Empty byte == EOS
94 | if bytelist == '':
95 | # If linewise, ensure we flush any leftovers in the buffer.
96 | if self.linewise and line:
97 | self._flush(self.prefix)
98 | self._flush("".join(line))
99 | break
100 | # A None capture variable implies that we're in open_shell()
101 | if self.capture is None:
102 | # Just print directly -- no prefixes, no capturing, nada
103 | # And since we know we're using a pty in this mode, just go
104 | # straight to stdout.
105 | self._flush(bytelist)
106 | # Otherwise, we're in run/sudo and need to handle capturing and
107 | # prompts.
108 | else:
109 | # Print to user
110 | if self.printing:
111 | printable_bytes = bytelist
112 | # Small state machine to eat \n after \r
113 | if printable_bytes[-1] == "\r":
114 | seen_cr = True
115 | if printable_bytes[0] == "\n" and seen_cr:
116 | printable_bytes = printable_bytes[1:]
117 | seen_cr = False
118 |
119 | while _has_newline(printable_bytes) and printable_bytes != "":
120 | # at most 1 split !
121 | cr = re.search("(\r\n|\r|\n)", printable_bytes)
122 | if cr is None:
123 | break
124 | end_of_line = printable_bytes[:cr.start(0)]
125 | printable_bytes = printable_bytes[cr.end(0):]
126 |
127 | if not initial_prefix_printed:
128 | self._flush(self.prefix)
129 |
130 | if _has_newline(end_of_line):
131 | end_of_line = ''
132 |
133 | if self.linewise:
134 | self._flush("".join(line) + end_of_line + "\n")
135 | line = []
136 | else:
137 | self._flush(end_of_line + "\n")
138 | initial_prefix_printed = False
139 |
140 | if self.linewise:
141 | line += [printable_bytes]
142 | else:
143 | if not initial_prefix_printed:
144 | self._flush(self.prefix)
145 | initial_prefix_printed = True
146 | self._flush(printable_bytes)
147 |
148 | # Now we have handled printing, handle interactivity
149 | read_lines = re.split(r"(\r|\n|\r\n)", bytelist)
150 | for fragment in read_lines:
151 | # Store in capture buffer
152 | self.capture += fragment
153 | # Store in internal buffer
154 | _buffer += fragment
155 | # Handle prompts
156 | expected, response = self._get_prompt_response()
157 | if expected:
158 | del self.capture[-1 * len(expected):]
159 | self.chan.sendall(str(response) + '\n')
160 | else:
161 | prompt = _endswith(self.capture, env.sudo_prompt)
162 | try_again = (_endswith(self.capture, env.again_prompt + '\n')
163 | or _endswith(self.capture, env.again_prompt + '\r\n'))
164 | if prompt:
165 | self.prompt()
166 | elif try_again:
167 | self.try_again()
168 |
169 | # Print trailing new line if the last thing we printed was our line
170 | # prefix.
171 | if self.prefix and "".join(self.write_buffer) == self.prefix:
172 | self._flush('\n')
173 |
174 | def prompt(self):
175 | # Obtain cached password, if any
176 | password = get_password(*normalize(env.host_string))
177 | # Remove the prompt itself from the capture buffer. This is
178 | # backwards compatible with Fabric 0.9.x behavior; the user
179 | # will still see the prompt on their screen (no way to avoid
180 | # this) but at least it won't clutter up the captured text.
181 | del self.capture[-1 * len(env.sudo_prompt):]
182 | # If the password we just tried was bad, prompt the user again.
183 | if (not password) or self.reprompt:
184 | # Print the prompt and/or the "try again" notice if
185 | # output is being hidden. In other words, since we need
186 | # the user's input, they need to see why we're
187 | # prompting them.
188 | if not self.printing:
189 | self._flush(self.prefix)
190 | if self.reprompt:
191 | self._flush(env.again_prompt + '\n' + self.prefix)
192 | self._flush(env.sudo_prompt)
193 | # Prompt for, and store, password. Give empty prompt so the
194 | # initial display "hides" just after the actually-displayed
195 | # prompt from the remote end.
196 | self.chan.input_enabled = False
197 | password = fabric.network.prompt_for_password(
198 | prompt=" ", no_colon=True, stream=self.stream
199 | )
200 | self.chan.input_enabled = True
201 | # Update env.password, env.passwords if necessary
202 | user, host, port = normalize(env.host_string)
203 | set_password(user, host, port, password)
204 | # Reset reprompt flag
205 | self.reprompt = False
206 | # Send current password down the pipe
207 | self.chan.sendall(password + '\n')
208 |
209 | def try_again(self):
210 | # Remove text from capture buffer
211 | self.capture = self.capture[:len(env.again_prompt)]
212 | # Set state so we re-prompt the user at the next prompt.
213 | self.reprompt = True
214 |
215 | def _get_prompt_response(self):
216 | """
217 | Iterate through the request prompts dict and return the response and
218 | original request if we find a match
219 | """
220 | for tup in env.prompts.iteritems():
221 | if _endswith(self.capture, tup[0]):
222 | return tup
223 | return None, None
224 |
225 |
226 | def input_loop(chan, using_pty):
227 | while not chan.exit_status_ready():
228 | if win32:
229 | have_char = msvcrt.kbhit()
230 | else:
231 | r, w, x = select([sys.stdin], [], [], 0.0)
232 | have_char = (r and r[0] == sys.stdin)
233 | if have_char and chan.input_enabled:
234 | # Send all local stdin to remote end's stdin
235 | byte = msvcrt.getch() if win32 else sys.stdin.read(1)
236 | chan.sendall(byte)
237 | # Optionally echo locally, if needed.
238 | if not using_pty and env.echo_stdin:
239 | # Not using fastprint() here -- it prints as 'user'
240 | # output level, don't want it to be accidentally hidden
241 | sys.stdout.write(byte)
242 | sys.stdout.flush()
243 | time.sleep(ssh.io_sleep)
244 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 |
3 | import sys
4 | import traceback
5 | from unittest import TestCase
6 |
7 | from fudge import Fake, patched_context, with_fakes
8 | from fudge.patcher import with_patched_object
9 | from nose.tools import eq_, raises
10 |
11 | from fabric.state import output, env
12 | from fabric.utils import warn, indent, abort, puts, fastprint, error, RingBuffer
13 | from fabric import utils # For patching
14 | from fabric.api import local, quiet
15 | from fabric.context_managers import settings, hide
16 | from fabric.colors import magenta, red
17 | from utils import mock_streams, aborts, FabricTest, assert_contains, \
18 | assert_not_contains
19 |
20 |
21 | @mock_streams('stderr')
22 | @with_patched_object(output, 'warnings', True)
23 | def test_warn():
24 | """
25 | warn() should print 'Warning' plus given text
26 | """
27 | warn("Test")
28 | eq_("\nWarning: Test\n\n", sys.stderr.getvalue())
29 |
30 |
31 | def test_indent():
32 | for description, input, output in (
33 | ("Sanity check: 1 line string",
34 | 'Test', ' Test'),
35 | ("List of strings turns in to strings joined by \\n",
36 | ["Test", "Test"], ' Test\n Test'),
37 | ):
38 | eq_.description = "indent(): %s" % description
39 | yield eq_, indent(input), output
40 | del eq_.description
41 |
42 |
43 | def test_indent_with_strip():
44 | for description, input, output in (
45 | ("Sanity check: 1 line string",
46 | indent('Test', strip=True), ' Test'),
47 | ("Check list of strings",
48 | indent(["Test", "Test"], strip=True), ' Test\n Test'),
49 | ("Check list of strings",
50 | indent([" Test", " Test"], strip=True),
51 | ' Test\n Test'),
52 | ):
53 | eq_.description = "indent(strip=True): %s" % description
54 | yield eq_, input, output
55 | del eq_.description
56 |
57 |
58 | @aborts
59 | def test_abort():
60 | """
61 | abort() should raise SystemExit
62 | """
63 | abort("Test")
64 |
65 | class TestException(Exception):
66 | pass
67 |
68 | @raises(TestException)
69 | def test_abort_with_exception():
70 | """
71 | abort() should raise a provided exception
72 | """
73 | with settings(abort_exception=TestException):
74 | abort("Test")
75 |
76 | @mock_streams('stderr')
77 | @with_patched_object(output, 'aborts', True)
78 | def test_abort_message():
79 | """
80 | abort() should print 'Fatal error' plus exception value
81 | """
82 | try:
83 | abort("Test")
84 | except SystemExit:
85 | pass
86 | result = sys.stderr.getvalue()
87 | eq_("\nFatal error: Test\n\nAborting.\n", result)
88 |
89 | def test_abort_message_only_printed_once():
90 | """
91 | abort()'s SystemExit should not cause a reprint of the error message
92 | """
93 | # No good way to test the implicit stderr print which sys.exit/SystemExit
94 | # perform when they are allowed to bubble all the way to the top. So, we
95 | # invoke a subprocess and look at its stderr instead.
96 | with quiet():
97 | result = local("fab -f tests/support/aborts.py kaboom", capture=True)
98 | # When error in #1318 is present, this has an extra "It burns!" at end of
99 | # stderr string.
100 | eq_(result.stderr, "Fatal error: It burns!\n\nAborting.")
101 |
102 | @mock_streams('stderr')
103 | @with_patched_object(output, 'aborts', True)
104 | def test_abort_exception_contains_separate_message_and_code():
105 | """
106 | abort()'s SystemExit contains distinct .code/.message attributes.
107 | """
108 | # Re #1318 / #1213
109 | try:
110 | abort("Test")
111 | except SystemExit as e:
112 | eq_(e.message, "Test")
113 | eq_(e.code, 1)
114 |
115 | @mock_streams('stdout')
116 | def test_puts_with_user_output_on():
117 | """
118 | puts() should print input to sys.stdout if "user" output level is on
119 | """
120 | s = "string!"
121 | output.user = True
122 | puts(s, show_prefix=False)
123 | eq_(sys.stdout.getvalue(), s + "\n")
124 |
125 | @mock_streams('stdout')
126 | def test_puts_with_unicode_output():
127 | """
128 | puts() should print unicode input
129 | """
130 | s = u"string!"
131 | output.user = True
132 | puts(s, show_prefix=False)
133 | eq_(sys.stdout.getvalue(), s + "\n")
134 |
135 |
136 | @mock_streams('stdout')
137 | def test_puts_with_encoding_type_none_output():
138 | """
139 | puts() should print unicode output without a stream encoding
140 | """
141 | s = u"string!"
142 | output.user = True
143 | sys.stdout.encoding = None
144 | puts(s, show_prefix=False)
145 | eq_(sys.stdout.getvalue(), s + "\n")
146 |
147 | @mock_streams('stdout')
148 | def test_puts_with_user_output_off():
149 | """
150 | puts() shouldn't print input to sys.stdout if "user" output level is off
151 | """
152 | output.user = False
153 | puts("You aren't reading this.")
154 | eq_(sys.stdout.getvalue(), "")
155 |
156 |
157 | @mock_streams('stdout')
158 | def test_puts_with_prefix():
159 | """
160 | puts() should prefix output with env.host_string if non-empty
161 | """
162 | s = "my output"
163 | h = "localhost"
164 | with settings(host_string=h):
165 | puts(s)
166 | eq_(sys.stdout.getvalue(), "[%s] %s" % (h, s + "\n"))
167 |
168 |
169 | @mock_streams('stdout')
170 | def test_puts_without_prefix():
171 | """
172 | puts() shouldn't prefix output with env.host_string if show_prefix is False
173 | """
174 | s = "my output"
175 | h = "localhost"
176 | puts(s, show_prefix=False)
177 | eq_(sys.stdout.getvalue(), "%s" % (s + "\n"))
178 |
179 | @with_fakes
180 | def test_fastprint_calls_puts():
181 | """
182 | fastprint() is just an alias to puts()
183 | """
184 | text = "Some output"
185 | fake_puts = Fake('puts', expect_call=True).with_args(
186 | text=text, show_prefix=False, end="", flush=True
187 | )
188 | with patched_context(utils, 'puts', fake_puts):
189 | fastprint(text)
190 |
191 |
192 | class TestErrorHandling(FabricTest):
193 | dummy_string = 'test1234!'
194 |
195 | @with_patched_object(utils, 'warn', Fake('warn', callable=True,
196 | expect_call=True))
197 | def test_error_warns_if_warn_only_True_and_func_None(self):
198 | """
199 | warn_only=True, error(func=None) => calls warn()
200 | """
201 | with settings(warn_only=True):
202 | error('foo')
203 |
204 | @with_patched_object(utils, 'abort', Fake('abort', callable=True,
205 | expect_call=True))
206 | def test_error_aborts_if_warn_only_False_and_func_None(self):
207 | """
208 | warn_only=False, error(func=None) => calls abort()
209 | """
210 | with settings(warn_only=False):
211 | error('foo')
212 |
213 | def test_error_calls_given_func_if_func_not_None(self):
214 | """
215 | error(func=callable) => calls callable()
216 | """
217 | error('foo', func=Fake(callable=True, expect_call=True))
218 |
219 | @mock_streams('stdout')
220 | @with_patched_object(utils, 'abort', Fake('abort', callable=True,
221 | expect_call=True).calls(lambda x: sys.stdout.write(x + "\n")))
222 | def test_error_includes_stdout_if_given_and_hidden(self):
223 | """
224 | error() correctly prints stdout if it was previously hidden
225 | """
226 | # Mostly to catch regression bug(s)
227 | stdout = "this is my stdout"
228 | with hide('stdout'):
229 | error("error message", func=utils.abort, stdout=stdout)
230 | assert_contains(stdout, sys.stdout.getvalue())
231 |
232 | @mock_streams('stdout')
233 | @with_patched_object(utils, 'abort', Fake('abort', callable=True,
234 | expect_call=True).calls(lambda x: sys.stdout.write(x + "\n")))
235 | @with_patched_object(output, 'exceptions', True)
236 | @with_patched_object(utils, 'format_exc', Fake('format_exc', callable=True,
237 | expect_call=True).returns(dummy_string))
238 | def test_includes_traceback_if_exceptions_logging_is_on(self):
239 | """
240 | error() includes traceback in message if exceptions logging is on
241 | """
242 | error("error message", func=utils.abort, stdout=error)
243 | assert_contains(self.dummy_string, sys.stdout.getvalue())
244 |
245 | @mock_streams('stdout')
246 | @with_patched_object(utils, 'abort', Fake('abort', callable=True,
247 | expect_call=True).calls(lambda x: sys.stdout.write(x + "\n")))
248 | @with_patched_object(output, 'debug', True)
249 | @with_patched_object(utils, 'format_exc', Fake('format_exc', callable=True,
250 | expect_call=True).returns(dummy_string))
251 | def test_includes_traceback_if_debug_logging_is_on(self):
252 | """
253 | error() includes traceback in message if debug logging is on (backwardis compatibility)
254 | """
255 | error("error message", func=utils.abort, stdout=error)
256 | assert_contains(self.dummy_string, sys.stdout.getvalue())
257 |
258 | @mock_streams('stdout')
259 | @with_patched_object(utils, 'abort', Fake('abort', callable=True,
260 | expect_call=True).calls(lambda x: sys.stdout.write(x + "\n")))
261 | @with_patched_object(output, 'exceptions', True)
262 | @with_patched_object(utils, 'format_exc', Fake('format_exc', callable=True,
263 | expect_call=True).returns(None))
264 | def test_doesnt_print_None_when_no_traceback_present(self):
265 | """
266 | error() doesn't include None in message if there is no traceback
267 | """
268 | error("error message", func=utils.abort, stdout=error)
269 | assert_not_contains('None', sys.stdout.getvalue())
270 |
271 | @mock_streams('stderr')
272 | @with_patched_object(utils, 'abort', Fake('abort', callable=True,
273 | expect_call=True).calls(lambda x: sys.stderr.write(x + "\n")))
274 | def test_error_includes_stderr_if_given_and_hidden(self):
275 | """
276 | error() correctly prints stderr if it was previously hidden
277 | """
278 | # Mostly to catch regression bug(s)
279 | stderr = "this is my stderr"
280 | with hide('stderr'):
281 | error("error message", func=utils.abort, stderr=stderr)
282 | assert_contains(stderr, sys.stderr.getvalue())
283 |
284 | @mock_streams('stderr')
285 | def test_warnings_print_magenta_if_colorize_on(self):
286 | with settings(colorize_errors=True):
287 | error("oh god", func=utils.warn, stderr="oops")
288 | # can't use assert_contains as ANSI codes contain regex specialchars
289 | eq_(magenta("\nWarning: oh god\n\n"), sys.stderr.getvalue())
290 |
291 | @mock_streams('stderr')
292 | @raises(SystemExit)
293 | def test_errors_print_red_if_colorize_on(self):
294 | with settings(colorize_errors=True):
295 | error("oh god", func=utils.abort, stderr="oops")
296 | # can't use assert_contains as ANSI codes contain regex specialchars
297 | eq_(red("\Error: oh god\n\n"), sys.stderr.getvalue())
298 |
299 |
300 | class TestRingBuffer(TestCase):
301 | def setUp(self):
302 | self.b = RingBuffer([], maxlen=5)
303 |
304 | def test_append_empty(self):
305 | self.b.append('x')
306 | eq_(self.b, ['x'])
307 |
308 | def test_append_full(self):
309 | self.b.extend("abcde")
310 | self.b.append('f')
311 | eq_(self.b, ['b', 'c', 'd', 'e', 'f'])
312 |
313 | def test_extend_empty(self):
314 | self.b.extend("abc")
315 | eq_(self.b, ['a', 'b', 'c'])
316 |
317 | def test_extend_overrun(self):
318 | self.b.extend("abc")
319 | self.b.extend("defg")
320 | eq_(self.b, ['c', 'd', 'e', 'f', 'g'])
321 |
322 | def test_extend_full(self):
323 | self.b.extend("abcde")
324 | self.b.extend("fgh")
325 | eq_(self.b, ['d', 'e', 'f', 'g', 'h'])
326 |
--------------------------------------------------------------------------------