├── .editorconfig
├── .gitignore
├── README.md
├── app
├── Dockerfile
├── apps
│ └── auth
│ │ ├── __init__.py
│ │ ├── bp.py
│ │ └── models.py
├── auto.py
├── config.py
├── extensions.py
├── main.py
├── migrations
│ ├── README
│ ├── alembic.ini
│ ├── env.py
│ ├── script.py.mako
│ └── versions
│ │ └── 2018-02-001-auth_user_and_role.py
├── tox.ini
└── utils.py
├── docker-compose-dev.yml
├── docker-compose.yml
├── envfile
├── envfile-dev
└── fabfile.py
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | indent_size = 2
5 | indent_style = space
6 | insert_final_newline = true
7 |
8 | [secrets/*]
9 | insert_final_newline = false
10 |
11 | [*.py]
12 | indent_size = 4
13 |
14 | [*.{html,css,yml},Dockerfile]
15 | indent_size = 2
16 |
17 | [Makefile]
18 | indent_style = tab
19 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .vscode
2 | .bash_history
3 | *.pyc
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Flask Empty API
2 |
3 | Docker Powered Flask boilerplate for super fast prototyping.
4 | Get you Flask Rest Token Authenticated Websocket-ready project
5 | running with a single command.
6 |
7 | ## Getting Started
8 |
9 | * Make sure docker, docker-compose and fabric are installed
10 | * Clone the repo with any name you like
11 | * Go to the project folder and run: `fab env:dev up` (project is now running)
12 | * In another terminal, create the database with: `fab env:dev on:app run:"flask db upgrade"`
13 |
14 | ## Available Endpoints (out-of-the-box)
15 |
16 | * /login
17 | * /logout
18 | * / # index
19 |
20 | ## Useful
21 |
22 | * `fab env:dev on:app run:"flask shell"` # bring up flask shell
23 | * `fab env:dev on:app run:"flask db migrate --rev-id 001 -m message"` # create revision
24 | * `fab env:dev attach:containerID` # attach to tty; logs and pdb
25 |
26 | ## Deployment
27 |
28 | * configure your swarm secrets
29 | * tune flask configuration for security (SSL, MAILING, etc)
30 | * make sure envfile variables are production ready
31 | * open the champaign
32 |
--------------------------------------------------------------------------------
/app/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.6
2 |
3 | ENV USR nonroot
4 | ENV HOME /home/$USR
5 | ENV PYTHONUNBUFFERED 1
6 |
7 | EXPOSE 5000
8 |
9 | RUN groupadd -g 1000 -r $USR && \
10 | useradd -u 1000 -d $HOME -m -r -g $USR $USR
11 |
12 | WORKDIR $HOME
13 |
14 | RUN pip install --no-cache-dir \
15 | empty==0.4.3\
16 | eventlet==0.22.0\
17 | flask-marshmallow==0.8.0\
18 | flask-migrate==2.1.1\
19 | flask-restful==0.3.6\
20 | flask-security==3.0.0\
21 | flask-socketio==2.9.3\
22 | flask-sqlalchemy==2.3.2\
23 | flask-rq2==17.2\
24 | marshmallow-sqlalchemy==0.13.2\
25 | psycopg2==2.7.3.2\
26 | redis==2.10.6
27 |
28 | COPY --chown=1000:1000 . .
29 |
30 | USER $USR
31 | CMD ["flask", "run", "-h", "0.0.0.0"]
--------------------------------------------------------------------------------
/app/apps/auth/__init__.py:
--------------------------------------------------------------------------------
1 | from .bp import app
--------------------------------------------------------------------------------
/app/apps/auth/bp.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint
2 |
3 | app = Blueprint('auth', __name__, template_folder=None)
4 |
--------------------------------------------------------------------------------
/app/apps/auth/models.py:
--------------------------------------------------------------------------------
1 | from extensions import db
2 | from flask_security import SQLAlchemyUserDatastore
3 | from flask_security import UserMixin, RoleMixin
4 |
5 | # Define models
6 | roles_users = db.Table(
7 | 'roles_users',
8 | db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
9 | db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
10 |
11 |
12 | class Role(db.Model, RoleMixin):
13 | id = db.Column(db.Integer(), primary_key=True)
14 | name = db.Column(db.String(80), unique=True)
15 | description = db.Column(db.String(255))
16 |
17 |
18 | class User(db.Model, UserMixin):
19 | id = db.Column(db.Integer, primary_key=True)
20 | email = db.Column(db.String(255), unique=True)
21 | password = db.Column(db.String(255))
22 | active = db.Column(db.Boolean())
23 | confirmed_at = db.Column(db.DateTime())
24 | roles = db.relationship(
25 | 'Role',
26 | secondary=roles_users,
27 | backref=db.backref('users', lazy='dynamic'))
28 |
29 |
30 | # Setup Flask-Security
31 | user_datastore = SQLAlchemyUserDatastore(db, User, Role)
32 |
--------------------------------------------------------------------------------
/app/auto.py:
--------------------------------------------------------------------------------
1 | import eventlet
2 | eventlet.monkey_patch()
3 |
4 | from empty import app_factory
5 | from main import App
6 | import config
7 | import traceback
8 | import logging
9 |
10 | try:
11 | # SPA setup; template_folder ignored;
12 | app = app_factory(
13 | 'app', config,
14 | template_folder=None,
15 | base_application=App)
16 | except Exception as e:
17 | logging.error(traceback.format_exc())
18 | raise e
19 |
--------------------------------------------------------------------------------
/app/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from utils import load_env
3 |
4 | DEBUG = os.getenv('FLASK_DEBUG') == '1'
5 | SECRET_KEY = load_env('FLASK_SECRET_KEY')
6 |
7 | BLUEPRINTS = ['auth']
8 | EXTENSIONS = list(map(lambda e: 'extensions.' + e, [
9 | 'io',
10 | 'db',
11 | 'migrate',
12 | 'ma',
13 | 'security'
14 | ]))
15 |
16 | PSYCOPG2_URI = 'postgresql+psycopg2://{user}:{passwd}@{host}/{name}'
17 |
18 | RQ_REDIS_URL = 'redis://broker:6379/0'
19 |
20 | # Make sure SERVER_NAME contains the access port for
21 | # the http server if it is not a default port (ex: dv:8080)
22 | # Also, add "127.0.0.1 dv" to your /etc/hosts during development
23 | SERVER_NAME = os.getenv('SERVER_NAME') + os.getenv('SERVER_NAME_EXTRA', '')
24 |
25 | SQLALCHEMY_DATABASE_URI = PSYCOPG2_URI.format(
26 | user=load_env('POSTGRES_USER'),
27 | passwd=load_env('POSTGRES_PASSWORD'),
28 | host='db',
29 | name=load_env('POSTGRES_DB')
30 | )
31 | SQLALCHEMY_TRACK_MODIFICATIONS = False
32 |
--------------------------------------------------------------------------------
/app/extensions.py:
--------------------------------------------------------------------------------
1 | from flask_socketio import SocketIO
2 | from flask_sqlalchemy import SQLAlchemy
3 | from flask_migrate import Migrate
4 | from flask_marshmallow import Marshmallow
5 | from flask_security import Security
6 |
7 | io = SocketIO()
8 | db = SQLAlchemy()
9 | migrate = Migrate(db=db)
10 | ma = Marshmallow()
11 | security = Security()
12 |
13 |
14 | def security_init_kwargs():
15 | from auth.models import user_datastore
16 | return dict(datastore=user_datastore)
17 |
18 |
19 | def io_init_kwargs():
20 | return dict(logger=True, engineio_logger=True)
21 |
--------------------------------------------------------------------------------
/app/main.py:
--------------------------------------------------------------------------------
1 | from empty import Empty
2 |
3 |
4 | class App(Empty):
5 | def configure_views(self):
6 | @self.route('/')
7 | def index():
8 | """Use this to make sure your web app is reachable"""
9 | return """
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 | Hello. This boilerplate was done to help those interested in the
20 | Flask web framework to rapidly start coding robust API's. Most
21 | tools are setup with sensible defaults.
22 |
23 |
24 | This boilerplate is websocket ready (flask-socketio), can handle
25 | async tasks (flask-rq2), json requests and response with
26 | serialization, token authentication, has ORM support
27 | (sqlalchemy+alembic) and so on.
28 |
29 |
30 | The app service supports pdb debugging (just run the project in
31 | daemon mode and attach to app), you have a few nice helpful
32 | commands available through fabric out-of-the-box and a neat
33 | project structure.
34 |
35 |
36 | For deployment, just remember to setup your swarm secrets and
37 | you should be ready to go. If you wish to help, create an issue
38 | for any bugs that you find and leave a star. PR's are welcomed.
39 |
40 |
50 |
51 |
52 | """
53 |
54 | def configure_error_handlers(self):
55 | """SPA"""
56 | pass
57 |
58 |
59 | if __name__ == '__main__':
60 | from auto import app
61 | from extensions import io
62 |
63 | io.run(app)
64 |
--------------------------------------------------------------------------------
/app/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/app/migrations/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # template used to generate migration files
5 | file_template = %%(year)d-%%(month).2d-%%(rev)s-%%(slug)s
6 |
7 | # set to 'true' to run the environment during
8 | # the 'revision' command, regardless of autogenerate
9 | # revision_environment = false
10 |
11 |
12 | # Logging configuration
13 | [loggers]
14 | keys = root,sqlalchemy,alembic
15 |
16 | [handlers]
17 | keys = console
18 |
19 | [formatters]
20 | keys = generic
21 |
22 | [logger_root]
23 | level = WARN
24 | handlers = console
25 | qualname =
26 |
27 | [logger_sqlalchemy]
28 | level = WARN
29 | handlers =
30 | qualname = sqlalchemy.engine
31 |
32 | [logger_alembic]
33 | level = INFO
34 | handlers =
35 | qualname = alembic
36 |
37 | [handler_console]
38 | class = StreamHandler
39 | args = (sys.stderr,)
40 | level = NOTSET
41 | formatter = generic
42 |
43 | [formatter_generic]
44 | format = %(levelname)-5.5s [%(name)s] %(message)s
45 | datefmt = %H:%M:%S
46 |
--------------------------------------------------------------------------------
/app/migrations/env.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 | from alembic import context
3 | from sqlalchemy import engine_from_config, pool
4 | from logging.config import fileConfig
5 | import logging
6 |
7 | # this is the Alembic Config object, which provides
8 | # access to the values within the .ini file in use.
9 | config = context.config
10 |
11 | # Interpret the config file for Python logging.
12 | # This line sets up loggers basically.
13 | fileConfig(config.config_file_name)
14 | logger = logging.getLogger('alembic.env')
15 |
16 | # add your model's MetaData object here
17 | # for 'autogenerate' support
18 | # from myapp import mymodel
19 | # target_metadata = mymodel.Base.metadata
20 | from flask import current_app
21 | config.set_main_option('sqlalchemy.url',
22 | current_app.config.get('SQLALCHEMY_DATABASE_URI'))
23 | target_metadata = current_app.extensions['migrate'].db.metadata
24 |
25 | # other values from the config, defined by the needs of env.py,
26 | # can be acquired:
27 | # my_important_option = config.get_main_option("my_important_option")
28 | # ... etc.
29 |
30 |
31 | def run_migrations_offline():
32 | """Run migrations in 'offline' mode.
33 |
34 | This configures the context with just a URL
35 | and not an Engine, though an Engine is acceptable
36 | here as well. By skipping the Engine creation
37 | we don't even need a DBAPI to be available.
38 |
39 | Calls to context.execute() here emit the given string to the
40 | script output.
41 |
42 | """
43 | url = config.get_main_option("sqlalchemy.url")
44 | context.configure(url=url)
45 |
46 | with context.begin_transaction():
47 | context.run_migrations()
48 |
49 |
50 | def run_migrations_online():
51 | """Run migrations in 'online' mode.
52 |
53 | In this scenario we need to create an Engine
54 | and associate a connection with the context.
55 |
56 | """
57 |
58 | # this callback is used to prevent an auto-migration from being generated
59 | # when there are no changes to the schema
60 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
61 | def process_revision_directives(context, revision, directives):
62 | if getattr(config.cmd_opts, 'autogenerate', False):
63 | script = directives[0]
64 | if script.upgrade_ops.is_empty():
65 | directives[:] = []
66 | logger.info('No changes in schema detected.')
67 |
68 | engine = engine_from_config(config.get_section(config.config_ini_section),
69 | prefix='sqlalchemy.',
70 | poolclass=pool.NullPool)
71 |
72 | connection = engine.connect()
73 | context.configure(connection=connection,
74 | target_metadata=target_metadata,
75 | process_revision_directives=process_revision_directives,
76 | **current_app.extensions['migrate'].configure_args)
77 |
78 | try:
79 | with context.begin_transaction():
80 | context.run_migrations()
81 | finally:
82 | connection.close()
83 |
84 | if context.is_offline_mode():
85 | run_migrations_offline()
86 | else:
87 | run_migrations_online()
88 |
--------------------------------------------------------------------------------
/app/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/app/migrations/versions/2018-02-001-auth_user_and_role.py:
--------------------------------------------------------------------------------
1 | """initial
2 |
3 | Revision ID: 001
4 | Revises:
5 | Create Date: 2018-02-05 01:08:24.820088
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '001'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('role',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('name', sa.String(length=80), nullable=True),
24 | sa.Column('description', sa.String(length=255), nullable=True),
25 | sa.PrimaryKeyConstraint('id'),
26 | sa.UniqueConstraint('name')
27 | )
28 | op.create_table('user',
29 | sa.Column('id', sa.Integer(), nullable=False),
30 | sa.Column('email', sa.String(length=255), nullable=True),
31 | sa.Column('password', sa.String(length=255), nullable=True),
32 | sa.Column('active', sa.Boolean(), nullable=True),
33 | sa.Column('confirmed_at', sa.DateTime(), nullable=True),
34 | sa.PrimaryKeyConstraint('id'),
35 | sa.UniqueConstraint('email')
36 | )
37 | op.create_table('roles_users',
38 | sa.Column('user_id', sa.Integer(), nullable=True),
39 | sa.Column('role_id', sa.Integer(), nullable=True),
40 | sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
41 | sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
42 | )
43 | # ### end Alembic commands ###
44 |
45 |
46 | def downgrade():
47 | # ### commands auto generated by Alembic - please adjust! ###
48 | op.drop_table('roles_users')
49 | op.drop_table('user')
50 | op.drop_table('role')
51 | # ### end Alembic commands ###
52 |
--------------------------------------------------------------------------------
/app/tox.ini:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore=E402
--------------------------------------------------------------------------------
/app/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 |
4 | def load_env(name):
5 | """
6 | Tries to load name from environment variables by name; if
7 | it is not set, appends "_FILE" to `name` and tries to load
8 | it from filesytem. This is quite useful for loading docker
9 | secrets.
10 | """
11 | value = os.getenv(name)
12 | if value is None:
13 | value = os.getenv('%s_FILE' % name)
14 | if value is not None and os.path.exists(value):
15 | with open(value) as fs:
16 | return fs.read()
17 | return value
18 |
--------------------------------------------------------------------------------
/docker-compose-dev.yml:
--------------------------------------------------------------------------------
1 | version: '3.3'
2 |
3 | services:
4 | db:
5 | env_file:
6 | - ./envfile-dev
7 | stdin_open: true
8 | tty: true
9 | broker:
10 | env_file:
11 | - ./envfile-dev
12 | stdin_open: true
13 | tty: true
14 | app:
15 | env_file:
16 | - ./envfile-dev
17 | ports:
18 | - 5000:5000
19 | stdin_open: true
20 | tty: true
21 | volumes:
22 | - type: bind
23 | source: ./app
24 | target: /home/nonroot
25 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.3'
2 |
3 | volumes:
4 | db-data:
5 | driver: local
6 |
7 | networks:
8 | backend:
9 | frontend:
10 |
11 | services:
12 | db:
13 | command: ["postgres"]
14 | entrypoint: ["docker-entrypoint.sh"]
15 | image: postgres:10.1-alpine
16 | env_file:
17 | - ./envfile
18 | networks:
19 | - backend
20 | volumes:
21 | - db-data:/var/lib/postgresql/data/
22 | broker:
23 | image: redis:4.0-alpine
24 | networks:
25 | - backend
26 | app:
27 | build: ./app
28 | env_file:
29 | - ./envfile
30 | depends_on:
31 | - db
32 | - broker
33 | networks:
34 | - backend
35 | - frontend
36 |
--------------------------------------------------------------------------------
/envfile:
--------------------------------------------------------------------------------
1 | # change by your app name
2 | # do not keep this in vcs
3 | PGDATA=/var/lib/postgresql/data/app
4 | POSTGRES_DB=app
5 | POSTGRES_USER=app_user
6 | POSTGRES_PASSWORD_FILE=/run/secrets/postgres_passwd
7 |
8 | FLASK_APP=auto.py
9 | FLASK_DEBUG=0
10 | # generate your secret with the command below
11 | # python -c "import os; print(os.urandom(20))"
12 | FLASK_SECRET_KEY_FILE=/run/secrets/flask_secret_key
13 | FLASK_LOGGER_NAME=app_logger
14 |
15 | # SERVER_NAME should be the full domain
16 | # serving your application
17 | SERVER_NAME=
18 |
--------------------------------------------------------------------------------
/envfile-dev:
--------------------------------------------------------------------------------
1 | PGDATA=/var/lib/postgresql/data/app-dev
2 | POSTGRES_DB=app
3 | POSTGRES_USER=app_user
4 | POSTGRES_PASSWORD=passw0rd
5 | POSTGRES_PASSWORD_FILE=
6 |
7 | FLASK_DEBUG=1
8 | FLASK_SECRET_KEY=secret
9 | FLASK_LOGGER_NAME=app_logger
10 |
11 | SERVER_NAME=dv
12 | SERVER_NAME_EXTRA=:5000
13 |
--------------------------------------------------------------------------------
/fabfile.py:
--------------------------------------------------------------------------------
1 | from fabric.api import env
2 | from fabric.api import local
3 | from fabric.api import run
4 | from fabric.api import task
5 | from fabric.context_managers import cd, lcd
6 |
7 | import os
8 | import json
9 |
10 | env.forward_agent = True
11 | env.user = 'root'
12 | env.hosts = ['your production host']
13 |
14 | project_dst = 'project-name'
15 |
16 | compose_cmd = [
17 | 'docker-compose',
18 | '-f', 'docker-compose.yml',
19 | '-f',
20 | ]
21 |
22 | # service to run commands against
23 | service_name = None
24 | renv = 'dev' # dev by default
25 | opts = []
26 | CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
27 | STYLES_DIR = os.path.join(CURRENT_DIR, 'styles')
28 | UX_DIR = os.path.join(CURRENT_DIR, 'ux')
29 |
30 |
31 | def get_compose_cmd():
32 | return compose_cmd + ['docker-compose-%s.yml' % renv]
33 |
34 |
35 | def get_fn():
36 | """
37 | Returns the correct function call for the environment.
38 | """
39 | return run if renv == 'prd' else local
40 |
41 |
42 | def get_cmd_exists(cmd):
43 | def tell_on(arg, rs):
44 | if rs:
45 | print('"%s" found in path.' % arg)
46 | else:
47 | print('"%s" not found in path. Please, install it to continue.' % arg) # noqa
48 | return rs
49 |
50 | fn = get_fn()
51 | rs = fn('which %s' % cmd, capture=True)
52 | return tell_on(cmd, ('not found' not in rs))
53 |
54 |
55 | @task(alias='setup')
56 | def do_setup():
57 | """
58 | Helps you setup your environment. Call it once per project.
59 | """
60 | msg = "Command not found. Please, install %s"
61 | assert get_cmd_exists('npm'), msg % "npm"
62 | assert get_cmd_exists('vue'), msg % "vue-cli"
63 | assert get_cmd_exists('fab'), msg % "fabric3"
64 | assert get_cmd_exists('docker'), msg % "docker"
65 | assert get_cmd_exists('docker-compose'), msg % "docker-compose"
66 |
67 | print("Setting up VueJS (just accept defaults)")
68 | local('vue init webpack ux', shell='/bin/bash')
69 |
70 | print("Setting up SemanticUI (just accept defaults)")
71 | with lcd(STYLES_DIR):
72 | local('npm install semantic-ui', shell='/bin/bash')
73 |
74 | semantic_settings = os.path.join(STYLES_DIR, 'semantic.json')
75 | with open(semantic_settings, 'r') as fs:
76 | data = json.load(fs)
77 |
78 | data['autoInstall'] = True
79 | with open(semantic_settings, 'w') as fs:
80 | json.dump(data, fs)
81 |
82 | print(
83 | "IMPORTANT: run the following command:\n"
84 | "sudo echo \"127.0.0.1 dv\" >> /etc/hosts")
85 |
86 | print(
87 | "IMPORTANT: make sure to update your envfile file with "
88 | "your project production configuration.")
89 | print(
90 | "IMPORTANT: make sure to update your fabfile "
91 | "hosts with your production host.")
92 | print("")
93 | print("Now you're ready to go:")
94 | print(' fab env:dev up # for development mode')
95 | print(' fab env:prd up # for production mode')
96 | print(' fab env:tst up # to simulate production mode')
97 | print('Locally, your project will be available at http://dv:8080')
98 |
99 |
100 | @task(alias='env')
101 | def set_renv(local_renv):
102 | "Sets docker-compose environment"
103 | global renv
104 | assert local_renv in ('dev', 'prd')
105 | renv = local_renv
106 |
107 |
108 | @task(alias='dae')
109 | def set_daemon():
110 | opts.append('-d')
111 |
112 |
113 | @task(alias='up')
114 | def compose_up(name=None):
115 | """
116 | Calls docker compose up using the correct environment.
117 | """
118 | with cd(project_dst):
119 | local_cmd = get_compose_cmd() + ['up']
120 | local_cmd += opts
121 | local_cmd += [name] if name else []
122 | get_fn()(' '.join(local_cmd))
123 |
124 |
125 | @task(alias='build')
126 | def compose_build(name=None):
127 | """
128 | Calls docker compose build using the correct environment.
129 | """
130 | with cd(project_dst):
131 | local_cmd = get_compose_cmd() + ['build']
132 | local_cmd += [name] if name else []
133 |
134 | get_fn()(' '.join(local_cmd))
135 |
136 |
137 | @task(alias='on')
138 | def on_service(name):
139 | """
140 | Define service where command should run
141 | """
142 | global service_name
143 | service_name = name
144 |
145 |
146 | @task(alias='run')
147 | def compose_run(cmd):
148 | """
149 | Calls docker compose run using the correct environment.
150 |
151 | :param cmd: run command, including container name.
152 | """
153 | opts.append('--rm')
154 |
155 | if service_name is None:
156 | print("please, provide service name")
157 | exit()
158 |
159 | with cd(project_dst):
160 | local_cmd = get_compose_cmd() + ['run']
161 | local_cmd += opts
162 | local_cmd += [service_name]
163 | local_cmd += cmd.split()
164 | get_fn()(' '.join(local_cmd))
165 |
166 |
167 | @task(alias='logs')
168 | def docker_logs(name):
169 | """
170 | Get docker container logs.
171 | """
172 | get_fn()('docker logs %s' % name)
173 |
174 |
175 | @task(alias='attach')
176 | def docker_attach(cid):
177 | local('docker attach --sig-proxy=false --detach-keys="ctrl-c" %s' % cid)
178 |
179 |
180 | @task(alias='ps')
181 | def docker_ps():
182 | get_fn()('docker ps --format "{{.Names}}\t{{.ID}}"')
183 |
--------------------------------------------------------------------------------