├── .env ├── .gitignore ├── Dockerfile ├── Pipfile ├── README.md ├── alembic.ini ├── app ├── __init__.py ├── controllers │ ├── __init__.py │ └── controller │ │ ├── __init__.py │ │ ├── controller.py │ │ └── schemas.py ├── data │ ├── __init__.py │ └── models.py ├── main.py └── utils │ ├── __init__.py │ └── helpers.py ├── core ├── __init__.py ├── dbsetup.py ├── extensions.py ├── factories.py └── settings │ ├── __init__.py │ ├── devsettings.py │ ├── prodsettings.py │ └── settings.py ├── gunicorn_conf.py ├── migrations ├── README ├── env.py └── script.py.mako ├── prestart.sh ├── requirements.txt └── start.sh /.env: -------------------------------------------------------------------------------- 1 | export settings=dev 2 | export DB_NAME=db_name 3 | export DB_USER=db_username 4 | export DB_PASSWORD=db_password 5 | export DB_HOST=127.0.0.1 6 | export DB_PORT=5432 7 | 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | ### JetBrains template 3 | .idea/ 4 | 5 | ### Python template 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | .hypothesis/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | .static_storage/ 61 | .media/ 62 | local_settings.py 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | # .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | .pytest_cache/ 111 | 112 | # VSCode 113 | .vscode/ 114 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tiangolo/uvicorn-gunicorn-fastapi:python3.7 2 | 3 | COPY . /app 4 | WORKDIR /app 5 | 6 | ENV settings=prod 7 | # ENV WORKERS_PER_CORE=2 8 | 9 | 10 | 11 | RUN apt-get update -y && pip install --upgrade pip && \ 12 | pip install -r requirements.txt && \ 13 | apt-get install -y postgresql-client 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | asyncpg = "==0.22.0" 8 | autopep8 = "==1.5.7" 9 | click = "==7.1.2" 10 | fastapi = "==0.65.1" 11 | gino = "==1.0.1" 12 | gino-starlette = "==0.1.1" 13 | greenlet = "==1.1.0" 14 | h11 = "==0.12.0" 15 | prometheus-client = "==0.7.1" 16 | pycodestyle = "==2.7.0" 17 | pydantic = "==1.8.2" 18 | six = "==1.16.0" 19 | starlette = "<=0.14.0" 20 | starlette-prometheus = "==0.7.0" 21 | toml = "==0.10.2" 22 | typing-extensions = "==3.10.0.0" 23 | uvicorn = "==0.13.4" 24 | SQLAlchemy = "==1.3.24" 25 | SQLAlchemy-Utils = "==0.37.3" 26 | 27 | [dev-packages] 28 | autopep8 = "*" 29 | 30 | [requires] 31 | python_version = "3.9" 32 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FastApi BoilerPlate 2 | Repo is useful for simple and fast development to production with FastApi. Framework is the one of the asynchronous framework for Python, for documation please refer to [FastAPI](https://fastapi.tiangolo.com/) 3 | 4 | ## Notes 5 | FastAPI boilerplate supports Python version 3.5 and above. 6 | This boilerplate is using [Gino-ORM](https://python-gino.org/) for database connections and queries. 7 | 8 | In order to use boilerplate for development we suggest you followings: 9 | ### 🕹 Guide 10 | 11 | ##### You can either use pipenv or pip itself 12 | 13 | For development with pipenv: 14 | ```sh 15 | pipenv shell 16 | 17 | pipenv install or pip install -r requirements.txt 18 | 19 | ``` 20 | 21 | For development with pip: 22 | ```sh 23 | python3 -m venv .venv 24 | 25 | source .venv/bin/activate 26 | 27 | pip install -r requirements.txt 28 | ``` 29 | 30 | Check this file and modife accordingly .env 31 | 32 | ```sh 33 | 34 | source .env 35 | 36 | ``` 37 | 38 | For database migrations: 39 | ``` 40 | alembic revision --autogenerate 41 | 42 | alembic upgrade heads 43 | ``` 44 | Last but not the least do the followings the you are ready to go: 45 | ``` 46 | uvicorn app.main:app --reload 47 | 48 | add .env to gitignore 49 | 50 | rm -rf .git 51 | ``` 52 | ## Other FastAPI project templates 53 | 54 | [full-stack-fastapi-postgresql](https://github.com/tiangolo/full-stack-fastapi-postgresql) 55 | 56 | [full-stack-fastapi-couchbase](https://github.com/tiangolo/full-stack-fastapi-couchbase) 57 | 58 | [cookiecutter-spacy-fastapi](https://github.com/microsoft/cookiecutter-spacy-fastapi) 59 | 60 | [fast-api-project-template](https://github.com/bergran/fast-api-project-template) 61 | 62 | [startapp](https://github.com/sabuhish/startapp) 63 | 64 | [fastapi-nano](https://github.com/rednafi/fastapi-nano) 65 | 66 | 67 | ## Contributing 68 | Fell free to open issue and send pull request. 69 | 70 | ## Supported OS 71 | Linux, MacOS 72 | -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = migrations 6 | 7 | # template used to generate migration files 8 | # file_template = %%(rev)s_%%(slug)s 9 | 10 | # timezone to use when rendering the date 11 | # within the migration file as well as the filename. 12 | # string value is passed to dateutil.tz.gettz() 13 | # leave blank for localtime 14 | # timezone = 15 | 16 | # max length of characters to apply to the 17 | # "slug" field 18 | # truncate_slug_length = 40 19 | 20 | # set to 'true' to run the environment during 21 | # the 'revision' command, regardless of autogenerate 22 | # revision_environment = false 23 | 24 | # set to 'true' to allow .pyc and .pyo files without 25 | # a source .py file to be detected as revisions in the 26 | # versions/ directory 27 | # sourceless = false 28 | 29 | # version location specification; this defaults 30 | # to migrations/versions. When using multiple version 31 | # directories, initial revisions must be specified with --version-path 32 | # version_locations = %(here)s/bar %(here)s/bat migrations/versions 33 | 34 | # the output encoding used when revision files 35 | # are written from script.py.mako 36 | # output_encoding = utf-8 37 | 38 | sqlalchemy.url = postgresql://%(DB_USER)s:%(DB_PASSWORD)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s 39 | 40 | [post_write_hooks] 41 | # post_write_hooks defines scripts or Python functions that are run 42 | # on newly generated revision scripts. See the documentation for further 43 | # detail and examples 44 | 45 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 46 | # hooks=black 47 | # black.type=console_scripts 48 | # black.entrypoint=black 49 | # black.options=-l 79 50 | 51 | # Logging configuration 52 | [loggers] 53 | keys = root,sqlalchemy,alembic 54 | 55 | [handlers] 56 | keys = console 57 | 58 | [formatters] 59 | keys = generic 60 | 61 | [logger_root] 62 | level = WARN 63 | handlers = console 64 | qualname = 65 | 66 | [logger_sqlalchemy] 67 | level = WARN 68 | handlers = 69 | qualname = sqlalchemy.engine 70 | 71 | [logger_alembic] 72 | level = INFO 73 | handlers = 74 | qualname = alembic 75 | 76 | [handler_console] 77 | class = StreamHandler 78 | args = (sys.stderr,) 79 | level = NOTSET 80 | formatter = generic 81 | 82 | [formatter_generic] 83 | format = %(levelname)-5.5s [%(name)s] %(message)s 84 | datefmt = %H:%M:%S 85 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Turall/FastApi-boilerplate/63845a81972abf110a648a11d4c8db046742181a/app/__init__.py -------------------------------------------------------------------------------- /app/controllers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Turall/FastApi-boilerplate/63845a81972abf110a648a11d4c8db046742181a/app/controllers/__init__.py -------------------------------------------------------------------------------- /app/controllers/controller/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Turall/FastApi-boilerplate/63845a81972abf110a648a11d4c8db046742181a/app/controllers/controller/__init__.py -------------------------------------------------------------------------------- /app/controllers/controller/controller.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter, Body, Query 2 | from starlette.responses import JSONResponse 3 | from core.factories import settings 4 | from app.controllers.controller.schemas import TestErrorSchema, TestSchema 5 | 6 | router = APIRouter() 7 | 8 | 9 | @router.get("/", 10 | tags=["Test"], 11 | response_description="test", 12 | description="test", 13 | include_in_schema=settings.INCLUDE_SCHEMA, 14 | response_model=TestSchema, 15 | responses={ 16 | 404: {"model": TestErrorSchema} 17 | } 18 | ) 19 | async def test_test(test: dict = Body(None, example={"test": "mytest"})) -> JSONResponse: 20 | if test: 21 | return JSONResponse({"result": test.get("test")}) 22 | 23 | return JSONResponse({"result": True}) 24 | 25 | 26 | @router.get("/test", 27 | tags=["Test"], 28 | response_description="test", 29 | description="test", 30 | include_in_schema=settings.INCLUDE_SCHEMA, 31 | response_model=TestSchema, 32 | responses={ 33 | 404: {"model": TestErrorSchema} 34 | } 35 | ) 36 | async def test_2(test: str = Query(None, alias="token", title="token", description="Send token in the query")) -> JSONResponse: 37 | 38 | return JSONResponse({"result": test or True}) 39 | -------------------------------------------------------------------------------- /app/controllers/controller/schemas.py: -------------------------------------------------------------------------------- 1 | # write your schemas in this files. Use pydantic 2 | 3 | from pydantic import BaseModel,constr,validator,ValidationError,EmailStr 4 | from uuid import UUID 5 | from typing import Optional,List,Union 6 | import pydantic.json 7 | import asyncpg.pgproto.pgproto 8 | pydantic.json.ENCODERS_BY_TYPE[asyncpg.pgproto.pgproto.UUID] = str 9 | 10 | 11 | # Write your pydantic models here 12 | 13 | class TestSchema(BaseModel): 14 | test: str 15 | status_code: int 16 | class Config: 17 | schema_extra = { 18 | 'example': { 19 | 'test': "Test", 20 | "status_code": 200 21 | 22 | } 23 | } 24 | 25 | class TestErrorSchema(BaseModel): 26 | test: str 27 | status_code: int 28 | class Config: 29 | schema_extra = { 30 | 'example': { 31 | 'test': "Test Error", 32 | "status_code": 404 33 | 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /app/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Turall/FastApi-boilerplate/63845a81972abf110a648a11d4c8db046742181a/app/data/__init__.py -------------------------------------------------------------------------------- /app/data/models.py: -------------------------------------------------------------------------------- 1 | # write you database models in this file 2 | 3 | from core.dbsetup import db, Model 4 | 5 | 6 | class Example(Model): 7 | 8 | __tablename__ = "example" 9 | 10 | name = db.Column(db.String(), nullable=False) 11 | email = db.Column(db.String(), nullable=False, index=True) 12 | -------------------------------------------------------------------------------- /app/main.py: -------------------------------------------------------------------------------- 1 | import string 2 | import random 3 | import time 4 | import logging 5 | 6 | from fastapi import FastAPI 7 | from starlette_prometheus import metrics, PrometheusMiddleware 8 | from starlette.middleware.cors import CORSMiddleware 9 | from starlette.requests import Request 10 | 11 | from core.extensions import db 12 | from core.factories import settings 13 | from app.controllers.controller.controller import router 14 | 15 | 16 | app = FastAPI() 17 | db.init_app(app) 18 | log = logging.getLogger(__name__) 19 | 20 | 21 | @app.middleware("http") 22 | async def log_requests(request: Request, call_next): 23 | """log_requests. 24 | 25 | :param request: 26 | :type request: Request 27 | :param call_next: 28 | """ 29 | idem = ''.join(random.choices(string.ascii_uppercase + string.digits, k=6)) 30 | log.info( 31 | f"RID={idem} REGION={request.headers.get('cf-ipcountry')} CLIENT_IP={request.headers.get('cf-connecting-ip')} START REQUEST PATH={request.url.path} METHOD={request.method} " 32 | ) 33 | start_time = time.time() 34 | response = await call_next(request) 35 | 36 | process_time = (time.time() - start_time) * 1000 37 | formatted_process_time = '{0:.2f}'.format(process_time) 38 | log.info( 39 | f"RID={idem} COMPLETED={formatted_process_time}ms REQUEST={request.method.upper()} {request.url.path} STATUS_CODE={response.status_code}" 40 | ) 41 | 42 | return response 43 | 44 | 45 | @app.on_event("startup") 46 | async def startup(): 47 | print("app started") 48 | 49 | 50 | @app.on_event("shutdown") 51 | async def shutdown(): 52 | print("SHUTDOWN") 53 | 54 | 55 | cors_origins = [i.strip() for i in settings.CORS_ORIGINS.split(",")] 56 | app.add_middleware( 57 | CORSMiddleware, 58 | allow_origins=cors_origins, 59 | allow_credentials=True, 60 | allow_methods=["*"], 61 | allow_headers=["*"], 62 | ) 63 | 64 | 65 | app.add_middleware(PrometheusMiddleware) 66 | app.add_route("/metrics/", metrics) 67 | app.include_router(router) 68 | -------------------------------------------------------------------------------- /app/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Turall/FastApi-boilerplate/63845a81972abf110a648a11d4c8db046742181a/app/utils/__init__.py -------------------------------------------------------------------------------- /app/utils/helpers.py: -------------------------------------------------------------------------------- 1 | # write your helper function here 2 | 3 | def clean_dict(data: dict) -> dict: 4 | return {key: val for (key, val) in data.items() if val is not None} -------------------------------------------------------------------------------- /core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Turall/FastApi-boilerplate/63845a81972abf110a648a11d4c8db046742181a/core/__init__.py -------------------------------------------------------------------------------- /core/dbsetup.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | from sqlalchemy_utils import UUIDType, Timestamp # add created,updated columns to model 3 | from core.extensions import db 4 | 5 | 6 | class SurrogatePK(object): 7 | """A mixin that adds a surrogate UUID 'primary key' column named ``id`` to 8 | any declarative-mapped class.""" 9 | 10 | __table_args__ = {"extend_existing": True} 11 | 12 | id = db.Column(UUIDType(binary=False), primary_key=True) 13 | 14 | 15 | class Model(Timestamp,SurrogatePK,db.Model): 16 | __abstract__ = True 17 | 18 | @classmethod 19 | async def create(cls, **kwargs): 20 | if issubclass(cls, SurrogatePK): 21 | unique_id = uuid4() 22 | if not kwargs.get("id"): 23 | kwargs["id"] = unique_id 24 | return await cls(**kwargs)._create() 25 | 26 | -------------------------------------------------------------------------------- /core/extensions.py: -------------------------------------------------------------------------------- 1 | from core.factories import settings 2 | from ssl import create_default_context 3 | from gino.ext.starlette import Gino 4 | 5 | 6 | if not settings.DEBUG: 7 | ssl_object = create_default_context(cafile=settings.SSL_CERT_FILE) 8 | 9 | db: Gino = Gino( 10 | dsn=settings.DATABASE_URL, 11 | echo=False, 12 | ssl=ssl_object, 13 | pool_min_size=3, 14 | pool_max_size=20, 15 | retry_limit=1, 16 | retry_interval=1, 17 | ) 18 | else: 19 | 20 | db: Gino = Gino( 21 | dsn=settings.DATABASE_URL) 22 | -------------------------------------------------------------------------------- /core/factories.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | envsettings = os.getenv("settings") 5 | 6 | if envsettings in ["dev", "default"]: 7 | from core.settings.devsettings import DevSettings 8 | settings = DevSettings() 9 | 10 | elif envsettings == "prod": 11 | from core.settings.prodsettings import ProdSettings 12 | settings = ProdSettings() 13 | else: 14 | raise SystemExit( 15 | "settings for app not exported. example: ```export settings=dev```") 16 | -------------------------------------------------------------------------------- /core/settings/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Turall/FastApi-boilerplate/63845a81972abf110a648a11d4c8db046742181a/core/settings/__init__.py -------------------------------------------------------------------------------- /core/settings/devsettings.py: -------------------------------------------------------------------------------- 1 | from starlette.config import Config 2 | from starlette.datastructures import Secret 3 | from core.settings.settings import BaseConfig 4 | 5 | 6 | class DevSettings(BaseConfig): 7 | 8 | """ Configuration class for site development environment """ 9 | 10 | config = Config() 11 | 12 | DEBUG = config("DEBUG", cast=bool, default=True) 13 | DB_USER = config("DB_USER", cast=str, default="postgres") 14 | DB_PASSWORD = config("DB_PASSWORD", cast=Secret, default="postgres") 15 | DB_HOST = config("DB_HOST", cast=str, default="db") 16 | DB_PORT = config("DB_PORT", cast=str, default="5432") 17 | DB_NAME = config("DB_NAME", cast=str, default="postgres") 18 | INCLUDE_SCHEMA = config("INCLUDE_SCHEMA", cast=bool, default=True) 19 | 20 | DATABASE_URL = config( 21 | "DATABASE_URL", 22 | default=f"asyncpg://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}", 23 | ) 24 | -------------------------------------------------------------------------------- /core/settings/prodsettings.py: -------------------------------------------------------------------------------- 1 | from starlette.config import Config 2 | from starlette.datastructures import Secret 3 | from core.settings.settings import BaseConfig 4 | 5 | 6 | class ProdSettings(BaseConfig): 7 | 8 | """ Configuration class for site development environment """ 9 | 10 | config = Config() 11 | 12 | DB_USER = config("DB_USER", cast=str) 13 | DB_PASSWORD = config("DB_PASSWORD", cast=Secret) 14 | DB_HOST = config("DB_HOST", cast=str) 15 | DB_PORT = config("DB_PORT", cast=str) 16 | DB_NAME = config("DB_NAME", cast=str) 17 | INCLUDE_SCHEMA = config("INCLUDE_SCHEMA", cast=bool) 18 | SSL_CERT_FILE = config("SSL_CERT_FILE") 19 | 20 | DATABASE_URL = config( 21 | "DATABASE_URL", 22 | default=f"asyncpg://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}", 23 | ) 24 | -------------------------------------------------------------------------------- /core/settings/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from starlette.config import Config 3 | 4 | project_name = "test" 5 | 6 | 7 | class BaseConfig: 8 | 9 | """ 10 | Base configuration class. Subclasses should include configurations for 11 | testing, development and production environments 12 | """ 13 | config = Config() 14 | 15 | INCLUDE_SCHEMA = config("INCLUDE_SCHEMA", cast=bool, default=True) 16 | 17 | SECRET_KEY = config("SECRET_KEY", default=os.urandom(32)) 18 | SQLALCHEMY_ECHO = config("SQLALCHEMY_ECHO", cast=bool, default=False) 19 | SQLALCHEMY_TRACK_MODIFICATIONS = config( 20 | "SQLALCHEMY_TRACK_MODIFICATIONS", cast=bool, default=False) 21 | 22 | LOGGER_NAME = "%s_log" % project_name 23 | LOG_FILENAME = "/var/tmp/app.%s.log" % project_name 24 | 25 | CORS_ORIGINS = config("CORS_HOSTS", default="*") 26 | 27 | DEBUG = config("DEBUG", cast=bool, default=False) 28 | TESTING = config("TESTING", cast=bool, default=False) 29 | -------------------------------------------------------------------------------- /gunicorn_conf.py: -------------------------------------------------------------------------------- 1 | import json 2 | import multiprocessing 3 | import os 4 | 5 | workers_per_core_str = os.getenv("WORKERS_PER_CORE", "1") 6 | web_concurrency_str = os.getenv("WEB_CONCURRENCY", None) 7 | host = os.getenv("HOST", "0.0.0.0") 8 | port = os.getenv("PORT", "80") 9 | bind_env = os.getenv("BIND", None) 10 | use_loglevel = os.getenv("LOG_LEVEL", "info") 11 | use_bind = bind_env or f"{host}:{port}" 12 | cores = multiprocessing.cpu_count() 13 | workers_per_core = float(workers_per_core_str) 14 | default_web_concurrency = workers_per_core * cores 15 | if web_concurrency_str: 16 | web_concurrency = int(web_concurrency_str) 17 | assert web_concurrency > 0 18 | else: 19 | web_concurrency = max(int(default_web_concurrency), 2) 20 | 21 | # Gunicorn config variables 22 | loglevel = use_loglevel 23 | workers = web_concurrency 24 | bind = use_bind 25 | keepalive = 120 26 | errorlog = "-" 27 | 28 | # For debugging and testing 29 | log_data = { 30 | "loglevel": loglevel, 31 | "workers": workers, 32 | "bind": '%s:%s' % (host, port), 33 | # "accesslog": "-", 34 | # "errorlog": "-", 35 | "worker_class": "uvicorn.workers.UvicornWorker", 36 | # Additional, non-gunicorn variables 37 | "workers_per_core": workers_per_core, 38 | "host": host, 39 | "port": port, 40 | } 41 | print(json.dumps(log_data)) 42 | -------------------------------------------------------------------------------- /migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /migrations/env.py: -------------------------------------------------------------------------------- 1 | from logging.config import fileConfig 2 | 3 | from sqlalchemy import engine_from_config 4 | from sqlalchemy import pool 5 | 6 | from alembic import context 7 | import sys, os 8 | # this is the Alembic Config object, which provides 9 | # access to the values within the .ini file in use. 10 | config = context.config 11 | section = config.config_ini_section 12 | config.set_section_option(section, "DB_USER", os.environ.get("DB_USER")) 13 | config.set_section_option(section, "DB_PASSWORD", os.environ.get("DB_PASSWORD")) 14 | config.set_section_option(section, "DB_HOST", os.environ.get("DB_HOST")) 15 | config.set_section_option(section, "DB_PORT", os.environ.get("DB_PORT")) 16 | config.set_section_option(section, "DB_NAME", os.environ.get("DB_NAME")) 17 | 18 | # Interpret the config file for Python logging. 19 | # This line sets up loggers basically. 20 | fileConfig(config.config_file_name) 21 | 22 | # add your model's MetaData object here 23 | # for 'autogenerate' support 24 | # from myapp import mymodel 25 | # target_metadata = mymodel.Base.metadata 26 | 27 | sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) 28 | from app.main import db 29 | 30 | target_metadata = db 31 | 32 | 33 | # other values from the config, defined by the needs of env.py, 34 | # can be acquired: 35 | # my_important_option = config.get_main_option("my_important_option") 36 | # ... etc. 37 | 38 | 39 | def run_migrations_offline(): 40 | """Run migrations in 'offline' mode. 41 | 42 | This configures the context with just a URL 43 | and not an Engine, though an Engine is acceptable 44 | here as well. By skipping the Engine creation 45 | we don't even need a DBAPI to be available. 46 | 47 | Calls to context.execute() here emit the given string to the 48 | script output. 49 | 50 | """ 51 | 52 | url = config.get_main_option("sqlalchemy.url") 53 | context.configure( 54 | url=url, 55 | target_metadata=target_metadata, 56 | literal_binds=True, 57 | dialect_opts={"paramstyle": "named"}, 58 | ) 59 | 60 | with context.begin_transaction(): 61 | context.run_migrations() 62 | 63 | 64 | def run_migrations_online(): 65 | """Run migrations in 'online' mode. 66 | 67 | In this scenario we need to create an Engine 68 | and associate a connection with the context. 69 | 70 | """ 71 | connectable = engine_from_config( 72 | config.get_section(config.config_ini_section), 73 | prefix="sqlalchemy.", 74 | poolclass=pool.NullPool, 75 | ) 76 | 77 | with connectable.connect() as connection: 78 | context.configure( 79 | connection=connection, target_metadata=target_metadata 80 | ) 81 | 82 | with context.begin_transaction(): 83 | context.run_migrations() 84 | 85 | 86 | if context.is_offline_mode(): 87 | run_migrations_offline() 88 | else: 89 | run_migrations_online() 90 | -------------------------------------------------------------------------------- /migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /prestart.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | # Let the DB start 4 | echo "postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}?sslmode=require" 5 | until psql "postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}?sslmode=require" -c '\q'; do 6 | >&2 echo "Postgres is unavailable - sleeping" 7 | sleep 1 8 | done 9 | 10 | 11 | # Run migrations 12 | alembic upgrade head 13 | 14 | 15 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | asyncpg==0.22.0 2 | autopep8==1.5.7 3 | click==7.1.2 4 | fastapi==0.65.1 5 | gino==1.0.1 6 | gino-starlette==0.1.1 7 | greenlet==1.1.0 8 | h11==0.12.0 9 | prometheus-client==0.7.1 10 | pycodestyle==2.7.0 11 | pydantic==1.8.2 12 | six==1.16.0 13 | SQLAlchemy==1.3.24 14 | SQLAlchemy-Utils==0.37.3 15 | starlette<=0.14.0 16 | starlette-prometheus==0.7.0 17 | toml==0.10.2 18 | typing-extensions==3.10.0.0 19 | uvicorn==0.13.4 20 | -------------------------------------------------------------------------------- /start.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env sh 2 | set -e 3 | 4 | if [ -f /app/app/main.py ]; then 5 | DEFAULT_MODULE_NAME=app.main 6 | elif [ -f /app/main.py ]; then 7 | DEFAULT_MODULE_NAME=main 8 | fi 9 | MODULE_NAME=${MODULE_NAME:-$DEFAULT_MODULE_NAME} 10 | VARIABLE_NAME=${VARIABLE_NAME:-app} 11 | export APP_MODULE=${APP_MODULE:-"$MODULE_NAME:$VARIABLE_NAME"} 12 | 13 | if [ -f /app/gunicorn_conf.py ]; then 14 | DEFAULT_GUNICORN_CONF=/app/gunicorn_conf.py 15 | elif [ -f /app/app/gunicorn_conf.py ]; then 16 | DEFAULT_GUNICORN_CONF=/app/app/gunicorn_conf.py 17 | else 18 | DEFAULT_GUNICORN_CONF=/gunicorn_conf.py 19 | fi 20 | export GUNICORN_CONF=${GUNICORN_CONF:-$DEFAULT_GUNICORN_CONF} 21 | 22 | # If there's a prestart.sh script in the /app directory or other path specified, run it before starting 23 | PRE_START_PATH=${PRE_START_PATH:-/app/prestart.sh} 24 | echo "Checking for script in $PRE_START_PATH" 25 | if [ -f $PRE_START_PATH ] ; then 26 | echo "Running script $PRE_START_PATH" 27 | . "$PRE_START_PATH" 28 | else 29 | echo "There is no script $PRE_START_PATH" 30 | fi 31 | 32 | # Start Gunicorn 33 | exec gunicorn -k uvicorn.workers.UvicornWorker -c "$GUNICORN_CONF" "$APP_MODULE" --log-file "app.log" --access-logfile "access.log" 34 | 35 | --------------------------------------------------------------------------------