├── docker-airflow-demo ├── airflow │ ├── data │ │ └── .keep │ ├── logs │ │ └── .keep │ ├── plugins │ │ └── .keep │ ├── requirements.txt │ ├── Dockerfile │ ├── dags │ │ └── workshop4.py │ └── docker-compose.yml └── airflow-env-vars │ ├── data │ └── .keep │ ├── logs │ └── .keep │ ├── plugins │ └── .keep │ ├── requirements.txt │ ├── Dockerfile │ ├── dags │ └── workshop4.py │ └── docker-compose.yml ├── .gitignore ├── docker-compose-demo └── docker-compose.yml └── README.md /docker-airflow-demo/airflow/data/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docker-airflow-demo/airflow/logs/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docker-airflow-demo/airflow/plugins/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docker-airflow-demo/airflow-env-vars/data/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docker-airflow-demo/airflow-env-vars/logs/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | *.log 3 | *.csv 4 | __pycache__/ -------------------------------------------------------------------------------- /docker-airflow-demo/airflow-env-vars/plugins/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docker-airflow-demo/airflow/requirements.txt: -------------------------------------------------------------------------------- 1 | pandas>=1.1.5 2 | pymysql>=1.0.2 3 | requests>=2.25.1 4 | -------------------------------------------------------------------------------- /docker-airflow-demo/airflow-env-vars/requirements.txt: -------------------------------------------------------------------------------- 1 | pandas>=1.1.5 2 | pymysql>=1.0.2 3 | requests>=2.25.1 4 | -------------------------------------------------------------------------------- /docker-airflow-demo/airflow/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM apache/airflow:2.1.0 2 | USER airflow 3 | COPY requirements.txt /tmp/requirements.txt 4 | RUN pip install --no-cache-dir --user -r /tmp/requirements.txt -------------------------------------------------------------------------------- /docker-airflow-demo/airflow-env-vars/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM apache/airflow:2.1.0 2 | USER airflow 3 | COPY requirements.txt /tmp/requirements.txt 4 | RUN pip install --no-cache-dir --user -r /tmp/requirements.txt -------------------------------------------------------------------------------- /docker-compose-demo/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.3" 2 | 3 | services: 4 | db: 5 | image: mysql:5.7 6 | restart: always 7 | environment: 8 | MYSQL_ROOT_PASSWORD: dontdothis 9 | MYSQL_DATABASE: dontdothis 10 | 11 | web: 12 | depends_on: 13 | - db 14 | image: nginxdemos/hello 15 | ports: 16 | - "80:80" -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Docker Workshop Demo 2 | 3 | Docker-compose demo for R2DE. 4 | 5 | ## How to run this project 6 | 1. Clone this project 7 | 2. Run `docker-compose up -d` 8 | 3. Run `docker ps` to see the containers are running 9 | 4. Open http://localhost. You should see the nginx welcome page. 10 | 11 | ## Idea to play around 12 | You can test modifying docker-compose.yml and run `docker-compose up -d` again to view the update. 13 | 14 | # Airflow Demo 15 | 16 | Follow this instruction: [Running Airflow in Docker](https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html) including create `dags`, `logs`, `plugins`, and `data` folders. 17 | 18 | The resources is in `airflow/` directory, including addtional requirements to install. 19 | 20 | To build the new container image with specified requirements. 21 | ``` 22 | docker-compose build 23 | ``` 24 | 25 | First initialization of Airflow: 26 | ``` 27 | docker-compose up airflow-init 28 | ``` 29 | 30 | Then, after initialization, start all containers: 31 | ``` 32 | docker-compose up -d 33 | ``` 34 | 35 | In the example dag in `dags/`, don't forget to apply database credentials. And please do not commit passwords or credential to git. 36 | 37 | ## Keeping credentials in Environment Variables 38 | 39 | In directory `airflow-env-vars/` contains the demo that requires to keep credentials in `.env` files. Then those variables will be set in container by `docker-compose.yml` in `environment:` section. 40 | 41 | The example of `.env` file: 42 | ``` 43 | AIRFLOW_UID= 44 | AIRFLOW_GID= 45 | MYSQL_HOST= 46 | MYSQL_PORT= 47 | MYSQL_USER= 48 | MYSQL_PASSWORD= 49 | MYSQL_DB= 50 | MYSQL_CHARSET= 51 | ``` -------------------------------------------------------------------------------- /docker-airflow-demo/airflow/dags/workshop4.py: -------------------------------------------------------------------------------- 1 | from airflow import DAG 2 | from airflow.operators.bash_operator import BashOperator 3 | from airflow.operators.python_operator import PythonOperator 4 | from airflow.utils.dates import days_ago 5 | from datetime import timedelta 6 | 7 | import pymysql.cursors 8 | import pandas as pd 9 | import requests 10 | 11 | 12 | class Config: 13 | MYSQL_HOST = '' 14 | MYSQL_PORT = 3306 15 | MYSQL_USER = '' 16 | MYSQL_PASSWORD = '' 17 | MYSQL_DB = '' 18 | MYSQL_CHARSET = '' 19 | 20 | # For PythonOperator 21 | 22 | def get_data_from_db(): 23 | 24 | # Connect to the database 25 | connection = pymysql.connect(host=Config.MYSQL_HOST, 26 | port=Config.MYSQL_PORT, 27 | user=Config.MYSQL_USER, 28 | password=Config.MYSQL_PASSWORD, 29 | db=Config.MYSQL_DB, 30 | charset=Config.MYSQL_CHARSET, 31 | cursorclass=pymysql.cursors.DictCursor) 32 | 33 | 34 | with connection.cursor() as cursor: 35 | # Read a single record 36 | sql = "SELECT * from online_retail" 37 | cursor.execute(sql) 38 | result_retail = cursor.fetchall() 39 | 40 | retail = pd.DataFrame(result_retail) 41 | retail['InvoiceTimestamp'] = retail['InvoiceDate'] 42 | retail['InvoiceDate'] = pd.to_datetime(retail['InvoiceDate']).dt.date 43 | retail.to_csv("/home/airflow/data/retail_from_db.csv", index=False) 44 | 45 | 46 | def get_data_from_api(): 47 | url = "https://de-training-2020-7au6fmnprq-de.a.run.app/currency_gbp/all" 48 | response = requests.get(url) 49 | result_conversion_rate = response.json() 50 | 51 | conversion_rate = pd.DataFrame.from_dict(result_conversion_rate) 52 | conversion_rate = conversion_rate.reset_index().rename(columns={"index":"date"}) 53 | 54 | conversion_rate['date'] = pd.to_datetime(conversion_rate['date']).dt.date 55 | conversion_rate.to_csv("/home/airflow/data/conversion_rate_from_api.csv", index=False) 56 | 57 | 58 | def convert_to_thb(): 59 | retail = pd.read_csv("/home/airflow/data/retail_from_db.csv") 60 | conversion_rate = pd.read_csv("/home/airflow/data/conversion_rate_from_api.csv") 61 | 62 | final_df = retail.merge(conversion_rate, how="left", left_on="InvoiceDate", right_on="date") 63 | 64 | final_df['THBPrice'] = final_df.apply(lambda x: x['UnitPrice'] * x['Rate'], axis=1) 65 | final_df.to_csv("/home/airflow/data/result.csv", index=False) 66 | 67 | # Default Args 68 | 69 | default_args = { 70 | 'owner': 'datath', 71 | 'depends_on_past': False, 72 | 'catchup': False, 73 | 'start_date': days_ago(2), 74 | 'email': ['airflow@example.com'], 75 | 'email_on_failure': False, 76 | 'email_on_retry': False, 77 | 'retries': 1, 78 | 'retry_delay': timedelta(minutes=5), 79 | } 80 | 81 | # Create DAG 82 | 83 | dag = DAG( 84 | 'Retail_pipeline', 85 | default_args=default_args, 86 | description='Pipeline for ETL online_retail data', 87 | schedule_interval=timedelta(days=1), 88 | ) 89 | 90 | # Tasks 91 | 92 | t1 = PythonOperator( 93 | task_id='db_ingest', 94 | python_callable=get_data_from_db, 95 | dag=dag, 96 | ) 97 | 98 | t2 = PythonOperator( 99 | task_id='api_call', 100 | python_callable=get_data_from_api, 101 | dag=dag, 102 | ) 103 | 104 | t3 = PythonOperator( 105 | task_id='convert_currency', 106 | python_callable=convert_to_thb, 107 | dag=dag, 108 | ) 109 | 110 | 111 | # Dependencies 112 | 113 | [t1, t2] >> t3 -------------------------------------------------------------------------------- /docker-airflow-demo/airflow-env-vars/dags/workshop4.py: -------------------------------------------------------------------------------- 1 | import os 2 | from airflow import DAG 3 | from airflow.operators.bash_operator import BashOperator 4 | from airflow.operators.python_operator import PythonOperator 5 | from airflow.utils.dates import days_ago 6 | from datetime import timedelta 7 | 8 | import pymysql.cursors 9 | import pandas as pd 10 | import requests 11 | 12 | 13 | class Config: 14 | MYSQL_HOST = os.getenv("MYSQL_HOST") 15 | MYSQL_PORT = int(os.getenv("MYSQL_PORT")) 16 | MYSQL_USER = os.getenv("MYSQL_USER") 17 | MYSQL_PASSWORD = os.getenv("MYSQL_PASSWORD") 18 | MYSQL_DB = os.getenv("MYSQL_DB") 19 | MYSQL_CHARSET = os.getenv("MYSQL_CHARSET") 20 | 21 | # For PythonOperator 22 | 23 | def get_data_from_db(): 24 | 25 | # Connect to the database 26 | connection = pymysql.connect(host=Config.MYSQL_HOST, 27 | port=Config.MYSQL_PORT, 28 | user=Config.MYSQL_USER, 29 | password=Config.MYSQL_PASSWORD, 30 | db=Config.MYSQL_DB, 31 | charset=Config.MYSQL_CHARSET, 32 | cursorclass=pymysql.cursors.DictCursor) 33 | 34 | 35 | with connection.cursor() as cursor: 36 | # Read a single record 37 | sql = "SELECT * from online_retail" 38 | cursor.execute(sql) 39 | result_retail = cursor.fetchall() 40 | 41 | retail = pd.DataFrame(result_retail) 42 | retail['InvoiceTimestamp'] = retail['InvoiceDate'] 43 | retail['InvoiceDate'] = pd.to_datetime(retail['InvoiceDate']).dt.date 44 | retail.to_csv("/home/airflow/data/retail_from_db.csv", index=False) 45 | 46 | 47 | def get_data_from_api(): 48 | url = "https://de-training-2020-7au6fmnprq-de.a.run.app/currency_gbp/all" 49 | response = requests.get(url) 50 | result_conversion_rate = response.json() 51 | 52 | conversion_rate = pd.DataFrame.from_dict(result_conversion_rate) 53 | conversion_rate = conversion_rate.reset_index().rename(columns={"index":"date"}) 54 | 55 | conversion_rate['date'] = pd.to_datetime(conversion_rate['date']).dt.date 56 | conversion_rate.to_csv("/home/airflow/data/conversion_rate_from_api.csv", index=False) 57 | 58 | 59 | def convert_to_thb(): 60 | retail = pd.read_csv("/home/airflow/data/retail_from_db.csv") 61 | conversion_rate = pd.read_csv("/home/airflow/data/conversion_rate_from_api.csv") 62 | 63 | final_df = retail.merge(conversion_rate, how="left", left_on="InvoiceDate", right_on="date") 64 | 65 | final_df['THBPrice'] = final_df.apply(lambda x: x['UnitPrice'] * x['Rate'], axis=1) 66 | final_df.to_csv("/home/airflow/data/result.csv", index=False) 67 | 68 | # Default Args 69 | 70 | default_args = { 71 | 'owner': 'datath', 72 | 'depends_on_past': False, 73 | 'catchup': False, 74 | 'start_date': days_ago(0), 75 | 'email': ['airflow@example.com'], 76 | 'email_on_failure': False, 77 | 'email_on_retry': False, 78 | 'retries': 1, 79 | 'retry_delay': timedelta(minutes=5), 80 | } 81 | 82 | # Create DAG 83 | 84 | dag = DAG( 85 | 'Retail_pipeline', 86 | default_args=default_args, 87 | description='Pipeline for ETL online_retail data', 88 | schedule_interval=timedelta(days=1), 89 | ) 90 | 91 | # Tasks 92 | 93 | t1 = PythonOperator( 94 | task_id='db_ingest', 95 | python_callable=get_data_from_db, 96 | dag=dag, 97 | ) 98 | 99 | t2 = PythonOperator( 100 | task_id='api_call', 101 | python_callable=get_data_from_api, 102 | dag=dag, 103 | ) 104 | 105 | t3 = PythonOperator( 106 | task_id='convert_currency', 107 | python_callable=convert_to_thb, 108 | dag=dag, 109 | ) 110 | 111 | 112 | # Dependencies 113 | 114 | [t1, t2] >> t3 -------------------------------------------------------------------------------- /docker-airflow-demo/airflow/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | # Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. 20 | # 21 | # WARNING: This configuration is for local development. Do not use it in a production deployment. 22 | # 23 | # This configuration supports basic configuration using environment variables or an .env file 24 | # The following variables are supported: 25 | # 26 | # AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. 27 | # Default: apache/airflow:master-python3.8 28 | # AIRFLOW_UID - User ID in Airflow containers 29 | # Default: 50000 30 | # AIRFLOW_GID - Group ID in Airflow containers 31 | # Default: 50000 32 | # _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account. 33 | # Default: airflow 34 | # _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account. 35 | # Default: airflow 36 | # 37 | # Feel free to modify this file to suit your needs. 38 | --- 39 | version: '3' 40 | x-airflow-common: 41 | &airflow-common 42 | build: . 43 | image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.1.0} 44 | environment: 45 | &airflow-common-env 46 | AIRFLOW__CORE__EXECUTOR: CeleryExecutor 47 | AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow 48 | AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow 49 | AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 50 | AIRFLOW__CORE__FERNET_KEY: '' 51 | AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' 52 | AIRFLOW__CORE__LOAD_EXAMPLES: 'true' 53 | AIRFLOW__API__AUTH_BACKEND: 'airflow.api.auth.backend.basic_auth' 54 | volumes: 55 | - ./dags:/opt/airflow/dags 56 | - ./logs:/opt/airflow/logs 57 | - ./plugins:/opt/airflow/plugins 58 | - ./data:/home/airflow/data 59 | user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" 60 | depends_on: 61 | redis: 62 | condition: service_healthy 63 | postgres: 64 | condition: service_healthy 65 | 66 | services: 67 | postgres: 68 | image: postgres:13 69 | environment: 70 | POSTGRES_USER: airflow 71 | POSTGRES_PASSWORD: airflow 72 | POSTGRES_DB: airflow 73 | volumes: 74 | - postgres-db-volume:/var/lib/postgresql/data 75 | healthcheck: 76 | test: ["CMD", "pg_isready", "-U", "airflow"] 77 | interval: 5s 78 | retries: 5 79 | restart: always 80 | 81 | redis: 82 | image: redis:latest 83 | ports: 84 | - 6379:6379 85 | healthcheck: 86 | test: ["CMD", "redis-cli", "ping"] 87 | interval: 5s 88 | timeout: 30s 89 | retries: 50 90 | restart: always 91 | 92 | airflow-webserver: 93 | <<: *airflow-common 94 | command: webserver 95 | ports: 96 | - 8080:8080 97 | healthcheck: 98 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 99 | interval: 10s 100 | timeout: 10s 101 | retries: 5 102 | restart: always 103 | 104 | airflow-scheduler: 105 | <<: *airflow-common 106 | command: scheduler 107 | healthcheck: 108 | test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"'] 109 | interval: 10s 110 | timeout: 10s 111 | retries: 5 112 | restart: always 113 | 114 | airflow-worker: 115 | <<: *airflow-common 116 | command: celery worker 117 | healthcheck: 118 | test: 119 | - "CMD-SHELL" 120 | - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' 121 | interval: 10s 122 | timeout: 10s 123 | retries: 5 124 | restart: always 125 | 126 | airflow-init: 127 | <<: *airflow-common 128 | command: version 129 | environment: 130 | <<: *airflow-common-env 131 | _AIRFLOW_DB_UPGRADE: 'true' 132 | _AIRFLOW_WWW_USER_CREATE: 'true' 133 | _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} 134 | _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} 135 | 136 | flower: 137 | <<: *airflow-common 138 | command: celery flower 139 | ports: 140 | - 5555:5555 141 | healthcheck: 142 | test: ["CMD", "curl", "--fail", "http://localhost:5555/"] 143 | interval: 10s 144 | timeout: 10s 145 | retries: 5 146 | restart: always 147 | 148 | volumes: 149 | postgres-db-volume: 150 | -------------------------------------------------------------------------------- /docker-airflow-demo/airflow-env-vars/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | # Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. 20 | # 21 | # WARNING: This configuration is for local development. Do not use it in a production deployment. 22 | # 23 | # This configuration supports basic configuration using environment variables or an .env file 24 | # The following variables are supported: 25 | # 26 | # AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. 27 | # Default: apache/airflow:master-python3.8 28 | # AIRFLOW_UID - User ID in Airflow containers 29 | # Default: 50000 30 | # AIRFLOW_GID - Group ID in Airflow containers 31 | # Default: 50000 32 | # _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account. 33 | # Default: airflow 34 | # _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account. 35 | # Default: airflow 36 | # 37 | # Feel free to modify this file to suit your needs. 38 | --- 39 | version: '3' 40 | x-airflow-common: 41 | &airflow-common 42 | build: . 43 | image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.1.0} 44 | environment: 45 | &airflow-common-env 46 | AIRFLOW__CORE__EXECUTOR: CeleryExecutor 47 | AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow 48 | AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow 49 | AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 50 | AIRFLOW__CORE__FERNET_KEY: '' 51 | AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' 52 | AIRFLOW__CORE__LOAD_EXAMPLES: 'true' 53 | AIRFLOW__API__AUTH_BACKEND: 'airflow.api.auth.backend.basic_auth' 54 | MYSQL_HOST: ${MYSQL_HOST} 55 | MYSQL_PORT: ${MYSQL_PORT} 56 | MYSQL_USER: ${MYSQL_USER} 57 | MYSQL_PASSWORD: ${MYSQL_PASSWORD} 58 | MYSQL_DB: ${MYSQL_DB} 59 | MYSQL_CHARSET: ${MYSQL_CHARSET} 60 | volumes: 61 | - ./dags:/opt/airflow/dags 62 | - ./logs:/opt/airflow/logs 63 | - ./plugins:/opt/airflow/plugins 64 | - ./data:/home/airflow/data 65 | user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" 66 | depends_on: 67 | redis: 68 | condition: service_healthy 69 | postgres: 70 | condition: service_healthy 71 | 72 | services: 73 | postgres: 74 | image: postgres:13 75 | environment: 76 | POSTGRES_USER: airflow 77 | POSTGRES_PASSWORD: airflow 78 | POSTGRES_DB: airflow 79 | volumes: 80 | - postgres-db-volume:/var/lib/postgresql/data 81 | healthcheck: 82 | test: ["CMD", "pg_isready", "-U", "airflow"] 83 | interval: 5s 84 | retries: 5 85 | restart: always 86 | 87 | redis: 88 | image: redis:latest 89 | ports: 90 | - 6379:6379 91 | healthcheck: 92 | test: ["CMD", "redis-cli", "ping"] 93 | interval: 5s 94 | timeout: 30s 95 | retries: 50 96 | restart: always 97 | 98 | airflow-webserver: 99 | <<: *airflow-common 100 | command: webserver 101 | ports: 102 | - 8080:8080 103 | healthcheck: 104 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 105 | interval: 10s 106 | timeout: 10s 107 | retries: 5 108 | restart: always 109 | 110 | airflow-scheduler: 111 | <<: *airflow-common 112 | command: scheduler 113 | healthcheck: 114 | test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"'] 115 | interval: 10s 116 | timeout: 10s 117 | retries: 5 118 | restart: always 119 | 120 | airflow-worker: 121 | <<: *airflow-common 122 | command: celery worker 123 | healthcheck: 124 | test: 125 | - "CMD-SHELL" 126 | - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' 127 | interval: 10s 128 | timeout: 10s 129 | retries: 5 130 | restart: always 131 | 132 | airflow-init: 133 | <<: *airflow-common 134 | command: version 135 | environment: 136 | <<: *airflow-common-env 137 | _AIRFLOW_DB_UPGRADE: 'true' 138 | _AIRFLOW_WWW_USER_CREATE: 'true' 139 | _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} 140 | _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} 141 | 142 | flower: 143 | <<: *airflow-common 144 | command: celery flower 145 | ports: 146 | - 5555:5555 147 | healthcheck: 148 | test: ["CMD", "curl", "--fail", "http://localhost:5555/"] 149 | interval: 10s 150 | timeout: 10s 151 | retries: 5 152 | restart: always 153 | 154 | volumes: 155 | postgres-db-volume: 156 | --------------------------------------------------------------------------------