├── .gitignore ├── DAG_ingestion_pipeline ├── .env ├── README.md ├── dags │ └── data_ingestion_dag │ │ └── main.py ├── docker-compose.yaml ├── plugins │ └── macros.py └── raw_data │ ├── 2021-08-14 │ ├── 20 │ │ └── booking.csv │ ├── 21 │ │ └── booking.csv │ └── 22 │ │ └── booking.csv │ ├── 2021-08-15 │ ├── 10 │ │ └── booking.csv │ ├── 11 │ │ └── booking.csv │ ├── 12 │ │ └── booking.csv │ ├── 13 │ │ └── booking.csv │ ├── 20 │ │ └── booking.csv │ └── 21 │ │ └── booking.csv │ ├── 2021-08-18 │ ├── 18 │ │ └── booking.csv │ ├── 19 │ │ └── booking.csv │ ├── 20 │ │ └── booking.csv │ ├── 21 │ │ └── booking.csv │ ├── 22 │ │ └── booking.csv │ └── 23 │ │ └── booking.csv │ ├── 2021-08-19 │ ├── 18 │ │ └── booking.csv │ ├── 19 │ │ └── booking.csv │ ├── 20 │ │ └── booking.csv │ ├── 21 │ │ └── booking.csv │ ├── 22 │ │ └── booking.csv │ └── 23 │ │ └── booking.csv │ ├── client.csv │ └── hotel.csv ├── LICENSE ├── Monitoring_Dag ├── configs │ ├── airflow.cfg │ ├── prometheus.yaml │ └── statsd.yaml ├── dags │ └── My_Dag │ │ └── main.py └── docker-compose.yaml ├── airflow-minimal ├── docker-localExecutor-mysql │ ├── .env │ ├── dags │ │ └── main_dag.py │ └── docker-compose.yaml ├── docker-sequencial-mysql │ ├── .env │ └── docker-compose.yaml └── docker-sequencial-sqlite │ ├── .env │ ├── dags │ ├── custom_sensors.py │ └── my_sensors_dag_demo.py │ └── docker-compose.yaml └── slack-alert-dag ├── .env ├── README.md ├── dags └── slack-alert-dag.py └── docker-compose.yaml /.gitignore: -------------------------------------------------------------------------------- 1 | .dev/ 2 | .idea/ 3 | venv 4 | .pytest_cache 5 | *.pyc 6 | logs/ 7 | db/ 8 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/.env: -------------------------------------------------------------------------------- 1 | AIRFLOW_UID=50000 2 | AIRFLOW_GID=50000 3 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/README.md: -------------------------------------------------------------------------------- 1 | ## First Airflow DAG: Data ingestion pipeline 2 | 3 | 1. Prepare the database first `docker-compose up airflow-init` 4 | 5 | This is going to created db/airflow.db sqlite database 6 | 7 | 2. Add raw data for current execution date and hour to be ingested 8 | 9 | 3. - Launch Airflow `docker-compose up` 10 | 11 | Wait for scheduler and webserver to get healthy, then go to `localhost:8081` 12 | 13 | ```python 14 | username: admin 15 | password: airflow 16 | ``` 17 | 18 | 4. Enable the DAG and watch it ingest data. 19 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/dags/data_ingestion_dag/main.py: -------------------------------------------------------------------------------- 1 | # We'll start by importing the DAG object 2 | from datetime import timedelta, datetime 3 | from pathlib import Path 4 | 5 | from airflow import DAG 6 | # We need to import the operators used in our tasks 7 | from airflow.operators.python_operator import PythonOperator 8 | # We then import the days_ago function 9 | from airflow.utils.dates import days_ago 10 | 11 | import pandas as pd 12 | import sqlite3 13 | import os 14 | 15 | # get dag directory path 16 | dag_path = os.getcwd() 17 | 18 | 19 | def execution_date_to_millis(execution_date): 20 | """converts execution date (in DAG timezone) to epoch millis 21 | 22 | Args: 23 | date (execution date): %Y-%m-%d 24 | 25 | Returns: 26 | milliseconds 27 | """ 28 | date = datetime.strptime(execution_date, "%Y-%m-%d") 29 | epoch = datetime.utcfromtimestamp(0) 30 | return (date - epoch).total_seconds() * 1000.0 31 | 32 | 33 | def transform_data(exec_date): 34 | try: 35 | print(f"Ingesting data for date: {exec_date}") 36 | date = datetime.strptime(exec_date, '%Y-%m-%d %H') 37 | file_date_path = f"{date.strftime('%Y-%m-%d')}/{date.hour}" 38 | 39 | booking = pd.read_csv(f"{dag_path}/raw_data/{file_date_path}/booking.csv", low_memory=False) 40 | client = pd.read_csv(f"{dag_path}/raw_data/client.csv", low_memory=False) 41 | hotel = pd.read_csv(f"{dag_path}/raw_data/hotel.csv", low_memory=False) 42 | 43 | # merge booking with client 44 | data = pd.merge(booking, client, on='client_id') 45 | data.rename(columns={'name': 'client_name', 'type': 'client_type'}, inplace=True) 46 | 47 | # merge booking, client & hotel 48 | data = pd.merge(data, hotel, on='hotel_id') 49 | data.rename(columns={'name': 'hotel_name'}, inplace=True) 50 | 51 | # make date format consistent 52 | data.booking_date = pd.to_datetime(data.booking_date, infer_datetime_format=True) 53 | 54 | # make all cost in GBP currency 55 | data.loc[data.currency == 'EUR', ['booking_cost']] = data.booking_cost * 0.8 56 | data.currency.replace("EUR", "GBP", inplace=True) 57 | 58 | # remove unnecessary columns 59 | data = data.drop('address', 1) 60 | 61 | # load processed data 62 | output_dir = Path(f'{dag_path}/processed_data/{file_date_path}') 63 | output_dir.mkdir(parents=True, exist_ok=True) 64 | # processed_data/2021-08-15/12/2021-08-15_12.csv 65 | data.to_csv(output_dir / f"{file_date_path}.csv".replace("/", "_"), index=False, mode='a') 66 | 67 | except ValueError as e: 68 | print("datetime format should match %Y-%m-%d %H", e) 69 | raise e 70 | 71 | 72 | def load_data(exec_date): 73 | print(f"Loading data for date: {exec_date}") 74 | date = datetime.strptime(exec_date, '%Y-%m-%d %H') 75 | file_date_path = f"{date.strftime('%Y-%m-%d')}/{date.hour}" 76 | 77 | conn = sqlite3.connect("/usr/local/airflow/db/datascience.db") 78 | c = conn.cursor() 79 | c.execute(''' 80 | CREATE TABLE IF NOT EXISTS booking_record ( 81 | client_id INTEGER NOT NULL, 82 | booking_date TEXT NOT NULL, 83 | room_type TEXT(512) NOT NULL, 84 | hotel_id INTEGER NOT NULL, 85 | booking_cost NUMERIC, 86 | currency TEXT, 87 | age INTEGER, 88 | client_name TEXT(512), 89 | client_type TEXT(512), 90 | hotel_name TEXT(512) 91 | ); 92 | ''') 93 | processed_file = f"{dag_path}/processed_data/{file_date_path}/{file_date_path.replace('/', '_')}.csv" 94 | records = pd.read_csv(processed_file) 95 | records.to_sql('booking_record', conn, index=False, if_exists='append') 96 | 97 | 98 | # initializing the default arguments that we'll pass to our DAG 99 | default_args = { 100 | 'owner': 'airflow', 101 | 'start_date': days_ago(5) 102 | } 103 | 104 | ingestion_dag = DAG( 105 | 'booking_ingestion', 106 | default_args=default_args, 107 | description='Aggregates booking records for data analysis', 108 | schedule_interval=timedelta(hours=1), 109 | catchup=False, 110 | user_defined_macros={'date_to_millis': execution_date_to_millis} 111 | ) 112 | 113 | task_1 = PythonOperator( 114 | task_id='transform_data', 115 | python_callable=transform_data, 116 | op_args=["{{ ds }} {{ execution_date.hour }}"], 117 | dag=ingestion_dag, 118 | ) 119 | 120 | task_2 = PythonOperator( 121 | task_id='load_data', 122 | python_callable=load_data, 123 | op_args=["{{ ds }} {{ execution_date.hour }}"], 124 | dag=ingestion_dag, 125 | ) 126 | 127 | 128 | task_1 >> task_2 129 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | --- 20 | version: '3' 21 | 22 | x-airflow-common: 23 | &airflow-common 24 | image: apache/airflow:2.1.1-python3.8 25 | environment: 26 | &airflow-common-env 27 | AIRFLOW__CORE__SQL_ALCHEMY_CONN: sqlite:////usr/local/airflow/db/airflow.db 28 | AIRFLOW__CORE__LOAD_EXAMPLES: 'false' 29 | volumes: 30 | - ./dags:/opt/airflow/dags 31 | - ./logs:/opt/airflow/logs 32 | - ./plugins:/opt/airflow/plugins 33 | - ./db:/usr/local/airflow/db 34 | - ./raw_data:/opt/airflow/raw_data 35 | - ./processed_data:/opt/airflow/processed_data 36 | user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" 37 | 38 | services: 39 | airflow-init: 40 | <<: *airflow-common 41 | entrypoint: /bin/bash -c "/bin/bash -c \"$${@}\"" 42 | command: | 43 | /bin/bash -c " 44 | airflow db init 45 | airflow db upgrade 46 | airflow users create -r Admin -u admin -e airflow@airflow.com -f admin -l user -p airflow 47 | " 48 | environment: 49 | <<: *airflow-common-env 50 | 51 | airflow-scheduler: 52 | <<: *airflow-common 53 | command: scheduler 54 | environment: 55 | <<: *airflow-common-env 56 | restart: always 57 | 58 | airflow-webserver: 59 | <<: *airflow-common 60 | command: webserver 61 | ports: 62 | - 8081:8080 63 | healthcheck: 64 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 65 | interval: 10s 66 | timeout: 10s 67 | retries: 5 68 | restart: always 69 | environment: 70 | <<: *airflow-common-env 71 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/plugins/macros.py: -------------------------------------------------------------------------------- 1 | # Defining the plugin class 2 | from datetime import datetime 3 | 4 | from airflow.plugins_manager import AirflowPlugin 5 | 6 | 7 | def execution_date_to_millis(execution_date): 8 | """converts execution date (in DAG timezone) to epoch millis 9 | 10 | Args: 11 | date (execution date): %Y-%m-%d 12 | 13 | Returns: 14 | milliseconds 15 | """ 16 | date = datetime.strptime(execution_date, "%Y-%m-%d") 17 | epoch = datetime.utcfromtimestamp(0) 18 | return (date - epoch).total_seconds() * 1000.0 19 | 20 | 21 | class MyPlugins(AirflowPlugin): 22 | name = "custom_macros" 23 | macros = [execution_date_to_millis] 24 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-14/20/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-14T20:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-14T20:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-14T20:00:00,standard_1_bed,4,2910.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-14/21/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 1,2021-08-14T21:00:00,standard_1_bed,2,2910.0,GBP 3 | 3,2021-08-14T21:00:00,first_class_2_bed,1,2910.0,GBP 4 | 6,2021-08-14T21:00:00,balcony_2_bed,4,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-14/22/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 7,2021-08-14T22:00:00,standard_3_bed,2,2910.0,GBP 3 | 8,2021-08-14T22:00:00,standard_2_bed,5,1700.0,GBP 4 | 9,2021-08-14T22:00:00,balcony_2_bed,4,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-15/10/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-15T10:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-15T10:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-15T10:00:00,standard_1_bed,4,2910.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-15/11/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 1,2021-08-15T11:00:00,standard_1_bed,2,2910.0,GBP 3 | 3,2021-08-15T11:00:00,first_class_2_bed,1,2910.0,GBP 4 | 6,2021-08-15T11:00:00,balcony_2_bed,4,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-15/12/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 7,2021-08-15T12:00:00,standard_3_bed,2,2910.0,GBP 3 | 8,2021-08-15T12:00:00,standard_2_bed,5,1700.0,GBP 4 | 9,2021-08-15T12:00:00,balcony_2_bed,4,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-15/13/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 3,2021-08-15T13:00:00,standard_3_bed,3,2910.0,GBP 3 | 7,2021-08-15T13:00:00,standard_2_bed,2,1700.0,GBP 4 | 8,2021-08-15T13:00:00,balcony_2_bed,1,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-15/20/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 1,2021-08-15T20:00:00,standard_3_bed,3,2910.0,GBP 3 | 2,2021-08-15T20:00:00,standard_2_bed,2,1700.0,GBP 4 | 5,2021-08-15T20:00:00,balcony_2_bed,1,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-15/21/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 8,2021-08-15T21:00:00,standard_3_bed,3,2910.0,GBP 3 | 2,2021-08-15T21:00:00,standard_2_bed,1,1700.0,GBP 4 | 4,2021-08-15T21:00:00,balcony_2_bed,1,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-18/18/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-18T18:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-18T18:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-18T18:00:00,standard_1_bed,4,2910.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-18/19/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 3 | 4 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-18/20/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-18T20:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-18T20:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-18T20:00:00,standard_1_bed,4,2910.0,EUR 5 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-18/21/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-18T21:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-18T21:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-18T21:00:00,standard_1_bed,4,2910.0,EUR 5 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-18/22/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-18T22:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-18T22:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-18T22:00:00,standard_1_bed,4,2910.0,EUR 5 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-18/23/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-18T23:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-18T23:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-18T23:00:00,standard_1_bed,4,2910.0,EUR 5 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-19/18/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-19T18:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-19T18:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-19T18:00:00,standard_1_bed,4,2910.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-19/19/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 1,2021-08-19T19:00:00,standard_1_bed,2,2910.0,GBP 3 | 3,2021-08-19T19:00:00,first_class_2_bed,1,2910.0,GBP 4 | 6,2021-08-19T19:00:00,balcony_2_bed,4,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-19/20/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 7,2021-08-20T20:00:00,standard_3_bed,2,2910.0,GBP 3 | 8,2021-08-20T20:00:00,standard_2_bed,5,1700.0,GBP 4 | 9,2021-08-20T20:00:00,balcony_2_bed,4,1100.0,EUR 5 | 6 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-19/21/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-19T21:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-19T21:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-19T21:00:00,standard_1_bed,4,2910.0,EUR 5 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-19/22/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-19T22:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-19T22:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-19T22:00:00,standard_1_bed,4,2910.0,EUR 5 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/2021-08-19/23/booking.csv: -------------------------------------------------------------------------------- 1 | client_id,booking_date,room_type,hotel_id,booking_cost,currency 2 | 2,2021-08-19T23:00:00,standard_1_bed,1,2910.0,GBP 3 | 4,2021-08-19T23:00:00,standard_1_bed,1,2910.0,GBP 4 | 5,2021-08-19T23:00:00,standard_1_bed,4,2910.0,EUR 5 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/client.csv: -------------------------------------------------------------------------------- 1 | client_id,age,name,type 2 | 1,,Ann,standard 3 | 2,38.0,Ben,standard 4 | 3,30.0,Tom,standard 5 | 4,43.0,Bianca,VIP 6 | 5,49.0,Caroline,standard 7 | 6,28.0,Kate,VIP 8 | -------------------------------------------------------------------------------- /DAG_ingestion_pipeline/raw_data/hotel.csv: -------------------------------------------------------------------------------- 1 | hotel_id,name,address 2 | 1,Astro Resort,address1 3 | 2,Dream Connect,address2 4 | 3,Green Acres,address3 5 | 4,Millennium Times Square,address5 6 | 5,The Clift Royal,address5 7 | 6,The New View,address6 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Monitoring_Dag/configs/airflow.cfg: -------------------------------------------------------------------------------- 1 | [core] 2 | executor = LocalExecutor 3 | sql_alchemy_conn = mysql+mysqldb://airflow:airflow@mysql:3306/airflow 4 | 5 | [metrics] 6 | statsd_on = True 7 | statsd_host = statsd-exporter 8 | statsd_port = 9125 9 | statsd_prefix = airflow 10 | -------------------------------------------------------------------------------- /Monitoring_Dag/configs/prometheus.yaml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 5s 3 | evaluation_interval: 15s 4 | 5 | scrape_configs: 6 | - job_name: airflow 7 | scheme: http 8 | metrics_path: metrics 9 | static_configs: 10 | - targets: ['host.docker.internal:9102'] 11 | labels: 12 | airflow_id: 'airflow' 13 | -------------------------------------------------------------------------------- /Monitoring_Dag/configs/statsd.yaml: -------------------------------------------------------------------------------- 1 | mappings: 2 | # Airflow StatsD metrics mappings (https://airflow.apache.org/docs/apache-airflow/stable/logging-monitoring/metrics.html) 3 | # === Counters === 4 | - match: "*.ti_failures" 5 | match_metric_type: counter 6 | name: "af_agg_ti_failures" 7 | labels: 8 | airflow_id: "$1" 9 | - match: "*.ti_successes" 10 | match_metric_type: counter 11 | name: "af_agg_ti_successes" 12 | labels: 13 | airflow_id: "$1" 14 | - match: "*.zombies_killed" 15 | match_metric_type: counter 16 | name: "af_agg_zombies_killed" 17 | labels: 18 | airflow_id: "$1" 19 | - match: "*.scheduler_heartbeat" 20 | match_metric_type: counter 21 | name: "af_agg_scheduler_heartbeat" 22 | labels: 23 | airflow_id: "$1" 24 | - match: "*.dag_processing.processes" 25 | match_metric_type: counter 26 | name: "af_agg_dag_processing_processes" 27 | labels: 28 | airflow_id: "$1" 29 | - match: "*.scheduler.tasks.killed_externally" 30 | match_metric_type: counter 31 | name: "af_agg_scheduler_tasks_killed_externally" 32 | labels: 33 | airflow_id: "$1" 34 | - match: "*.scheduler.tasks.running" 35 | match_metric_type: counter 36 | name: "af_agg_scheduler_tasks_running" 37 | labels: 38 | airflow_id: "$1" 39 | - match: "*.scheduler.tasks.starving" 40 | match_metric_type: counter 41 | name: "af_agg_scheduler_tasks_starving" 42 | labels: 43 | airflow_id: "$1" 44 | - match: "*.scheduler.orphaned_tasks.cleared" 45 | match_metric_type: counter 46 | name: "af_agg_scheduler_orphaned_tasks_cleared" 47 | labels: 48 | airflow_id: "$1" 49 | - match: "*.scheduler.orphaned_tasks.adopted" 50 | match_metric_type: counter 51 | name: "af_agg_scheduler_orphaned_tasks_adopted" 52 | labels: 53 | airflow_id: "$1" 54 | - match: "*.scheduler.critical_section_busy" 55 | match_metric_type: counter 56 | name: "af_agg_scheduler_critical_section_busy" 57 | labels: 58 | airflow_id: "$1" 59 | - match: "*.sla_email_notification_failure" 60 | match_metric_type: counter 61 | name: "af_agg_sla_email_notification_failure" 62 | labels: 63 | airflow_id: "$1" 64 | - match: "*.ti.start.*.*" 65 | match_metric_type: counter 66 | name: "af_agg_ti_start" 67 | labels: 68 | airflow_id: "$1" 69 | dag_id: "$2" 70 | task_id: "$3" 71 | - match: "*.ti.finish.*.*.*" 72 | match_metric_type: counter 73 | name: "af_agg_ti_finish" 74 | labels: 75 | airflow_id: "$1" 76 | dag_id: "$2" 77 | task_id: "$3" 78 | state: "$4" 79 | - match: "*.dag.callback_exceptions" 80 | match_metric_type: counter 81 | name: "af_agg_dag_callback_exceptions" 82 | labels: 83 | airflow_id: "$1" 84 | - match: "*.celery.task_timeout_error" 85 | match_metric_type: counter 86 | name: "af_agg_celery_task_timeout_error" 87 | labels: 88 | airflow_id: "$1" 89 | 90 | # === Gauges === 91 | - match: "*.dagbag_size" 92 | match_metric_type: gauge 93 | name: "af_agg_dagbag_size" 94 | labels: 95 | airflow_id: "$1" 96 | - match: "*.dag_processing.import_errors" 97 | match_metric_type: gauge 98 | name: "af_agg_dag_processing_import_errors" 99 | labels: 100 | airflow_id: "$1" 101 | - match: "*.dag_processing.total_parse_time" 102 | match_metric_type: gauge 103 | name: "af_agg_dag_processing_total_parse_time" 104 | labels: 105 | airflow_id: "$1" 106 | - match: "*.dag_processing.last_runtime.*" 107 | match_metric_type: gauge 108 | name: "af_agg_dag_processing_last_runtime" 109 | labels: 110 | airflow_id: "$1" 111 | dag_file: "$2" 112 | - match: "*.dag_processing.last_run.seconds_ago.*" 113 | match_metric_type: gauge 114 | name: "af_agg_dag_processing_last_run_seconds" 115 | labels: 116 | airflow_id: "$1" 117 | dag_file: "$2" 118 | - match: "*.dag_processing.processor_timeouts" 119 | match_metric_type: gauge 120 | name: "af_agg_dag_processing_processor_timeouts" 121 | labels: 122 | airflow_id: "$1" 123 | - match: "*.executor.open_slots" 124 | match_metric_type: gauge 125 | name: "af_agg_executor_open_slots" 126 | labels: 127 | airflow_id: "$1" 128 | - match: "*.executor.queued_tasks" 129 | match_metric_type: gauge 130 | name: "af_agg_executor_queued_tasks" 131 | labels: 132 | airflow_id: "$1" 133 | - match: "*.executor.running_tasks" 134 | match_metric_type: gauge 135 | name: "af_agg_executor_running_tasks" 136 | labels: 137 | airflow_id: "$1" 138 | - match: "*.pool.open_slots.*" 139 | match_metric_type: gauge 140 | name: "af_agg_pool_open_slots" 141 | labels: 142 | airflow_id: "$1" 143 | pool_name: "$2" 144 | - match: "*.pool.queued_slots.*" 145 | match_metric_type: gauge 146 | name: "af_agg_pool_queued_slots" 147 | labels: 148 | airflow_id: "$1" 149 | pool_name: "$2" 150 | - match: "*.pool.running_slots.*" 151 | match_metric_type: gauge 152 | name: "af_agg_pool_running_slots" 153 | labels: 154 | airflow_id: "$1" 155 | pool_name: "$2" 156 | - match: "*.pool.starving_tasks.*" 157 | match_metric_type: gauge 158 | name: "af_agg_pool_starving_tasks" 159 | labels: 160 | airflow_id: "$1" 161 | pool_name: "$2" 162 | - match: "*.smart_sensor_operator.poked_tasks" 163 | match_metric_type: gauge 164 | name: "af_agg_smart_sensor_operator_poked_tasks" 165 | labels: 166 | airflow_id: "$1" 167 | - match: "*.smart_sensor_operator.poked_success" 168 | match_metric_type: gauge 169 | name: "af_agg_smart_sensor_operator_poked_success" 170 | labels: 171 | airflow_id: "$1" 172 | - match: "*.smart_sensor_operator.poked_exception" 173 | match_metric_type: gauge 174 | name: "af_agg_smart_sensor_operator_poked_exception" 175 | labels: 176 | airflow_id: "$1" 177 | - match: "*.smart_sensor_operator.exception_failures" 178 | match_metric_type: gauge 179 | name: "af_agg_smart_sensor_operator_exception_failures" 180 | labels: 181 | airflow_id: "$1" 182 | - match: "*.smart_sensor_operator.infra_failures" 183 | match_metric_type: gauge 184 | name: "af_agg_smart_sensor_operator_infra_failures" 185 | labels: 186 | airflow_id: "$1" 187 | 188 | # === Timers === 189 | - match: "*.dagrun.dependency-check.*" 190 | match_metric_type: observer 191 | name: "af_agg_dagrun_dependency_check" 192 | labels: 193 | airflow_id: "$1" 194 | dag_id: "$2" 195 | - match: "*.dag.*.*.duration" 196 | match_metric_type: observer 197 | name: "af_agg_dag_task_duration" 198 | labels: 199 | airflow_id: "$1" 200 | dag_id: "$2" 201 | task_id: "$3" 202 | - match: "*.dag_processing.last_duration.*" 203 | match_metric_type: observer 204 | name: "af_agg_dag_processing_duration" 205 | labels: 206 | airflow_id: "$1" 207 | dag_file: "$2" 208 | - match: "*.dagrun.duration.success.*" 209 | match_metric_type: observer 210 | name: "af_agg_dagrun_duration_success" 211 | labels: 212 | airflow_id: "$1" 213 | dag_id: "$2" 214 | - match: "*.dagrun.duration.failed.*" 215 | match_metric_type: observer 216 | name: "af_agg_dagrun_duration_failed" 217 | labels: 218 | airflow_id: "$1" 219 | dag_id: "$2" 220 | - match: "*.dagrun.schedule_delay.*" 221 | match_metric_type: observer 222 | name: "af_agg_dagrun_schedule_delay" 223 | labels: 224 | airflow_id: "$1" 225 | dag_id: "$2" 226 | - match: "*.scheduler.critical_section_duration" 227 | match_metric_type: observer 228 | name: "af_agg_scheduler_critical_section_duration" 229 | labels: 230 | airflow_id: "$1" 231 | - match: "*.dagrun.*.first_task_scheduling_delay" 232 | match_metric_type: observer 233 | name: "af_agg_dagrun_first_task_scheduling_delay" 234 | labels: 235 | airflow_id: "$1" 236 | dag_id: "$2" 237 | - match: "*.dag.*.*.my_counter" 238 | match_metric_type: counter 239 | name: "my_custom_task_counter" 240 | labels: 241 | airflow_id: "$1" 242 | dag_id: "$2" 243 | task_id: "$3" 244 | -------------------------------------------------------------------------------- /Monitoring_Dag/dags/My_Dag/main.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from airflow import DAG 4 | from airflow.operators.python import PythonOperator 5 | from airflow.utils.dates import days_ago 6 | from airflow.configuration import conf 7 | 8 | from statsd import StatsClient 9 | 10 | 11 | STATSD_HOST = conf.get("metrics", "statsd_host") 12 | STATSD_PORT = conf.get("metrics", "statsd_port") 13 | STATSD_PREFIX = conf.get("metrics", "statsd_prefix") 14 | 15 | 16 | def task(run_time, **context): 17 | metric_name = f'dag.{context["dag"].dag_id}.{context["task"].task_id}.my_counter' 18 | client = StatsClient(host=STATSD_HOST, port=STATSD_PORT, prefix=STATSD_PREFIX) 19 | for i in range(0, run_time): 20 | time.sleep(1) 21 | client.incr(metric_name) 22 | print(i) 23 | 24 | 25 | default_args = { 26 | 'owner': 'airflow', 27 | 'start_date': days_ago(5) 28 | } 29 | 30 | dag = DAG( 31 | 'my_dag', 32 | default_args=default_args, 33 | description='', 34 | schedule_interval="*/2 * * * *", 35 | catchup=False 36 | ) 37 | 38 | my_task_1 = PythonOperator( 39 | task_id='my_task', 40 | python_callable=task, 41 | op_args=[5], 42 | dag=dag, 43 | provide_context=True 44 | ) 45 | 46 | my_task_2 = PythonOperator( 47 | task_id='my_task_2', 48 | python_callable=task, 49 | op_args=[2], 50 | dag=dag, 51 | provide_context=True 52 | ) 53 | 54 | my_task_1 >> my_task_2 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /Monitoring_Dag/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | --- 20 | version: '3' 21 | 22 | x-airflow-common: 23 | &airflow-common 24 | image: apache/airflow:2.1.3-python3.8 25 | volumes: 26 | - ./dags:/opt/airflow/dags 27 | - ./logs:/opt/airflow/logs 28 | - ./configs/airflow.cfg:/opt/airflow/airflow.cfg 29 | user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" 30 | depends_on: 31 | mysql: 32 | condition: service_healthy 33 | 34 | services: 35 | airflow-init: 36 | <<: *airflow-common 37 | entrypoint: /bin/bash -c "/bin/bash -c \"$${@}\"" 38 | command: | 39 | /bin/bash -c " 40 | airflow db init 41 | airflow db upgrade 42 | airflow users create -r Admin -u admin -e airflow@airflow.com -f admin -l user -p airflow 43 | " 44 | 45 | mysql: 46 | image: mysql:5.7 47 | command: mysqld --explicit_defaults_for_timestamp=1 48 | ports: 49 | - "3306:3306" 50 | volumes: 51 | - my-db:/var/lib/mysql 52 | environment: 53 | MYSQL_ROOT_USER: root 54 | MYSQL_ROOT_PASSWORD: airflow 55 | MYSQL_USER: airflow 56 | MYSQL_PASSWORD: airflow 57 | MYSQL_DATABASE: airflow 58 | healthcheck: 59 | test: "/etc/init.d/mysql status" 60 | interval: 2s 61 | retries: 120 62 | 63 | airflow-scheduler: 64 | <<: *airflow-common 65 | command: scheduler 66 | restart: always 67 | 68 | airflow-webserver: 69 | <<: *airflow-common 70 | command: webserver 71 | ports: 72 | - 8081:8080 73 | healthcheck: 74 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 75 | interval: 10s 76 | timeout: 10s 77 | retries: 5 78 | restart: always 79 | 80 | statsd-exporter: 81 | image: prom/statsd-exporter:v0.21.0 82 | volumes: 83 | - ./configs/statsd.yaml:/home/statsd-mapping-configs.yaml 84 | entrypoint: ["/bin/sh", "-c", "--"] 85 | command: ["statsd_exporter --log.level debug --statsd.mapping-config=/home/statsd-mapping-configs.yaml"] 86 | ports: 87 | - 9102:9102 # scrape port 88 | - 9125:9125 # ingest port 89 | restart: always 90 | 91 | prometheus: 92 | image: prom/prometheus:v2.26.0 93 | volumes: 94 | - ./configs/prometheus.yaml:/etc/prometheus/prometheus.yaml 95 | - prometheus_data:/prometheus 96 | command: 97 | - '--config.file=/etc/prometheus/prometheus.yaml' 98 | - '--storage.tsdb.path=/prometheus' 99 | - '--web.console.libraries=/usr/share/prometheus/console_libraries' 100 | - '--web.console.templates=/usr/share/prometheus/consoles' 101 | ports: 102 | - 9092:9090 103 | restart: always 104 | 105 | grafana: 106 | image: grafana/grafana:6.7.2 107 | container_name: grafana 108 | volumes: 109 | - grafana_data:/var/lib/grafana 110 | environment: 111 | - GF_SECURITY_ADMIN_USER=admin 112 | - GF_SECURITY_ADMIN_PASSWORD=grafana 113 | - GF_USERS_ALLOW_SIGN_UP=false 114 | restart: always 115 | ports: 116 | - 3000:3000 117 | 118 | # Names our volume 119 | volumes: 120 | my-db: 121 | prometheus_data: 122 | grafana_data: 123 | -------------------------------------------------------------------------------- /airflow-minimal/docker-localExecutor-mysql/.env: -------------------------------------------------------------------------------- 1 | AIRFLOW_UID=50000 2 | AIRFLOW_GID=50000 3 | -------------------------------------------------------------------------------- /airflow-minimal/docker-localExecutor-mysql/dags/main_dag.py: -------------------------------------------------------------------------------- 1 | # We'll start by importing the DAG object 2 | from airflow import DAG 3 | from airflow.operators.bash_operator import BashOperator 4 | from airflow.utils.dates import days_ago 5 | 6 | 7 | # initializing the default arguments that we'll pass to our DAG 8 | default_args = { 9 | 'owner': 'airflow', 10 | 'start_date': days_ago(5) 11 | } 12 | 13 | dag = DAG( 14 | 'my_dag', 15 | default_args=default_args, 16 | description='', 17 | schedule_interval="*/5 * * * *", 18 | catchup=False 19 | ) 20 | 21 | my_task = BashOperator( 22 | task_id='my_task', 23 | bash_command='for i in {1..5}; do echo $i; sleep 1; done', 24 | retries=0, 25 | dag=dag, 26 | ) 27 | -------------------------------------------------------------------------------- /airflow-minimal/docker-localExecutor-mysql/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | --- 20 | version: '3' 21 | 22 | x-airflow-common: 23 | &airflow-common 24 | image: apache/airflow:2.6.2-python3.8 25 | environment: 26 | &airflow-common-env 27 | AIRFLOW__CORE__EXECUTOR: LocalExecutor 28 | AIRFLOW__CORE__SQL_ALCHEMY_CONN: mysql+mysqldb://airflow:airflow@mysql:3306/airflow 29 | AIRFLOW__CORE__LOAD_EXAMPLES: 'false' 30 | AIRFLOW__CORE__SQL_ALCHEMY_POOL_RECYCLE: 3600 31 | volumes: 32 | - ./dags:/opt/airflow/dags 33 | - ./logs:/opt/airflow/logs 34 | - ./db:/usr/local/airflow/db 35 | user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" 36 | depends_on: 37 | mysql: 38 | condition: service_healthy 39 | 40 | services: 41 | airflow-init: 42 | <<: *airflow-common 43 | entrypoint: /bin/bash -c "/bin/bash -c \"$${@}\"" 44 | command: | 45 | /bin/bash -c " 46 | airflow db init 47 | airflow db upgrade 48 | airflow users create -r Admin -u admin -e airflow@airflow.com -f admin -l user -p airflow 49 | " 50 | environment: 51 | <<: *airflow-common-env 52 | 53 | mysql: 54 | image: mysql:5.7 55 | command: mysqld --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci --explicit_defaults_for_timestamp=1 56 | ports: 57 | - "3306:3306" 58 | volumes: 59 | - my-db:/var/lib/mysql 60 | environment: 61 | MYSQL_ROOT_USER: root 62 | MYSQL_ROOT_PASSWORD: airflow 63 | MYSQL_USER: airflow 64 | MYSQL_PASSWORD: airflow 65 | MYSQL_DATABASE: airflow 66 | healthcheck: 67 | test: "mysqladmin ping -h localhost -uairflow -pairflow" 68 | interval: 2s 69 | retries: 120 70 | 71 | airflow-scheduler: 72 | <<: *airflow-common 73 | command: scheduler 74 | environment: 75 | <<: *airflow-common-env 76 | restart: always 77 | 78 | airflow-webserver: 79 | <<: *airflow-common 80 | command: webserver 81 | ports: 82 | - 8081:8080 83 | healthcheck: 84 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 85 | interval: 10s 86 | timeout: 10s 87 | retries: 5 88 | restart: always 89 | environment: 90 | <<: *airflow-common-env 91 | 92 | # Names our volume 93 | volumes: 94 | my-db: 95 | -------------------------------------------------------------------------------- /airflow-minimal/docker-sequencial-mysql/.env: -------------------------------------------------------------------------------- 1 | AIRFLOW_UID=50000 2 | AIRFLOW_GID=50000 3 | -------------------------------------------------------------------------------- /airflow-minimal/docker-sequencial-mysql/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | --- 20 | version: '3' 21 | 22 | x-airflow-common: 23 | &airflow-common 24 | image: apache/airflow:2.1.1-python3.8 25 | environment: 26 | &airflow-common-env 27 | AIRFLOW__CORE__SQL_ALCHEMY_CONN: mysql://airflow:airflow@mysql:3306/airflow 28 | AIRFLOW__CORE__LOAD_EXAMPLES: 'true' 29 | volumes: 30 | - ./dags:/opt/airflow/dags 31 | - ./logs:/opt/airflow/logs 32 | - ./plugins:/opt/airflow/plugins 33 | user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" 34 | depends_on: 35 | mysql: 36 | condition: service_healthy 37 | 38 | services: 39 | mysql: 40 | image: mysql:5.7 41 | command: --explicit_defaults_for_timestamp 42 | ports: 43 | - "3306:3306" 44 | volumes: 45 | - my-db:/var/lib/mysql 46 | environment: 47 | MYSQL_ROOT_USER: airflow 48 | MYSQL_ROOT_PASSWORD: airflow 49 | MYSQL_USER: airflow 50 | MYSQL_PASSWORD: airflow 51 | MYSQL_DATABASE: airflow 52 | healthcheck: 53 | test: "/etc/init.d/mysql status" 54 | interval: 2s 55 | retries: 120 56 | 57 | airflow-init: 58 | <<: *airflow-common 59 | entrypoint: /bin/bash -c "/bin/bash -c \"$${@}\"" 60 | command: | 61 | /bin/bash -c " 62 | airflow db init 63 | airflow db upgrade 64 | airflow users create -r Admin -u admin -e airflow@airflow.com -f admin -l user -p airflow 65 | " 66 | environment: 67 | <<: *airflow-common-env 68 | 69 | airflow-scheduler: 70 | <<: *airflow-common 71 | command: scheduler 72 | environment: 73 | <<: *airflow-common-env 74 | restart: always 75 | 76 | airflow-webserver: 77 | <<: *airflow-common 78 | command: webserver 79 | ports: 80 | - 8081:8080 81 | healthcheck: 82 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 83 | interval: 10s 84 | timeout: 10s 85 | retries: 5 86 | restart: always 87 | environment: 88 | <<: *airflow-common-env 89 | 90 | # Names our volume 91 | volumes: 92 | my-db: 93 | -------------------------------------------------------------------------------- /airflow-minimal/docker-sequencial-sqlite/.env: -------------------------------------------------------------------------------- 1 | AIRFLOW_UID=50000 2 | AIRFLOW_GID=50000 3 | -------------------------------------------------------------------------------- /airflow-minimal/docker-sequencial-sqlite/dags/custom_sensors.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from airflow.sensors.base import BaseSensorOperator 4 | 5 | 6 | class MyCustomFileSensor(BaseSensorOperator): 7 | def __init__(self, filepath, *args, **kwargs): 8 | self.filepath = filepath 9 | super().__init__(*args, **kwargs) 10 | 11 | def poke(self, context): 12 | # Check if the file exists 13 | print(f'checking for file `{self.filepath}`...') 14 | return os.path.exists(self.filepath) 15 | -------------------------------------------------------------------------------- /airflow-minimal/docker-sequencial-sqlite/dags/my_sensors_dag_demo.py: -------------------------------------------------------------------------------- 1 | import json 2 | from datetime import datetime, timedelta 3 | 4 | from airflow import DAG 5 | from airflow.decorators import task 6 | from airflow.models.baseoperator import chain 7 | from airflow.sensors.bash import BashSensor 8 | 9 | from custom_sensors import MyCustomFileSensor 10 | 11 | 12 | @task 13 | def final_task(): 14 | return json.dumps({'return': 'i am done'}) 15 | 16 | 17 | with DAG( 18 | dag_id="sensors_dag_demo", 19 | schedule=None, 20 | start_date=datetime(2021, 1, 1), 21 | catchup=False, 22 | ) as dag: 23 | 24 | sleep_10sec_sensor = BashSensor( 25 | task_id='wait_for_10sec', 26 | poke_interval=2, 27 | timeout=30, 28 | soft_fail=False, 29 | retries=0, 30 | bash_command="sleep 10", 31 | dag=dag) 32 | 33 | wait_for_file = MyCustomFileSensor( 34 | task_id='check_file', 35 | filepath='/opt/airflow/abc.txt', 36 | poke_interval=10, 37 | timeout=60, 38 | dag=dag 39 | ) 40 | 41 | end_task = final_task() 42 | 43 | chain(sleep_10sec_sensor, 44 | wait_for_file, 45 | end_task) -------------------------------------------------------------------------------- /airflow-minimal/docker-sequencial-sqlite/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | --- 20 | version: '3' 21 | 22 | x-airflow-common: 23 | &airflow-common 24 | image: apache/airflow:2.6.2-python3.8 25 | environment: 26 | &airflow-common-env 27 | AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: sqlite:////usr/local/airflow/db/airflow.db 28 | AIRFLOW__CORE__LOAD_EXAMPLES: 'false' 29 | volumes: 30 | - ./dags:/opt/airflow/dags 31 | - ./logs:/opt/airflow/logs 32 | - ./plugins:/opt/airflow/plugins 33 | - ./db:/usr/local/airflow/db 34 | user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" 35 | 36 | services: 37 | airflow-init: 38 | <<: *airflow-common 39 | entrypoint: /bin/bash -c "/bin/bash -c \"$${@}\"" 40 | command: | 41 | /bin/bash -c " 42 | airflow db init 43 | airflow db upgrade 44 | airflow users create -r Admin -u admin -e airflow@airflow.com -f admin -l user -p airflow 45 | " 46 | environment: 47 | <<: *airflow-common-env 48 | 49 | airflow-scheduler: 50 | <<: *airflow-common 51 | command: scheduler 52 | environment: 53 | <<: *airflow-common-env 54 | restart: always 55 | depends_on: 56 | airflow-webserver: 57 | condition: service_healthy 58 | 59 | airflow-webserver: 60 | <<: *airflow-common 61 | command: webserver 62 | ports: 63 | - 8081:8080 64 | healthcheck: 65 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 66 | interval: 10s 67 | timeout: 10s 68 | retries: 5 69 | restart: always 70 | environment: 71 | <<: *airflow-common-env 72 | -------------------------------------------------------------------------------- /slack-alert-dag/.env: -------------------------------------------------------------------------------- 1 | AIRFLOW_UID=50000 2 | AIRFLOW_GID=50000 3 | -------------------------------------------------------------------------------- /slack-alert-dag/README.md: -------------------------------------------------------------------------------- 1 | # Slack alert dag 2 | 3 | Create slack app first; 4 | 5 | https://api.slack.com/apps/ 6 | 7 | go to `incoming webhooks` and activate webhooks 8 | 9 | click add new webhooks. Copy the wehbook and paste it inside the code variable`SLACK_WEBHOOK_URL` 10 | 11 | watch YouTube video here https://youtu.be/2Cbz9Z06KJo 12 | -------------------------------------------------------------------------------- /slack-alert-dag/dags/slack-alert-dag.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import time 3 | 4 | from datetime import datetime, timedelta 5 | from typing import Optional 6 | 7 | from airflow.decorators import dag, task 8 | from airflow.models import TaskInstance 9 | from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook 10 | 11 | SLACK_WEBHOOK_URL = "---PASTE-YOUR-WEBHOOK-HERE---" 12 | 13 | 14 | def alert_slack_channel(context: dict): 15 | """ Alert to slack channel on failed dag 16 | 17 | :param context: airflow context object 18 | """ 19 | if not SLACK_WEBHOOK_URL: 20 | # Do nothing if slack webhook not set up 21 | return 22 | 23 | last_task: Optional[TaskInstance] = context.get('task_instance') 24 | dag_name = last_task.dag_id 25 | task_name = last_task.task_id 26 | error_message = context.get('exception') or context.get('reason') 27 | execution_date = context.get('execution_date') 28 | dag_run = context.get('dag_run') 29 | task_instances = dag_run.get_task_instances() 30 | file_and_link_template = "<{log_url}|{name}>" 31 | failed_tis = [file_and_link_template.format(log_url=ti.log_url, name=ti.task_id) 32 | for ti in task_instances 33 | if ti.state == 'failed'] 34 | title = f':red_circle: Dag: *{dag_name}* has failed, with ({len(failed_tis)}) failed tasks' 35 | msg_parts = { 36 | 'Execution date': execution_date, 37 | 'Failed Tasks': ', '.join(failed_tis), 38 | 'Error': error_message 39 | } 40 | msg = "\n".join([title, *[f"*{key}*: {value}" for key, value in msg_parts.items()]]).strip() 41 | 42 | SlackWebhookHook( 43 | webhook_token=SLACK_WEBHOOK_URL, 44 | message=msg, 45 | ).execute() 46 | 47 | 48 | default_args = { 49 | 'owner': 'airflow', 50 | } 51 | 52 | 53 | @dag( 54 | description='Hello World DAG', 55 | default_args=default_args, 56 | schedule_interval='0 12 * * *', 57 | on_failure_callback=alert_slack_channel, 58 | start_date=datetime(2017, 3, 20), catchup=False 59 | ) 60 | def SlackAlertTestDag(): 61 | @task(on_failure_callback=alert_slack_channel) 62 | def task1(): 63 | print('Hello world from first task. I am failing :(') 64 | sys.exit(1) 65 | 66 | @task 67 | def task2(): 68 | print('Hello world from second task. I am failing :(') 69 | time.sleep(10) 70 | sys.exit(1) 71 | 72 | @task 73 | def task3(): 74 | time.sleep(10) 75 | print('Hello world from third task') 76 | 77 | @task 78 | def task4(): 79 | time.sleep(10) 80 | print('Hello world from forth task. I am failing :(') 81 | sys.exit(1) 82 | 83 | task1() >> task2() 84 | task3() 85 | task4() 86 | 87 | 88 | dag = SlackAlertTestDag() 89 | -------------------------------------------------------------------------------- /slack-alert-dag/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | --- 20 | version: '3' 21 | 22 | x-airflow-common: 23 | &airflow-common 24 | image: apache/airflow:2.3.3-python3.8 25 | environment: 26 | &airflow-common-env 27 | AIRFLOW__CORE__SQL_ALCHEMY_CONN: sqlite:////usr/local/airflow/db/airflow.db 28 | AIRFLOW__CORE__LOAD_EXAMPLES: 'true' 29 | volumes: 30 | - ./dags:/opt/airflow/dags 31 | - ./logs:/opt/airflow/logs 32 | - ./plugins:/opt/airflow/plugins 33 | - ./db:/usr/local/airflow/db 34 | user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}" 35 | 36 | services: 37 | airflow-init: 38 | <<: *airflow-common 39 | entrypoint: /bin/bash -c "/bin/bash -c \"$${@}\"" 40 | command: | 41 | /bin/bash -c " 42 | airflow db init 43 | airflow db upgrade 44 | airflow users create -r Admin -u admin -e airflow@airflow.com -f admin -l user -p airflow 45 | " 46 | environment: 47 | <<: *airflow-common-env 48 | 49 | airflow-scheduler: 50 | <<: *airflow-common 51 | command: scheduler 52 | environment: 53 | <<: *airflow-common-env 54 | restart: always 55 | depends_on: 56 | airflow-webserver: 57 | condition: service_healthy 58 | 59 | airflow-webserver: 60 | <<: *airflow-common 61 | command: webserver 62 | ports: 63 | - 8081:8080 64 | healthcheck: 65 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 66 | interval: 10s 67 | timeout: 10s 68 | retries: 5 69 | restart: always 70 | environment: 71 | <<: *airflow-common-env 72 | --------------------------------------------------------------------------------