├── .gitignore ├── src ├── gold │ ├── .gitkeep │ ├── daily_report.sql │ ├── ingestao.py │ ├── monthly_report.sql │ └── churn_report.sql ├── silver │ ├── .gitkeep │ ├── produtos.sql │ ├── cliente.sql │ ├── transacoes.sql │ ├── transacao_produto.sql │ └── ingestao.py ├── silver_fs │ └── .gitkeep ├── workflows │ ├── requirements.txt │ ├── main.py │ ├── ds-bricks.json │ └── upsell.json ├── bronze │ ├── customers.json │ ├── transactions.json │ ├── transactions_product.json │ └── ingestao.py └── lib │ ├── utils.py │ └── ingestors.py ├── .github ├── workflows │ └── main.yml └── ISSUE_TEMPLATE │ ├── solicitação-de-recurso.md │ ├── relatório-de-problema-na-documentação.md │ └── relatório-de-bug.md ├── README.md └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | .env -------------------------------------------------------------------------------- /src/gold/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/silver/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/silver_fs/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/workflows/requirements.txt: -------------------------------------------------------------------------------- 1 | python-dotenv==1.0.1 2 | Requests==2.32.3 3 | -------------------------------------------------------------------------------- /src/silver/produtos.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | DISTINCT NameProduct AS descProduto 3 | 4 | FROM bronze.upsell.transactions_product 5 | 6 | ORDER BY 1 -------------------------------------------------------------------------------- /src/silver/cliente.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | idCustomer AS idCliente, 3 | PointsCustomer AS nrPontosCliente, 4 | flEmail AS flEmailCliente 5 | 6 | FROM bronze.upsell.customers -------------------------------------------------------------------------------- /src/silver/transacoes.sql: -------------------------------------------------------------------------------- 1 | SELECT idTransaction AS idTransacao, 2 | idCustomer AS idCliente, 3 | dtTransaction AS dtTransacao, 4 | pointsTransaction AS nrPontosTransacao 5 | 6 | FROM bronze.upsell.transactions -------------------------------------------------------------------------------- /src/silver/transacao_produto.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | idTransactionCart AS idTransacaoProduto, 3 | idTransaction AS idTransacao, 4 | NameProduct AS descNomeProduto, 5 | QuantityProduct AS nrQuantidadeProduto 6 | 7 | FROM bronze.upsell.transactions_product -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: main 2 | on: 3 | push: 4 | branches: 5 | - main 6 | 7 | jobs: 8 | sync_jobs: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - uses: actions/setup-python@v2 13 | with: 14 | python-version: '3.12' 15 | - name: Instalando Requeriments 16 | working-directory: src/workflows 17 | run: pip install -r requirements.txt 18 | - name: Sync dos Jobs 19 | env: 20 | DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} 21 | DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} 22 | working-directory: src/workflows 23 | run: python main.py 24 | -------------------------------------------------------------------------------- /src/bronze/customers.json: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "metadata": {}, 5 | "name": "idCustomer", 6 | "nullable": true, 7 | "type": "string" 8 | }, 9 | { 10 | "metadata": {}, 11 | "name": "PointsCustomer", 12 | "nullable": true, 13 | "type": "long" 14 | }, 15 | { 16 | "metadata": {}, 17 | "name": "flEmail", 18 | "nullable": true, 19 | "type": "long" 20 | }, 21 | { 22 | "metadata": {}, 23 | "name": "OP", 24 | "nullable": true, 25 | "type": "string" 26 | }, 27 | { 28 | "metadata": {}, 29 | "name": "modified_date", 30 | "nullable": true, 31 | "type": "timestamp_ntz" 32 | } 33 | ], 34 | "type": "struct" 35 | } -------------------------------------------------------------------------------- /src/gold/daily_report.sql: -------------------------------------------------------------------------------- 1 | SELECT DATE(t1.dtTransacao) AS dtRef, 2 | t2.descNomeProduto, 3 | count(DISTINCT t1.idTransacao) AS nrQuantidadeTrasacoes, 4 | count(DISTINCT t1.idCliente) AS nrQuantidadeClientes, 5 | sum(t1.nrPontosTransacao) AS nrQuantidadePontos, 6 | sum(CASE WHEN t1.nrPontosTransacao > 0 THEN t1.nrPontosTransacao ELSE 0 END) AS nrQuantidadePontosPos, 7 | sum(CASE WHEN t1.nrPontosTransacao < 0 THEN t1.nrPontosTransacao ELSE 0 END) AS nrQuantidadePontosNeg 8 | 9 | FROM silver.upsell.transacoes AS t1 10 | 11 | LEFT JOIN silver.upsell.transacao_produto AS t2 12 | ON t1.idTransacao = t2.idTransacao 13 | 14 | WHERE DATE(t1.dtTransacao) = '{dt_ref}' 15 | 16 | GROUP BY dtRef, t2.descNomeProduto GROUPING SETS ((dtRef, t2.descNomeProduto), (dtRef)) 17 | ORDER BY dtRef -------------------------------------------------------------------------------- /src/bronze/transactions.json: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "metadata": {}, 5 | "name": "idTransaction", 6 | "nullable": true, 7 | "type": "string" 8 | }, 9 | { 10 | "metadata": {}, 11 | "name": "idCustomer", 12 | "nullable": true, 13 | "type": "string" 14 | }, 15 | { 16 | "metadata": {}, 17 | "name": "dtTransaction", 18 | "nullable": true, 19 | "type": "string" 20 | }, 21 | { 22 | "metadata": {}, 23 | "name": "pointsTransaction", 24 | "nullable": true, 25 | "type": "long" 26 | }, 27 | { 28 | "metadata": {}, 29 | "name": "OP", 30 | "nullable": true, 31 | "type": "string" 32 | }, 33 | { 34 | "metadata": {}, 35 | "name": "modified_date", 36 | "nullable": true, 37 | "type": "timestamp_ntz" 38 | } 39 | ], 40 | "type": "struct" 41 | } -------------------------------------------------------------------------------- /src/bronze/transactions_product.json: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "metadata": {}, 5 | "name": "idTransactionCart", 6 | "nullable": true, 7 | "type": "string" 8 | }, 9 | { 10 | "metadata": {}, 11 | "name": "idTransaction", 12 | "nullable": true, 13 | "type": "string" 14 | }, 15 | { 16 | "metadata": {}, 17 | "name": "NameProduct", 18 | "nullable": true, 19 | "type": "string" 20 | }, 21 | { 22 | "metadata": {}, 23 | "name": "QuantityProduct", 24 | "nullable": true, 25 | "type": "long" 26 | }, 27 | { 28 | "metadata": {}, 29 | "name": "OP", 30 | "nullable": true, 31 | "type": "string" 32 | }, 33 | { 34 | "metadata": {}, 35 | "name": "modified_date", 36 | "nullable": true, 37 | "type": "timestamp_ntz" 38 | } 39 | ], 40 | "type": "struct" 41 | } -------------------------------------------------------------------------------- /src/gold/ingestao.py: -------------------------------------------------------------------------------- 1 | # Databricks notebook source 2 | # DBTITLE 1,IMPORTS 3 | import tqdm 4 | import sys 5 | import datetime 6 | 7 | sys.path.insert(0, "../lib") 8 | 9 | import utils 10 | import ingestors 11 | 12 | # COMMAND ---------- 13 | 14 | # DBTITLE 1,SETUP 15 | catalog = "gold" 16 | schemaname = 'upsell' 17 | tablename = dbutils.widgets.get("tablename") 18 | 19 | start = dbutils.widgets.get("dt_start") # now 20 | stop = dbutils.widgets.get("dt_stop") # now 21 | 22 | if start == datetime.datetime.now().strftime('%Y-%m-%d'): 23 | start = (datetime.datetime.now() - datetime.timedelta(days=1)).strftime("%Y-%m-%d") 24 | 25 | 26 | # COMMAND ---------- 27 | 28 | ingestor = ingestors.IngestorCubo(spark=spark, 29 | catalog=catalog, 30 | schemaname=schemaname, 31 | tablename=tablename) 32 | 33 | ingestor.backfill(start, stop) 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/solicitação-de-recurso.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Solicitação de Recurso 3 | about: 'Sua solicitação de recurso está relacionada a algum problema? Por favor, descreva:' 4 | title: Novo Recurso 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | # **Solicitação de Recurso** 11 | 12 | ## **Sua solicitação de recurso está relacionada a algum problema? Por favor, descreva.** 13 | 14 | 15 | - 16 | 17 | --- 18 | 19 | ## **Descreva a solução que você gostaria** 20 | 21 | - 22 | 23 | --- 24 | 25 | ## **Descreva alternativas que você considerou** 26 | 27 | 28 | - 29 | --- 30 | 31 | ### **Contexto adicional** 32 | 33 | - 34 | -------------------------------------------------------------------------------- /src/gold/monthly_report.sql: -------------------------------------------------------------------------------- 1 | SELECT DATE('{dt_ref}') AS dtRef, 2 | t2.descNomeProduto, 3 | count(DISTINCT t1.idTransacao) AS nrQuantidadeTrasacoes, 4 | count(DISTINCT t1.idCliente) AS nrQuantidadeClientes, 5 | count(DISTINCT t1.idTransacao) / count(DISTINCT t1.idCliente) AS nrQuantidadeTransacaoCliente, 6 | sum(t1.nrPontosTransacao) AS nrQuantidadePontos, 7 | sum(CASE WHEN t1.nrPontosTransacao > 0 THEN t1.nrPontosTransacao ELSE 0 END) AS nrQuantidadePontosPos, 8 | sum(CASE WHEN t1.nrPontosTransacao < 0 THEN t1.nrPontosTransacao ELSE 0 END) AS nrQuantidadePontosNeg 9 | 10 | FROM silver.upsell.transacoes AS t1 11 | 12 | LEFT JOIN silver.upsell.transacao_produto AS t2 13 | ON t1.idTransacao = t2.idTransacao 14 | 15 | WHERE DATE(t1.dtTransacao) <= '{dt_ref}' 16 | AND DATE(t1.dtTransacao) > '{dt_ref}' - INTERVAL 28 DAY 17 | 18 | GROUP BY dtRef, t2.descNomeProduto GROUPING SETS ((dtRef, t2.descNomeProduto), (dtRef)) 19 | ORDER BY dtRef, t2.descNomeProduto -------------------------------------------------------------------------------- /src/gold/churn_report.sql: -------------------------------------------------------------------------------- 1 | WITH tb_new AS ( 2 | 3 | SELECT DISTINCT 4 | date('{dt_ref}')AS dtRef, 5 | t1.idCliente 6 | 7 | FROM silver.upsell.transacoes AS t1 8 | 9 | WHERE DATE(t1.dtTransacao) <= '{dt_ref}' 10 | AND DATE(t1.dtTransacao) > '{dt_ref}' - INTERVAL 28 DAY 11 | 12 | ), 13 | 14 | tb_old AS ( 15 | 16 | SELECT DISTINCT 17 | date('{dt_ref}' - INTERVAL 28 DAY)AS dtRef, 18 | t1.idCliente 19 | 20 | FROM silver.upsell.transacoes AS t1 21 | 22 | WHERE DATE(t1.dtTransacao) <= '{dt_ref}' - INTERVAL 28 DAY 23 | AND DATE(t1.dtTransacao) > '{dt_ref}' - INTERVAL 56 DAY 24 | 25 | ) 26 | 27 | select date('{dt_ref}') AS dtRef, 28 | count(t1.idCliente) AS qtdeBaseOld, 29 | count(t2.idCliente) AS qtdeBaseNewNotChurn, 30 | count(t1.idCliente) - count(t2.idCliente) AS nrQtdeChurn, 31 | 1 - count(t2.idCliente) / count(t1.idCliente) AS ChurnRate 32 | 33 | FROM tb_old as t1 34 | 35 | LEFT JOIN tb_new AS t2 36 | ON t1.idCliente = t2.idCliente 37 | 38 | GROUP BY ALL -------------------------------------------------------------------------------- /src/workflows/main.py: -------------------------------------------------------------------------------- 1 | # %% 2 | import os 3 | import dotenv 4 | import requests 5 | import json 6 | 7 | dotenv.load_dotenv(".env") 8 | 9 | DATABRICKS_HOST = os.getenv("DATABRICKS_HOST") 10 | DATABRICKS_TOKEN = os.getenv("DATABRICKS_TOKEN") 11 | 12 | def list_job_names(): 13 | return [i.replace(".json", "") for i in os.listdir(".") if i.endswith(".json")] 14 | 15 | 16 | def load_settings(job_name): 17 | with open(f"{job_name}.json", "r") as open_file: 18 | settings = json.load(open_file) 19 | return settings 20 | 21 | 22 | def reset_job(settings): 23 | url = f"https://{DATABRICKS_HOST}/api/2.1/jobs/reset" 24 | header = {"Authorization": f"Bearer {DATABRICKS_TOKEN}"} 25 | resp = requests.post(url=url, headers=header, json=settings) 26 | return resp 27 | 28 | 29 | def main(): 30 | for i in list_job_names(): 31 | settings = load_settings(job_name=i) 32 | resp = reset_job(settings=settings) 33 | if resp.status_code == 200: 34 | print(f"Job '{i}' atualizado com sucesso!") 35 | else: 36 | print(f"Não foi possível atualizar o job '{i}'. Error: {resp.text}") 37 | 38 | if __name__ == "__main__": 39 | main() -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/relatório-de-problema-na-documentação.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Relatório de Problema na Documentação 3 | about: Uma descrição do que precisa de documentação 4 | title: Documentação 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | # **Relatório de Problema na Documentação** 11 | 12 | ## **Descreva o Documentação** 13 | 14 | 15 | - 16 | 17 | --- 18 | 19 | ### **Para Reproduzir** 20 | 21 | 27 | 28 | 29 | 30 | 1. 31 | 2. 32 | 3. 33 | 4. 34 | 35 | --- 36 | 37 | ### **Prova em mídia** 38 | 39 | 40 | --- 41 | 42 | ## **Descreva a solução desejada** 43 | 44 | - 45 | 46 | --- 47 | 48 | ### **Contexto adicional** 49 | 50 | - 51 | -------------------------------------------------------------------------------- /src/silver/ingestao.py: -------------------------------------------------------------------------------- 1 | # Databricks notebook source 2 | # DBTITLE 1,SETUP 3 | import sys 4 | 5 | sys.path.insert(0, "../lib") 6 | 7 | import utils 8 | import ingestors 9 | 10 | tablename = dbutils.widgets.get("tablename") 11 | idfield = dbutils.widgets.get("id_field") 12 | idfield_old = dbutils.widgets.get("id_field_old") 13 | 14 | # tablename = "transacoes" 15 | # idfield = "idTransacao" 16 | # idfield_old = "idTransaction" 17 | 18 | catalog = "silver" 19 | schemaname = "upsell" 20 | 21 | # COMMAND ---------- 22 | 23 | # DBTITLE 1,INGESTAO FULL-LOAD 24 | 25 | remove_checkpoint = False 26 | 27 | if not utils.table_exists(spark, "silver", "upsell", tablename): 28 | 29 | print("Criando a tabela", tablename) 30 | query = utils.import_query(f"{tablename}.sql") 31 | (spark.sql(query) 32 | .write 33 | .format("delta") 34 | .mode("overwrite") 35 | .option("overwriteSchema", "true") 36 | .saveAsTable(f"silver.upsell.{tablename}")) 37 | 38 | remove_checkpoint = True 39 | 40 | # COMMAND ---------- 41 | 42 | # DBTITLE 1,Ingestão CDF + Streaming 43 | print("Iniciando CDF...") 44 | 45 | ingest = ingestors.IngestorCDF(spark=spark, 46 | catalog=catalog, 47 | schemaname=schemaname, 48 | tablename=tablename, 49 | id_field=idfield, 50 | idfield_old=idfield_old) 51 | 52 | if remove_checkpoint: 53 | dbutils.fs.rm(ingest.checkpoint_location, True) 54 | 55 | stream = ingest.execute() 56 | print("Ok.") 57 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/relatório-de-bug.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Relatório de Bug 3 | about: Descreva o bug 4 | title: Report de Bug 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | # **Relatório de Bug** 11 | 12 | ## **Descreva o bug** 13 | 14 | 15 | - 16 | 17 | --- 18 | 19 | ### **Isso é uma regressão?** 20 | 21 | 22 | 23 | --- 24 | 25 | ### **Para reproduzir** 26 | 27 | 33 | 34 | 35 | 36 | 1. 37 | 2. 38 | 3. 39 | 4. 40 | 41 | --- 42 | 43 | ### **Comportamento esperado** 44 | 45 | 46 | - 47 | 48 | --- 49 | 50 | ### **Prova em mídia** 51 | 52 | 53 | --- 54 | 55 | ### **Seu ambiente** 56 | 57 | 59 | 60 | * SO: 61 | * Versão de Pacote: 62 | * Nome e versão do navegador: 63 | 64 | --- 65 | 66 | ### **Contexto adicional** 67 | 68 | 69 | - 70 | -------------------------------------------------------------------------------- /src/bronze/ingestao.py: -------------------------------------------------------------------------------- 1 | # Databricks notebook source 2 | # DBTITLE 1,Imports 3 | import delta 4 | import sys 5 | 6 | sys.path.insert(0, "../lib/") 7 | 8 | import utils 9 | import ingestors 10 | 11 | # COMMAND ---------- 12 | 13 | # DBTITLE 1,SETUP 14 | catalog = "bronze" 15 | schemaname = "upsell" 16 | 17 | tablename = dbutils.widgets.get("tablename") 18 | id_field = dbutils.widgets.get("id_field") 19 | timestamp_field = dbutils.widgets.get("timestamp_field") 20 | 21 | cdc_path = f"/Volumes/raw/{schemaname}/cdc/{tablename}/" 22 | full_load_path = f"/Volumes/raw/{schemaname}/full_load/{tablename}/" 23 | checkpoint_location = f"/Volumes/raw/{schemaname}/cdc/{tablename}_checkpoint/" 24 | 25 | # COMMAND ---------- 26 | 27 | # DBTITLE 1,Ingestão do Full Load 28 | if not utils.table_exists(spark, catalog, schemaname, tablename): 29 | 30 | print("Tabela não existente, criando...") 31 | 32 | dbutils.fs.rm(checkpoint_location, True) 33 | 34 | ingest_full_load = ingestors.Ingestor(spark=spark, 35 | catalog=catalog, 36 | schemaname=schemaname, 37 | tablename=tablename, 38 | data_format="parquet") 39 | 40 | ingest_full_load.execute(full_load_path) 41 | print("Tabela criada com sucesso!") 42 | 43 | else: 44 | print("Tabela já existente, ignorando full-load") 45 | 46 | # COMMAND ---------- 47 | 48 | # DBTITLE 1,CDC 49 | ingest_cdc = ingestors.IngestorCDC(spark=spark, 50 | catalog=catalog, 51 | schemaname=schemaname, 52 | tablename=tablename, 53 | data_format="parquet", 54 | id_field=id_field, 55 | timestamp_field=timestamp_field) 56 | 57 | stream = ingest_cdc.execute(cdc_path) 58 | -------------------------------------------------------------------------------- /src/lib/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pyspark.sql import types 3 | import datetime 4 | 5 | def import_query(path): 6 | with open(path, "r") as open_file: 7 | return open_file.read() 8 | 9 | def table_exists(spark, catalog, database, table): 10 | count = (spark.sql(f"SHOW TABLES FROM {catalog}.{database}") 11 | .filter(f"database = '{database}' AND tableName = '{table}'") 12 | .count()) 13 | return count == 1 14 | 15 | 16 | def import_schema(tablename:str): 17 | with open(f"{tablename}.json", "r") as open_file: 18 | schema_json = json.load(open_file) ## dicionário 19 | 20 | schema_df = types.StructType.fromJson(schema_json) 21 | return schema_df 22 | 23 | 24 | def extract_from(query:str): 25 | tablename = (query.lower() 26 | .split("from")[-1] 27 | .strip(" ") 28 | .split(" ")[0] 29 | .split("\n")[0] 30 | .strip(" ")) 31 | return tablename 32 | 33 | 34 | def add_generic_from(query:str, generic_from="df"): 35 | tablename = extract_from(query) 36 | query = query.replace(tablename, generic_from) 37 | return query 38 | 39 | 40 | def add_fields(query:str, fields:list): 41 | select = query.split("FROM")[0].strip(" \n") 42 | fields = ",\n".join(fields) 43 | from_query = f"\n\nFROM{query.split('FROM')[-1]}" 44 | query_new = f"{select},\n{fields}{from_query}" 45 | return query_new 46 | 47 | 48 | def format_query_cdf(query:str, from_table:str): 49 | fields = ["_change_type", "_commit_version", "_commit_timestamp"] 50 | query = add_fields(query=query, fields=fields) 51 | query = add_generic_from(query=query, generic_from=from_table) 52 | return query 53 | 54 | 55 | def date_range(start, stop): 56 | dt_start = datetime.datetime.strptime(start, "%Y-%m-%d") 57 | dt_stop = datetime.datetime.strptime(stop, "%Y-%m-%d") 58 | dates = [] 59 | while dt_start < dt_stop: 60 | dates.append(dt_start.strftime("%Y-%m-%d")) 61 | dt_start += datetime.timedelta(days=1) 62 | return dates -------------------------------------------------------------------------------- /src/workflows/ds-bricks.json: -------------------------------------------------------------------------------- 1 | { 2 | "job_id": 196700556036763, 3 | "new_settings": { 4 | "name": "ds-bricks", 5 | "email_notifications": { 6 | "no_alert_for_skipped_runs": false 7 | }, 8 | "webhook_notifications": {}, 9 | "timeout_seconds": 0, 10 | "max_concurrent_runs": 1, 11 | "tasks": [ 12 | { 13 | "task_key": "fs_dia_horario", 14 | "run_if": "ALL_SUCCESS", 15 | "notebook_task": { 16 | "notebook_path": "src/feature_store/ingestao", 17 | "base_parameters": { 18 | "table": "fs_dia_horario", 19 | "dt_start": "{{job.start_time.iso_date}}", 20 | "dt_stop": "{{job.start_time.iso_date}}" 21 | }, 22 | "source": "GIT" 23 | }, 24 | "existing_cluster_id": "0809-155233-nc569ju7", 25 | "timeout_seconds": 0, 26 | "email_notifications": {}, 27 | "notification_settings": { 28 | "no_alert_for_skipped_runs": false, 29 | "no_alert_for_canceled_runs": false, 30 | "alert_on_last_attempt": false 31 | }, 32 | "webhook_notifications": {} 33 | }, 34 | { 35 | "task_key": "fs_geral", 36 | "run_if": "ALL_SUCCESS", 37 | "notebook_task": { 38 | "notebook_path": "src/feature_store/ingestao", 39 | "base_parameters": { 40 | "table": "fs_geral", 41 | "dt_start": "{{job.start_time.iso_date}}", 42 | "dt_stop": "{{job.start_time.iso_date}}" 43 | }, 44 | "source": "GIT" 45 | }, 46 | "existing_cluster_id": "0809-155233-nc569ju7", 47 | "timeout_seconds": 0, 48 | "email_notifications": {}, 49 | "notification_settings": { 50 | "no_alert_for_skipped_runs": false, 51 | "no_alert_for_canceled_runs": false, 52 | "alert_on_last_attempt": false 53 | }, 54 | "webhook_notifications": {} 55 | }, 56 | { 57 | "task_key": "fs_pontos", 58 | "run_if": "ALL_SUCCESS", 59 | "notebook_task": { 60 | "notebook_path": "src/feature_store/ingestao", 61 | "base_parameters": { 62 | "table": "fs_pontos", 63 | "dt_start": "{{job.start_time.iso_date}}", 64 | "dt_stop": "{{job.start_time.iso_date}}" 65 | }, 66 | "source": "GIT" 67 | }, 68 | "existing_cluster_id": "0809-155233-nc569ju7", 69 | "timeout_seconds": 0, 70 | "email_notifications": {}, 71 | "notification_settings": { 72 | "no_alert_for_skipped_runs": false, 73 | "no_alert_for_canceled_runs": false, 74 | "alert_on_last_attempt": false 75 | }, 76 | "webhook_notifications": {} 77 | }, 78 | { 79 | "task_key": "fs_transacoes", 80 | "run_if": "ALL_SUCCESS", 81 | "notebook_task": { 82 | "notebook_path": "src/feature_store/ingestao", 83 | "base_parameters": { 84 | "table": "fs_transacoes", 85 | "dt_start": "{{job.start_time.iso_date}}", 86 | "dt_stop": "{{job.start_time.iso_date}}" 87 | }, 88 | "source": "GIT" 89 | }, 90 | "existing_cluster_id": "0809-155233-nc569ju7", 91 | "timeout_seconds": 0, 92 | "email_notifications": {}, 93 | "notification_settings": { 94 | "no_alert_for_skipped_runs": false, 95 | "no_alert_for_canceled_runs": false, 96 | "alert_on_last_attempt": false 97 | }, 98 | "webhook_notifications": {} 99 | }, 100 | { 101 | "task_key": "model_churn", 102 | "depends_on": [ 103 | { 104 | "task_key": "fs_dia_horario" 105 | }, 106 | { 107 | "task_key": "fs_geral" 108 | }, 109 | { 110 | "task_key": "fs_pontos" 111 | }, 112 | { 113 | "task_key": "fs_transacoes" 114 | } 115 | ], 116 | "run_if": "ALL_SUCCESS", 117 | "notebook_task": { 118 | "notebook_path": "src/model_churn/predict", 119 | "base_parameters": { 120 | "date": "{{job.start_time.iso_date}}" 121 | }, 122 | "source": "GIT" 123 | }, 124 | "existing_cluster_id": "0809-155233-nc569ju7", 125 | "libraries": [ 126 | { 127 | "pypi": { 128 | "package": "databricks_feature_engineering==0.6.0" 129 | } 130 | }, 131 | { 132 | "pypi": { 133 | "package": "pandas==2.2.2" 134 | } 135 | }, 136 | { 137 | "pypi": { 138 | "package": "mlflow==2.15.1" 139 | } 140 | }, 141 | { 142 | "pypi": { 143 | "package": "feature_engine==1.8.0" 144 | } 145 | }, 146 | { 147 | "pypi": { 148 | "package": "scikit_learn==1.5.1" 149 | } 150 | }, 151 | { 152 | "pypi": { 153 | "package": "cloudpickle==3.0.0" 154 | } 155 | } 156 | ], 157 | "timeout_seconds": 0, 158 | "email_notifications": {}, 159 | "notification_settings": { 160 | "no_alert_for_skipped_runs": false, 161 | "no_alert_for_canceled_runs": false, 162 | "alert_on_last_attempt": false 163 | }, 164 | "webhook_notifications": {} 165 | } 166 | ], 167 | "git_source": { 168 | "git_url": "https://github.com/TeoMeWhy/ds-bricks", 169 | "git_provider": "gitHub", 170 | "git_branch": "main" 171 | }, 172 | "queue": { 173 | "enabled": true 174 | }, 175 | "run_as": { 176 | "user_name": "teomewhy@gmail.com" 177 | } 178 | } 179 | } -------------------------------------------------------------------------------- /src/lib/ingestors.py: -------------------------------------------------------------------------------- 1 | import delta 2 | import utils 3 | import tqdm 4 | 5 | class Ingestor: 6 | 7 | def __init__(self, spark, catalog, schemaname, tablename, data_format): 8 | self.spark = spark 9 | self.catalog = catalog 10 | self.schemaname = schemaname 11 | self.tablename = tablename 12 | self.format = data_format 13 | self.set_schema() 14 | 15 | def set_schema(self): 16 | self.data_schema = utils.import_schema(self.tablename) 17 | 18 | def load(self, path): 19 | df = (self.spark 20 | .read 21 | .format(self.format) 22 | .schema(self.data_schema) 23 | .load(path)) 24 | return df 25 | 26 | def save(self, df): 27 | (df.write 28 | .format("delta") 29 | .mode("overwrite") 30 | .saveAsTable(f"{self.catalog}.{self.schemaname}.{self.tablename}")) 31 | return True 32 | 33 | def execute(self, path): 34 | df = self.load(path) 35 | return self.save(df) 36 | 37 | 38 | class IngestorCDC(Ingestor): 39 | 40 | def __init__(self, spark, catalog, schemaname, tablename, data_format, id_field, timestamp_field): 41 | super().__init__(spark, catalog, schemaname, tablename, data_format) 42 | self.id_field = id_field 43 | self.timestamp_field = timestamp_field 44 | self.set_deltatable() 45 | 46 | def set_deltatable(self): 47 | tablename = f"{self.catalog}.{self.schemaname}.{self.tablename}" 48 | self.deltatable = delta.DeltaTable.forName(self.spark, tablename) 49 | 50 | def upsert(self, df): 51 | df.createOrReplaceGlobalTempView(f"view_{self.tablename}") 52 | query = f''' 53 | SELECT * 54 | FROM global_temp.view_{self.tablename} 55 | QUALIFY ROW_NUMBER() OVER (PARTITION BY {self.id_field} ORDER BY {self.timestamp_field} DESC) = 1 56 | ''' 57 | 58 | df_cdc = self.spark.sql(query) 59 | 60 | (self.deltatable 61 | .alias("b") 62 | .merge(df_cdc.alias("d"), f"b.{self.id_field} = d.{self.id_field}") 63 | .whenMatchedDelete(condition = "d.OP = 'D'") 64 | .whenMatchedUpdateAll(condition = "d.OP = 'U'") 65 | .whenNotMatchedInsertAll(condition = "d.OP = 'I' OR d.OP = 'U'") 66 | .execute()) 67 | 68 | def load(self, path): 69 | df = (self.spark 70 | .readStream 71 | .format("cloudFiles") 72 | .option("cloudFiles.format", self.format) 73 | .schema(self.data_schema) 74 | .load(path)) 75 | return df 76 | 77 | def save(self, df): 78 | stream = (df.writeStream 79 | .option("checkpointLocation", f"/Volumes/raw/{self.schemaname}/cdc/{self.tablename}_checkpoint/") 80 | .foreachBatch(lambda df, batchID: self.upsert(df)) 81 | .trigger(availableNow=True)) 82 | return stream.start() 83 | 84 | 85 | class IngestorCDF(IngestorCDC): 86 | 87 | def __init__(self, spark, catalog, schemaname, tablename, id_field, idfield_old): 88 | 89 | super().__init__(spark=spark, 90 | catalog=catalog, 91 | schemaname=schemaname, 92 | tablename=tablename, 93 | data_format='delta', 94 | id_field=id_field, 95 | timestamp_field='_commit_timestamp') 96 | 97 | self.idfield_old = idfield_old 98 | self.set_query() 99 | self.checkpoint_location = f"/Volumes/raw/{schemaname}/cdc/{catalog}_{tablename}_checkpoint/" 100 | 101 | def set_schema(self): 102 | return 103 | 104 | def set_query(self): 105 | query = utils.import_query(f"{self.tablename}.sql") 106 | self.from_table = utils.extract_from(query=query) 107 | self.original_query = query 108 | self.query = utils.format_query_cdf(query, "{df}") 109 | 110 | def load(self): 111 | df = (self.spark.readStream 112 | .format('delta') 113 | .option("readChangeFeed", "true") 114 | .table(self.from_table)) 115 | return df 116 | 117 | def save(self, df): 118 | stream = (df.writeStream 119 | .option("checkpointLocation", self.checkpoint_location) 120 | .foreachBatch(lambda df, batchID: self.upsert(df) ) 121 | .trigger(availableNow=True)) 122 | return stream.start() 123 | 124 | def upsert(self, df): 125 | df.createOrReplaceGlobalTempView(f"silver_{self.tablename}") 126 | 127 | query_last = f""" 128 | SELECT * 129 | FROM global_temp.silver_{self.tablename} 130 | WHERE _change_type <> 'update_preimage' 131 | QUALIFY ROW_NUMBER() OVER (PARTITION BY {self.idfield_old} ORDER BY _commit_timestamp DESC) = 1 132 | """ 133 | df_last = self.spark.sql(query_last) 134 | df_upsert = self.spark.sql(self.query, df=df_last) 135 | 136 | (self.deltatable 137 | .alias("s") 138 | .merge(df_upsert.alias("d"), f"s.{self.id_field} = d.{self.id_field}") 139 | .whenMatchedDelete(condition = "d._change_type = 'delete'") 140 | .whenMatchedUpdateAll(condition = "d._change_type = 'update_postimage'") 141 | .whenNotMatchedInsertAll(condition = "d._change_type = 'insert' OR d._change_type = 'update_postimage'") 142 | .execute()) 143 | 144 | def execute(self): 145 | df = self.load() 146 | return self.save(df) 147 | 148 | class IngestorCubo: 149 | 150 | def __init__(self, spark, catalog, schemaname, tablename): 151 | self.spark = spark 152 | self.catalog = catalog 153 | self.schemaname = schemaname 154 | self.tablename = tablename 155 | self.table = f"{catalog}.{schemaname}.{tablename}" 156 | self.set_query() 157 | 158 | def set_query(self): 159 | self.query = utils.import_query(f"{self.tablename}.sql") 160 | 161 | def load(self, **kwargs): 162 | df = self.spark.sql(self.query.format(**kwargs)) 163 | return df 164 | 165 | def save(self, df, dt_ref): 166 | self.spark.sql(f"DELETE FROM {self.table} WHERE dtRef = '{dt_ref}'") 167 | 168 | (df.write 169 | .mode("append") 170 | .saveAsTable(self.table)) 171 | 172 | def backfill(self, dt_start, dt_stop): 173 | dates = utils.date_range(dt_start, dt_stop) 174 | 175 | if not utils.table_exists(self.spark, self.catalog, self.schemaname, self.tablename): 176 | df = self.load(dt_ref=dates.pop(0)) 177 | df.write.saveAsTable(self.table) 178 | 179 | for dt in tqdm.tqdm(dates): 180 | df = self.load(dt_ref=dt) 181 | self.save(df=df, dt_ref=dt) 182 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # O Lago do Mago 2 | 3 | Construção de um Lakehouse completamente do zero! 4 | 5 | Seja membro ou sub de nossos canais e assista todos os vídeos deste projeto. 6 | 7 | - [YouTube](https://www.youtube.com/playlist?list=PLvlkVRRKOYFTcLehYZ2Bd5hGIcLH0dJHE) 8 | - [Twitch](https://www.twitch.tv/collections/2e8D0Vgd3hf04g) 9 | 10 | Datalake-upsell 11 | 12 | ## Sobre 13 | 14 | A partir dos dados do nosso sistema de pontos, vamos construir ingestões de dados no Databricks. 15 | 16 | DB -> Raw -> Bronze -> Silver -> Silver FS -> Modelo I.A. 17 | 18 | ### Envio dos dados para o bucket S3 19 | 20 | Criamos um script em Python que verifica cada novo registro (ou atualização) que ocorre no banco de dados em produto. Este mesmo script envia os dados de cada tabela para o S3 em formato `.parquet`, simulando um `Change Data Capture` (CDC). 21 | 22 | Foi realizda uma carga `full-load` dia 13/06/2024, para o mesmo bucket, em um diretório específico. 23 | 24 | A criação deste script foi realizado em algumas lives aleatórias do dia a dia (estamos online todos os dias 9AM na [Twitch](https://twitch.tv/teomewhy). 25 | 26 | ### Setup Databricks 27 | 28 | No primeiro dia de projeto, mostramos como realizar o setup do ambiente do Databricks. Isto é: 29 | - Criação do Workspace + Unity Catalog 30 | - Setup do External Location (S3 em Raw) 31 | - Adição do Volume dos dados em Raw 32 | 33 | ### Consumo dos dados para Bronze 34 | 35 | Seguimos no projeto para realizar as primeiras ingestões de dados. 36 | 37 | Criamos nosso primeiro notebook e fizemos a leitura dos dados `full-load` em Raw e salvamos em Bronze. 38 | 39 | Algo similar à este script: 40 | 41 | ```python 42 | df_full = (spark.read 43 | .format("parquet") 44 | .load(f"/Volumes/raw/upsell/full_load/{tablename}/")) 45 | 46 | (df_full.coalesce(1) 47 | .write 48 | .format("delta") 49 | .mode("overwrite") 50 | .saveAsTable(f"{catalog}.{schema}.{tablename}")) 51 | ``` 52 | 53 | Ainda neste mesmo dia, realizamos a ingestão de todos os dados em CDC com Upsert em Delta. 54 | 55 | Ou seja, identificamos a última versão válida do dado com base na `primary key` e no campo `modified date` que vem do CDC. 56 | 57 | ```python 58 | (spark.read 59 | .format("parquet") 60 | .load(f"/Volumes/raw/upsell/cdc/{tablename}/") 61 | .createOrReplaceTempView(f"view_{tablename}")) 62 | 63 | query = f''' 64 | SELECT * 65 | FROM "view_{tablename}" 66 | QUALIFY ROW_NUMBER() OVER (PARTITION BY {primary_key} ORDER BY modified_date DESC) = 1 67 | ''' 68 | 69 | df_cdc_unique = spark.sql(query) 70 | 71 | bronze = delta.DeltaTable.forName(spark, f"{catalog}.{schema}.{tablename}") 72 | 73 | (bronze.alias("b") 74 | .merge(df_cdc_unique.alias("d"), f"b.{primary_key} = d.{primary_key}") 75 | .whenMatchedDelete(condition = "d.OP = 'D'") 76 | .whenMatchedUpdateAll(condition = "d.OP = 'U'") 77 | .whenNotMatchedInsertAll(condition = "d.OP = 'I' OR d.OP = 'U'") 78 | .execute()) 79 | ``` 80 | 81 | Apesar deste código ser funcional, não é muito bacana. Pois a cada nova carga em CDC, todos os arquivos são lidos e processados. No dia seguinte mostramos uma solução interessante para esta questão, utilizando Spark Streaming (`CloudFiles`). 82 | 83 | ### Consumo por Streaming (CloudFiles) 84 | 85 | Embora a solução anterior em `batch` para `CDC` tenha funcionado, essa não é uma solução muito performática. Uma vez que a cada nova carga, todos os dados na pasta `CDC` serão lidos e processados. existem algumas alternativas para solucionar essa perda de performance, como: 86 | 87 | - Particionamento dos dados em pastas de data (yyyy-mm-dd) 88 | - Movimentação dos arquivos lidos para outro diretório/bucket 89 | - Leitura via Streaming 90 | 91 | Adotaremos a última opção, realizando a leitura dos dados utilizando Streaming com Apache Spark: 92 | 93 | ```python 94 | df_stream = (spark.readStream 95 | .format("cloudFiles") 96 | .option("cloudFiles.format", "parquet") 97 | .schema(schema) 98 | .load(f"/Volumes/raw/upsell/cdc/{tablename}/")) 99 | ``` 100 | 101 | A opção de formato `cloudFiles` é algo específico do Databricks. Você pode utilizar apenas `parquet` caso esteja trabalhando com o Apache Spark Vanilla. Vale ressaltar que para o streaming funcionar, é necessário passar o `schema` dos arquivos a serem lidos. 102 | 103 | O próximo passo é realizar a escrita dos dados a partir do Dataframe criado com Streaming: 104 | 105 | ```python 106 | stream = (df_stream.writeStream 107 | .option("checkpointLocation", f"/Volumes/raw/upsell/cdc/{tablename}_checkpoint/") 108 | .foreachBatch(lambda df, batchID: upsert(df, bronze)) 109 | .trigger(availableNow=True)) 110 | ``` 111 | 112 | Pontos de destaque: 113 | - `checkpointLocation`:trata-se de uma diretório especial onde o Spark conseguirá identificar a partir de qual arquivo ele deve ler na próxima iteração. 114 | - `.foreachBatch`: definição de como cada batch do streaming será processado, i.e. como lidaremos com os dados. 115 | - `.trigger(availableNow=True))`: garante que após o processamento de todos os arvuiso disponíveis, a stream é encerrada. 116 | 117 | Agora, precisamos definir a função `upsert`. Ela seguirá o mesmo racional apresentado na etapa de CDC anteriormente, isto é, consolidação dos dados novos para realizar o merge na tabela já existente em bronze. 118 | 119 | ```python 120 | def upsert(df, deltatable): 121 | df.createOrReplaceGlobalTempView(f"view_{tablename}") 122 | 123 | query = f''' 124 | SELECT * 125 | FROM global_temp.view_{tablename} 126 | QUALIFY ROW_NUMBER() OVER (PARTITION BY {id_field} ORDER BY {timestamp_field} DESC) = 1 127 | ''' 128 | 129 | df_cdc = spark.sql(query) 130 | 131 | (deltatable.alias("b") 132 | .merge(df_cdc.alias("d"), f"b.{id_field} = d.{id_field}") 133 | .whenMatchedDelete(condition = "d.OP = 'D'") 134 | .whenMatchedUpdateAll(condition = "d.OP = 'U'") 135 | .whenNotMatchedInsertAll(condition = "d.OP = 'I' OR d.OP = 'U'") 136 | .execute()) 137 | ``` 138 | 139 | Ou seja, desta forma, para cada batch lido na stream, realizamos o upsert dos dos em bronze. 140 | 141 | ### Classes de ingestão 142 | 143 | Buscando melhorar ainda mais o nosso código, decidimos construir algumas classes para ingestão desses dados. Isso nos ajudará aplicar esses mesmos métodos e estratégias de ingestão em outros contextos ou necessidades. 144 | 145 | #### Classe para carga Full-load 146 | 147 | ```python 148 | class Ingestor: 149 | 150 | def __init__(self, spark, catalog, schemaname, tablename, data_format): 151 | self.spark = spark 152 | self.catalog = catalog 153 | self.schemaname = schemaname 154 | self.tablename = tablename 155 | self.format = data_format 156 | self.set_schema() 157 | 158 | def set_schema(self): 159 | self.data_schema = utils.import_schema(self.tablename) 160 | 161 | def load(self, path): 162 | df = (self.spark 163 | .read 164 | .format(self.format) 165 | .schema(self.data_schema) 166 | .load(path)) 167 | return df 168 | 169 | def save(self, df): 170 | (df.write 171 | .format("delta") 172 | .mode("overwrite") 173 | .saveAsTable(f"{self.catalog}.{self.schemaname}.{self.tablename}")) 174 | return True 175 | 176 | def execute(self, path): 177 | df = self.load(path) 178 | return self.save(df) 179 | ``` 180 | 181 | #### Classe para carga CDC 182 | 183 | ```python 184 | class IngestorCDC(Ingestor): 185 | 186 | def __init__(self, spark, catalog, schemaname, tablename, data_format, id_field, timestamp_field): 187 | super().__init__(spark, catalog, schemaname, tablename, data_format) 188 | self.id_field = id_field 189 | self.timestamp_field = timestamp_field 190 | self.set_deltatable() 191 | 192 | def set_deltatable(self): 193 | tablename = f"{self.catalog}.{self.schemaname}.{self.tablename}" 194 | self.deltatable = delta.DeltaTable.forName(self.spark, tablename) 195 | 196 | def upsert(self, df): 197 | df.createOrReplaceGlobalTempView(f"view_{self.tablename}") 198 | query = f''' 199 | SELECT * 200 | FROM global_temp.view_{self.tablename} 201 | QUALIFY ROW_NUMBER() OVER (PARTITION BY {self.id_field} ORDER BY {self.timestamp_field} DESC) = 1 202 | ''' 203 | 204 | df_cdc = self.spark.sql(query) 205 | 206 | (self.deltatable 207 | .alias("b") 208 | .merge(df_cdc.alias("d"), f"b.{self.id_field} = d.{self.id_field}") 209 | .whenMatchedDelete(condition = "d.OP = 'D'") 210 | .whenMatchedUpdateAll(condition = "d.OP = 'U'") 211 | .whenNotMatchedInsertAll(condition = "d.OP = 'I' OR d.OP = 'U'") 212 | .execute()) 213 | 214 | def load(self, path): 215 | df = (self.spark 216 | .readStream 217 | .format("cloudFiles") 218 | .option("cloudFiles.format", self.format) 219 | .schema(self.data_schema) 220 | .load(path)) 221 | return df 222 | 223 | def save(self, df): 224 | stream = (df.writeStream 225 | .option("checkpointLocation", f"/Volumes/raw/{self.schemaname}/cdc/{self.tablename}_checkpoint/") 226 | .foreachBatch(lambda df, batchID: self.upsert(df)) 227 | .trigger(availableNow=True)) 228 | return stream.start() 229 | ``` 230 | 231 | #### Execução 232 | 233 | Com isso em mente, podemos apenas invorcar as classes e realizar sua execução: 234 | 235 | ```python 236 | if not utils.table_exists(spark, catalog, schemaname, tablename): 237 | 238 | print("Tabela não existente, criando...") 239 | 240 | dbutils.fs.rm(checkpoint_location, True) 241 | 242 | ingest_full_load = ingestors.Ingestor(spark=spark, 243 | catalog=catalog, 244 | schemaname=schemaname, 245 | tablename=tablename, 246 | data_format="parquet") 247 | 248 | ingest_full_load.execute(full_load_path) 249 | print("Tabela criada com sucesso!") 250 | 251 | else: 252 | print("Tabela já existente, ignorando full-load") 253 | 254 | print("Executando carga cdc...") 255 | ingest_cdc = ingestors.IngestorCDC(spark=spark, 256 | catalog=catalog, 257 | schemaname=schemaname, 258 | tablename=tablename, 259 | data_format="parquet", 260 | id_field=id_field, 261 | timestamp_field=timestamp_field) 262 | 263 | stream = ingest_cdc.execute(cdc_path) 264 | print("ok") 265 | ``` 266 | -------------------------------------------------------------------------------- /src/workflows/upsell.json: -------------------------------------------------------------------------------- 1 | { 2 | "job_id": 557670198108589, 3 | "new_settings": { 4 | "name": "Upsell", 5 | "email_notifications": { 6 | "no_alert_for_skipped_runs": false 7 | }, 8 | "webhook_notifications": {}, 9 | "timeout_seconds": 0, 10 | "schedule": { 11 | "quartz_cron_expression": "00 00 7 * * ?", 12 | "timezone_id": "America/Sao_Paulo", 13 | "pause_status": "UNPAUSED" 14 | }, 15 | "max_concurrent_runs": 1, 16 | "tasks": [ 17 | { 18 | "task_key": "bronze_customers", 19 | "run_if": "ALL_SUCCESS", 20 | "notebook_task": { 21 | "notebook_path": "src/bronze/ingestao", 22 | "base_parameters": { 23 | "tablename": "customers", 24 | "id_field": "idCustomer", 25 | "timestamp_field": "modified_date" 26 | }, 27 | "source": "GIT" 28 | }, 29 | "existing_cluster_id": "0809-155233-nc569ju7", 30 | "timeout_seconds": 0, 31 | "email_notifications": {}, 32 | "notification_settings": { 33 | "no_alert_for_skipped_runs": false, 34 | "no_alert_for_canceled_runs": false, 35 | "alert_on_last_attempt": false 36 | }, 37 | "webhook_notifications": {} 38 | }, 39 | { 40 | "task_key": "bronze_transactions", 41 | "run_if": "ALL_SUCCESS", 42 | "notebook_task": { 43 | "notebook_path": "src/bronze/ingestao", 44 | "base_parameters": { 45 | "tablename": "transactions", 46 | "id_field": "idTransaction", 47 | "timestamp_field": "modified_date" 48 | }, 49 | "source": "GIT" 50 | }, 51 | "existing_cluster_id": "0809-155233-nc569ju7", 52 | "timeout_seconds": 0, 53 | "email_notifications": {}, 54 | "notification_settings": { 55 | "no_alert_for_skipped_runs": false, 56 | "no_alert_for_canceled_runs": false, 57 | "alert_on_last_attempt": false 58 | }, 59 | "webhook_notifications": {} 60 | }, 61 | { 62 | "task_key": "bronze_transactions_product", 63 | "run_if": "ALL_SUCCESS", 64 | "notebook_task": { 65 | "notebook_path": "src/bronze/ingestao", 66 | "base_parameters": { 67 | "tablename": "transactions_product", 68 | "id_field": "idTransactionCart", 69 | "timestamp_field": "modified_date" 70 | }, 71 | "source": "GIT" 72 | }, 73 | "existing_cluster_id": "0809-155233-nc569ju7", 74 | "timeout_seconds": 0, 75 | "email_notifications": {}, 76 | "notification_settings": { 77 | "no_alert_for_skipped_runs": false, 78 | "no_alert_for_canceled_runs": false, 79 | "alert_on_last_attempt": false 80 | }, 81 | "webhook_notifications": {} 82 | }, 83 | { 84 | "task_key": "silver_cliente", 85 | "depends_on": [ 86 | { 87 | "task_key": "bronze_customers" 88 | } 89 | ], 90 | "run_if": "ALL_SUCCESS", 91 | "notebook_task": { 92 | "notebook_path": "src/silver/ingestao", 93 | "base_parameters": { 94 | "tablename": "cliente", 95 | "id_field": "idCliente", 96 | "id_field_old": "idCustomer" 97 | }, 98 | "source": "GIT" 99 | }, 100 | "existing_cluster_id": "0809-155233-nc569ju7", 101 | "timeout_seconds": 0, 102 | "email_notifications": {} 103 | }, 104 | { 105 | "task_key": "silver_transacao_produto", 106 | "depends_on": [ 107 | { 108 | "task_key": "bronze_transactions_product" 109 | } 110 | ], 111 | "run_if": "ALL_SUCCESS", 112 | "notebook_task": { 113 | "notebook_path": "src/silver/ingestao", 114 | "base_parameters": { 115 | "tablename": "transacao_produto", 116 | "id_field": "idTransacaoProduto", 117 | "id_field_old": "idTransactionCart" 118 | }, 119 | "source": "GIT" 120 | }, 121 | "existing_cluster_id": "0809-155233-nc569ju7", 122 | "timeout_seconds": 0, 123 | "email_notifications": {} 124 | }, 125 | { 126 | "task_key": "silver_transacoes", 127 | "depends_on": [ 128 | { 129 | "task_key": "bronze_transactions" 130 | } 131 | ], 132 | "run_if": "ALL_SUCCESS", 133 | "notebook_task": { 134 | "notebook_path": "src/silver/ingestao", 135 | "base_parameters": { 136 | "tablename": "transacoes", 137 | "id_field": "idTransacao", 138 | "id_field_old": "idTransaction" 139 | }, 140 | "source": "GIT" 141 | }, 142 | "existing_cluster_id": "0809-155233-nc569ju7", 143 | "timeout_seconds": 0, 144 | "email_notifications": {} 145 | }, 146 | { 147 | "task_key": "silver_produtos", 148 | "depends_on": [ 149 | { 150 | "task_key": "bronze_transactions_product" 151 | } 152 | ], 153 | "run_if": "ALL_SUCCESS", 154 | "notebook_task": { 155 | "notebook_path": "src/silver/ingestao", 156 | "base_parameters": { 157 | "tablename": "produtos", 158 | "id_field": "descProduto", 159 | "id_field_old": "NameProduct" 160 | }, 161 | "source": "GIT" 162 | }, 163 | "existing_cluster_id": "0809-155233-nc569ju7", 164 | "timeout_seconds": 0, 165 | "email_notifications": {} 166 | }, 167 | { 168 | "task_key": "gold_daily_report", 169 | "depends_on": [ 170 | { 171 | "task_key": "silver_transacoes" 172 | }, 173 | { 174 | "task_key": "silver_transacao_produto" 175 | } 176 | ], 177 | "run_if": "ALL_SUCCESS", 178 | "notebook_task": { 179 | "notebook_path": "src/gold/ingestao", 180 | "base_parameters": { 181 | "tablename": "daily_report", 182 | "dt_start": "{{job.start_time.iso_date}}", 183 | "dt_stop": "{{job.start_time.iso_date}}" 184 | }, 185 | "source": "GIT" 186 | }, 187 | "existing_cluster_id": "0809-155233-nc569ju7", 188 | "timeout_seconds": 0, 189 | "email_notifications": {} 190 | }, 191 | { 192 | "task_key": "gold_monthly_report", 193 | "depends_on": [ 194 | { 195 | "task_key": "silver_transacoes" 196 | }, 197 | { 198 | "task_key": "silver_transacao_produto" 199 | } 200 | ], 201 | "run_if": "ALL_SUCCESS", 202 | "notebook_task": { 203 | "notebook_path": "src/gold/ingestao", 204 | "base_parameters": { 205 | "tablename": "monthly_report", 206 | "dt_start": "{{job.start_time.iso_date}}", 207 | "dt_stop": "{{job.start_time.iso_date}}" 208 | }, 209 | "source": "GIT" 210 | }, 211 | "existing_cluster_id": "0809-155233-nc569ju7", 212 | "timeout_seconds": 0, 213 | "email_notifications": {} 214 | }, 215 | { 216 | "task_key": "gold_churn_report", 217 | "depends_on": [ 218 | { 219 | "task_key": "silver_transacoes" 220 | }, 221 | { 222 | "task_key": "silver_transacao_produto" 223 | } 224 | ], 225 | "run_if": "ALL_SUCCESS", 226 | "notebook_task": { 227 | "notebook_path": "src/gold/ingestao", 228 | "base_parameters": { 229 | "tablename": "churn_report", 230 | "dt_start": "{{job.start_time.iso_date}}", 231 | "dt_stop": "{{job.start_time.iso_date}}" 232 | }, 233 | "source": "GIT" 234 | }, 235 | "existing_cluster_id": "0809-155233-nc569ju7", 236 | "timeout_seconds": 0, 237 | "email_notifications": {} 238 | }, 239 | { 240 | "task_key": "DS-BRICKS", 241 | "depends_on": [ 242 | { 243 | "task_key": "silver_cliente" 244 | }, 245 | { 246 | "task_key": "silver_produtos" 247 | }, 248 | { 249 | "task_key": "silver_transacao_produto" 250 | }, 251 | { 252 | "task_key": "silver_transacoes" 253 | } 254 | ], 255 | "run_if": "ALL_SUCCESS", 256 | "run_job_task": { 257 | "job_id": 196700556036763 258 | }, 259 | "timeout_seconds": 0, 260 | "email_notifications": {} 261 | } 262 | ], 263 | "git_source": { 264 | "git_url": "https://github.com/TeoMeWhy/lago-mago", 265 | "git_provider": "gitHub", 266 | "git_branch": "main" 267 | }, 268 | "run_as": { 269 | "user_name": "teomewhy@gmail.com" 270 | } 271 | } 272 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | --------------------------------------------------------------------------------