├── .deepsource.toml ├── .dockerignore ├── .env.template ├── .github ├── ISSUE_TEMPLATE │ ├── c4gt.md │ └── dmp.md └── workflows │ ├── dalgo-cd.yml │ ├── dalgo-ci.yml │ ├── dalgo-docker-release.yml │ └── dbt-automation-ui4t-ci.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .python-version ├── .vscode └── settings.json ├── CODE_OF_CONDUCT.md ├── Docker ├── Dockerfile.dev.deploy ├── Dockerfile.main ├── docker-compose.dev.yml ├── docker-compose.yml ├── entrypoint.sh └── mount │ └── whitelist_deploy.py ├── LICENSE ├── Plan.md ├── README.NotificationSystem.md ├── README.md ├── admin.py ├── ddpui ├── __init__.py ├── api │ ├── __init__.py │ ├── airbyte_api.py │ ├── dashboard_api.py │ ├── data_api.py │ ├── dbt_api.py │ ├── notifications_api.py │ ├── org_preferences_api.py │ ├── orgtask_api.py │ ├── pipeline_api.py │ ├── superset_api.py │ ├── task_api.py │ ├── transform_api.py │ ├── user_org_api.py │ ├── user_preferences_api.py │ ├── warehouse_api.py │ └── webhook_api.py ├── asgi.py ├── assets │ ├── __init__.py │ └── whitelist.template.py ├── auth.py ├── celery.py ├── celeryworkers │ ├── __init__.py │ ├── moretasks.py │ └── tasks.py ├── core │ ├── __init__.py │ ├── dbtautomation_service.py │ ├── dbtfunctions.py │ ├── infra_service.py │ ├── llm_service.py │ ├── notifications_service.py │ ├── orgdbt_manager.py │ ├── orgfunctions.py │ ├── orgtaskfunctions.py │ ├── orguserfunctions.py │ ├── pipelinefunctions.py │ ├── transformfunctions.py │ └── warehousefunctions.py ├── datainsights │ ├── __init__.py │ ├── generate_result.py │ ├── insights │ │ ├── __init__.py │ │ ├── boolean_type │ │ │ ├── __init__.py │ │ │ ├── boolean_insights.py │ │ │ └── queries.py │ │ ├── common │ │ │ ├── __init__.py │ │ │ ├── base_insights.py │ │ │ └── queries.py │ │ ├── datetime_type │ │ │ ├── __init__.py │ │ │ ├── datetime_insight.py │ │ │ └── queries.py │ │ ├── insight_factory.py │ │ ├── insight_interface.py │ │ ├── numeric_type │ │ │ ├── __init__.py │ │ │ ├── numeric_insight.py │ │ │ └── queries.py │ │ └── string_type │ │ │ ├── __init__.py │ │ │ ├── queries.py │ │ │ └── string_insights.py │ ├── query_builder.py │ └── warehouse │ │ ├── __init__.py │ │ ├── bigquery.py │ │ ├── postgres.py │ │ ├── warehouse_factory.py │ │ └── warehouse_interface.py ├── dbt_automation │ ├── __init__.py │ ├── assets │ │ ├── __init__.py │ │ ├── generate_schema_name.sql │ │ ├── operations.template.yml │ │ ├── packages.yml │ │ └── unpivot.sql │ ├── operations │ │ ├── __init__.py │ │ ├── aggregate.py │ │ ├── arithmetic.py │ │ ├── casewhen.py │ │ ├── castdatatypes.py │ │ ├── coalescecolumns.py │ │ ├── concatcolumns.py │ │ ├── droprenamecolumns.py │ │ ├── flattenairbyte.py │ │ ├── flattenjson.py │ │ ├── generic.py │ │ ├── groupby.py │ │ ├── joins.py │ │ ├── mergeoperations.py │ │ ├── mergetables.py │ │ ├── pivot.py │ │ ├── rawsql.py │ │ ├── regexextraction.py │ │ ├── replace.py │ │ ├── scaffold.py │ │ ├── syncsources.py │ │ ├── unpivot.py │ │ └── wherefilter.py │ ├── seeds │ │ ├── __init__.py │ │ ├── sample_sheet1.json │ │ ├── sample_sheet2.json │ │ ├── seed.py │ │ └── seed_001.yml │ └── utils │ │ ├── __init__.py │ │ ├── bigquery.py │ │ ├── columnutils.py │ │ ├── dbtconfigs.py │ │ ├── dbtproject.py │ │ ├── dbtsources.py │ │ ├── interfaces │ │ ├── __init__.py │ │ └── warehouse_interface.py │ │ ├── postgres.py │ │ ├── sourceschemas.py │ │ ├── tableutils.py │ │ └── warehouseclient.py ├── ddpairbyte │ ├── __init__.py │ ├── airbyte_service.py │ ├── airbytehelpers.py │ └── schema.py ├── ddpdbt │ ├── __init__.py │ ├── dbt_service.py │ ├── elementary_service.py │ └── schema.py ├── ddpprefect │ ├── __init__.py │ ├── prefect_service.py │ └── schema.py ├── html │ ├── docs.py │ └── elementary.py ├── management │ └── commands │ │ ├── __init__.py │ │ ├── addparamstodbtcliprofile.py │ │ ├── addusertoorg.py │ │ ├── create-system-orguser.py │ │ ├── create_notification.py │ │ ├── createedrsendreportdataflow.py │ │ ├── createorganduser.py │ │ ├── createorgplan.py │ │ ├── createorgsuperset.py │ │ ├── createsupersetusageuser.py │ │ ├── cypress_tests_cleanup.py │ │ ├── dbt_cloud_integration.py │ │ ├── delete_notification.py │ │ ├── deleteorg.py │ │ ├── deleteorgdbtsource.py │ │ ├── dumpconfig.py │ │ ├── estimate-time-for-queued-runs.py │ │ ├── fetch-airbyte-sync-stats.py │ │ ├── github_to_ui4t.py │ │ ├── importconfig.py │ │ ├── manage-transform-tasks.py │ │ ├── manage-user-attributes.py │ │ ├── move-deployments-to-workerpools.py │ │ ├── reconcile-prefect-flow-runs-in-db.py │ │ ├── refresh_deployment_schedule.py │ │ ├── removetargetfromdbtcommands.py │ │ ├── role_based_access.py │ │ ├── saveflowrunlogstos3.py │ │ ├── setalldeploymentqueuenames.py │ │ ├── setdockerinfoindestinations.py │ │ ├── setup-reset-connection-run-via-deployments.py │ │ ├── setup_clear_connection_run_via_deployments.py │ │ ├── setworkerpoolfordeployment.py │ │ ├── showentities.py │ │ ├── sync-flow-runs-of-dataflow.py │ │ ├── sync-the-pipeline-transform-tasks-seq.py │ │ ├── toggle_user_preferences.py │ │ ├── unlock-sync-sources.py │ │ ├── update_airbyte_server_blocks.py │ │ ├── update_bigquery_warehouse_location.py │ │ ├── update_custom_macros_for_clients.py │ │ ├── update_orgdbt_file_paths.py │ │ ├── updateedrsendreportdataflowtarget.py │ │ ├── userpermissions2orgpermissions.py │ │ └── verify-warehouse-in-secrets-manager.py ├── migrations │ ├── 0001_initial.py │ ├── 0002_remove_adminuser_active_remove_adminuser_email_and_more.py │ ├── 0003_orguser_role.py │ ├── 0004_invitation_invited_role.py │ ├── 0005_orgprefectblock_displayname.py │ ├── 0006_rename_dbtversion_orgdbt_dbt_version_and_more.py │ ├── 0007_orgdbt_gitrepo_access_token_secret.py │ ├── 0008_orgprefectblock_seq.py │ ├── 0009_remove_orgdbt_database_remove_orgdbt_host_and_more.py │ ├── 0010_remove_orgdbt_database_remove_orgdbt_host_and_more.py │ ├── 0011_orgflow.py │ ├── 0012_alter_org_dbt.py │ ├── 0013_orgwarehouse_airbyte_destination_id.py │ ├── 0014_alter_orgdbt_target_schema.py │ ├── 0015_orgwarehouse_airbyte_norm_op_id.py │ ├── 0015_rename_target_schema_orgdbt_default_schema_and_more.py │ ├── 0016_merge_20230518_1427.py │ ├── 0017_alter_orgdataflow_cron.py │ ├── 0018_orgdataflow_deployment_name.py │ ├── 0019_remove_orgdataflow_flow_id_orgdataflow_connection_id.py │ ├── 0020_orgprefectblock_command_and_more.py │ ├── 0021_orguser_email_verified.py │ ├── 0022_org_viz_login_type_org_viz_url.py │ ├── 0023_remove_orgdbt_dbt_version_orgdbt_dbt_venv.py │ ├── 0024_userattributes.py │ ├── 0025_orgwarehouse_name.py │ ├── 0026_blocklock.py │ ├── 0027_remove_blocklock_block_blocklock_opb_dataflowblock.py │ ├── 0028_orgwarehouse_superset_creds.py │ ├── 0029_prefectflowrun.py │ ├── 0030_orgdataflow_dataflow_type.py │ ├── 0031_task_orgtask_datafloworgtask.py │ ├── 0032_orgprefectblockv1.py │ ├── 0033_orgdataflowv1.py │ ├── 0034_alter_datafloworgtask_dataflow.py │ ├── 0035_tasklock.py │ ├── 0036_orgtnc.py │ ├── 0037_userattributes_is_consultant.py │ ├── 0038_org_is_demo.py │ ├── 0039_remove_orgwarehouse_superset_creds.py │ ├── 0040_tasklock_locking_dataflow.py │ ├── 0041_orgtask_parameters.py │ ├── 0042_orgtask_generated_by_task_is_system.py │ ├── 0043_alter_orgdbt_gitrepo_url.py │ ├── 0043_orgwarehouse_bq_location.py │ ├── 0044_merge_20240212_1548.py │ ├── 0045_orgdbtmodel.py │ ├── 0046_orgdbtmodel_schema.py │ ├── 0047_userattributes_is_platform_admin.py │ ├── 0048_datafloworgtask_seq_orgtask_uuid.py │ ├── 0049_dbtedge.py │ ├── 0050_remove_orgdbtmodel_config.py │ ├── 0051_orgdbtmodel_type.py │ ├── 0052_orgdbtmodel_uuid.py │ ├── 0053_orgwarehouse_airbyte_docker_image_tag_and_more.py │ ├── 0053_remove_dbtedge_config_remove_dbtedge_source_and_more.py │ ├── 0054_orgdbtmodel_source_name.py │ ├── 0055_merge_20240228_1312.py │ ├── 0056_orgdbt_transform_type.py │ ├── 0056_remove_orgdbtmodel_config_orgdbtmodel_output_cols.py │ ├── 0057_orgdbtoperation.py │ ├── 0058_orgdbtmodel_under_construction.py │ ├── 0059_alter_orgdbtmodel_display_name_and_more.py │ ├── 0060_merge_20240302_1014.py │ ├── 0061_alter_orgdbtmodel_display_name_and_more.py │ ├── 0062_alter_orgdbtmodel_schema_and_more.py │ ├── 0063_permission_role_rolepermission.py │ ├── 0064_orguser_new_role.py │ ├── 0065_role_level_alter_permission_uuid_alter_role_uuid.py │ ├── 0066_invitation_invited_new_role.py │ ├── 0067_alter_orgwarehouse_credentials.py │ ├── 0067_canvaslock.py │ ├── 0068_merge_20240422_0043.py │ ├── 0069_tasklock_celery_task_id.py │ ├── 0070_org_ses_whitelisted_email.py │ ├── 0071_alter_orgtask_connection_id.py │ ├── 0072_orgdataflowv1_reset_conn_dataflow.py │ ├── 0073_orgschemachange.py │ ├── 0073_remove_dataflowblock_dataflow_and_more.py │ ├── 0074_rename_schema_change_orgschemachange_change_type.py │ ├── 0075_remove_orgschemachange_name.py │ ├── 0076_merge_20240526_2356.py │ ├── 0077_delete_orgprefectblock.py │ ├── 0078_remove_orgwarehouse_airbyte_norm_op_id.py │ ├── 0079_assistantprompt.py │ ├── 0080_llmsession.py │ ├── 0081_llmsession_airbyte_job_id.py │ ├── 0082_llmsession_task_id_alter_llmsession_flow_run_id_and_more.py │ ├── 0082_notification_userpreferences_notificationrecipient.py │ ├── 0083_merge_20240703_1732.py │ ├── 0084_llmsession_session_status.py │ ├── 0085_llmsession_session_name.py │ ├── 0085_syncstats.py │ ├── 0086_alter_syncstats_sync_data_volume_b_and_more.py │ ├── 0087_alter_datafloworgtask_dataflow_and_more.py │ ├── 0088_alter_orgdbtoperation_dbtmodel.py │ ├── 0089_alter_prefectflowrun_deployment_id.py │ ├── 0090_syncstats_job_id.py │ ├── 0091_prefectflowrun_retries.py │ ├── 0092_assistantprompt_created_at_and_more.py │ ├── 0093_remove_org_is_demo_org_type.py │ ├── 0094_merge_20240903_0610.py │ ├── 0095_alter_llmsession_session_name.py │ ├── 0096_llmsession_request_meta_llmsession_session_type.py │ ├── 0097_userprompt.py │ ├── 0098_remove_assistantprompt_created_at_and_more.py │ ├── 0099_userprompt_label.py │ ├── 0100_llmsession_feedback.py │ ├── 0101_llmsession_updated_by.py │ ├── 0102_orguser_llm_optin.py │ ├── 0103_connectionjob_connectionmeta_and_more.py │ ├── 0104_orgdataflowv1_clear_conn_dataflow.py │ ├── 0104_remove_userpreferences_discord_webhook_and_more.py │ ├── 0105_orgpreferences.py │ ├── 0106_alter_orgpreferences_llm_optin_approved_by.py │ ├── 0107_orgsupersets.py │ ├── 0108_userpreferences_llm_optin.py │ ├── 0109_alter_orgpreferences_org_alter_orgsupersets_org.py │ ├── 0110_userpreferences_discord_webhook_and_more.py │ ├── 0111_remove_orgpreferences_trial_end_date_and_more.py │ ├── 0112_rename_llm_optin_userpreferences_disclaimer_shown.py │ ├── 0113_orgplans_upgrade_requested_and_more.py │ ├── 0114_merge_20241124_1514.py │ ├── 0115_remove_org_type.py │ ├── 0116_notification_email_subject.py │ ├── 0117_orgwren.py │ ├── 0118_orgdataflowv1_meta.py │ ├── 0119_prefectflowrun_orguser.py │ ├── 0119_remove_connectionmeta_schedule_large_jobs.py │ ├── 0120_alter_prefectflowrun_start_time.py │ ├── 0121_merge_20250513_1951.py │ ├── 0122_set_uuid_in_orgtask.py │ ├── 0123_alter_orgtask_uuid.py │ └── __init__.py ├── models │ ├── __init__.py │ ├── admin_user.py │ ├── canvaslock.py │ ├── dbt_workflow.py │ ├── flow_runs.py │ ├── llm.py │ ├── notifications.py │ ├── org.py │ ├── org_plans.py │ ├── org_preferences.py │ ├── org_supersets.py │ ├── org_user.py │ ├── org_wren.py │ ├── orgtnc.py │ ├── role_based_access.py │ ├── syncstats.py │ ├── tasks.py │ └── userpreferences.py ├── oldmigrations │ ├── 0001_initial.py │ ├── 0002_invitation.py │ ├── 0003_clientdbt_remove_clientorg_dbt_repo_url_and_more.py │ ├── 0004_clientorg_slug.py │ ├── 0005_clientprefectblock.py │ └── 0006_alter_clientorg_dbt.py ├── routes.py ├── schemas │ ├── __init__.py │ ├── dbt_workflow_schema.py │ ├── notifications_api_schemas.py │ ├── org_preferences_schema.py │ ├── org_task_schema.py │ ├── userpreferences_schema.py │ └── warehouse_api_schemas.py ├── settings.py ├── tests │ ├── __init__.py │ ├── api_tests │ │ ├── __init__.py │ │ ├── test_airbyte_api.py │ │ ├── test_airbyte_api_v1.py │ │ ├── test_dashboard_api.py │ │ ├── test_dbt_api.py │ │ ├── test_notifications_api.py │ │ ├── test_orgtask_api.py │ │ ├── test_pipeline_api.py │ │ ├── test_transform_api.py │ │ ├── test_user_org_api.py │ │ ├── test_user_preferences_api.py │ │ ├── test_warehouse_api.py │ │ └── test_webhook_api.py │ ├── core │ │ ├── __init__.py │ │ ├── datainsights │ │ │ ├── __init__.py │ │ │ ├── factories │ │ │ │ ├── __init__.py │ │ │ │ ├── test_insight_factory.py │ │ │ │ └── test_warehouse_factory.py │ │ │ ├── insights │ │ │ │ ├── __init__.py │ │ │ │ ├── test_base_insights.py │ │ │ │ ├── test_boolean_insights.py │ │ │ │ ├── test_datetime_insights.py │ │ │ │ ├── test_numeric_insights.py │ │ │ │ └── test_string_insights.py │ │ │ ├── interfaces │ │ │ │ ├── __init__.py │ │ │ │ ├── test_insight_interface.py │ │ │ │ └── test_warehouse_interface.py │ │ │ └── test_query_builder.py │ │ ├── dbt_automation │ │ │ ├── __init__.py │ │ │ ├── test_bigquery.py │ │ │ ├── test_columnutils.py │ │ │ ├── test_dbtconfigs.py │ │ │ ├── test_dbtproject.py │ │ │ ├── test_dbtsources.py │ │ │ ├── test_postgres.py │ │ │ ├── test_sourceschemas.py │ │ │ ├── test_tableutils.py │ │ │ └── test_warehouseclient.py │ │ ├── test_celery_tasks.py │ │ ├── test_dbtfunctions.py │ │ ├── test_notifications_service.py │ │ ├── test_orgtaskfunctions.py │ │ └── test_pipelinefunctions.py │ ├── helper │ │ ├── __init__.py │ │ ├── test_airbyte_unit_schemas.py │ │ ├── test_airbytehelpers.py │ │ ├── test_auth.py │ │ ├── test_dev_secretsmanager.py │ │ ├── test_prefectlogs.py │ │ └── test_secretsmanager.py │ ├── integration_tests │ │ ├── __init__.py │ │ ├── dbt_automation │ │ │ ├── __init__.py │ │ │ ├── test_bigquery_ops.py │ │ │ └── test_postgres_ops.py │ │ ├── test_airbyte_integration.py │ │ └── test_prefect_integration.py │ ├── services │ │ ├── __init__.py │ │ ├── test_airbyte_service.py │ │ ├── test_dbt_service.py │ │ ├── test_dbtautomation_service.py │ │ ├── test_elementary_service.py │ │ └── test_prefect_service.py │ ├── utils │ │ ├── __init__.py │ │ ├── test_deleteorg.py │ │ └── test_helpers.py │ └── websockets │ │ └── test_airbyte_consumer.py ├── urls.py ├── utils │ ├── __init__.py │ ├── ab_logger.py │ ├── awsses.py │ ├── constants.py │ ├── custom_logger.py │ ├── dbtdocs.py │ ├── ddp_logger.py │ ├── deleteorg.py │ ├── discord.py │ ├── flags.py │ ├── helpers.py │ ├── http.py │ ├── orguserhelpers.py │ ├── prefectlogs.py │ ├── redis_client.py │ ├── secretsmanager.py │ ├── sendgrid.py │ ├── singletaskprogress.py │ ├── taskprogress.py │ ├── thread.py │ ├── timezone.py │ ├── transform_workflow_helpers.py │ └── webhook_helpers.py ├── websockets │ ├── __init__.py │ ├── airbyte_consumer.py │ └── schemas.py └── wsgi.py ├── gunicorn-log.conf ├── kill.sh ├── manage.py ├── pyproject.toml ├── pyproject_deprecated.toml ├── refresh-dbt-automation.sh ├── requirements_dbt.txt ├── requirements_deprecated.txt ├── scripts ├── README.createsources.md ├── README.generatecreatesourcesyml.md ├── createsources.example.env ├── createsources.py ├── dbt_automation │ ├── checkmergetables.py │ ├── comparecolumnschemas.py │ ├── dbdiff.py │ ├── diffjsonfields.py │ ├── dropcolumnfromcsv.py │ ├── main.py │ ├── mknormalized.py │ ├── operations.yaml │ ├── operations1.yaml │ ├── seedtestdb.py │ ├── showcolumnfromtables.py │ └── trimtosyncairbyteraw.py ├── generate_role_permission_seed_json.py ├── generatecreatesourcesyml.py ├── parseprefectlogs.py ├── resetNGO.py ├── resetdb.py ├── test-airbyte-service.py ├── test-clientapi.py └── testtransformapi.py ├── seed ├── 001_roles.json ├── 002_permissions.json ├── 003_role_permissions.json ├── assistant_prompts.json ├── tasks.json └── user_prompts.json ├── start-celery-worker.sh ├── start.sh ├── testclient ├── .env.test.template ├── config.yaml └── testclient.py └── uv.lock /.deepsource.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | test_patterns = [ 3 | "ddpui/tests/**", 4 | "scripts/test*.py" 5 | ] 6 | 7 | [[analyzers]] 8 | name = "python" 9 | 10 | [analyzers.meta] 11 | runtime_version = "3.x.x" 12 | max_line_length = 200 13 | cyclomatic_complexity_threshold = "high" -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/dmp.md: -------------------------------------------------------------------------------- 1 | ## Description 2 | 3 | 4 | ## Goals 5 | - [ ] 6 | - [ ] 7 | 8 | ## Expected Outcome 9 | 10 | ## Acceptance Criteria 11 | - [ ] 12 | - [ ] 13 | 14 | ## Implementation Details 15 | 16 | 17 | ## Mockups / Wireframes 18 | 19 | ### Product Name 20 | Dalgo 21 | 22 | 23 | ### Project Name 24 | 25 | 26 | 27 | ### Organization Name: 28 | Project Tech4Dev 29 | 30 | ### Domain 31 | Other 32 | 33 | 34 | ### Tech Skills Needed: 35 | Python, Django, JavaScript, NextJS 36 | 37 | ### Mentor(s) 38 | @fatchat 39 | 40 | ### Complexity 41 | High 42 | 43 | ### Category 44 | 45 | 46 | ### Sub Category 47 | -------------------------------------------------------------------------------- /.github/workflows/dalgo-cd.yml: -------------------------------------------------------------------------------- 1 | name: Dalgo CD 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - name: Deploy code to EC2 server 13 | uses: appleboy/ssh-action@v1.2.0 14 | with: 15 | host: ${{ secrets.SERVERIP }} 16 | username: ${{ secrets.SSH_USERNAME }} 17 | key: ${{ secrets.SSH_PRIVATE_KEY }} 18 | port: 22 19 | command_timeout: 500s 20 | script: | 21 | set -e 22 | source ~/.nvm/nvm.sh 23 | cd /home/ddp/DDP_backend 24 | current_branch=$(git rev-parse --abbrev-ref HEAD) 25 | if [ "$current_branch" != "main" ]; then 26 | echo "Error: You are not on the main branch. Current branch is $current_branch." 27 | exit 1 28 | fi 29 | git pull 30 | /home/ddp/.local/bin/uv run python manage.py migrate 31 | /home/ddp/.local/bin/uv run python manage.py loaddata seed/*.json 32 | /home/ddp/.yarn/bin/pm2 restart django-celery-worker django-backend-asgi 33 | -------------------------------------------------------------------------------- /.github/workflows/dalgo-docker-release.yml: -------------------------------------------------------------------------------- 1 | # This workflow builds and pushes a new docker image 2 | # to dockerhub whenever a new release is published 3 | 4 | name: Publish Docker image 5 | 6 | on: 7 | release: 8 | types: [published] 9 | 10 | jobs: 11 | push_image_to_registry: 12 | name: Push Docker image to docker hub 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Check out the repo 16 | uses: actions/checkout@v4 17 | 18 | - name: Set build date 19 | run: echo "BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV 20 | 21 | - name: Set release number 22 | run: echo "RELEASE_NUMBER=${{ github.event.release.tag_name }}" >> $GITHUB_ENV 23 | 24 | - name: Set up QEMU 25 | uses: docker/setup-qemu-action@v3 26 | 27 | - name: Set up Docker Buildx 28 | uses: docker/setup-buildx-action@v3 29 | 30 | - name: Login to docker registry 31 | uses: docker/login-action@v3 32 | with: 33 | username: ${{secrets.DOCKERHUB_USERNAME}} 34 | password: ${{secrets.DOCKERHUB_PASSWORD}} 35 | 36 | - name: Build and push docker image to registry 37 | uses: docker/build-push-action@v5 38 | with: 39 | context: . 40 | file: ./Docker/Dockerfile.main 41 | platforms: linux/amd64,linux/arm64 42 | build-args: | 43 | BUILD_DATE=${{ env.BUILD_DATE }} 44 | BUILD_VERSION=${{ env.RELEASE_NUMBER }}" 45 | push: true 46 | tags: | 47 | tech4dev/dalgo_backend:${{ env.RELEASE_NUMBER }} 48 | tech4dev/dalgo_backend:latest 49 | cache-from: type=registry,ref=tech4dev/dalgo_backend:latest 50 | cache-to: type=inline 51 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 23.3.0 4 | hooks: 5 | - id: black 6 | language_version: python3.10 7 | # - repo: https://github.com/pre-commit/pre-commit-hooks # TODO: fix all lint errors & uncomment this 8 | # rev: v1.2.3 9 | # hooks: 10 | # - id: flake8 11 | # - repo: local 12 | # hooks: 13 | # - id: pylint 14 | # name: pylint 15 | # entry: "pylint" 16 | # language: system 17 | # types: [python] 18 | files: 'ddpui\/.*\.pyi?$' 19 | -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 3.10.12 2 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.defaultFormatter": "ms-python.black-formatter", 3 | "pylint.enabled": true, 4 | "pylint.args": [ 5 | "--load-plugins", 6 | "pylint_django" 7 | ], 8 | } -------------------------------------------------------------------------------- /Docker/Dockerfile.dev.deploy: -------------------------------------------------------------------------------- 1 | # This is a dev Dockerfile to build a dev docker image for the Dalgo backend 2 | # application based on the image built using Dockerfile.main. 3 | # This is mainly for development purposes 4 | 5 | FROM dalgo_backend_main_image:0.1 as build 6 | 7 | USER root 8 | 9 | 10 | # Copy whitelist file 11 | COPY Docker/mount/whitelist_deploy.py /usr/src/backend/ddpui/assets/whitelist.py 12 | 13 | RUN chmod +x /entrypoint.sh 14 | 15 | 16 | USER container_user 17 | 18 | # Make port 8002 available to the world outside this container 19 | EXPOSE 8002 20 | 21 | ENTRYPOINT [ "/entrypoint.sh" ] -------------------------------------------------------------------------------- /Docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | redis_server: 4 | image: redis:latest 5 | ports: 6 | - "6379:6379" 7 | volumes: 8 | - redis_data:/data 9 | networks: 10 | - dalgo-network 11 | backend: 12 | image: dalgo_backend:latest 13 | command: backend 14 | restart: always 15 | ports: 16 | - "8002:8002" 17 | env_file: 18 | - .env.docker 19 | volumes: 20 | - ${CLIENTS_DBT_MOUNT}:/data/clients_dbt 21 | - ${DEV_SECRETS_MOUNT}:/data/secrets 22 | - ${LOGS_MOUNT}:/usr/src/backend/ddpui/logs 23 | networks: 24 | - dalgo-network 25 | celery_beat: 26 | image: dalgo_backend:latest 27 | command: beat 28 | depends_on: 29 | - backend 30 | - redis_server 31 | env_file: 32 | - .env.docker 33 | volumes: 34 | - celerybeat_volume:/data 35 | networks: 36 | - dalgo-network 37 | 38 | celery_worker: 39 | image: dalgo_backend:latest 40 | command: worker 41 | depends_on: 42 | - backend 43 | - redis_server 44 | env_file: 45 | - .env.docker 46 | volumes: 47 | - ${CLIENTS_DBT_MOUNT}:/data/clients_dbt 48 | - ${DEV_SECRETS_MOUNT}:/data/secrets 49 | - ${LOGS_MOUNT}:/usr/src/backend/ddpui/logs 50 | networks: 51 | - dalgo-network 52 | 53 | volumes: 54 | redis_data: 55 | celerybeat_volume: 56 | 57 | networks: 58 | dalgo-network: 59 | driver: bridge 60 | -------------------------------------------------------------------------------- /Docker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Apply database migrations 4 | 5 | # create case of different containers 6 | case "$1" in 7 | worker) 8 | echo "Starting celery worker" 9 | celery -A ddpui worker -n ddpui 10 | ;; 11 | beat) 12 | echo "Starting celery beat" 13 | celery -A ddpui beat --schedule=/data/celerybeat-schedule --loglevel=error --max-interval 60 14 | ;; 15 | backend) 16 | echo "Starting backend" 17 | 18 | # Start server 19 | echo "Starting server" 20 | uvicorn ddpui.asgi:application --workers 4 --host 0.0.0.0 --port 8002 --timeout-keep-alive 60 21 | ;; 22 | initdb) 23 | echo "Apply database migrations" 24 | python manage.py migrate 25 | 26 | echo "Seed database" 27 | python manage.py loaddata seed/*.json 28 | 29 | echo "Create first user ${FIRST_USER_EMAIL} in organization ${FIRST_ORG_NAME}" 30 | python manage.py createorganduser ${FIRST_ORG_NAME} ${FIRST_USER_EMAIL} ${FIRST_USER_PASSWORD} --role ${FIRST_USER_ROLE} 31 | 32 | echo "Create system user if it does not exist" 33 | python manage.py create-system-orguser 34 | ;; 35 | *) 36 | exec "$@" 37 | esac -------------------------------------------------------------------------------- /Docker/mount/whitelist_deploy.py: -------------------------------------------------------------------------------- 1 | # whitelist airbyte sources and destinations here 2 | import json 3 | 4 | DEMO_WHITELIST_SOURCES = [ 5 | { 6 | "type": "Google Sheets", 7 | "config": { 8 | "row_batch_size": 500, 9 | "spreadsheet_id": "https://docs.google.com/spreadsheets/d/18GqjB6nFBxZbAPnk6UOAvERE3iMfi4AkidjV6HkFKQk/edit#gid=0", # skip-cq: FLK-E501 10 | "credentials": { 11 | "auth_type": "Service", 12 | "service_account_info": json.dumps( 13 | {"key": "big service account json dictionary"} 14 | ), 15 | }, 16 | }, 17 | }, 18 | { 19 | "type": "Postgres", 20 | "config": { 21 | "database": "test_db", 22 | "host": "host_domain", 23 | "username": "user_name", 24 | "password": "password", 25 | "port": 5432, 26 | "ssl_mode": {"mode": "disable"}, 27 | "schemas": ["schema1", "schema2"], 28 | "tunnel_method": { 29 | "tunnel_method": "NO_TUNNEL", 30 | }, 31 | }, 32 | }, 33 | ] 34 | -------------------------------------------------------------------------------- /ddpui/__init__.py: -------------------------------------------------------------------------------- 1 | # This will make sure the app is always imported when 2 | # Django starts so that shared_task will use this app. 3 | from .celery import app as celery_app 4 | 5 | __all__ = ("celery_app",) 6 | -------------------------------------------------------------------------------- /ddpui/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/api/__init__.py -------------------------------------------------------------------------------- /ddpui/asgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | ASGI config for ddpui project. 3 | 4 | It exposes the ASGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/4.1/howto/deployment/asgi/ 8 | """ 9 | 10 | import os 11 | from django.core.asgi import get_asgi_application 12 | 13 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ddpui.settings") 14 | 15 | django_asgi_app = get_asgi_application() 16 | 17 | from django.urls import path 18 | from channels.auth import AuthMiddlewareStack 19 | from channels.routing import ProtocolTypeRouter, URLRouter, ChannelNameRouter 20 | from channels.security.websocket import AllowedHostsOriginValidator 21 | from ddpui.urls import ws_urlpatterns 22 | 23 | 24 | application = ProtocolTypeRouter( 25 | { 26 | "http": django_asgi_app, 27 | "websocket": AllowedHostsOriginValidator(AuthMiddlewareStack(URLRouter(ws_urlpatterns))), 28 | } 29 | ) 30 | -------------------------------------------------------------------------------- /ddpui/assets/__init__.py: -------------------------------------------------------------------------------- 1 | # store all config, images, static assets here 2 | -------------------------------------------------------------------------------- /ddpui/assets/whitelist.template.py: -------------------------------------------------------------------------------- 1 | # whitelist airbyte sources and destinations here 2 | import json 3 | 4 | DEMO_WHITELIST_SOURCES = [ 5 | { 6 | "type": "Google Sheets", 7 | "config": { 8 | "row_batch_size": 500, 9 | "spreadsheet_id": "https://docs.google.com/spreadsheets/d/18GqjB6nFBxZbAPnk6UOAvERE3iMfi4AkidjV6HkFKQk/edit#gid=0", 10 | "credentials": { 11 | "auth_type": "Service", 12 | "service_account_info": json.dumps({"key": "big service account json dictionary"}), 13 | }, 14 | }, 15 | }, 16 | { 17 | "type": "Postgres", 18 | "config": { 19 | "database": "test_db", 20 | "host": "host_domain", 21 | "username": "user_name", 22 | "password": "password", 23 | "port": 5432, 24 | "ssl_mode": {"mode": "disable"}, 25 | "schemas": ["schema1", "schema2"], 26 | "tunnel_method": { 27 | "tunnel_method": "NO_TUNNEL", 28 | }, 29 | }, 30 | }, 31 | ] 32 | -------------------------------------------------------------------------------- /ddpui/celery.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from celery import Celery 4 | 5 | # Set the default Django settings module for the 'celery' program. 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ddpui.settings") 7 | 8 | REDIS_HOST = os.getenv("REDIS_HOST", "localhost") 9 | REDIS_PORT = int(os.getenv("REDIS_PORT", "6379")) 10 | 11 | # Here we use redis as both Celery message broker(delivering task messages) and backend(for task status storage) 12 | app = Celery( 13 | "ddpui", 14 | backend=f"redis://{REDIS_HOST}:{REDIS_PORT}", 15 | broker=f"redis://{REDIS_HOST}:{REDIS_PORT}", 16 | ) 17 | 18 | # Using a string here means the worker doesn't have to serialize 19 | # the configuration object to child processes. 20 | # - namespace='CELERY' means all celery-related configuration keys 21 | # should have a `CELERY_` prefix. 22 | app.config_from_object("django.conf:settings", namespace="CELERY") 23 | 24 | # Load task modules from all registered Django apps. 25 | app.autodiscover_tasks() 26 | -------------------------------------------------------------------------------- /ddpui/celeryworkers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/celeryworkers/__init__.py -------------------------------------------------------------------------------- /ddpui/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/core/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/datainsights/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/insights/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/datainsights/insights/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/insights/boolean_type/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/datainsights/insights/boolean_type/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/insights/boolean_type/boolean_insights.py: -------------------------------------------------------------------------------- 1 | from ddpui.datainsights.insights.insight_interface import ( 2 | DataTypeColInsights, 3 | ColInsight, 4 | ) 5 | from ddpui.datainsights.insights.boolean_type.queries import ( 6 | DataStats, 7 | ) 8 | 9 | 10 | class BooleanColInsights(DataTypeColInsights): 11 | """ 12 | Class that maintains a list of ColInsight queries for a numeric type col 13 | """ 14 | 15 | def __init__( 16 | self, 17 | columns: list[dict], 18 | db_table: str, 19 | db_schema: str, 20 | filter_: dict = None, 21 | wtype: str = None, 22 | ): 23 | super().__init__(columns, db_table, db_schema, filter_, wtype) 24 | self.insights: list[ColInsight] = [ 25 | DataStats(self.columns, self.db_table, self.db_schema, self.filter, self.wtype), 26 | ] 27 | -------------------------------------------------------------------------------- /ddpui/datainsights/insights/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/datainsights/insights/common/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/insights/common/base_insights.py: -------------------------------------------------------------------------------- 1 | from ddpui.datainsights.insights.insight_interface import ( 2 | DataTypeColInsights, 3 | ColInsight, 4 | ) 5 | from ddpui.datainsights.insights.common.queries import BaseDataStats 6 | 7 | 8 | class BaseInsights(DataTypeColInsights): 9 | """ 10 | Class that maintains a list of shared queries across all datatypes 11 | """ 12 | 13 | def __init__( 14 | self, 15 | columns: list[dict], 16 | db_table: str, 17 | db_schema: str, 18 | filter_: dict = None, 19 | wtype: str = None, 20 | ): 21 | super().__init__(columns, db_table, db_schema, filter_, wtype) 22 | self.insights: list[ColInsight] = [ 23 | BaseDataStats(self.columns, self.db_table, self.db_schema, self.filter, self.wtype), 24 | ] 25 | -------------------------------------------------------------------------------- /ddpui/datainsights/insights/datetime_type/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/datainsights/insights/datetime_type/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/insights/datetime_type/datetime_insight.py: -------------------------------------------------------------------------------- 1 | from ddpui.datainsights.insights.insight_interface import ( 2 | DataTypeColInsights, 3 | ColInsight, 4 | ) 5 | from ddpui.datainsights.insights.datetime_type.queries import ( 6 | DistributionChart, 7 | ) 8 | 9 | 10 | class DatetimeColInsights(DataTypeColInsights): 11 | """ 12 | Class that maintains a list of ColInsight queries for a numeric type col 13 | """ 14 | 15 | def __init__( 16 | self, 17 | columns: list[dict], 18 | db_table: str, 19 | db_schema: str, 20 | filter_: dict = None, 21 | wtype: str = None, 22 | ): 23 | super().__init__(columns, db_table, db_schema, filter_, wtype) 24 | self.insights: list[ColInsight] = [ 25 | DistributionChart(self.columns, self.db_table, self.db_schema, self.filter, self.wtype), 26 | ] 27 | -------------------------------------------------------------------------------- /ddpui/datainsights/insights/numeric_type/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/datainsights/insights/numeric_type/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/insights/numeric_type/numeric_insight.py: -------------------------------------------------------------------------------- 1 | from ddpui.datainsights.insights.insight_interface import ( 2 | DataTypeColInsights, 3 | ColInsight, 4 | ) 5 | from ddpui.datainsights.insights.numeric_type.queries import DataStats 6 | 7 | 8 | class NumericColInsights(DataTypeColInsights): 9 | """ 10 | Class that maintains a list of ColInsight queries for a numeric type col 11 | """ 12 | 13 | def __init__( 14 | self, 15 | columns: list[dict], 16 | db_table: str, 17 | db_schema: str, 18 | filter_: dict = None, 19 | wtype: str = None, 20 | ): 21 | super().__init__(columns, db_table, db_schema, filter_, wtype) 22 | self.insights: list[ColInsight] = [ 23 | DataStats(self.columns, self.db_table, self.db_schema, self.filter, self.wtype), 24 | ] 25 | -------------------------------------------------------------------------------- /ddpui/datainsights/insights/string_type/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/datainsights/insights/string_type/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/insights/string_type/string_insights.py: -------------------------------------------------------------------------------- 1 | from ddpui.datainsights.insights.insight_interface import ( 2 | DataTypeColInsights, 3 | ColInsight, 4 | ) 5 | from ddpui.datainsights.insights.string_type.queries import ( 6 | DistributionChart, 7 | StringLengthStats, 8 | ) 9 | 10 | 11 | class StringColInsights(DataTypeColInsights): 12 | """ 13 | Class that maintains a list of ColInsight queries for a numeric type col 14 | """ 15 | 16 | def __init__( 17 | self, 18 | columns: list[str], 19 | db_table: str, 20 | db_schema: str, 21 | filter_: dict = None, 22 | wtype: str = None, 23 | ): 24 | super().__init__(columns, db_table, db_schema, filter_, wtype) 25 | self.insights: list[ColInsight] = [ 26 | DistributionChart(self.columns, self.db_table, self.db_schema, self.filter, self.wtype), 27 | StringLengthStats(self.columns, self.db_table, self.db_schema, self.filter, self.wtype), 28 | ] 29 | -------------------------------------------------------------------------------- /ddpui/datainsights/warehouse/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/datainsights/warehouse/__init__.py -------------------------------------------------------------------------------- /ddpui/datainsights/warehouse/warehouse_factory.py: -------------------------------------------------------------------------------- 1 | from ddpui.datainsights.warehouse.warehouse_interface import Warehouse 2 | from ddpui.datainsights.warehouse.postgres import PostgresClient 3 | from ddpui.datainsights.warehouse.bigquery import BigqueryClient 4 | from ddpui.datainsights.warehouse.warehouse_interface import WarehouseType 5 | 6 | 7 | class WarehouseFactory: 8 | @classmethod 9 | def connect(cls, creds: dict, wtype: str) -> Warehouse: 10 | if wtype == WarehouseType.POSTGRES: 11 | return PostgresClient(creds) 12 | elif wtype == WarehouseType.BIGQUERY: 13 | return BigqueryClient(creds) 14 | else: 15 | raise ValueError("Column type not supported for insights generation") 16 | -------------------------------------------------------------------------------- /ddpui/datainsights/warehouse/warehouse_interface.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from enum import Enum 3 | 4 | 5 | class WarehouseType(str, Enum): 6 | """ 7 | warehouse types available; this will be same as what is stored in OrgWarehouse.wtype 8 | """ 9 | 10 | POSTGRES = "postgres" 11 | BIGQUERY = "bigquery" 12 | 13 | 14 | class Warehouse(ABC): 15 | @abstractmethod 16 | def execute(self, sql_statement: str): 17 | pass 18 | 19 | @abstractmethod 20 | def get_table_columns(self, db_schema: str, db_table: str) -> dict: 21 | pass 22 | 23 | @abstractmethod 24 | def get_col_python_type(self, db_schema: str, db_table: str, column_name: str): 25 | pass 26 | 27 | @abstractmethod 28 | def get_wtype(self): 29 | pass 30 | -------------------------------------------------------------------------------- /ddpui/dbt_automation/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.0.1" 2 | -------------------------------------------------------------------------------- /ddpui/dbt_automation/assets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/dbt_automation/assets/__init__.py -------------------------------------------------------------------------------- /ddpui/dbt_automation/assets/generate_schema_name.sql: -------------------------------------------------------------------------------- 1 | {% macro generate_schema_name(custom_schema_name, node) -%} 2 | 3 | {%- set default_schema = target.schema -%} 4 | {%- if custom_schema_name is none -%} 5 | 6 | {{ default_schema }} 7 | 8 | {%- else -%} 9 | 10 | {{ custom_schema_name | trim }} 11 | 12 | {%- endif -%} 13 | 14 | {%- endmacro %} -------------------------------------------------------------------------------- /ddpui/dbt_automation/assets/packages.yml: -------------------------------------------------------------------------------- 1 | packages: 2 | - package: dbt-labs/dbt_utils 3 | version: 1.1.1 -------------------------------------------------------------------------------- /ddpui/dbt_automation/operations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/dbt_automation/operations/__init__.py -------------------------------------------------------------------------------- /ddpui/dbt_automation/seeds/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/dbt_automation/seeds/__init__.py -------------------------------------------------------------------------------- /ddpui/dbt_automation/seeds/seed_001.yml: -------------------------------------------------------------------------------- 1 | version: 1 2 | description: "Yaml template to get you started on automating your dbt work. DO NOT EDIT this, make a copy and use" 3 | warehouse: bigquery 4 | 5 | seed_data: 6 | - schema: tests_001 7 | tables: 8 | - name: model_001 9 | csv: seed_001.csv 10 | -------------------------------------------------------------------------------- /ddpui/dbt_automation/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/dbt_automation/utils/__init__.py -------------------------------------------------------------------------------- /ddpui/dbt_automation/utils/dbtconfigs.py: -------------------------------------------------------------------------------- 1 | """helpers to create .yml dbt files""" 2 | 3 | 4 | # ================================================================================ 5 | def mk_model_config(schemaname: str, modelname_: str, columnspec: list): 6 | """creates a model config with the given column spec""" 7 | columns = [ 8 | { 9 | "name": "_airbyte_ab_id", 10 | "description": "", 11 | "tests": ["unique", "not_null"], 12 | } 13 | ] 14 | for column in columnspec: 15 | columns.append( 16 | { 17 | "name": column, 18 | "description": "", 19 | } 20 | ) 21 | return { 22 | "name": modelname_, 23 | "description": "", 24 | "+schema": schemaname, 25 | "columns": columns, 26 | } 27 | 28 | 29 | def get_columns_from_model(models: dict, modelname: str): 30 | """reads a models.yml, finds the modelname and returns the columns""" 31 | for model in models["models"]: 32 | if model["name"] == modelname: 33 | return [x["name"] for x in model["columns"]] 34 | return None 35 | -------------------------------------------------------------------------------- /ddpui/dbt_automation/utils/interfaces/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/dbt_automation/utils/interfaces/__init__.py -------------------------------------------------------------------------------- /ddpui/dbt_automation/utils/interfaces/warehouse_interface.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class WarehouseInterface(ABC): 5 | @abstractmethod 6 | def execute(self, statement: str): 7 | pass 8 | 9 | @abstractmethod 10 | def get_tables(self, schema: str): 11 | pass 12 | 13 | @abstractmethod 14 | def get_schemas(self): 15 | pass 16 | 17 | @abstractmethod 18 | def get_table_data( 19 | self, 20 | schema: str, 21 | table: str, 22 | limit: int, 23 | page: int = 1, 24 | order_by: str = None, 25 | order: int = 1, # ASC 26 | ): 27 | pass 28 | 29 | @abstractmethod 30 | def get_table_columns(self, schema: str, table: str): 31 | pass 32 | 33 | @abstractmethod 34 | def get_columnspec(self, schema: str, table_id: str): 35 | pass 36 | 37 | @abstractmethod 38 | def get_json_columnspec(self, schema: str, table: str, column: str): 39 | pass 40 | 41 | @abstractmethod 42 | def ensure_schema(self, schema: str): 43 | pass 44 | 45 | @abstractmethod 46 | def ensure_table(self, schema: str, table: str, columns: list): 47 | pass 48 | 49 | @abstractmethod 50 | def drop_table(self, schema: str, table: str): 51 | pass 52 | 53 | @abstractmethod 54 | def insert_row(self, schema: str, table: str, row: dict): 55 | pass 56 | 57 | @abstractmethod 58 | def json_extract_op(self, json_column: str, json_field: str, sql_column: str): 59 | pass 60 | 61 | @abstractmethod 62 | def close(self): 63 | pass 64 | 65 | @abstractmethod 66 | def generate_profiles_yaml_dbt(self, project_name, default_schema): 67 | pass 68 | -------------------------------------------------------------------------------- /ddpui/dbt_automation/utils/sourceschemas.py: -------------------------------------------------------------------------------- 1 | """helpers for working with dbt source configs""" 2 | 3 | import yaml 4 | 5 | 6 | # ================================================================================ 7 | def mksourcedefinition(sourcename: str, input_schema: str, tables: list): 8 | """generates the data structure for a dbt sources.yml""" 9 | airbyte_prefix = "_airbyte_raw_" 10 | 11 | source = {"name": sourcename, "schema": input_schema, "tables": []} 12 | 13 | for tablename in tables: 14 | cleaned_name = tablename 15 | source["tables"].append( 16 | { 17 | "name": cleaned_name, 18 | "identifier": tablename, 19 | "description": "", 20 | } 21 | ) 22 | 23 | sourcedefinitions = { 24 | "version": 2, 25 | "sources": [source], 26 | } 27 | return sourcedefinitions 28 | 29 | 30 | # ================================================================================ 31 | def get_source(filename: str, input_schema: str) -> dict: 32 | """read the config file containing `sources` keys and return the source 33 | matching the input schema""" 34 | with open(filename, "r", encoding="utf-8") as sources_file: 35 | sources = yaml.safe_load(sources_file) 36 | 37 | return next((src for src in sources["sources"] if src["schema"] == input_schema), None) 38 | -------------------------------------------------------------------------------- /ddpui/dbt_automation/utils/tableutils.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file will take all helpers need to work with dbt sources and dbt models 3 | """ 4 | 5 | 6 | def source_or_ref(source_name: str, input_name: str, input_type: str) -> str: 7 | if input_type not in ["source", "model", "cte"]: 8 | raise ValueError("invalid input type to select from") 9 | 10 | if input_type == "cte": 11 | return input_name 12 | 13 | source_or_ref = f"ref('{input_name}')" 14 | 15 | if input_type == "source": 16 | source_or_ref = f"source('{source_name}', '{input_name}')" 17 | 18 | return source_or_ref 19 | -------------------------------------------------------------------------------- /ddpui/dbt_automation/utils/warehouseclient.py: -------------------------------------------------------------------------------- 1 | """constructs and returns an instance of the client for the right warehouse""" 2 | 3 | from ddpui.dbt_automation.utils.postgres import PostgresClient 4 | from ddpui.dbt_automation.utils.bigquery import BigQueryClient 5 | from ddpui.dbt_automation.utils.interfaces.warehouse_interface import WarehouseInterface 6 | 7 | 8 | def get_client(warehouse: str, conn_info: dict = None, location: str = None) -> WarehouseInterface: 9 | """constructs and returns an instance of the client for the right warehouse""" 10 | if warehouse == "postgres": 11 | client = PostgresClient(conn_info) 12 | elif warehouse == "bigquery": 13 | client = BigQueryClient(conn_info, location) 14 | else: 15 | raise ValueError("unknown warehouse") 16 | return client 17 | -------------------------------------------------------------------------------- /ddpui/ddpairbyte/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/ddpairbyte/__init__.py -------------------------------------------------------------------------------- /ddpui/ddpdbt/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/ddpdbt/__init__.py -------------------------------------------------------------------------------- /ddpui/ddpdbt/schema.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from ninja import Schema 3 | from pathlib import Path 4 | 5 | 6 | class DbtProjectParams(Schema): 7 | """ 8 | schema to define all parameters required to run a dbt project 9 | """ 10 | 11 | dbt_env_dir: Union[str, Path] 12 | project_dir: Union[str, Path] 13 | org_project_dir: Union[str, Path] 14 | target: str 15 | venv_binary: Union[str, Path] 16 | dbt_binary: Union[str, Path] 17 | 18 | 19 | class DbtCloudJobParams(Schema): 20 | """ 21 | Schema to define all parameters required to run a any dbt command using dbt Cloud. 22 | Extend this if you need to add more params while triggering a dbt cloud job 23 | """ 24 | 25 | job_id: int 26 | -------------------------------------------------------------------------------- /ddpui/html/docs.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from django.http import Http404, HttpResponse 4 | 5 | from ddpui.utils.redis_client import RedisClient 6 | 7 | 8 | def get_dbt_docs(request, tokenhex: str): 9 | """serve the generated docs""" 10 | redis = RedisClient.get_instance() 11 | redis_key = f"dbtdocs-{tokenhex}" 12 | htmlfilename = redis.get(redis_key) 13 | if htmlfilename is None: 14 | raise Http404("link has expired") 15 | 16 | htmlfilename = htmlfilename.decode("utf-8") 17 | if not os.path.exists(htmlfilename): 18 | raise Http404("link has expired") 19 | 20 | with open(htmlfilename, "r", encoding="utf-8") as htmlfile: 21 | html = htmlfile.read() 22 | response = HttpResponse(html) 23 | # the only valid values for x-frame-options are "deny" and "sameorigin", both 24 | # of which will stop the iframe from rendering the docs 25 | # removing the header causes it to be set to "deny" by django 26 | # but if we set it to an invalid value, it makes its way to the browser where 27 | # it is ignored 28 | response.headers["X-Frame-Options"] = "ignore" 29 | response.headers[ 30 | "Content-Security-Policy" 31 | ] = f"frame-src localhost:8002 {request.headers['Host']};" 32 | return response 33 | -------------------------------------------------------------------------------- /ddpui/html/elementary.py: -------------------------------------------------------------------------------- 1 | import os 2 | from django.http import HttpResponse, Http404 3 | 4 | from ddpui.utils.redis_client import RedisClient 5 | 6 | 7 | def get_elementary_report(request, tokenhex: str): 8 | """serve the generated docs""" 9 | redis = RedisClient.get_instance() 10 | redis_key = f"elementary-report-{tokenhex}" 11 | htmlfilename = redis.get(redis_key) 12 | if htmlfilename is None: 13 | raise Http404("link has expired") 14 | 15 | htmlfilename = htmlfilename.decode("utf-8") 16 | if not os.path.exists(htmlfilename): 17 | raise Http404("link has expired") 18 | 19 | with open(htmlfilename, "r", encoding="utf-8") as htmlfile: 20 | html = htmlfile.read() 21 | response = HttpResponse(html) 22 | # the only valid values for x-frame-options are "deny" and "sameorigin", both 23 | # of which will stop the iframe from rendering the docs 24 | # removing the header causes it to be set to "deny" by django 25 | # but if we set it to an invalid value, it makes its way to the browser where 26 | # it is ignored 27 | response.headers["X-Frame-Options"] = "ignore" 28 | response.headers[ 29 | "Content-Security-Policy" 30 | ] = f"frame-src localhost:8002 {request.headers['Host']};" 31 | return response 32 | -------------------------------------------------------------------------------- /ddpui/management/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/management/commands/__init__.py -------------------------------------------------------------------------------- /ddpui/management/commands/create-system-orguser.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | from django.core.management.base import BaseCommand 3 | from django.contrib.auth.models import User 4 | 5 | from ddpui.models.org_user import OrgUser, OrgUserRole 6 | from ddpui.utils.custom_logger import CustomLogger 7 | from ddpui.utils.constants import SYSTEM_USER_EMAIL 8 | 9 | logger = CustomLogger("ddpui") 10 | 11 | load_dotenv() 12 | 13 | 14 | class Command(BaseCommand): 15 | """ 16 | Adds user for superset usage dashboard 17 | """ 18 | 19 | help = "Adds auth user and orguser that system will use to lock scheduled pipelines" 20 | 21 | def add_arguments(self, parser): # skipcq: PYL-R0201 22 | pass 23 | 24 | def handle(self, *args, **options): 25 | """create orguser with null org and auth user with no password""" 26 | user = User.objects.filter(email=SYSTEM_USER_EMAIL).first() 27 | if user is None: 28 | user = User.objects.create( 29 | email=SYSTEM_USER_EMAIL, 30 | username=SYSTEM_USER_EMAIL, 31 | password="", 32 | ) 33 | logger.info("created auth user") 34 | orguser = OrgUser.objects.filter(user=user).first() 35 | if orguser is None: 36 | OrgUser.objects.create(user=user, org=None, role=OrgUserRole.ACCOUNT_MANAGER) 37 | logger.info("created system orguser") 38 | -------------------------------------------------------------------------------- /ddpui/management/commands/createorgsuperset.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | 3 | from ddpui.models.org import Org 4 | from ddpui.models.org_supersets import OrgSupersets 5 | 6 | 7 | class Command(BaseCommand): 8 | """ 9 | This script creates OrgSupersets for Orgs 10 | """ 11 | 12 | help = "Create an OrgSuperset for an Org" 13 | 14 | def add_arguments(self, parser): 15 | parser.add_argument("--org", type=str, help="Org slug", required=True) 16 | parser.add_argument("--container-name", type=str, help="Container name", required=True) 17 | parser.add_argument("--superset-version", type=str, help="Superset version", required=True) 18 | parser.add_argument("--overwrite", action="store_true", help="Overwrite existing plan") 19 | 20 | def handle(self, *args, **options): 21 | org = Org.objects.get(slug=options["org"]) 22 | 23 | org_superset = OrgSupersets.objects.filter(org=org).first() 24 | if org_superset and not options["overwrite"]: 25 | self.stdout.write(self.style.ERROR(f"Org {options['org']} already has a superset")) 26 | return 27 | 28 | if not org_superset: 29 | org_superset = OrgSupersets(org=org) 30 | 31 | org_superset.container_name = options["container_name"] 32 | org_superset.superset_version = options["superset_version"] 33 | 34 | org_superset.save() 35 | print("OrgSuperset created successfully for " + org.slug) 36 | -------------------------------------------------------------------------------- /ddpui/management/commands/createsupersetusageuser.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | from django.core.management.base import BaseCommand 3 | 4 | from ddpui.models.org import Org, OrgWarehouse 5 | from ddpui.utils.secretsmanager import save_superset_usage_dashboard_credentials 6 | 7 | from ddpui.utils.custom_logger import CustomLogger 8 | 9 | logger = CustomLogger("ddpui") 10 | 11 | load_dotenv() 12 | 13 | 14 | class Command(BaseCommand): 15 | """ 16 | Adds user for superset usage dashboard 17 | """ 18 | 19 | help = "Adds user for superset usage dashboard" 20 | 21 | def add_arguments(self, parser): # skipcq: PYL-R0201 22 | parser.add_argument("--username", required=True) 23 | parser.add_argument("--first-name", required=True) 24 | parser.add_argument("--last-name", required=True) 25 | parser.add_argument("--password", required=True) 26 | 27 | def handle(self, *args, **options): 28 | """adds superset credentials to secrets manager""" 29 | secret_id = save_superset_usage_dashboard_credentials( 30 | { 31 | "username": options["username"], 32 | "first_name": options["first_name"], 33 | "last_name": options["last_name"], 34 | "password": options["password"], 35 | }, 36 | ) 37 | logger.info(f"credentials saved to secretId = {secret_id}") 38 | -------------------------------------------------------------------------------- /ddpui/management/commands/cypress_tests_cleanup.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from django.contrib.auth.models import User 3 | 4 | from ddpui.models.org import Org 5 | 6 | 7 | class Command(BaseCommand): 8 | """ 9 | This script cleansup the test users and the test organizations created 10 | by cypress. The org slugs start with 'cypress_' while username/email starts 11 | with 'cypress_'. 12 | """ 13 | 14 | help = "Deletes the user and org created by cypress while tessting" 15 | 16 | def handle(self, *args, **options): 17 | """Delete cypress user and org""" 18 | Org.objects.filter(slug__startswith="cypress_").delete() 19 | 20 | User.objects.filter(username__startswith="cypress_").delete() 21 | -------------------------------------------------------------------------------- /ddpui/management/commands/dbt_cloud_integration.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | 3 | from ddpui.models.org import Org, OrgPrefectBlockv1 4 | from ddpui.models.org import OrgWarehouse 5 | from ddpui.utils import secretsmanager 6 | from ddpui.ddpprefect import DBTCLIPROFILE 7 | from ddpui.ddpprefect import prefect_service 8 | 9 | 10 | class Command(BaseCommand): 11 | """ 12 | This script lets us run tasks/jobs related to dbt cloud integrations in dalgo 13 | """ 14 | 15 | help = "Dbt cloud related tasks" 16 | 17 | def add_arguments(self, parser): 18 | parser.add_argument("org", type=str, help="Org slug") 19 | parser.add_argument( 20 | "--api-key", type=str, help="Api key for your dbt cloud account", required=True 21 | ) 22 | parser.add_argument( 23 | "--account-id", type=int, help="Account id for your dbt cloud account", required=True 24 | ) 25 | 26 | def handle(self, *args, **options): 27 | """ 28 | Create/update dbt cloud creds block 29 | This should be replaced by configuration option on settings panel where users can add this 30 | """ 31 | org = Org.objects.filter(slug=options["org"]).first() 32 | if org is None: 33 | print(f"Org with slug {options['org']} does not exist") 34 | return 35 | 36 | if options["api_key"] and options["account_id"]: 37 | block: OrgPrefectBlockv1 = prefect_service.create_or_update_dbt_cloud_creds_block( 38 | org, options["account_id"], options["api_key"] 39 | ) 40 | print(f"DBT Cloud credentials block created/updated {block.block_name}") 41 | return 42 | -------------------------------------------------------------------------------- /ddpui/management/commands/delete_notification.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from django.core.management.base import BaseCommand 3 | from ddpui.core import notifications_service 4 | 5 | 6 | class Command(BaseCommand): 7 | """Deletes a notification by its ID""" 8 | 9 | help = "Deletes a notification by its ID" 10 | 11 | def add_arguments(self, parser): 12 | """Adds command line arguments""" 13 | parser.add_argument("notification_id", type=int, help="ID of the notification to delete") 14 | 15 | def handle(self, *args, **options): 16 | notification_id = options["notification_id"] 17 | 18 | # Call the notification service to delete the notification 19 | error, result = notifications_service.delete_scheduled_notification(notification_id) 20 | 21 | if error is not None: 22 | self.stderr.write(f"Error: {error}") 23 | sys.exit(1) 24 | 25 | self.stdout.write(f"Notification with ID {notification_id} deleted successfully.") 26 | -------------------------------------------------------------------------------- /ddpui/management/commands/deleteorg.py: -------------------------------------------------------------------------------- 1 | from dotenv import load_dotenv 2 | from django.core.management.base import BaseCommand 3 | 4 | from ddpui.models.org_user import Org 5 | from ddpui.utils.deleteorg import delete_one_org, display_org 6 | 7 | 8 | load_dotenv() 9 | 10 | 11 | class Command(BaseCommand): 12 | """ 13 | This script deletes an org and all associated entities 14 | Not only in the Django database, but also in Airbyte and in Prefect 15 | """ 16 | 17 | help = "Deletes an organization" 18 | 19 | def add_arguments(self, parser): # skipcq: PYL-R0201 20 | """The main parameter is the org name""" 21 | parser.add_argument("--org-name", required=True) 22 | parser.add_argument("--yes-really", action="store_true") 23 | 24 | def handle(self, *args, **options): 25 | """Docstring""" 26 | if options["org_name"] == "ALL": 27 | for org in Org.objects.all(): 28 | display_org(org) 29 | else: 30 | org = Org.objects.filter(name=options["org_name"]).first() 31 | if org is None: 32 | org = Org.objects.filter(slug=options["org_name"]).first() 33 | if org is None: 34 | print("no such org") 35 | return 36 | 37 | delete_one_org(org, dry_run=not options["yes_really"]) 38 | -------------------------------------------------------------------------------- /ddpui/management/commands/role_based_access.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | 3 | from ddpui.models.org_user import OrgUser, Invitation 4 | from ddpui.models.role_based_access import Role 5 | from ddpui.auth import ACCOUNT_MANAGER_ROLE 6 | 7 | 8 | class Command(BaseCommand): 9 | """ 10 | Scripts needed to move current clients to role based access 11 | """ 12 | 13 | def handle(self, *args, **options): 14 | # Update all orgusers to have role of Super admin 15 | 16 | role = Role.objects.filter(slug=ACCOUNT_MANAGER_ROLE).first() 17 | if role: 18 | OrgUser.objects.filter(new_role__isnull=True).update(new_role=role) 19 | Invitation.objects.filter(invited_new_role__isnull=True).update(invited_new_role=role) 20 | -------------------------------------------------------------------------------- /ddpui/management/commands/setdockerinfoindestinations.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | 3 | from ddpui.models.org import OrgWarehouse 4 | from ddpui.ddpairbyte import airbyte_service 5 | 6 | 7 | class Command(BaseCommand): 8 | """Docstring""" 9 | 10 | help = "Updates the airbyte_docker_repository and airbyte_docker_image_tag for all warehouses in the system." 11 | 12 | def handle(self, *args, **options): 13 | """Docstring""" 14 | for warehouse in OrgWarehouse.objects.all(): 15 | try: 16 | destination = airbyte_service.get_destination( 17 | warehouse.org.airbyte_workspace_id, warehouse.airbyte_destination_id 18 | ) 19 | except Exception: 20 | continue 21 | destination_def_id = destination["destinationDefinitionId"] 22 | destination_definition = airbyte_service.get_destination_definition( 23 | warehouse.org.airbyte_workspace_id, destination_def_id 24 | ) 25 | warehouse.airbyte_docker_repository = destination_definition["dockerRepository"] 26 | warehouse.airbyte_docker_image_tag = destination_definition["dockerImageTag"] 27 | warehouse.save() 28 | -------------------------------------------------------------------------------- /ddpui/management/commands/setworkerpoolfordeployment.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | 3 | from ddpui.ddpprefect.prefect_service import prefect_put 4 | 5 | 6 | class Command(BaseCommand): 7 | """Docstring""" 8 | 9 | help = "Sets the name of the worker pool for a deployment." 10 | 11 | def add_arguments(self, parser): 12 | """Docstring""" 13 | parser.add_argument("--deployment-id", type=str, help="The deployment ID.", required=True) 14 | parser.add_argument( 15 | "--work-queue-name", 16 | type=str, 17 | help="The name of the work queue to set for the deployment.", 18 | required=True, 19 | ) 20 | 21 | def handle(self, *args, **options): 22 | """Docstring""" 23 | deployment_id = options["deployment_id"] 24 | work_queue_name = options["work_queue_name"] 25 | 26 | res = prefect_put( 27 | f"v1/deployments/{deployment_id}", 28 | {"work_queue_name": work_queue_name}, 29 | ) 30 | print(res) 31 | -------------------------------------------------------------------------------- /ddpui/management/commands/sync-flow-runs-of-dataflow.py: -------------------------------------------------------------------------------- 1 | """for each org fetch orchestration pipelines for airbyte sync""" 2 | 3 | from datetime import datetime 4 | import pytz 5 | from django.core.management.base import BaseCommand, CommandParser 6 | 7 | from ddpui.models.tasks import DataflowOrgTask 8 | from ddpui.celeryworkers.tasks import sync_flow_runs_of_deployments 9 | from ddpui.models.org import OrgDataFlowv1 10 | 11 | 12 | class Command(BaseCommand): 13 | """Docstring""" 14 | 15 | help = "Syncs the flow runs of deployment by reading it from prefect" 16 | 17 | def add_arguments(self, parser: CommandParser) -> None: 18 | parser.add_argument("--orgslug", type=str, help="org slug; optional") 19 | parser.add_argument( 20 | "--lookbackhours", 21 | type=int, 22 | help="will sync flow runs from last these many hours till now; default is 24hrs", 23 | default=24, 24 | ) 25 | 26 | def handle(self, *args, **options): 27 | """for each org, fetch orchestration pipeline(s) for airbyte sync""" 28 | query = OrgDataFlowv1.objects 29 | 30 | if options["orgslug"]: 31 | query = query.filter(org__slug=options["orgslug"]) 32 | 33 | deployment_ids = [flow.deployment_id for flow in query.all() if flow.deployment_id] 34 | sync_flow_runs_of_deployments( 35 | deployment_ids=deployment_ids, look_back_hours=options["lookbackhours"] 36 | ) 37 | -------------------------------------------------------------------------------- /ddpui/management/commands/unlock-sync-sources.py: -------------------------------------------------------------------------------- 1 | """script to expire the redis sync-sources key for an org""" 2 | 3 | from time import sleep 4 | from dotenv import load_dotenv 5 | from django.core.management.base import BaseCommand 6 | 7 | from ddpui.utils.redis_client import RedisClient 8 | from ddpui.utils.custom_logger import CustomLogger 9 | from ddpui.models.org import Org 10 | from ddpui.models.tasks import TaskProgressHashPrefix 11 | 12 | logger = CustomLogger("ddpui") 13 | 14 | load_dotenv() 15 | 16 | 17 | class Command(BaseCommand): 18 | """ 19 | This script unlocks a frozen sync-sources 20 | """ 21 | 22 | help = "Unlock an Org's sync-sources" 23 | 24 | def add_arguments(self, parser): # skipcq: PYL-R0201 25 | parser.add_argument("--org", required=True) 26 | 27 | def handle(self, *args, **options): 28 | """for the given user, set/unset specified attributes""" 29 | org = Org.objects.filter(slug=options["org"]).first() 30 | if org is None: 31 | print("Org not found") 32 | return 33 | redis = RedisClient.get_instance() 34 | hashkey = f"{TaskProgressHashPrefix.SYNCSOURCES}-{org.slug}" 35 | redis.expire(hashkey, 1) 36 | sleep(2) 37 | if len(redis.hkeys(hashkey)) > 0: 38 | print("Sync sources still locked for org: ", org.slug) 39 | return 40 | print("Sync sources unlocked for org: ", org.slug) 41 | -------------------------------------------------------------------------------- /ddpui/management/commands/update_airbyte_server_blocks.py: -------------------------------------------------------------------------------- 1 | from django.core.management.base import BaseCommand 2 | from ddpui.models.org import Org, OrgPrefectBlockv1 3 | from ddpui.ddpprefect import prefect_service 4 | 5 | 6 | class Command(BaseCommand): 7 | """Update airbyte server blocks in prefect""" 8 | 9 | help = "Update airbyte server blocks in prefect" 10 | 11 | def add_arguments(self, parser): 12 | """adds command line arguments""" 13 | parser.add_argument("org", type=str, help="Org slug, use 'all' to update for all orgs") 14 | 15 | def handle(self, *args, **options): 16 | """Use airbyte host and port from the .env""" 17 | orgs = Org.objects.all() 18 | if options["org"] != "all": 19 | orgs = orgs.filter(slug=options["org"]) 20 | 21 | print(f"Updating airbyte server blocks in prefect for {len(orgs)} orgs") 22 | 23 | for org in orgs: 24 | server_block = OrgPrefectBlockv1.objects.filter(org=org).first() 25 | 26 | if not server_block: 27 | print(f"Org {org.slug} does not have a server block") 28 | continue 29 | 30 | # update host and port 31 | prefect_service.update_airbyte_server_block(server_block.block_name) 32 | 33 | print(f"Updated airbyte server block for org {org.slug}") 34 | -------------------------------------------------------------------------------- /ddpui/migrations/0002_remove_adminuser_active_remove_adminuser_email_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-05 10:04 2 | 3 | from django.conf import settings 4 | from django.db import migrations, models 5 | import django.db.models.deletion 6 | 7 | 8 | class Migration(migrations.Migration): 9 | dependencies = [ 10 | migrations.swappable_dependency(settings.AUTH_USER_MODEL), 11 | ("ddpui", "0001_initial"), 12 | ] 13 | 14 | operations = [ 15 | migrations.RemoveField( 16 | model_name="adminuser", 17 | name="active", 18 | ), 19 | migrations.RemoveField( 20 | model_name="adminuser", 21 | name="email", 22 | ), 23 | migrations.RemoveField( 24 | model_name="orguser", 25 | name="active", 26 | ), 27 | migrations.RemoveField( 28 | model_name="orguser", 29 | name="email", 30 | ), 31 | migrations.AddField( 32 | model_name="adminuser", 33 | name="user", 34 | field=models.ForeignKey( 35 | default=None, 36 | on_delete=django.db.models.deletion.CASCADE, 37 | to=settings.AUTH_USER_MODEL, 38 | ), 39 | preserve_default=False, 40 | ), 41 | migrations.AddField( 42 | model_name="orguser", 43 | name="user", 44 | field=models.ForeignKey( 45 | default=0, 46 | on_delete=django.db.models.deletion.CASCADE, 47 | to=settings.AUTH_USER_MODEL, 48 | ), 49 | preserve_default=False, 50 | ), 51 | ] 52 | -------------------------------------------------------------------------------- /ddpui/migrations/0003_orguser_role.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-07 13:19 2 | 3 | import ddpui.models.org_user 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0002_remove_adminuser_active_remove_adminuser_email_and_more"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="orguser", 15 | name="role", 16 | field=models.IntegerField( 17 | choices=[ 18 | (1, "REPORT_VIEWER"), 19 | (2, "PIPELINE_MANAGER"), 20 | (3, "ACCOUNT_MANAGER"), 21 | ], 22 | default=ddpui.models.org_user.OrgUserRole["REPORT_VIEWER"], 23 | ), 24 | ), 25 | ] 26 | -------------------------------------------------------------------------------- /ddpui/migrations/0004_invitation_invited_role.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-07 17:51 2 | 3 | import ddpui.models.org_user 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0003_orguser_role"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="invitation", 15 | name="invited_role", 16 | field=models.IntegerField( 17 | choices=[ 18 | (1, "REPORT_VIEWER"), 19 | (2, "PIPELINE_MANAGER"), 20 | (3, "ACCOUNT_MANAGER"), 21 | ], 22 | default=ddpui.models.org_user.OrgUserRole["REPORT_VIEWER"], 23 | ), 24 | ), 25 | ] 26 | -------------------------------------------------------------------------------- /ddpui/migrations/0005_orgprefectblock_displayname.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-15 05:49 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0004_invitation_invited_role"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgprefectblock", 14 | name="displayname", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0006_rename_dbtversion_orgdbt_dbt_version_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-17 11:45 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0005_orgprefectblock_displayname"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RenameField( 13 | model_name="orgdbt", 14 | old_name="dbtversion", 15 | new_name="dbt_version", 16 | ), 17 | migrations.RenameField( 18 | model_name="orgdbt", 19 | old_name="targetname", 20 | new_name="target_name", 21 | ), 22 | migrations.RenameField( 23 | model_name="orgdbt", 24 | old_name="targetschema", 25 | new_name="target_schema", 26 | ), 27 | migrations.RenameField( 28 | model_name="orgdbt", 29 | old_name="targettype", 30 | new_name="target_type", 31 | ), 32 | migrations.RenameField( 33 | model_name="orgprefectblock", 34 | old_name="blockid", 35 | new_name="block_id", 36 | ), 37 | migrations.RenameField( 38 | model_name="orgprefectblock", 39 | old_name="blockname", 40 | new_name="block_name", 41 | ), 42 | migrations.RenameField( 43 | model_name="orgprefectblock", 44 | old_name="blocktype", 45 | new_name="block_type", 46 | ), 47 | migrations.RenameField( 48 | model_name="orgprefectblock", 49 | old_name="displayname", 50 | new_name="display_name", 51 | ), 52 | ] 53 | -------------------------------------------------------------------------------- /ddpui/migrations/0007_orgdbt_gitrepo_access_token_secret.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-18 08:41 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0006_rename_dbtversion_orgdbt_dbt_version_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdbt", 14 | name="gitrepo_access_token_secret", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0008_orgprefectblock_seq.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-18 12:12 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0007_orgdbt_gitrepo_access_token_secret"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgprefectblock", 14 | name="seq", 15 | field=models.SmallIntegerField(null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0009_remove_orgdbt_database_remove_orgdbt_host_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-18 14:55 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0008_orgprefectblock_seq"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgWarehouse", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("wtype", models.CharField(max_length=25)), 26 | ("credentials", models.CharField(max_length=200)), 27 | ( 28 | "org", 29 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="ddpui.org"), 30 | ), 31 | ], 32 | ), 33 | ] 34 | -------------------------------------------------------------------------------- /ddpui/migrations/0010_remove_orgdbt_database_remove_orgdbt_host_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-18 16:52 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0009_remove_orgdbt_database_remove_orgdbt_host_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="orgdbt", 14 | name="database", 15 | ), 16 | migrations.RemoveField( 17 | model_name="orgdbt", 18 | name="host", 19 | ), 20 | migrations.RemoveField( 21 | model_name="orgdbt", 22 | name="password", 23 | ), 24 | migrations.RemoveField( 25 | model_name="orgdbt", 26 | name="port", 27 | ), 28 | migrations.RemoveField( 29 | model_name="orgdbt", 30 | name="username", 31 | ), 32 | ] 33 | -------------------------------------------------------------------------------- /ddpui/migrations/0011_orgflow.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-22 19:25 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0010_remove_orgdbt_database_remove_orgdbt_host_and_more"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgDataFlow", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("name", models.CharField(max_length=100)), 26 | ("deployment_id", models.CharField(max_length=36, unique=True)), 27 | ("cron", models.CharField(max_length=36, unique=True)), 28 | ( 29 | "org", 30 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="ddpui.org"), 31 | ), 32 | ], 33 | ), 34 | ] 35 | -------------------------------------------------------------------------------- /ddpui/migrations/0012_alter_org_dbt.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-04-28 13:10 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0011_orgflow"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="org", 15 | name="dbt", 16 | field=models.ForeignKey( 17 | null=True, 18 | on_delete=django.db.models.deletion.SET_NULL, 19 | to="ddpui.orgdbt", 20 | ), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0013_orgwarehouse_airbyte_destination_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-05-10 05:39 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0012_alter_org_dbt"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgwarehouse", 14 | name="airbyte_destination_id", 15 | field=models.TextField(max_length=36, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0014_alter_orgdbt_target_schema.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-05-12 04:24 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0013_orgwarehouse_airbyte_destination_id"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="orgdbt", 14 | name="target_schema", 15 | field=models.CharField(max_length=50), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0015_orgwarehouse_airbyte_norm_op_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-05-12 08:42 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0014_alter_orgdbt_target_schema"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgwarehouse", 14 | name="airbyte_norm_op_id", 15 | field=models.TextField(max_length=36, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0015_rename_target_schema_orgdbt_default_schema_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-05-15 08:39 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0014_alter_orgdbt_target_schema"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RenameField( 13 | model_name="orgdbt", 14 | old_name="target_schema", 15 | new_name="default_schema", 16 | ), 17 | migrations.RemoveField( 18 | model_name="orgdbt", 19 | name="target_name", 20 | ), 21 | migrations.AddField( 22 | model_name="orgdataflow", 23 | name="flow_id", 24 | field=models.CharField(max_length=36, null=True, unique=True), 25 | ), 26 | migrations.AlterField( 27 | model_name="orgdataflow", 28 | name="cron", 29 | field=models.CharField(max_length=36, null=True, unique=True), 30 | ), 31 | migrations.AlterField( 32 | model_name="orgdataflow", 33 | name="deployment_id", 34 | field=models.CharField(max_length=36, null=True, unique=True), 35 | ), 36 | ] 37 | -------------------------------------------------------------------------------- /ddpui/migrations/0016_merge_20230518_1427.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-05-18 14:27 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0015_orgwarehouse_airbyte_norm_op_id"), 9 | ("ddpui", "0015_rename_target_schema_orgdbt_default_schema_and_more"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0017_alter_orgdataflow_cron.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-05-18 14:27 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0016_merge_20230518_1427"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="orgdataflow", 14 | name="cron", 15 | field=models.CharField(max_length=36, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0018_orgdataflow_deployment_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-05-18 14:33 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0017_alter_orgdataflow_cron"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdataflow", 14 | name="deployment_name", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0019_remove_orgdataflow_flow_id_orgdataflow_connection_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-06-01 14:04 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0018_orgdataflow_deployment_name"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="orgdataflow", 14 | name="flow_id", 15 | ), 16 | migrations.AddField( 17 | model_name="orgdataflow", 18 | name="connection_id", 19 | field=models.CharField(max_length=36, null=True, unique=True), 20 | ), 21 | ] 22 | -------------------------------------------------------------------------------- /ddpui/migrations/0020_orgprefectblock_command_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-06-05 11:35 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0019_remove_orgdataflow_flow_id_orgdataflow_connection_id"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgprefectblock", 14 | name="command", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | migrations.AddField( 18 | model_name="orgprefectblock", 19 | name="dbt_target_schema", 20 | field=models.CharField(max_length=50, null=True), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0021_orguser_email_verified.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-07-03 07:04 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | """add email_verified field to OrgUser""" 8 | 9 | dependencies = [ 10 | ("ddpui", "0020_orgprefectblock_command_and_more"), 11 | ] 12 | 13 | operations = [ 14 | migrations.AddField( 15 | model_name="orguser", 16 | name="email_verified", 17 | field=models.BooleanField(default=False), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /ddpui/migrations/0022_org_viz_login_type_org_viz_url.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-08-09 07:01 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0021_orguser_email_verified"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="org", 14 | name="viz_login_type", 15 | field=models.CharField( 16 | choices=[("basic", "BASIC_AUTH"), ("google", "GOOGLE_AUTH")], 17 | max_length=50, 18 | null=True, 19 | ), 20 | ), 21 | migrations.AddField( 22 | model_name="org", 23 | name="viz_url", 24 | field=models.CharField(max_length=100, null=True), 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /ddpui/migrations/0023_remove_orgdbt_dbt_version_orgdbt_dbt_venv.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-08-15 05:43 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0022_org_viz_login_type_org_viz_url"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="orgdbt", 14 | name="dbt_version", 15 | ), 16 | migrations.AddField( 17 | model_name="orgdbt", 18 | name="dbt_venv", 19 | field=models.CharField(max_length=200, null=True), 20 | ), 21 | ] 22 | -------------------------------------------------------------------------------- /ddpui/migrations/0024_userattributes.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-09-01 08:16 2 | 3 | from django.conf import settings 4 | from django.db import migrations, models 5 | import django.db.models.deletion 6 | 7 | 8 | class Migration(migrations.Migration): 9 | dependencies = [ 10 | migrations.swappable_dependency(settings.AUTH_USER_MODEL), 11 | ("ddpui", "0023_remove_orgdbt_dbt_version_orgdbt_dbt_venv"), 12 | ] 13 | 14 | operations = [ 15 | migrations.CreateModel( 16 | name="UserAttributes", 17 | fields=[ 18 | ( 19 | "id", 20 | models.BigAutoField( 21 | auto_created=True, 22 | primary_key=True, 23 | serialize=False, 24 | verbose_name="ID", 25 | ), 26 | ), 27 | ("email_verified", models.BooleanField(default=False)), 28 | ("can_create_orgs", models.BooleanField(default=False)), 29 | ( 30 | "user", 31 | models.ForeignKey( 32 | on_delete=django.db.models.deletion.CASCADE, 33 | to=settings.AUTH_USER_MODEL, 34 | ), 35 | ), 36 | ], 37 | ), 38 | ] 39 | -------------------------------------------------------------------------------- /ddpui/migrations/0025_orgwarehouse_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-09-05 08:58 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0024_userattributes"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgwarehouse", 14 | name="name", 15 | field=models.CharField(blank=True, default="", max_length=25), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0026_blocklock.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-09-05 12:20 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0025_orgwarehouse_name"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="BlockLock", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("locked_at", models.DateTimeField(auto_now_add=True)), 26 | ( 27 | "flow_run_id", 28 | models.TextField(blank=True, default="", max_length=36), 29 | ), 30 | ( 31 | "block", 32 | models.ForeignKey( 33 | on_delete=django.db.models.deletion.CASCADE, 34 | to="ddpui.orgprefectblock", 35 | unique=True, 36 | ), 37 | ), 38 | ( 39 | "locked_by", 40 | models.ForeignKey( 41 | on_delete=django.db.models.deletion.CASCADE, to="ddpui.orguser" 42 | ), 43 | ), 44 | ], 45 | ), 46 | ] 47 | -------------------------------------------------------------------------------- /ddpui/migrations/0028_orgwarehouse_superset_creds.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-11-07 03:20 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0027_remove_blocklock_block_blocklock_opb_dataflowblock"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgwarehouse", 14 | name="superset_creds", 15 | field=models.CharField(max_length=200, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0029_prefectflowrun.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-11-08 07:59 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0028_orgwarehouse_superset_creds"), 9 | ] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name="PrefectFlowRun", 14 | fields=[ 15 | ( 16 | "id", 17 | models.BigAutoField( 18 | auto_created=True, 19 | primary_key=True, 20 | serialize=False, 21 | verbose_name="ID", 22 | ), 23 | ), 24 | ("deployment_id", models.CharField(max_length=36)), 25 | ("flow_run_id", models.CharField(max_length=36)), 26 | ("name", models.CharField(max_length=255)), 27 | ("start_time", models.DateTimeField()), 28 | ("expected_start_time", models.DateTimeField()), 29 | ("total_run_time", models.FloatField()), 30 | ("status", models.CharField(max_length=20)), 31 | ("state_name", models.CharField(max_length=20)), 32 | ], 33 | ), 34 | ] 35 | -------------------------------------------------------------------------------- /ddpui/migrations/0030_orgdataflow_dataflow_type.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-11-13 18:28 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0029_prefectflowrun"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdataflow", 14 | name="dataflow_type", 15 | field=models.CharField( 16 | choices=[("orchestrate", "orchestrate"), ("manual", "manual")], 17 | default="orchestrate", 18 | max_length=25, 19 | ), 20 | ), 21 | ] 22 | -------------------------------------------------------------------------------- /ddpui/migrations/0032_orgprefectblockv1.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-12-05 08:28 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0031_task_orgtask_datafloworgtask"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgPrefectBlockv1", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("block_type", models.CharField(max_length=25)), 26 | ("block_id", models.CharField(max_length=36, unique=True)), 27 | ("block_name", models.CharField(max_length=100, unique=True)), 28 | ( 29 | "org", 30 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="ddpui.org"), 31 | ), 32 | ], 33 | ), 34 | ] 35 | -------------------------------------------------------------------------------- /ddpui/migrations/0033_orgdataflowv1.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-12-05 09:11 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0032_orgprefectblockv1"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgDataFlowv1", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("name", models.CharField(max_length=100)), 26 | ("deployment_name", models.CharField(max_length=100, null=True)), 27 | ( 28 | "deployment_id", 29 | models.CharField(max_length=36, null=True, unique=True), 30 | ), 31 | ("cron", models.CharField(max_length=36, null=True)), 32 | ( 33 | "dataflow_type", 34 | models.CharField( 35 | choices=[("orchestrate", "orchestrate"), ("manual", "manual")], 36 | default="orchestrate", 37 | max_length=25, 38 | ), 39 | ), 40 | ( 41 | "org", 42 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="ddpui.org"), 43 | ), 44 | ], 45 | ), 46 | ] 47 | -------------------------------------------------------------------------------- /ddpui/migrations/0034_alter_datafloworgtask_dataflow.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-12-05 10:02 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0033_orgdataflowv1"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="datafloworgtask", 15 | name="dataflow", 16 | field=models.ForeignKey( 17 | on_delete=django.db.models.deletion.CASCADE, to="ddpui.orgdataflowv1" 18 | ), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /ddpui/migrations/0035_tasklock.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-12-18 05:56 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0034_alter_datafloworgtask_dataflow"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="TaskLock", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ( 26 | "flow_run_id", 27 | models.TextField(blank=True, default="", max_length=36), 28 | ), 29 | ("locked_at", models.DateTimeField(auto_now_add=True)), 30 | ( 31 | "locked_by", 32 | models.ForeignKey( 33 | on_delete=django.db.models.deletion.CASCADE, to="ddpui.orguser" 34 | ), 35 | ), 36 | ( 37 | "orgtask", 38 | models.OneToOneField( 39 | on_delete=django.db.models.deletion.CASCADE, to="ddpui.orgtask" 40 | ), 41 | ), 42 | ], 43 | ), 44 | ] 45 | -------------------------------------------------------------------------------- /ddpui/migrations/0036_orgtnc.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-12-13 18:07 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0035_tasklock"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgTnC", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("tnc_accepted_on", models.DateField()), 26 | ( 27 | "org", 28 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="ddpui.org"), 29 | ), 30 | ( 31 | "tnc_accepted_by", 32 | models.ForeignKey( 33 | on_delete=django.db.models.deletion.CASCADE, to="ddpui.orguser" 34 | ), 35 | ), 36 | ], 37 | ), 38 | ] 39 | -------------------------------------------------------------------------------- /ddpui/migrations/0037_userattributes_is_consultant.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-12-23 02:54 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0036_orgtnc"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="userattributes", 14 | name="is_consultant", 15 | field=models.BooleanField(default=False), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0038_org_is_demo.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-01-08 05:52 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0037_userattributes_is_consultant"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="org", 14 | name="is_demo", 15 | field=models.BooleanField(default=False), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0039_remove_orgwarehouse_superset_creds.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-01-09 07:44 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0038_org_is_demo"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="orgwarehouse", 14 | name="superset_creds", 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /ddpui/migrations/0040_tasklock_locking_dataflow.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-01-11 13:53 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0039_remove_orgwarehouse_superset_creds"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="tasklock", 15 | name="locking_dataflow", 16 | field=models.ForeignKey( 17 | null=True, 18 | on_delete=django.db.models.deletion.CASCADE, 19 | to="ddpui.orgdataflowv1", 20 | ), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0041_orgtask_parameters.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-01-30 10:44 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0040_tasklock_locking_dataflow"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgtask", 14 | name="parameters", 15 | field=models.JSONField(blank=True, default=dict), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0042_orgtask_generated_by_task_is_system.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-02 10:21 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0041_orgtask_parameters"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgtask", 14 | name="generated_by", 15 | field=models.CharField( 16 | choices=[("system", "SYSTEM"), ("client", "CLIENT")], 17 | default="system", 18 | max_length=50, 19 | ), 20 | ), 21 | migrations.AddField( 22 | model_name="task", 23 | name="is_system", 24 | field=models.BooleanField(default=True), 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /ddpui/migrations/0043_alter_orgdbt_gitrepo_url.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-12 13:33 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0042_orgtask_generated_by_task_is_system"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="orgdbt", 14 | name="gitrepo_url", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0043_orgwarehouse_bq_location.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-12 08:21 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0042_orgtask_generated_by_task_is_system"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgwarehouse", 14 | name="bq_location", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0044_merge_20240212_1548.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-12 15:48 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0043_alter_orgdbt_gitrepo_url"), 9 | ("ddpui", "0043_orgwarehouse_bq_location"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0045_orgdbtmodel.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-13 06:06 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0044_merge_20240212_1548"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgDbtModel", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("name", models.CharField(max_length=100)), 26 | ("display_name", models.CharField(max_length=100)), 27 | ("sql_path", models.CharField(max_length=200, null=True)), 28 | ("config", models.JSONField(null=True)), 29 | ( 30 | "orgdbt", 31 | models.ForeignKey( 32 | on_delete=django.db.models.deletion.CASCADE, to="ddpui.orgdbt" 33 | ), 34 | ), 35 | ], 36 | ), 37 | ] 38 | -------------------------------------------------------------------------------- /ddpui/migrations/0046_orgdbtmodel_schema.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-14 10:43 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0045_orgdbtmodel"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdbtmodel", 14 | name="schema", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0047_userattributes_is_platform_admin.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-17 06:57 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0046_orgdbtmodel_schema"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="userattributes", 14 | name="is_platform_admin", 15 | field=models.BooleanField(default=False), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0048_datafloworgtask_seq_orgtask_uuid.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-19 08:14 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0047_userattributes_is_platform_admin"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="datafloworgtask", 14 | name="seq", 15 | field=models.IntegerField(default=1), 16 | ), 17 | migrations.AddField( 18 | model_name="orgtask", 19 | name="uuid", 20 | field=models.UUIDField(editable=False, null=True, unique=True), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0049_dbtedge.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-26 05:34 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0048_datafloworgtask_seq_orgtask_uuid"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="DbtEdge", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("config", models.JSONField(null=True)), 26 | ( 27 | "source", 28 | models.ForeignKey( 29 | on_delete=django.db.models.deletion.CASCADE, 30 | related_name="source", 31 | to="ddpui.orgdbtmodel", 32 | ), 33 | ), 34 | ( 35 | "target", 36 | models.ForeignKey( 37 | on_delete=django.db.models.deletion.CASCADE, 38 | related_name="target", 39 | to="ddpui.orgdbtmodel", 40 | ), 41 | ), 42 | ], 43 | ), 44 | ] 45 | -------------------------------------------------------------------------------- /ddpui/migrations/0050_remove_orgdbtmodel_config.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-26 05:37 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0049_dbtedge"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="orgdbtmodel", 14 | name="config", 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /ddpui/migrations/0051_orgdbtmodel_type.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-26 05:42 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0050_remove_orgdbtmodel_config"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdbtmodel", 14 | name="type", 15 | field=models.CharField( 16 | choices=[("source", "SOURCE"), ("model", "MODEL")], 17 | default="model", 18 | max_length=50, 19 | ), 20 | ), 21 | ] 22 | -------------------------------------------------------------------------------- /ddpui/migrations/0052_orgdbtmodel_uuid.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-26 05:43 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0051_orgdbtmodel_type"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdbtmodel", 14 | name="uuid", 15 | field=models.UUIDField(editable=False, null=True, unique=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0053_orgwarehouse_airbyte_docker_image_tag_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-28 09:06 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0052_orgdbtmodel_uuid"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgwarehouse", 14 | name="airbyte_docker_image_tag", 15 | field=models.TextField(max_length=10, null=True), 16 | ), 17 | migrations.AddField( 18 | model_name="orgwarehouse", 19 | name="airbyte_docker_repository", 20 | field=models.TextField(max_length=100, null=True), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0053_remove_dbtedge_config_remove_dbtedge_source_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-26 11:59 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0052_orgdbtmodel_uuid"), 10 | ] 11 | 12 | operations = [ 13 | migrations.RemoveField( 14 | model_name="dbtedge", 15 | name="config", 16 | ), 17 | migrations.RemoveField( 18 | model_name="dbtedge", 19 | name="source", 20 | ), 21 | migrations.RemoveField( 22 | model_name="dbtedge", 23 | name="target", 24 | ), 25 | migrations.AddField( 26 | model_name="dbtedge", 27 | name="from_node", 28 | field=models.ForeignKey( 29 | default=None, 30 | on_delete=django.db.models.deletion.CASCADE, 31 | related_name="from_node", 32 | to="ddpui.orgdbtmodel", 33 | ), 34 | ), 35 | migrations.AddField( 36 | model_name="dbtedge", 37 | name="to_node", 38 | field=models.ForeignKey( 39 | default=None, 40 | on_delete=django.db.models.deletion.CASCADE, 41 | related_name="to_node", 42 | to="ddpui.orgdbtmodel", 43 | ), 44 | ), 45 | migrations.AddField( 46 | model_name="orgdbtmodel", 47 | name="config", 48 | field=models.JSONField(null=True), 49 | ), 50 | ] 51 | -------------------------------------------------------------------------------- /ddpui/migrations/0054_orgdbtmodel_source_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-26 12:01 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0053_remove_dbtedge_config_remove_dbtedge_source_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdbtmodel", 14 | name="source_name", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0055_merge_20240228_1312.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-28 13:12 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0053_orgwarehouse_airbyte_docker_image_tag_and_more"), 9 | ("ddpui", "0054_orgdbtmodel_source_name"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0056_orgdbt_transform_type.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-03-01 06:34 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0055_merge_20240228_1312"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdbt", 14 | name="transform_type", 15 | field=models.CharField(max_length=10, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0056_remove_orgdbtmodel_config_orgdbtmodel_output_cols.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-28 15:19 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0055_merge_20240228_1312"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="orgdbtmodel", 14 | name="config", 15 | ), 16 | migrations.AddField( 17 | model_name="orgdbtmodel", 18 | name="output_cols", 19 | field=models.JSONField(default=list), 20 | ), 21 | ] 22 | -------------------------------------------------------------------------------- /ddpui/migrations/0057_orgdbtoperation.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-28 15:25 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0056_remove_orgdbtmodel_config_orgdbtmodel_output_cols"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgDbtOperation", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("uuid", models.UUIDField(editable=False, unique=True)), 26 | ("seq", models.IntegerField(default=0)), 27 | ("output_cols", models.JSONField(default=list)), 28 | ("config", models.JSONField(null=True)), 29 | ( 30 | "dbtmodel", 31 | models.ForeignKey( 32 | on_delete=django.db.models.deletion.CASCADE, 33 | to="ddpui.orgdbtmodel", 34 | ), 35 | ), 36 | ], 37 | ), 38 | ] 39 | -------------------------------------------------------------------------------- /ddpui/migrations/0058_orgdbtmodel_under_construction.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-28 15:30 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0057_orgdbtoperation"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdbtmodel", 14 | name="under_construction", 15 | field=models.BooleanField(default=False), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0059_alter_orgdbtmodel_display_name_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-02-28 16:28 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0058_orgdbtmodel_under_construction"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="orgdbtmodel", 14 | name="display_name", 15 | field=models.CharField(max_length=100, null=True), 16 | ), 17 | migrations.AlterField( 18 | model_name="orgdbtmodel", 19 | name="name", 20 | field=models.CharField(max_length=100, null=True), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0060_merge_20240302_1014.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-03-02 10:14 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0056_orgdbt_transform_type"), 9 | ("ddpui", "0059_alter_orgdbtmodel_display_name_and_more"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0061_alter_orgdbtmodel_display_name_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-03-07 07:13 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0060_merge_20240302_1014"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="orgdbtmodel", 14 | name="display_name", 15 | field=models.CharField(max_length=300, null=True), 16 | ), 17 | migrations.AlterField( 18 | model_name="orgdbtmodel", 19 | name="name", 20 | field=models.CharField(max_length=300, null=True), 21 | ), 22 | migrations.AlterField( 23 | model_name="orgdbtmodel", 24 | name="sql_path", 25 | field=models.CharField(max_length=300, null=True), 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /ddpui/migrations/0062_alter_orgdbtmodel_schema_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-03-20 14:52 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0061_alter_orgdbtmodel_display_name_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="orgdbtmodel", 14 | name="schema", 15 | field=models.CharField(max_length=300, null=True), 16 | ), 17 | migrations.AlterField( 18 | model_name="orgdbtmodel", 19 | name="source_name", 20 | field=models.CharField(max_length=300, null=True), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0064_orguser_new_role.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-04-03 05:35 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0063_permission_role_rolepermission"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="orguser", 15 | name="new_role", 16 | field=models.ForeignKey( 17 | null=True, on_delete=django.db.models.deletion.SET_NULL, to="ddpui.role" 18 | ), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /ddpui/migrations/0065_role_level_alter_permission_uuid_alter_role_uuid.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-04-05 05:20 2 | 3 | from django.db import migrations, models 4 | import uuid 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0064_orguser_new_role"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="role", 15 | name="level", 16 | field=models.SmallIntegerField(default=1), 17 | ), 18 | migrations.AlterField( 19 | model_name="permission", 20 | name="uuid", 21 | field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True), 22 | ), 23 | migrations.AlterField( 24 | model_name="role", 25 | name="uuid", 26 | field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True), 27 | ), 28 | ] 29 | -------------------------------------------------------------------------------- /ddpui/migrations/0066_invitation_invited_new_role.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-04-05 06:46 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0065_role_level_alter_permission_uuid_alter_role_uuid"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="invitation", 15 | name="invited_new_role", 16 | field=models.ForeignKey( 17 | null=True, on_delete=django.db.models.deletion.CASCADE, to="ddpui.role" 18 | ), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /ddpui/migrations/0067_alter_orgwarehouse_credentials.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-04-21 04:11 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0066_invitation_invited_new_role"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="orgwarehouse", 14 | name="credentials", 15 | field=models.CharField(max_length=1000), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0067_canvaslock.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-04-17 17:01 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0066_invitation_invited_new_role"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="CanvasLock", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("locked_at", models.DateTimeField(auto_now_add=True)), 26 | ("lock_id", models.UUIDField(editable=False, null=True, unique=True)), 27 | ( 28 | "locked_by", 29 | models.ForeignKey( 30 | on_delete=django.db.models.deletion.CASCADE, to="ddpui.orguser" 31 | ), 32 | ), 33 | ], 34 | ), 35 | ] 36 | -------------------------------------------------------------------------------- /ddpui/migrations/0068_merge_20240422_0043.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-04-22 00:43 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0067_alter_orgwarehouse_credentials"), 9 | ("ddpui", "0067_canvaslock"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0069_tasklock_celery_task_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-04-25 12:08 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0068_merge_20240422_0043"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="tasklock", 14 | name="celery_task_id", 15 | field=models.TextField(max_length=36, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0070_org_ses_whitelisted_email.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-04-30 15:51 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0069_tasklock_celery_task_id"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="org", 14 | name="ses_whitelisted_email", 15 | field=models.TextField(max_length=100, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0071_alter_orgtask_connection_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-05-07 07:36 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0070_org_ses_whitelisted_email"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="orgtask", 14 | name="connection_id", 15 | field=models.CharField(max_length=36, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0072_orgdataflowv1_reset_conn_dataflow.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-05-07 08:27 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0071_alter_orgtask_connection_id"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="orgdataflowv1", 15 | name="reset_conn_dataflow", 16 | field=models.ForeignKey( 17 | null=True, 18 | on_delete=django.db.models.deletion.SET_NULL, 19 | to="ddpui.orgdataflowv1", 20 | ), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0073_orgschemachange.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-05-22 11:55 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0072_orgdataflowv1_reset_conn_dataflow"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgSchemaChange", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("name", models.CharField(max_length=100)), 26 | ( 27 | "connection_id", 28 | models.CharField(max_length=36, null=True, unique=True), 29 | ), 30 | ("schema_change", models.CharField(max_length=36, null=True)), 31 | ( 32 | "org", 33 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="ddpui.org"), 34 | ), 35 | ], 36 | ), 37 | ] 38 | -------------------------------------------------------------------------------- /ddpui/migrations/0073_remove_dataflowblock_dataflow_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-05-23 05:43 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0072_orgdataflowv1_reset_conn_dataflow"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="dataflowblock", 14 | name="dataflow", 15 | ), 16 | migrations.RemoveField( 17 | model_name="dataflowblock", 18 | name="opb", 19 | ), 20 | migrations.RemoveField( 21 | model_name="orgdataflow", 22 | name="org", 23 | ), 24 | migrations.DeleteModel( 25 | name="BlockLock", 26 | ), 27 | migrations.DeleteModel( 28 | name="DataflowBlock", 29 | ), 30 | migrations.DeleteModel( 31 | name="OrgDataFlow", 32 | ), 33 | ] 34 | -------------------------------------------------------------------------------- /ddpui/migrations/0074_rename_schema_change_orgschemachange_change_type.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-05-22 12:28 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0073_orgschemachange"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RenameField( 13 | model_name="orgschemachange", 14 | old_name="schema_change", 15 | new_name="change_type", 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0075_remove_orgschemachange_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-05-22 12:30 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0074_rename_schema_change_orgschemachange_change_type"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="orgschemachange", 14 | name="name", 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /ddpui/migrations/0076_merge_20240526_2356.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2024-05-26 23:56 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0073_remove_dataflowblock_dataflow_and_more"), 9 | ("ddpui", "0075_remove_orgschemachange_name"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0077_delete_orgprefectblock.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 5.0.6 on 2024-05-27 08:21 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0076_merge_20240526_2356"), 9 | ] 10 | 11 | operations = [ 12 | migrations.DeleteModel( 13 | name="OrgPrefectBlock", 14 | ), 15 | ] 16 | -------------------------------------------------------------------------------- /ddpui/migrations/0078_remove_orgwarehouse_airbyte_norm_op_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-06-18 05:26 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0077_delete_orgprefectblock"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="orgwarehouse", 14 | name="airbyte_norm_op_id", 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /ddpui/migrations/0079_assistantprompt.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-06-24 09:39 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0078_remove_orgwarehouse_airbyte_norm_op_id"), 9 | ] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name="AssistantPrompt", 14 | fields=[ 15 | ( 16 | "id", 17 | models.BigAutoField( 18 | auto_created=True, 19 | primary_key=True, 20 | serialize=False, 21 | verbose_name="ID", 22 | ), 23 | ), 24 | ("prompt", models.TextField()), 25 | ( 26 | "type", 27 | models.CharField( 28 | choices=[ 29 | ("log_summarization", "LOG_SUMMARIZATION"), 30 | ("long_text_summarization", "LONG_TEXT_SUMMARIZATION"), 31 | ], 32 | max_length=100, 33 | ), 34 | ), 35 | ], 36 | ), 37 | ] 38 | -------------------------------------------------------------------------------- /ddpui/migrations/0081_llmsession_airbyte_job_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-06-28 00:16 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0080_llmsession"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="llmsession", 14 | name="airbyte_job_id", 15 | field=models.IntegerField(null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0082_llmsession_task_id_alter_llmsession_flow_run_id_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-01 09:33 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0081_llmsession_airbyte_job_id"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="llmsession", 14 | name="task_id", 15 | field=models.CharField(max_length=200, null=True), 16 | ), 17 | migrations.AlterField( 18 | model_name="llmsession", 19 | name="flow_run_id", 20 | field=models.CharField(max_length=200, null=True), 21 | ), 22 | migrations.AlterField( 23 | model_name="llmsession", 24 | name="response", 25 | field=models.JSONField(null=True), 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /ddpui/migrations/0083_merge_20240703_1732.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-03 17:32 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0082_llmsession_task_id_alter_llmsession_flow_run_id_and_more"), 9 | ("ddpui", "0082_notification_userpreferences_notificationrecipient"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0084_llmsession_session_status.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-10 15:33 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0083_merge_20240703_1732"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="llmsession", 14 | name="session_status", 15 | field=models.CharField(max_length=200, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0085_llmsession_session_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-14 06:09 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0084_llmsession_session_status"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="llmsession", 14 | name="session_name", 15 | field=models.CharField(max_length=500, null=True, unique=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0085_syncstats.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-14 09:04 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0084_llmsession_session_status"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="SyncStats", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("connection_id", models.CharField(max_length=36)), 26 | ("attempt", models.IntegerField(default=0)), 27 | ("status", models.TextField()), 28 | ( 29 | "sync_type", 30 | models.CharField( 31 | choices=[("manual", "manual"), ("orchestrate", "orchestrate")] 32 | ), 33 | ), 34 | ("sync_time", models.DateTimeField()), 35 | ("sync_duration_s", models.IntegerField(default=0)), 36 | ("sync_records", models.IntegerField(default=0)), 37 | ("sync_data_volume_b", models.IntegerField(default=0)), 38 | ( 39 | "org", 40 | models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="ddpui.org"), 41 | ), 42 | ], 43 | ), 44 | ] 45 | -------------------------------------------------------------------------------- /ddpui/migrations/0086_alter_syncstats_sync_data_volume_b_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-14 09:11 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0085_syncstats"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="syncstats", 14 | name="sync_data_volume_b", 15 | field=models.BigIntegerField(default=0), 16 | ), 17 | migrations.AlterField( 18 | model_name="syncstats", 19 | name="sync_duration_s", 20 | field=models.BigIntegerField(default=0), 21 | ), 22 | migrations.AlterField( 23 | model_name="syncstats", 24 | name="sync_records", 25 | field=models.BigIntegerField(default=0), 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /ddpui/migrations/0088_alter_orgdbtoperation_dbtmodel.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-29 08:03 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0087_alter_datafloworgtask_dataflow_and_more"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="orgdbtoperation", 15 | name="dbtmodel", 16 | field=models.ForeignKey( 17 | on_delete=django.db.models.deletion.CASCADE, 18 | related_name="operations", 19 | to="ddpui.orgdbtmodel", 20 | ), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0089_alter_prefectflowrun_deployment_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-29 14:08 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0088_alter_orgdbtoperation_dbtmodel"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="prefectflowrun", 14 | name="deployment_id", 15 | field=models.CharField(max_length=36, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0090_syncstats_job_id.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-07-31 17:00 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0089_alter_prefectflowrun_deployment_id"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="syncstats", 14 | name="job_id", 15 | field=models.IntegerField(null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0091_prefectflowrun_retries.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-08-02 11:53 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0090_syncstats_job_id"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="prefectflowrun", 14 | name="retries", 15 | field=models.SmallIntegerField(default=0), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0093_remove_org_is_demo_org_type.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-08-30 05:36 2 | 3 | import ddpui.models.org 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0092_assistantprompt_created_at_and_more"), 10 | ] 11 | 12 | operations = [ 13 | migrations.RemoveField( 14 | model_name="org", 15 | name="is_demo", 16 | ), 17 | migrations.AddField( 18 | model_name="org", 19 | name="type", 20 | field=models.CharField( 21 | choices=[("demo", "DEMO"), ("trial", "TRIAL"), ("subscription", "SUBSCRIPTION")], 22 | default=ddpui.models.org.OrgType["SUBSCRIPTION"], 23 | max_length=50, 24 | ), 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /ddpui/migrations/0094_merge_20240903_0610.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-03 06:10 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0085_llmsession_session_name"), 9 | ("ddpui", "0093_remove_org_is_demo_org_type"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0095_alter_llmsession_session_name.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-03 12:22 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0094_merge_20240903_0610"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="llmsession", 14 | name="session_name", 15 | field=models.CharField(max_length=500, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0096_llmsession_request_meta_llmsession_session_type.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-03 12:49 2 | 3 | import ddpui.models.llm 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0095_alter_llmsession_session_name"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="llmsession", 15 | name="request_meta", 16 | field=models.JSONField(null=True), 17 | ), 18 | migrations.AddField( 19 | model_name="llmsession", 20 | name="session_type", 21 | field=models.CharField( 22 | choices=[ 23 | ("log_summarization", "LOG_SUMMARIZATION"), 24 | ("long_text_summarization", "LONG_TEXT_SUMMARIZATION"), 25 | ], 26 | default=ddpui.models.llm.LlmAssistantType["LOG_SUMMARIZATION"], 27 | max_length=100, 28 | ), 29 | ), 30 | ] 31 | -------------------------------------------------------------------------------- /ddpui/migrations/0097_userprompt.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-04 05:50 2 | 3 | import ddpui.models.llm 4 | from django.db import migrations, models 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0096_llmsession_request_meta_llmsession_session_type"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="UserPrompt", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("prompt", models.TextField()), 26 | ( 27 | "type", 28 | models.CharField( 29 | choices=[ 30 | ("log_summarization", "LOG_SUMMARIZATION"), 31 | ("long_text_summarization", "LONG_TEXT_SUMMARIZATION"), 32 | ], 33 | default=ddpui.models.llm.LlmAssistantType["LONG_TEXT_SUMMARIZATION"], 34 | max_length=100, 35 | ), 36 | ), 37 | ], 38 | ), 39 | ] 40 | -------------------------------------------------------------------------------- /ddpui/migrations/0098_remove_assistantprompt_created_at_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-04 05:51 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0097_userprompt"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="assistantprompt", 14 | name="created_at", 15 | ), 16 | migrations.RemoveField( 17 | model_name="assistantprompt", 18 | name="updated_at", 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /ddpui/migrations/0099_userprompt_label.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-04 05:53 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0098_remove_assistantprompt_created_at_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="userprompt", 14 | name="label", 15 | field=models.CharField(max_length=200, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0100_llmsession_feedback.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-13 07:31 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0099_userprompt_label"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="llmsession", 14 | name="feedback", 15 | field=models.TextField(null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0101_llmsession_updated_by.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-24 07:35 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0100_llmsession_feedback"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="llmsession", 15 | name="updated_by", 16 | field=models.ForeignKey( 17 | null=True, 18 | on_delete=django.db.models.deletion.SET_NULL, 19 | related_name="updated_by", 20 | to="ddpui.orguser", 21 | ), 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /ddpui/migrations/0102_orguser_llm_optin.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-09-25 08:53 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0101_llmsession_updated_by"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orguser", 14 | name="llm_optin", 15 | field=models.BooleanField(default=False), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0104_orgdataflowv1_clear_conn_dataflow.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-19 07:29 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0103_connectionjob_connectionmeta_and_more"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="orgdataflowv1", 15 | name="clear_conn_dataflow", 16 | field=models.ForeignKey( 17 | null=True, 18 | on_delete=django.db.models.deletion.SET_NULL, 19 | related_name="clear_connection_dataflow", 20 | to="ddpui.orgdataflowv1", 21 | ), 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /ddpui/migrations/0104_remove_userpreferences_discord_webhook_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-06 15:34 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0103_connectionjob_connectionmeta_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="userpreferences", 14 | name="discord_webhook", 15 | ), 16 | migrations.RemoveField( 17 | model_name="userpreferences", 18 | name="enable_discord_notifications", 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /ddpui/migrations/0106_alter_orgpreferences_llm_optin_approved_by.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-12 08:27 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0105_orgpreferences"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="orgpreferences", 15 | name="llm_optin_approved_by", 16 | field=models.ForeignKey( 17 | blank=True, 18 | null=True, 19 | on_delete=django.db.models.deletion.CASCADE, 20 | related_name="approvedby", 21 | to="ddpui.orguser", 22 | ), 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /ddpui/migrations/0107_orgsupersets.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-12 17:21 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | import django.utils.timezone 6 | 7 | 8 | class Migration(migrations.Migration): 9 | dependencies = [ 10 | ("ddpui", "0106_alter_orgpreferences_llm_optin_approved_by"), 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name="OrgSupersets", 16 | fields=[ 17 | ( 18 | "id", 19 | models.BigAutoField( 20 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 21 | ), 22 | ), 23 | ("container_name", models.CharField(blank=True, max_length=255, null=True)), 24 | ("superset_version", models.CharField(blank=True, max_length=255, null=True)), 25 | ("created_at", models.DateTimeField(default=django.utils.timezone.now)), 26 | ("updated_at", models.DateTimeField(default=django.utils.timezone.now)), 27 | ( 28 | "org", 29 | models.ForeignKey( 30 | on_delete=django.db.models.deletion.CASCADE, 31 | related_name="orgInfo", 32 | to="ddpui.org", 33 | ), 34 | ), 35 | ], 36 | ), 37 | ] 38 | -------------------------------------------------------------------------------- /ddpui/migrations/0108_userpreferences_llm_optin.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-12 19:51 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0107_orgsupersets"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="userpreferences", 14 | name="llm_optin", 15 | field=models.BooleanField(default=False), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0109_alter_orgpreferences_org_alter_orgsupersets_org.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-12 20:03 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0108_userpreferences_llm_optin"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="orgpreferences", 15 | name="org", 16 | field=models.OneToOneField( 17 | on_delete=django.db.models.deletion.CASCADE, 18 | related_name="preferences", 19 | to="ddpui.org", 20 | ), 21 | ), 22 | migrations.AlterField( 23 | model_name="orgsupersets", 24 | name="org", 25 | field=models.OneToOneField( 26 | on_delete=django.db.models.deletion.CASCADE, related_name="orgInfo", to="ddpui.org" 27 | ), 28 | ), 29 | ] 30 | -------------------------------------------------------------------------------- /ddpui/migrations/0110_userpreferences_discord_webhook_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-14 08:49 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0109_alter_orgpreferences_org_alter_orgsupersets_org"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="userpreferences", 14 | name="discord_webhook", 15 | field=models.URLField(blank=True, null=True), 16 | ), 17 | migrations.AddField( 18 | model_name="userpreferences", 19 | name="enable_discord_notifications", 20 | field=models.BooleanField(default=False), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0112_rename_llm_optin_userpreferences_disclaimer_shown.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-17 07:42 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0111_remove_orgpreferences_trial_end_date_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RenameField( 13 | model_name="userpreferences", 14 | old_name="llm_optin", 15 | new_name="disclaimer_shown", 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0113_orgplans_upgrade_requested_and_more.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-20 07:39 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0112_rename_llm_optin_userpreferences_disclaimer_shown"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="orgplans", 15 | name="upgrade_requested", 16 | field=models.BooleanField(default=False), 17 | ), 18 | migrations.AddField( 19 | model_name="orgpreferences", 20 | name="enable_llm_request", 21 | field=models.BooleanField(default=False), 22 | ), 23 | migrations.AddField( 24 | model_name="orgpreferences", 25 | name="enable_llm_requested_by", 26 | field=models.ForeignKey( 27 | blank=True, 28 | null=True, 29 | on_delete=django.db.models.deletion.CASCADE, 30 | related_name="llm_request", 31 | to="ddpui.orguser", 32 | ), 33 | ), 34 | ] 35 | -------------------------------------------------------------------------------- /ddpui/migrations/0114_merge_20241124_1514.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-24 15:14 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0104_orgdataflowv1_clear_conn_dataflow"), 9 | ("ddpui", "0113_orgplans_upgrade_requested_and_more"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0115_remove_org_type.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2024-11-27 02:09 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0114_merge_20241124_1514"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="org", 14 | name="type", 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /ddpui/migrations/0116_notification_email_subject.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2025-02-01 01:33 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0115_remove_org_type"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="notification", 14 | name="email_subject", 15 | field=models.TextField(default="Message from Dalgo"), 16 | preserve_default=False, 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /ddpui/migrations/0117_orgwren.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2025-03-04 03:36 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | import django.utils.timezone 6 | 7 | 8 | class Migration(migrations.Migration): 9 | dependencies = [ 10 | ("ddpui", "0116_notification_email_subject"), 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name="OrgWren", 16 | fields=[ 17 | ( 18 | "id", 19 | models.BigAutoField( 20 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 21 | ), 22 | ), 23 | ("wren_url", models.CharField(max_length=255)), 24 | ("created_at", models.DateTimeField(default=django.utils.timezone.now)), 25 | ("updated_at", models.DateTimeField(default=django.utils.timezone.now)), 26 | ( 27 | "org", 28 | models.OneToOneField( 29 | on_delete=django.db.models.deletion.CASCADE, 30 | related_name="org_wren_info", 31 | to="ddpui.org", 32 | ), 33 | ), 34 | ], 35 | ), 36 | ] 37 | -------------------------------------------------------------------------------- /ddpui/migrations/0118_orgdataflowv1_meta.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2025-04-01 12:37 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0117_orgwren"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="orgdataflowv1", 14 | name="meta", 15 | field=models.JSONField(null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0119_prefectflowrun_orguser.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2025-05-06 19:59 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0118_orgdataflowv1_meta"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AddField( 14 | model_name="prefectflowrun", 15 | name="orguser", 16 | field=models.ForeignKey( 17 | blank=True, 18 | null=True, 19 | on_delete=django.db.models.deletion.SET_NULL, 20 | to="ddpui.orguser", 21 | ), 22 | ), 23 | ] 24 | -------------------------------------------------------------------------------- /ddpui/migrations/0119_remove_connectionmeta_schedule_large_jobs.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2025-05-06 22:14 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0118_orgdataflowv1_meta"), 9 | ] 10 | 11 | operations = [ 12 | migrations.RemoveField( 13 | model_name="connectionmeta", 14 | name="schedule_large_jobs", 15 | ), 16 | ] 17 | -------------------------------------------------------------------------------- /ddpui/migrations/0120_alter_prefectflowrun_start_time.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2025-05-06 21:22 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0119_prefectflowrun_orguser"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AlterField( 13 | model_name="prefectflowrun", 14 | name="start_time", 15 | field=models.DateTimeField(null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/migrations/0121_merge_20250513_1951.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2025-05-13 19:51 2 | 3 | from django.db import migrations 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0119_remove_connectionmeta_schedule_large_jobs"), 9 | ("ddpui", "0120_alter_prefectflowrun_start_time"), 10 | ] 11 | 12 | operations = [] 13 | -------------------------------------------------------------------------------- /ddpui/migrations/0122_set_uuid_in_orgtask.py: -------------------------------------------------------------------------------- 1 | # ddpui/migrations/0122_set_uuid_in_orgtask 2 | # courtesy of chatgpt 3 | 4 | import uuid 5 | from django.db import migrations 6 | 7 | 8 | def assign_uuids(apps, schema_editor): 9 | OrgTask = apps.get_model("ddpui", "OrgTask") 10 | for row in OrgTask.objects.filter(uuid__isnull=True): 11 | row.uuid = uuid.uuid4() 12 | row.save(update_fields=["uuid"]) 13 | 14 | 15 | class Migration(migrations.Migration): 16 | dependencies = [ 17 | ("ddpui", "0121_merge_20250513_1951"), 18 | ] 19 | 20 | operations = [ 21 | migrations.RunPython(assign_uuids), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/migrations/0123_alter_orgtask_uuid.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.2 on 2025-06-03 11:13 2 | 3 | from django.db import migrations, models 4 | import uuid 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0122_set_uuid_in_orgtask"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="orgtask", 15 | name="uuid", 16 | field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True), 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /ddpui/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/migrations/__init__.py -------------------------------------------------------------------------------- /ddpui/models/__init__.py: -------------------------------------------------------------------------------- 1 | from ddpui.models.tasks import Task 2 | from ddpui.models.llm import AssistantPrompt 3 | from ddpui.models.syncstats import SyncStats 4 | from ddpui.models.org_preferences import OrgPreferences 5 | from ddpui.models.org_supersets import OrgSupersets 6 | from ddpui.models.org_wren import OrgWren 7 | -------------------------------------------------------------------------------- /ddpui/models/admin_user.py: -------------------------------------------------------------------------------- 1 | from ninja import Schema 2 | from django.db import models 3 | from django.contrib.auth.models import User 4 | 5 | 6 | class AdminUser(models.Model): 7 | """Docstring""" 8 | 9 | user = models.ForeignKey(User, on_delete=models.CASCADE) 10 | 11 | 12 | class AdminUserResponse(Schema): 13 | """Docstring""" 14 | 15 | email: str 16 | active: str 17 | 18 | @staticmethod 19 | def fromadminuser(adminuser): 20 | """helper""" 21 | return AdminUserResponse( 22 | email=adminuser.user.email, 23 | active=adminuser.user.is_active, 24 | ) 25 | -------------------------------------------------------------------------------- /ddpui/models/canvaslock.py: -------------------------------------------------------------------------------- 1 | """ Lock class for UI4T Workflow Canvas """ 2 | 3 | from django.db import models 4 | from django.utils import timezone 5 | from ddpui.models.org_user import OrgUser 6 | 7 | 8 | class CanvasLock(models.Model): 9 | """Lock object, one per org""" 10 | 11 | locked_by = models.ForeignKey(OrgUser, on_delete=models.CASCADE) 12 | locked_at = models.DateTimeField(auto_now_add=True) 13 | lock_id = models.UUIDField(editable=False, unique=True, null=True) 14 | created_at = models.DateTimeField(auto_created=True, default=timezone.now) 15 | updated_at = models.DateTimeField(auto_now=True) 16 | 17 | def __repr__(self) -> str: 18 | return f"CanvasLock[{self.locked_by.org.slug} | {self.locked_by.user.email}]" 19 | -------------------------------------------------------------------------------- /ddpui/models/notifications.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | from ddpui.models.org_user import OrgUser 3 | 4 | 5 | class Notification(models.Model): 6 | """Model to store notifications for users""" 7 | 8 | author = models.EmailField() 9 | message = models.TextField() 10 | email_subject = models.TextField() 11 | timestamp = models.DateTimeField(auto_now_add=True) 12 | urgent = models.BooleanField(default=False) 13 | scheduled_time = models.DateTimeField(null=True, blank=True) 14 | sent_time = models.DateTimeField(null=True, blank=True) 15 | 16 | 17 | class NotificationRecipient(models.Model): 18 | """Model to store notification recipients and their read status""" 19 | 20 | notification = models.ForeignKey( 21 | Notification, on_delete=models.CASCADE, related_name="notifications_received" 22 | ) 23 | recipient = models.ForeignKey(OrgUser, on_delete=models.CASCADE, related_name="recipients") 24 | read_status = models.BooleanField(default=False) 25 | task_id = models.TextField() 26 | -------------------------------------------------------------------------------- /ddpui/models/org_supersets.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | from django.utils import timezone 3 | from ddpui.models.org import Org 4 | 5 | 6 | class OrgSupersets(models.Model): 7 | """Model to store org supereset details for settings panel""" 8 | 9 | org = models.OneToOneField(Org, on_delete=models.CASCADE, related_name="orgInfo") 10 | container_name = models.CharField(max_length=255, blank=True, null=True) 11 | superset_version = models.CharField(max_length=255, blank=True, null=True) 12 | created_at = models.DateTimeField(default=timezone.now) 13 | updated_at = models.DateTimeField(default=timezone.now) 14 | -------------------------------------------------------------------------------- /ddpui/models/org_wren.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | from django.utils import timezone 3 | from ddpui.models.org import Org 4 | 5 | 6 | class OrgWren(models.Model): 7 | """Model to store org's wrenai details""" 8 | 9 | org = models.OneToOneField(Org, on_delete=models.CASCADE, related_name="org_wren_info") 10 | wren_url = models.CharField(max_length=255, blank=False, null=False) 11 | created_at = models.DateTimeField(default=timezone.now) 12 | updated_at = models.DateTimeField(default=timezone.now) 13 | -------------------------------------------------------------------------------- /ddpui/models/orgtnc.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | from ddpui.models.org import Org 4 | from ddpui.models.org_user import OrgUser 5 | 6 | 7 | class OrgTnC(models.Model): 8 | """Docstring""" 9 | 10 | org = models.ForeignKey(Org, on_delete=models.CASCADE, related_name="orgtncs") 11 | tnc_accepted_on = models.DateField(null=False) 12 | tnc_accepted_by = models.ForeignKey(OrgUser, on_delete=models.CASCADE, null=False) 13 | 14 | def __str__(self) -> str: 15 | return f"OrgTnC[{self.org.slug}|{self.tnc_accepted_on}|{self.orguser.user.email}]" 16 | -------------------------------------------------------------------------------- /ddpui/models/role_based_access.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | import uuid 3 | 4 | 5 | class Role(models.Model): 6 | """Roles for an orguser""" 7 | 8 | uuid = models.UUIDField(editable=False, unique=True, default=uuid.uuid4) 9 | slug = models.CharField(max_length=255, unique=True) 10 | name = models.CharField(max_length=255) 11 | level = models.SmallIntegerField(default=1) # keep the lowest role as default 12 | 13 | def __str__(self): 14 | return f"{self.name} | {self.slug} | {self.level}" 15 | 16 | 17 | class Permission(models.Model): 18 | """List of permissions to be assigned to roles""" 19 | 20 | uuid = models.UUIDField(editable=False, unique=True, default=uuid.uuid4) 21 | slug = models.CharField(max_length=255, unique=True) 22 | name = models.CharField(max_length=255) 23 | 24 | def __str__(self): 25 | return f"{self.name} | {self.slug}" 26 | 27 | 28 | class RolePermission(models.Model): 29 | """Mapping of roles to permissions""" 30 | 31 | role = models.ForeignKey(Role, on_delete=models.CASCADE, related_name="rolepermissions") 32 | permission = models.ForeignKey( 33 | Permission, 34 | on_delete=models.CASCADE, 35 | ) 36 | 37 | def __str__(self): 38 | return f"{self.role.slug} | {self.permission.slug}" 39 | -------------------------------------------------------------------------------- /ddpui/models/syncstats.py: -------------------------------------------------------------------------------- 1 | """ track sync times, number of records, volume of data by client and connection """ 2 | 3 | from django.db import models 4 | from ddpui.models.org import Org 5 | 6 | 7 | class SyncStats(models.Model): 8 | """single table to track connection sync stats""" 9 | 10 | org = models.ForeignKey(Org, on_delete=models.CASCADE) 11 | connection_id = models.CharField(max_length=36) 12 | job_id = models.IntegerField(null=True) 13 | attempt = models.IntegerField(default=0) 14 | status = models.TextField() 15 | sync_type = models.CharField(choices=[("manual", "manual"), ("orchestrate", "orchestrate")]) 16 | sync_time = models.DateTimeField() 17 | sync_duration_s = models.BigIntegerField(default=0) 18 | sync_records = models.BigIntegerField(default=0) 19 | sync_data_volume_b = models.BigIntegerField(default=0) 20 | 21 | def __str__(self) -> str: 22 | return f"SyncStats[{self.org.name}|{self.connection_id}]" 23 | 24 | def to_json(self) -> dict: 25 | return { 26 | "org": self.org.slug, 27 | "connection_id": self.connection_id, 28 | "attempt": self.attempt, 29 | "status": self.status, 30 | "sync_time": self.sync_time, 31 | "sync_duration_s": self.sync_duration_s, 32 | "sync_records": self.sync_records, 33 | "sync_data_volume_b": self.sync_data_volume_b, 34 | } 35 | -------------------------------------------------------------------------------- /ddpui/models/userpreferences.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | from django.utils import timezone 3 | from ddpui.models.org_user import OrgUser 4 | 5 | 6 | class UserPreferences(models.Model): 7 | """Model to store user preferences for notifications""" 8 | 9 | orguser = models.OneToOneField(OrgUser, on_delete=models.CASCADE, related_name="preferences") 10 | enable_discord_notifications = models.BooleanField(default=False) # deprecated 11 | discord_webhook = models.URLField(blank=True, null=True) # deprecated 12 | enable_email_notifications = models.BooleanField(default=False) 13 | disclaimer_shown = models.BooleanField(default=False) 14 | created_at = models.DateTimeField(default=timezone.now) 15 | updated_at = models.DateTimeField(default=timezone.now) 16 | 17 | def to_json(self) -> dict: 18 | """Return a dict representation of the model""" 19 | return { 20 | "enable_email_notifications": self.enable_email_notifications, 21 | "disclaimer_shown": self.disclaimer_shown, 22 | } 23 | -------------------------------------------------------------------------------- /ddpui/oldmigrations/0002_invitation.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-03-14 17:58 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0001_initial"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="Invitation", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("invited_email", models.CharField(max_length=50)), 26 | ("invited_on", models.DateTimeField()), 27 | ("invite_code", models.CharField(max_length=36)), 28 | ( 29 | "invited_by", 30 | models.ForeignKey( 31 | on_delete=django.db.models.deletion.CASCADE, 32 | to="ddpui.clientuser", 33 | ), 34 | ), 35 | ], 36 | ), 37 | ] 38 | -------------------------------------------------------------------------------- /ddpui/oldmigrations/0004_clientorg_slug.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-03-28 12:12 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | dependencies = [ 8 | ("ddpui", "0003_clientdbt_remove_org_dbt_repo_url_and_more"), 9 | ] 10 | 11 | operations = [ 12 | migrations.AddField( 13 | model_name="org", 14 | name="slug", 15 | field=models.CharField(max_length=20, null=True), 16 | ), 17 | ] 18 | -------------------------------------------------------------------------------- /ddpui/oldmigrations/0005_clientprefectblock.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-03-31 00:16 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0004_org_slug"), 10 | ] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="OrgPrefectBlock", 15 | fields=[ 16 | ( 17 | "id", 18 | models.BigAutoField( 19 | auto_created=True, 20 | primary_key=True, 21 | serialize=False, 22 | verbose_name="ID", 23 | ), 24 | ), 25 | ("blocktype", models.CharField(max_length=25)), 26 | ("blockid", models.CharField(max_length=36, unique=True)), 27 | ("blockname", models.CharField(max_length=100, unique=True)), 28 | ( 29 | "org", 30 | models.ForeignKey( 31 | on_delete=django.db.models.deletion.CASCADE, 32 | to="ddpui.org", 33 | ), 34 | ), 35 | ], 36 | ), 37 | ] 38 | -------------------------------------------------------------------------------- /ddpui/oldmigrations/0006_alter_clientorg_dbt.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 4.1.7 on 2023-03-31 10:56 2 | 3 | from django.db import migrations, models 4 | import django.db.models.deletion 5 | 6 | 7 | class Migration(migrations.Migration): 8 | dependencies = [ 9 | ("ddpui", "0005_clientprefectblock"), 10 | ] 11 | 12 | operations = [ 13 | migrations.AlterField( 14 | model_name="org", 15 | name="dbt", 16 | field=models.ForeignKey( 17 | null=True, 18 | on_delete=django.db.models.deletion.CASCADE, 19 | to="ddpui.orgdbt", 20 | ), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /ddpui/schemas/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/schemas/__init__.py -------------------------------------------------------------------------------- /ddpui/schemas/notifications_api_schemas.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from datetime import datetime 3 | from pydantic import BaseModel 4 | from enum import Enum 5 | from ninja import Schema 6 | 7 | 8 | class SentToEnum(str, Enum): 9 | """ 10 | Schema for sent_to field in create notification 11 | api payload. 12 | """ 13 | 14 | ALL_USERS = "all_users" 15 | ALL_ORG_USERS = "all_org_users" 16 | SINGLE_USER = "single_user" 17 | 18 | 19 | class CreateNotificationPayloadSchema(BaseModel): 20 | """Schema for creating a new notification api.""" 21 | 22 | author: str 23 | message: str 24 | sent_to: SentToEnum 25 | urgent: Optional[bool] = False 26 | scheduled_time: Optional[datetime] = None 27 | user_email: Optional[str] = None 28 | manager_or_above: Optional[bool] = False 29 | org_slug: Optional[str] = None 30 | 31 | class Config: 32 | use_enum_values = True 33 | 34 | 35 | class UpdateReadStatusSchema(Schema): 36 | """Schema for updating the read status of a notification.""" 37 | 38 | notification_id: int 39 | read_status: bool 40 | 41 | 42 | class UpdateReadStatusSchemav1(Schema): 43 | """Schema for updating the read status of a notification.""" 44 | 45 | notification_ids: list[int] 46 | read_status: bool 47 | 48 | 49 | class NotificationDataSchema(Schema): 50 | """Schema use to call the notification service function for creating a notification""" 51 | 52 | author: str 53 | message: str 54 | email_subject: str 55 | urgent: Optional[bool] = False 56 | scheduled_time: Optional[datetime] = None 57 | recipients: List[int] # list of orguser ids 58 | -------------------------------------------------------------------------------- /ddpui/schemas/org_preferences_schema.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from datetime import datetime 3 | from ninja import Schema 4 | 5 | 6 | class CreateOrgPreferencesSchema(Schema): 7 | """Schema for creating organization preferences.""" 8 | 9 | org: Optional[int] 10 | trial_start_date: Optional[datetime] 11 | trial_end_date: Optional[datetime] 12 | llm_optin: Optional[bool] = False 13 | llm_optin_approved_by: Optional[int] 14 | llm_optin_date: Optional[datetime] 15 | enable_discord_notifications: Optional[bool] = False 16 | discord_webhook: Optional[str] 17 | 18 | 19 | class UpdateOrgPreferencesSchema(Schema): 20 | """Schema for updating organization preferences.""" 21 | 22 | trial_start_date: Optional[datetime] = None 23 | trial_end_date: Optional[datetime] = None 24 | llm_optin: Optional[bool] = None 25 | llm_optin_approved_by: Optional[int] = None 26 | llm_optin_date: Optional[datetime] = None 27 | enable_discord_notifications: Optional[bool] = None 28 | discord_webhook: Optional[str] = None 29 | 30 | 31 | class UpdateLLMOptinSchema(Schema): 32 | """Schema for updating organization LLM approval preference.""" 33 | 34 | llm_optin: bool 35 | 36 | 37 | class UpdateDiscordNotificationsSchema(Schema): 38 | """Schema for updating organization discord notification settings.""" 39 | 40 | enable_discord_notifications: bool 41 | discord_webhook: Optional[str] 42 | 43 | 44 | class CreateOrgSupersetDetailsSchema(Schema): 45 | """Schema for creating organization superset details.""" 46 | 47 | superset_version: Optional[str] 48 | container_name: Optional[str] 49 | -------------------------------------------------------------------------------- /ddpui/schemas/org_task_schema.py: -------------------------------------------------------------------------------- 1 | from ninja import Schema 2 | 3 | 4 | class TaskParameters(Schema): 5 | """Schema to parameterize Task Runs""" 6 | 7 | flags: list | None 8 | options: dict | None 9 | 10 | 11 | class CreateOrgTaskPayload(Schema): 12 | """ 13 | schema to define the payload required to create a custom org task 14 | """ 15 | 16 | task_slug: str 17 | flags: list | None 18 | options: dict | None 19 | 20 | 21 | class DbtProjectSchema(Schema): 22 | default_schema: str 23 | -------------------------------------------------------------------------------- /ddpui/schemas/userpreferences_schema.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from ninja import Schema 3 | 4 | 5 | class CreateUserPreferencesSchema(Schema): 6 | """Schema for creating user preferences for the user.""" 7 | 8 | enable_email_notifications: bool 9 | disclaimer_shown: Optional[bool] = None 10 | 11 | 12 | class UpdateUserPreferencesSchema(Schema): 13 | """Schema for updating user preferences for the user.""" 14 | 15 | enable_email_notifications: Optional[bool] = None 16 | disclaimer_shown: Optional[bool] = None 17 | -------------------------------------------------------------------------------- /ddpui/schemas/warehouse_api_schemas.py: -------------------------------------------------------------------------------- 1 | from ninja import Field, Schema 2 | 3 | 4 | class RequestorColumnSchema(Schema): 5 | """ 6 | schema to query on insights for a column 7 | """ 8 | 9 | db_schema: str 10 | db_table: str 11 | column_name: str 12 | filter: dict = None 13 | refresh: bool = False 14 | 15 | 16 | class AskWarehouseRequest(Schema): 17 | """ 18 | Payload to ask warehouse a question to be responded via llm 19 | """ 20 | 21 | sql: str 22 | user_prompt: str 23 | 24 | 25 | class SaveLlmSessionRequest(Schema): 26 | """ 27 | Payload to save the llm analysis session for future reference 28 | """ 29 | 30 | session_name: str 31 | overwrite: bool = False 32 | old_session_id: str = None # if overwrite is True, then this is required 33 | 34 | 35 | class LlmSessionFeedbackRequest(Schema): 36 | """ 37 | Payload to give feedback for llm session 38 | """ 39 | 40 | feedback: str 41 | -------------------------------------------------------------------------------- /ddpui/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/api_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/api_tests/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/core/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/core/datainsights/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/core/datainsights/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/core/datainsights/factories/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/core/datainsights/factories/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/core/datainsights/insights/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/core/datainsights/insights/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/core/datainsights/interfaces/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/core/datainsights/interfaces/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/core/datainsights/interfaces/test_warehouse_interface.py: -------------------------------------------------------------------------------- 1 | import os 2 | import django 3 | from django.core.management import call_command 4 | from django.apps import apps 5 | 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ddpui.settings") 7 | os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true" 8 | django.setup() 9 | 10 | 11 | import pytest 12 | 13 | pytestmark = pytest.mark.django_db 14 | 15 | from ddpui.datainsights.warehouse.warehouse_interface import Warehouse 16 | 17 | 18 | class UnimplementedMethodWarehouse(Warehouse): 19 | def __init__(self): 20 | pass 21 | 22 | 23 | class DummyWarehouse(Warehouse): 24 | def __init__(self): 25 | pass 26 | 27 | def execute(self, sql_statement: str): 28 | pass 29 | 30 | def get_table_columns(self, db_schema: str, db_table: str) -> dict: 31 | pass 32 | 33 | def get_col_python_type(self, db_schema: str, db_table: str, column_name: str): 34 | pass 35 | 36 | def get_wtype(self): 37 | pass 38 | 39 | 40 | def test_unimplemented_methods_warehouse_interface(): 41 | """Each warehouse client should implement all abstract methods in Warehouse interface""" 42 | 43 | with pytest.raises(TypeError): 44 | UnimplementedMethodWarehouse() 45 | 46 | 47 | def test_dummy_warehouse_with_all_methods_implemented(): 48 | """Success test with all abstract methods implemented in Warehouse interface""" 49 | obj = DummyWarehouse() 50 | 51 | assert "execute" in dir(obj) 52 | assert "get_col_python_type" in dir(obj) 53 | assert "get_table_columns" in dir(obj) 54 | assert "get_wtype" in dir(obj) 55 | -------------------------------------------------------------------------------- /ddpui/tests/core/dbt_automation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/core/dbt_automation/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/core/dbt_automation/test_dbtconfigs.py: -------------------------------------------------------------------------------- 1 | from ddpui.dbt_automation.utils.dbtconfigs import mk_model_config, get_columns_from_model 2 | 3 | 4 | def test_mk_model_config(): 5 | """test mk_model_config""" 6 | schemaname = "schemaname" 7 | modelname = "modelname" 8 | columnspec = ["column1", "column2"] 9 | model_config = mk_model_config(schemaname, modelname, columnspec) 10 | assert model_config["name"] == modelname 11 | assert model_config["columns"][0]["name"] == "_airbyte_ab_id" 12 | assert model_config["columns"][1]["name"] == "column1" 13 | assert model_config["columns"][2]["name"] == "column2" 14 | 15 | 16 | def test_get_columns_from_model(): 17 | """test get_columns_from_model""" 18 | models = { 19 | "models": [ 20 | { 21 | "name": "modelname", 22 | "description": "", 23 | "+schema": "schemaname", 24 | "columns": [ 25 | { 26 | "name": "_airbyte_ab_id", 27 | "description": "", 28 | "tests": ["unique", "not_null"], 29 | }, 30 | { 31 | "name": "column1", 32 | "description": "", 33 | }, 34 | { 35 | "name": "column2", 36 | "description": "", 37 | }, 38 | ], 39 | } 40 | ] 41 | } 42 | columns = get_columns_from_model(models, "modelname") 43 | assert columns == ["_airbyte_ab_id", "column1", "column2"] 44 | columns = get_columns_from_model(models, "modelname2") 45 | assert columns is None 46 | -------------------------------------------------------------------------------- /ddpui/tests/core/dbt_automation/test_tableutils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from ddpui.dbt_automation.utils.tableutils import source_or_ref 3 | 4 | 5 | @pytest.fixture 6 | def source(): 7 | return { 8 | "input_type": "source", 9 | "input_name": "src_table", 10 | "source_name": "src_name", 11 | } 12 | 13 | 14 | @pytest.fixture 15 | def model_ref(): 16 | return { 17 | "input_type": "model", 18 | "input_name": "src_table", 19 | "source_name": "src_name", 20 | } 21 | 22 | 23 | def test_correct_source(source): 24 | assert "input_type" in source 25 | assert "input_name" in source 26 | assert "source_name" in source 27 | assert source["input_type"] == "source" 28 | src_name = source["source_name"] 29 | inp_name = source["input_name"] 30 | assert source_or_ref(**source) == f"source('{src_name}', '{inp_name}')" 31 | 32 | 33 | def test_correct_model(model_ref): 34 | assert "input_type" in model_ref 35 | assert "input_name" in model_ref 36 | assert "source_name" in model_ref 37 | assert model_ref["input_type"] == "model" 38 | inp_name = model_ref["input_name"] 39 | assert source_or_ref(**model_ref) == f"ref('{inp_name}')" 40 | -------------------------------------------------------------------------------- /ddpui/tests/core/dbt_automation/test_warehouseclient.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from unittest.mock import patch, Mock 3 | 4 | from ddpui.dbt_automation.utils.warehouseclient import get_client 5 | 6 | 7 | @patch("ddpui.dbt_automation.utils.warehouseclient.BigQueryClient") 8 | @patch("ddpui.dbt_automation.utils.warehouseclient.PostgresClient") 9 | def test_dbt_automation_warehouse_client(MockPostgresClient: Mock, MockBigqueryClient: Mock): 10 | """test the warehouse client""" 11 | conn_info = { 12 | "host": "localhost", 13 | "port": 5432, 14 | "user": "test", 15 | "password": "test", 16 | "database": "test", 17 | } 18 | 19 | warehouse_type = "postgres" 20 | get_client(warehouse_type, conn_info) 21 | MockPostgresClient.assert_called_once_with(conn_info) 22 | 23 | warehouse_type = "bigquery" 24 | location = "US" 25 | get_client(warehouse_type, conn_info, location) 26 | MockBigqueryClient.assert_called_once_with(conn_info, location) 27 | 28 | warehouse_type = "unknown" 29 | with pytest.raises(ValueError, match="unknown warehouse"): 30 | get_client(warehouse_type, conn_info) 31 | -------------------------------------------------------------------------------- /ddpui/tests/helper/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/helper/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/integration_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/integration_tests/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/integration_tests/dbt_automation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/integration_tests/dbt_automation/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/services/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/services/__init__.py -------------------------------------------------------------------------------- /ddpui/tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/tests/utils/__init__.py -------------------------------------------------------------------------------- /ddpui/urls.py: -------------------------------------------------------------------------------- 1 | # main urls 2 | from django.contrib import admin 3 | from django.urls import include, path 4 | from django.http import HttpResponse 5 | 6 | from ddpui.routes import src_api 7 | from ddpui.html.docs import get_dbt_docs 8 | from ddpui.html.elementary import get_elementary_report 9 | 10 | from ddpui.datainsights.generate_result import DataInsightsConsumer 11 | from ddpui.websockets.airbyte_consumer import SchemaCatalogConsumer, SourceCheckConnectionConsumer 12 | from ddpui.websockets.airbyte_consumer import DestinationCheckConnectionConsumer 13 | 14 | 15 | def trigger_error(request): # pylint: disable=unused-argument # skipcq PYK-W0612 16 | """endpoint to test sentry""" 17 | division_by_zero = 1 / 0 # pylint: disable=unused-variable 18 | 19 | 20 | def healthcheck(request): # pylint:disable=unused-argument 21 | """Healthcheck endpoint for load balancers""" 22 | return HttpResponse("OK") 23 | 24 | 25 | urlpatterns = [ 26 | path("admin/", admin.site.urls), 27 | path("healthcheck", healthcheck), 28 | path("docs//", get_dbt_docs), 29 | path("elementary//", get_elementary_report), 30 | path("prometheus/", include("django_prometheus.urls")), 31 | path("sentry-debug/", trigger_error), 32 | path("", src_api.urls), 33 | ] 34 | 35 | # socket endpoints 36 | ws_urlpatterns = [ 37 | path("wss/data_insights/", DataInsightsConsumer.as_asgi()), 38 | path("wss/airbyte/source/check_connection", SourceCheckConnectionConsumer.as_asgi()), 39 | path( 40 | "wss/airbyte/destination/check_connection", 41 | DestinationCheckConnectionConsumer.as_asgi(), 42 | ), 43 | path("wss/airbyte/connection/schema_catalog", SchemaCatalogConsumer.as_asgi()), 44 | ] 45 | -------------------------------------------------------------------------------- /ddpui/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DalgoT4D/DDP_backend/68df124354912bbe9ee567c7a952a9529987e74d/ddpui/utils/__init__.py -------------------------------------------------------------------------------- /ddpui/utils/ab_logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from logging.handlers import RotatingFileHandler 4 | from ddpui import settings 5 | from ddpui.utils.timezone import ist_time 6 | 7 | logger = logging.getLogger("airbyte") 8 | 9 | 10 | def setup_logger(): 11 | """setup the airbyte api logger""" 12 | logfilename = settings.BASE_DIR / "ddpui/logs/airbyte.log" 13 | logger.setLevel(logging.INFO) 14 | 15 | # log to stdout 16 | handler = logging.StreamHandler(sys.stdout) 17 | handler.setLevel(logging.DEBUG) 18 | logging.Formatter.converter = ist_time 19 | formatter = logging.Formatter( 20 | "%(levelname)s - %(asctime)s - %(name)s - %(filename)s - %(caller_name)s - %(orgname)s: %(message)s" 21 | ) 22 | handler.setFormatter(formatter) 23 | logger.addHandler(handler) 24 | 25 | # log to file 26 | handler = RotatingFileHandler(logfilename, maxBytes=1048576, backupCount=5) 27 | handler.setLevel(logging.INFO) 28 | logging.Formatter.converter = ist_time 29 | formatter = logging.Formatter( 30 | "%(levelname)s - %(asctime)s - %(name)s - %(filename)s - %(caller_name)s - %(orgname)s: %(message)s" 31 | ) 32 | handler.setFormatter(formatter) 33 | logger.addHandler(handler) 34 | -------------------------------------------------------------------------------- /ddpui/utils/awsses.py: -------------------------------------------------------------------------------- 1 | """send emails using SES""" 2 | 3 | import os 4 | import boto3 5 | 6 | ses = boto3.client( 7 | "ses", 8 | "ap-south-1", 9 | aws_access_key_id=os.getenv("SES_ACCESS_KEY_ID"), 10 | aws_secret_access_key=os.getenv("SES_SECRET_ACCESS_KEY"), 11 | ) 12 | 13 | 14 | def send_text_message(to_email, subject, message): 15 | """ 16 | send a plain-text email using ses 17 | """ 18 | response = ses.send_email( 19 | Destination={"ToAddresses": [to_email]}, 20 | Message={ 21 | "Body": {"Text": {"Charset": "UTF-8", "Data": message}}, 22 | "Subject": {"Charset": "UTF-8", "Data": subject}, 23 | }, 24 | Source=os.getenv("SES_SENDER_EMAIL"), 25 | ) 26 | return response 27 | -------------------------------------------------------------------------------- /ddpui/utils/ddp_logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from logging.handlers import RotatingFileHandler 4 | from ddpui import settings 5 | from ddpui.utils.timezone import ist_time 6 | 7 | logger = logging.getLogger("ddpui") 8 | 9 | 10 | def setup_logger(): 11 | """setup the ddpui logger""" 12 | logfilename = settings.BASE_DIR / "ddpui/logs/ddpui.log" 13 | logger.setLevel(logging.INFO) 14 | 15 | # log to stdout 16 | handler = logging.StreamHandler(sys.stdout) 17 | handler.setLevel(logging.DEBUG) 18 | logging.Formatter.converter = ist_time 19 | formatter = logging.Formatter( 20 | "%(levelname)s - %(asctime)s - %(name)s - %(filename)s - %(caller_name)s - %(orgname)s: %(message)s" 21 | ) 22 | handler.setFormatter(formatter) 23 | logger.addHandler(handler) 24 | 25 | handler = RotatingFileHandler(logfilename, maxBytes=1048576, backupCount=5) 26 | handler.setLevel(logging.INFO) 27 | logging.Formatter.converter = ist_time 28 | formatter = logging.Formatter( 29 | "%(levelname)s - %(asctime)s - %(name)s - %(filename)s - %(caller_name)s - %(orgname)s: %(message)s" 30 | ) 31 | handler.setFormatter(formatter) 32 | logger.addHandler(handler) 33 | -------------------------------------------------------------------------------- /ddpui/utils/discord.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | 4 | def send_discord_notification(webhook_url, message): 5 | """sends a message to a discord webhook""" 6 | data = {"content": message} 7 | 8 | response = requests.post(webhook_url, json=data, timeout=10) 9 | 10 | if response.status_code != 204: 11 | raise Exception( 12 | f"Failed to send notification. Status code: {response.status_code}, Response: {response.text}" 13 | ) 14 | -------------------------------------------------------------------------------- /ddpui/utils/flags.py: -------------------------------------------------------------------------------- 1 | from flags import conditions 2 | 3 | 4 | @conditions.register("org_slug") 5 | def org_condition(org_slug, request_org_slug=None): 6 | """Def""" 7 | return request_org_slug == org_slug 8 | -------------------------------------------------------------------------------- /ddpui/utils/redis_client.py: -------------------------------------------------------------------------------- 1 | import os 2 | import threading 3 | 4 | from redis import Redis 5 | 6 | 7 | class RedisClient: 8 | """ 9 | Singleton Class to instantiate a Redis client. 10 | Use this class anywhere in the code to interact with the Redis server 11 | """ 12 | 13 | lock = threading.Lock() 14 | _redis_instance = None 15 | 16 | @classmethod 17 | def get_instance(cls) -> Redis: 18 | """ 19 | Returns the Redis instance. 20 | If the Redis instance is not already created, it creates a new instance using the 21 | host and port specified in the environment variables REDIS_HOST and REDIS_PORT. 22 | If the environment variables are not set, it defaults to using "localhost" as the host 23 | and "6379" as the port. 24 | To prevent multiple Redis objects being created we make use of locks 25 | Returns: 26 | Redis: The Redis instance. 27 | """ 28 | if cls._redis_instance is None: 29 | if cls.lock.acquire(timeout=10): 30 | if cls._redis_instance is None: 31 | host = os.getenv("REDIS_HOST", "localhost") 32 | port = int(os.getenv("REDIS_PORT", "6379")) 33 | cls._redis_instance = Redis(host=host, port=port) 34 | cls.lock.release() 35 | return cls._redis_instance 36 | 37 | @classmethod 38 | def reset_instance(cls) -> None: 39 | """ 40 | Reset the instance to None 41 | """ 42 | cls._redis_instance = None 43 | -------------------------------------------------------------------------------- /ddpui/utils/thread.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | # Thread-local storage 4 | _thread_locals = threading.local() 5 | 6 | 7 | def get_current_request(): 8 | """get the current request object from the thread""" 9 | return getattr(_thread_locals, "request", None) 10 | 11 | 12 | def set_current_request(request): 13 | """set the current request object in the thread""" 14 | _thread_locals.request = request 15 | -------------------------------------------------------------------------------- /ddpui/utils/timezone.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import pytz 3 | 4 | IST = pytz.IST = pytz.timezone("Asia/Kolkata") 5 | UTC = pytz.utc 6 | 7 | 8 | def as_utc(timestamp: datetime.datetime): 9 | """Return time in UTC""" 10 | return timestamp.astimezone(UTC) if timestamp.tzinfo else UTC.localize(timestamp) 11 | 12 | 13 | def as_ist(timestamp: datetime.datetime): 14 | """Return time in IST""" 15 | return timestamp.astimezone(IST) if timestamp.tzinfo else IST.localize(timestamp) 16 | 17 | 18 | def ist_time(*args): 19 | """set ist time""" 20 | utc_dt = pytz.utc.localize(datetime.datetime.utcnow()) 21 | converted = utc_dt.astimezone(IST) 22 | return converted.timetuple() 23 | -------------------------------------------------------------------------------- /ddpui/utils/transform_workflow_helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helpers related to UI for transformation feature 3 | """ 4 | 5 | from ddpui.models.dbt_workflow import ( 6 | OrgDbtModel, 7 | OrgDbtOperation, 8 | DbtEdge, 9 | OrgDbtNodeType, 10 | ) 11 | 12 | 13 | def from_orgdbtoperation(orgdbt_op: OrgDbtOperation, chain_length: int = None, **kwargs): 14 | """Helper to turn an OrgDbtOperation into a dict""" 15 | dbtop = { 16 | "id": orgdbt_op.uuid, 17 | "output_cols": orgdbt_op.output_cols, 18 | "config": orgdbt_op.config, 19 | "type": OrgDbtNodeType.OPERATION_NODE, 20 | "target_model_id": orgdbt_op.dbtmodel.uuid, 21 | "target_model_name": orgdbt_op.dbtmodel.name, 22 | "target_model_schema": orgdbt_op.dbtmodel.schema, 23 | "seq": orgdbt_op.seq, 24 | "chain_length": chain_length, 25 | } 26 | 27 | if not dbtop["chain_length"]: 28 | dbtop["chain_length"] = OrgDbtOperation.objects.filter(dbtmodel=orgdbt_op.dbtmodel).count() 29 | 30 | dbtop["is_last_in_chain"] = dbtop["seq"] == dbtop["chain_length"] 31 | 32 | dbtop.update(kwargs) 33 | 34 | return dbtop 35 | 36 | 37 | def from_orgdbtmodel(orgdbt_model: OrgDbtModel): 38 | """ 39 | Helper to turn an OrgDbtModel into a dict 40 | """ 41 | 42 | return { 43 | "id": orgdbt_model.uuid, 44 | "source_name": orgdbt_model.source_name, 45 | "input_name": orgdbt_model.name, 46 | "input_type": orgdbt_model.type, 47 | "schema": orgdbt_model.schema, 48 | "type": OrgDbtNodeType.SRC_MODEL_NODE, 49 | } 50 | -------------------------------------------------------------------------------- /ddpui/websockets/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | from channels.generic.websocket import WebsocketConsumer 3 | from rest_framework.authtoken.models import Token 4 | from urllib.parse import parse_qs 5 | 6 | from ddpui.websockets.schemas import WebsocketResponse 7 | from ddpui.models.org_user import OrgUser 8 | from ddpui.utils.custom_logger import CustomLogger 9 | 10 | logger = CustomLogger("ddpui") 11 | 12 | 13 | class BaseConsumer(WebsocketConsumer): 14 | def authenticate_user(self, token: str, orgslug: str): 15 | self.orguser = None 16 | self.user = None 17 | tokenrecord = Token.objects.filter(key=token).first() 18 | if tokenrecord and tokenrecord.user: 19 | self.user = tokenrecord.user 20 | q_orguser = OrgUser.objects.filter(user=self.user) 21 | if orgslug: 22 | q_orguser = q_orguser.filter(org__slug=orgslug) 23 | orguser = q_orguser.first() 24 | if orguser is not None: 25 | self.orguser = orguser 26 | return True 27 | return False 28 | 29 | def respond(self, message: WebsocketResponse): 30 | self.send(text_data=json.dumps(message.dict())) 31 | 32 | def connect(self): 33 | query_string = parse_qs(self.scope["query_string"].decode()) 34 | token = query_string.get("token", [None])[0] 35 | orgslug = query_string.get("orgslug", [None])[0] 36 | 37 | if self.authenticate_user(token, orgslug): 38 | logger.info("User authenticated, establishing connection") 39 | self.accept() 40 | else: 41 | logger.info("Authentication failed, closing connection") 42 | self.close() 43 | -------------------------------------------------------------------------------- /ddpui/websockets/schemas.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from ninja import Schema 3 | from enum import Enum 4 | 5 | 6 | class WebsocketResponseStatus(str, Enum): 7 | SUCCESS = "success" 8 | ERROR = "error" 9 | 10 | 11 | class WebsocketResponse(Schema): 12 | """ 13 | Generic schema for all responses sent back via websockets 14 | """ 15 | 16 | message: str 17 | status: WebsocketResponseStatus 18 | data: dict = {} 19 | 20 | 21 | # ======================================================================== 22 | -------------------------------------------------------------------------------- /ddpui/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for ddpui project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/4.1/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ddpui.settings") 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /gunicorn-log.conf: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root, gunicorn.error, gunicorn.access 3 | 4 | [handlers] 5 | keys=console, error_file, access_file 6 | 7 | [formatters] 8 | keys=generic, access 9 | 10 | [logger_root] 11 | level=INFO 12 | handlers=console 13 | 14 | [logger_gunicorn.error] 15 | level=INFO 16 | handlers=error_file 17 | propagate=1 18 | qualname=gunicorn.error 19 | 20 | [logger_gunicorn.access] 21 | level=INFO 22 | handlers=access_file 23 | propagate=0 24 | qualname=gunicorn.access 25 | 26 | [handler_console] 27 | class=StreamHandler 28 | formatter=generic 29 | args=(sys.stdout, ) 30 | 31 | [handler_error_file] 32 | class=logging.handlers.TimedRotatingFileHandler 33 | formatter=generic 34 | args=('ddpui/logs/gunicorn-error.log', 'midnight', 1, 90, 'utf-8') 35 | 36 | [handler_access_file] 37 | class=logging.handlers.TimedRotatingFileHandler 38 | formatter=access 39 | args=('ddpui/logs/gunicorn-access.log', 'midnight', 1, 90, 'utf-8') 40 | 41 | [formatter_generic] 42 | format=%(asctime)s [%(process)d] [%(levelname)s] %(message)s 43 | datefmt=%Y-%m-%d %H:%M:%S 44 | class=logging.Formatter 45 | 46 | [formatter_access] 47 | format=%(message)s 48 | class=logging.Formatter -------------------------------------------------------------------------------- /kill.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | ps aux | grep "ddpui.wsgi" | grep -v grep | awk '{print $2}' | xargs kill 4 | ps aux | grep "celery -A ddpui worker" | grep -v grep | awk '{print $2}' | xargs kill -9 5 | 6 | -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Django's command-line utility for administrative tasks.""" 3 | import os 4 | import sys 5 | 6 | 7 | def main(): 8 | """Run administrative tasks.""" 9 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ddpui.settings") 10 | try: 11 | from django.core.management import execute_from_command_line 12 | except ImportError as exc: 13 | raise ImportError( 14 | "Couldn't import Django. Are you sure it's installed and " 15 | "available on your PYTHONPATH environment variable? Did you " 16 | "forget to activate a virtual environment?" 17 | ) from exc 18 | execute_from_command_line(sys.argv) 19 | 20 | 21 | if __name__ == "__main__": 22 | main() 23 | -------------------------------------------------------------------------------- /pyproject_deprecated.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 100 3 | target-version = ["py38", "py39", "py310", "py311"] 4 | include = 'ddpui\/.*\.pyi?$' 5 | 6 | [tool.pytest.ini_options] 7 | DJANGO_SETTINGS_MODULE="ddpui.settings" 8 | testpaths = [ 9 | "ddpui/tests" 10 | ] 11 | 12 | [tool.pylint.messages_control] 13 | max-line-length = 200 14 | fail-under = 6.5 15 | disable = [ 16 | "missing-module-docstring", 17 | "broad-exception-raised", 18 | "broad-exception-caught", 19 | "too-few-public-methods", 20 | "logging-not-lazy", 21 | "logging-fstring-interpolation" 22 | ] 23 | 24 | [tool.coverage.run] 25 | source = [ 26 | "ddpui" 27 | ] 28 | omit = [ 29 | "ddpui/utils/sendgrid.py", 30 | "ddpui/utils/dbtdocs.py", 31 | "ddpui/celeryworkers/*.py", 32 | "ddpui/tests/*", 33 | "ddpui/migrations/*", 34 | "ddpui/management/*", 35 | "ddpui/models/*", 36 | "ddpui/celery.py", 37 | "ddpui/settings.py", 38 | "ddpui/asgi.py", 39 | "ddpui/wsgi.py", 40 | "ddpui/urls.py", 41 | "ddpui/routes.py", 42 | "*/schema.py", 43 | "ddpui/*/__init__.py", 44 | "ddpui/schemas/*", 45 | "ddpui/dbt_automation/assets/*", 46 | "ddpui/dbt_automation/seeds/*", 47 | "ddpui/dbt_automation/operations/*", 48 | ] -------------------------------------------------------------------------------- /refresh-dbt-automation.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | yes Y | pip uninstall dbt_automation 4 | yes Y | pip install git+https://github.com/DalgoT4D/dbt-automation.git 5 | 6 | -------------------------------------------------------------------------------- /scripts/README.createsources.md: -------------------------------------------------------------------------------- 1 | The `createsources.py` script bulk-creates sources and connections in a Dalgo workspace. 2 | 3 | Prerequisites: 4 | - a Dalgo workspace with a warehouse 5 | 6 | Usage: 7 | ``` 8 | PYTHONPATH= python scripts/createsources.py \ 9 | --env \ 10 | --file 11 | ``` 12 | 13 | The `ENV_FILE` needs to contain the following variables 14 | 15 | ``` 16 | APP_HOST="staging-api.dalgo.org" 17 | APP_PORT=443 18 | EMAIL= 19 | PASSWORD= 20 | ORG= 21 | ``` 22 | 23 | Make sure to change the `APP-HOST` to `api.dalgo.org` if you are doing this in production. The `org slug` is the lowercased version of the Org name, with spaces replaced by `-`. 24 | 25 | The configuration YAML has the following structure: 26 | 27 | ``` 28 | sources: 29 | - name: NAME 30 | stype: SOURCE TYPE 31 | config: 32 | parameter: value 33 | parameter: value 34 | parameter: value 35 | ``` 36 | 37 | You have to know the `SOURCE_TYPE` as well as the configuration structure for your source. 38 | 39 | After the `sources` you will list the `connections` in the same YAML file: 40 | 41 | ``` 42 | connections: 43 | - name: NAME 44 | source: SOURCE_NAME_FROM_ABOVE 45 | destinationSchema: staging // or whatever schema you want 46 | streams: 47 | - name: NAME_OF_TABLE_IN_DESTINATION 48 | syncMode: overwrite // or append or whatever 49 | ``` 50 | 51 | Running the script should then create the sources in your Dalgo workspace. 52 | -------------------------------------------------------------------------------- /scripts/README.generatecreatesourcesyml.md: -------------------------------------------------------------------------------- 1 | This is a convenience script to generate the YAML file for `createsources.py` specifically for CSV sources in S3 2 | 3 | Usage: 4 | ``` 5 | python generatecreatesourcesyml.py \ 6 | --csvs \ 7 | --aws-access-key-id \ 8 | --aws-secret-access-key \ 9 | --output 10 | --destination-schema 11 | ``` 12 | 13 | The AWS credentials are not used by this script, they are put into the YAML for use by Airbyte. 14 | 15 | The input CSV must have the following columns: 16 | 17 | `s3location,dataset,source,connection` 18 | 19 | - `s3location` is an S3 url of the form `s3://...` 20 | - `dataset` is the name of the destination table to write to 21 | - `source` is the name of the Dalgo source to be created 22 | - `connection` is the name of the Dalgo connection to be created 23 | 24 | The `dataset`, `source` and `connection` will often all be the same. 25 | 26 | Running this script will generate the YAML which can be passed to `createsources.py` to create the corresponding sources and their connections to the Dalgo warehouse. 27 | -------------------------------------------------------------------------------- /scripts/createsources.example.env: -------------------------------------------------------------------------------- 1 | APP_HOST="staging-api.dalgo.org" 2 | APP_PORT=443 3 | EMAIL= 4 | PASSWORD= 5 | ORG= -------------------------------------------------------------------------------- /scripts/dbt_automation/dropcolumnfromcsv.py: -------------------------------------------------------------------------------- 1 | """drops a column from a CSV file""" 2 | #!env python 3 | 4 | import argparse 5 | import pandas as pd 6 | 7 | parser = argparse.ArgumentParser(description="Drop a column from a CSV file") 8 | parser.add_argument("inputcsvfile", help="CSV file to drop column from") 9 | parser.add_argument("outputcsvfile", help="CSV file to drop column from") 10 | parser.add_argument("column", help="column to drop") 11 | args = parser.parse_args() 12 | 13 | df = pd.read_csv(args.inputcsvfile) 14 | if args.column in df.columns: 15 | df.drop(columns=[args.column], inplace=True) 16 | df.to_csv(args.outputcsvfile, index=False) 17 | -------------------------------------------------------------------------------- /scripts/dbt_automation/operations.yaml: -------------------------------------------------------------------------------- 1 | version: 1 2 | description: "Yaml template to get you started on automating your dbt work. DO NOT EDIT this, make a copy and use" 3 | warehouse: postgres 4 | operations: 5 | - type: mergeoperations 6 | config: 7 | dest_schema: intermediate 8 | output_name: pivot_merge_op 9 | input: 10 | input_type: source 11 | input_name: sheet2 12 | source_name: staging 13 | operations: 14 | - type: renamecolumns 15 | config: 16 | source_columns: 17 | - ngo 18 | - spoc 19 | - _airbyte_ab_id 20 | - _airbyte_emitted_at 21 | columns: 22 | ngo: NGO 23 | Indicator: indicator 24 | - type: pivot 25 | config: 26 | source_columns: 27 | - spoc 28 | pivot_column_name: Month 29 | pivot_column_values: 30 | - "IMAGE" 31 | - "CRC" 32 | - "CWFD" 33 | -------------------------------------------------------------------------------- /scripts/dbt_automation/operations1.yaml: -------------------------------------------------------------------------------- 1 | version: 1 2 | description: "Yaml template to get you started on automating your dbt work. DO NOT EDIT this, make a copy and use" 3 | warehouse: postgres 4 | operations: 5 | - type: unpivot 6 | config: 7 | input: 8 | input_type: source 9 | input_name: sheet2 10 | source_name: staging 11 | source_columns: 12 | - Month 13 | - ngo 14 | - spoc 15 | - measure 16 | - Indicator 17 | - _airbyte_ab_id 18 | - _airbyte_emitted_at 19 | - _airbyte_normalized_at 20 | - _airbyte_sheet2_hashid 21 | exclude_columns: 22 | - Month 23 | unpivot_columns: 24 | - ngo 25 | - spoc 26 | dest_schema: intermediate 27 | output_name: unpivot_op 28 | # - type: pivot 29 | # config: 30 | # input: 31 | # input_type: source 32 | # input_name: sheet2 33 | # source_name: staging 34 | # source_columns: 35 | # - spoc 36 | # pivot_column_name: Month 37 | # pivot_column_values: 38 | # - "IMAGE" 39 | # - "CRC" 40 | # - "CWFD" 41 | # dest_schema: intermediate 42 | # output_name: pivot_merge_op 43 | -------------------------------------------------------------------------------- /scripts/dbt_automation/showcolumnfromtables.py: -------------------------------------------------------------------------------- 1 | """given a list of tables, show the unique values of the specified column""" 2 | 3 | import os 4 | import argparse 5 | from logging import basicConfig, getLogger, INFO 6 | from dotenv import load_dotenv 7 | from ddpui.dbt_automation.utils.warehouseclient import get_client 8 | 9 | load_dotenv("dbconnection.env") 10 | 11 | basicConfig(level=INFO) 12 | logger = getLogger() 13 | 14 | parser = argparse.ArgumentParser() 15 | parser.add_argument("--warehouse", required=True, choices=["postgres", "bigquery"]) 16 | parser.add_argument("--schema", required=True) 17 | parser.add_argument("--column", required=True) 18 | parser.add_argument("--tables", nargs="+", required=True) 19 | args = parser.parse_args() 20 | 21 | warehouse = args.warehouse 22 | schema = args.schema 23 | column = args.column 24 | 25 | # -- start 26 | conn_info = { 27 | "host": os.getenv("DBHOST"), 28 | "port": os.getenv("DBPORT"), 29 | "username": os.getenv("DBUSER"), 30 | "password": os.getenv("DBPASSWORD"), 31 | "database": os.getenv("DBNAME"), 32 | } 33 | client = get_client(warehouse, conn_info) 34 | 35 | for tablename in args.tables: 36 | QUERY = f"SELECT DISTINCT {column} FROM {schema}.{tablename}" 37 | resultset = client.execute(QUERY) 38 | for result in resultset: 39 | if warehouse == "bigquery": 40 | print(result[column]) 41 | else: 42 | print(result[0]) 43 | -------------------------------------------------------------------------------- /scripts/parseprefectlogs.py: -------------------------------------------------------------------------------- 1 | """fetches the logs from the prefect database""" 2 | import os 3 | import json 4 | import argparse 5 | import logging 6 | from dotenv import load_dotenv 7 | 8 | from ddpui.utils.prefectlogs import parse_prefect_logs 9 | from ddpui.utils.helpers import remove_nested_attribute 10 | 11 | parser = argparse.ArgumentParser(description="Parse the logs from a flow run") 12 | parser.add_argument("flowrun", help="flow run id") 13 | parser.add_argument("--show-logs", help="show logs", action="store_true") 14 | args = parser.parse_args() 15 | 16 | logger = logging.getLogger() 17 | logging.basicConfig( 18 | level=logging.DEBUG, 19 | format="%(asctime)s %(levelname)s %(message)s", 20 | handlers=[logging.StreamHandler()], 21 | ) 22 | 23 | 24 | if __name__ == "__main__": 25 | # 3b5473c2-f164-4fee-ad6a-2030d3a3deb3 26 | # 9755ec98-db63-40c7-8e52-eccf6b220d12 27 | # 55beb129-ba43-48a9-8139-14765c0b26fc 28 | # ed16d9ff-3fba-4bf3-bbe9-04ee51a22092 29 | # be98ad56-b17c-4dc8-a732-f6fe552a50f1 30 | # a88759e9-e393-44be-a623-7cd9667b8872 31 | load_dotenv("scripts/parseprefectlogs.env", verbose=True, override=True) 32 | connection_info = { 33 | "host": os.getenv("POSTGRES_HOST"), 34 | "port": os.getenv("POSTGRES_PORT"), 35 | "database": os.getenv("POSTGRES_DB"), 36 | "user": os.getenv("POSTGRES_USER"), 37 | "password": os.getenv("POSTGRES_PASSWORD"), 38 | } 39 | result = parse_prefect_logs(connection_info, args.flowrun) 40 | if args.show_logs: 41 | print(json.dumps(result, indent=2)) 42 | else: 43 | for task in result: 44 | print(json.dumps(remove_nested_attribute(task, "log_lines"), indent=2)) 45 | -------------------------------------------------------------------------------- /seed/001_roles.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "model": "ddpui.Role", 4 | "pk": 1, 5 | "fields": { 6 | "name": "Super User", 7 | "slug": "super-admin", 8 | "level": 5 9 | } 10 | }, 11 | { 12 | "model": "ddpui.Role", 13 | "pk": 2, 14 | "fields": { 15 | "name": "Account Manager", 16 | "slug": "account-manager", 17 | "level": 4 18 | } 19 | }, 20 | { 21 | "model": "ddpui.Role", 22 | "pk": 3, 23 | "fields": { 24 | "name": "Pipeline Manager", 25 | "slug": "pipeline-manager", 26 | "level": 3 27 | } 28 | }, 29 | { 30 | "model": "ddpui.Role", 31 | "pk": 4, 32 | "fields": { 33 | "name": "Analyst", 34 | "slug": "analyst", 35 | "level": 2 36 | } 37 | }, 38 | { 39 | "model": "ddpui.Role", 40 | "pk": 5, 41 | "fields": { 42 | "name": "Guest", 43 | "slug": "guest", 44 | "level": 1 45 | } 46 | } 47 | ] -------------------------------------------------------------------------------- /seed/assistant_prompts.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "model": "ddpui.AssistantPrompt", 4 | "pk": 1, 5 | "fields": { 6 | "type": "log_summarization", 7 | "prompt": "You are a member of the Dalgo development team. Your expertise is in analyzing log messages from the platform and providing meaningful insights.\n\nThe file attached to this assistant is a log from $pipeline. Read the log carefully. You will be asked questions about it that you are expected to answer accurately." 8 | } 9 | }, 10 | { 11 | "model": "ddpui.AssistantPrompt", 12 | "pk": 2, 13 | "fields": { 14 | "type": "long_text_summarization", 15 | "prompt": "You are very good at summarizing what you read." 16 | } 17 | } 18 | ] -------------------------------------------------------------------------------- /seed/user_prompts.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "model": "ddpui.UserPrompt", 4 | "pk": 1, 5 | "fields": { 6 | "type": "long_text_summarization", 7 | "prompt": "Summarize the results of the query", 8 | "label": "Summarize" 9 | } 10 | } 11 | ] -------------------------------------------------------------------------------- /start-celery-worker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | /home/ddp/DDP_backend/venv/bin/celery -A ddpui worker -n ddpui --pidfile celeryworker.pid -------------------------------------------------------------------------------- /start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # /home/ddp/DDP_backend/venv/bin/gunicorn -b localhost:8002 ddpui.wsgi \ 4 | # --capture-output \ 5 | # --error-logfile /home/ddp/DDP_backend/ddpui/logs/gunicorn-error.log \ 6 | # --access-logfile /home/ddp/DDP_backend/ddpui/logs/gunicorn-access.log 7 | 8 | /home/ddp/DDP_backend/venv/bin/gunicorn -b localhost:8002 ddpui.wsgi \ 9 | --capture-output \ 10 | --log-config /home/ddp/DDP_backend/gunicorn-log.conf 11 | -------------------------------------------------------------------------------- /testclient/.env.test.template: -------------------------------------------------------------------------------- 1 | PREFECT_PROXY_API_URL= 2 | DALGO_USER= 3 | DALGO_PASSWORD= 4 | SIGNUPCODE= 5 | 6 | # WAREHOUSETYPE=bigquery 7 | BQ_WAREHOUSE_SERVICE_ACCOUNT_CREDSFILE= 8 | BQ_PROJECTID= 9 | BQ_DATASETID= 10 | BQ_DATASETLOCATION= 11 | 12 | GCS_BUCKET= 13 | GCS_PATH= 14 | GCS_HMAC_KEY= 15 | GCS_HMAC_SECRET= 16 | 17 | # WAREHOUSETYPE=postgres 18 | PG_WAREHOUSE_HOST= 19 | PG_WAREHOUSE_PORT= 20 | PG_WAREHOUSE_DATABASE= 21 | PG_WAREHOUSE_USERNAME= 22 | PG_WAREHOUSE_PASSWORD= 23 | 24 | DBT_PROFILE= 25 | DBT_TARGET= 26 | 27 | DBT_TARGETCONFIGS_SCHEMA= 28 | 29 | 30 | DBT_TEST_REPO= 31 | DBT_TEST_REPO_ACCESSTOKEN= 32 | 33 | -------------------------------------------------------------------------------- /testclient/config.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | user: 4 | create_new: false 5 | 6 | org: 7 | create_new: false 8 | name: testclient 9 | 10 | warehouse: 11 | delete_existing: false 12 | wtype: bigquery 13 | 14 | airbyte: 15 | sources: 16 | - name: covid19data 17 | stype: "File (CSV, JSON, Excel, Feather, Parquet)" 18 | config: 19 | url: "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv" 20 | format: csv 21 | provider: 22 | storage: "HTTPS" 23 | user_agent: false 24 | dataset_name: covid19data 25 | 26 | connections: 27 | - name: covid19data 28 | source: covid19data 29 | streams: 30 | - name: covid19data 31 | syncMode: overwrite 32 | destinationSchema: csv_schema 33 | 34 | dbt_workspace: 35 | setup_new: false 36 | git_pull: false 37 | 38 | prefect: 39 | dbt: 40 | delete_blocks: false 41 | delete_tasks: false 42 | create_tasks: false 43 | run_flow_dbtrun: false 44 | create_flow: false 45 | delete_flows: false --------------------------------------------------------------------------------