├── .dockerignore ├── .env.example ├── .firebaserc ├── .github └── workflows │ ├── backend_deploy_workflow.yaml │ ├── backend_test_workflow.yaml │ ├── build_and_test.yaml │ ├── deploy_production.yaml │ └── deploy_staging.yaml ├── .gitignore ├── .golangci.yml ├── .jwt-key.example.pem ├── .vscode ├── .user-settings.sample.json ├── extensions.json ├── launch.json └── settings.json ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── api-clients └── convoy │ ├── api.yaml │ ├── cfg.yaml │ ├── client.gen.go │ └── generate.go ├── api ├── configuration.go ├── dependencies.go ├── handle_analytics.go ├── handle_apikey.go ├── handle_cases.go ├── handle_client_data.go ├── handle_custom_list.go ├── handle_data_model_options.go ├── handle_datamodel.go ├── handle_datamodel_pivot.go ├── handle_decision.go ├── handle_editor.go ├── handle_entity_annotations.go ├── handle_inboxes.go ├── handle_ingestion.go ├── handle_license.go ├── handle_liveness_probe.go ├── handle_organization.go ├── handle_partner.go ├── handle_post_firebase_id_token.go ├── handle_post_firebase_id_token_test.go ├── handle_rules.go ├── handle_sanction_checks.go ├── handle_scenario_iterations.go ├── handle_scenario_publications.go ├── handle_scenario_test_run.go ├── handle_scenarios.go ├── handle_scheduled_execution.go ├── handle_signup_status.go ├── handle_snoozes.go ├── handle_suspicious_activity_report.go ├── handle_tags.go ├── handle_transfer_alerts.go ├── handle_transfer_check.go ├── handle_users.go ├── handle_version.go ├── handle_webhooks.go ├── middleware │ └── logging.go ├── parse_authorization_bearer_header.go ├── parse_authorization_bearer_header_test.go ├── present_error.go ├── router.go ├── routes.go ├── server.go └── usecases_with_context_credentials.go ├── cmd ├── batch_ingestion.go ├── config.go ├── migrations.go ├── scheduled_executor.go ├── send_pending_webhook_events.go ├── server.go └── worker.go ├── contrib ├── firebase-local-data.example │ ├── auth_export │ │ ├── accounts.json │ │ └── config.json │ └── firebase-export-metadata.json └── yente-datasets.yml ├── docker-compose.yml ├── dto ├── analytics_dto.go ├── api_error.go ├── api_key_dto.go ├── ast_validation_dto.go ├── case_contributor_dto.go ├── case_dto.go ├── case_event_dto.go ├── case_files_dto.go ├── case_tag_dto.go ├── client_data_object.go ├── credentials_dto.go ├── custom_list_dto.go ├── data_model_dto.go ├── data_model_pivot.go ├── decision_dto.go ├── dto_ast_node.go ├── dto_ast_node_test.go ├── entity_annotation.go ├── entity_annotation_test.go ├── inbox_dto.go ├── license_dto.go ├── openapi.go ├── organization_dto.go ├── organization_feature_access_dto.go ├── pagination.go ├── partner_dto.go ├── rule_dto.go ├── rule_snoozes.go ├── sanction_check_config_dto.go ├── sanction_check_dataset_dto.go ├── sanction_check_dto.go ├── sanction_check_refine_query_dto.go ├── sanction_check_refine_query_dto_test.go ├── scenario_iterations.go ├── scenario_publications.go ├── scenario_testrun.go ├── scenario_validation_dto.go ├── scenarios.go ├── scheduled_scenario_executions.go ├── string_or_number.go ├── suspicious_activity_report.go ├── tag_dto.go ├── transfer_alerts.go ├── transfer_check.go ├── upload_log_dto.go ├── user_dto.go └── webhooks.go ├── firebase.json ├── go.mod ├── go.sum ├── infra ├── config.go ├── convoy.go ├── firebase_client.go ├── firebase_client_mock.go ├── metabase.go ├── offloading.go ├── opensanctions.go ├── parse_signing_key.go ├── postgres_connections_pool.go ├── sentry.go ├── tracing.go └── verify_license.go ├── integration_test ├── api_end_to_end_test.go ├── batch_ingestion_and_execution_test.go ├── generate_usecases.go ├── init_test.go ├── river_helpers.go └── scenario_flow_test.go ├── jobs ├── execute_with_monitoring.go ├── generate_usecases_with_creds.go ├── ingest_data_from_csv.go ├── middlewares.go ├── scheduled_scenarios.go ├── scheduler.go └── send_pending_webhook_events.go ├── main.go ├── mise.toml ├── mocks ├── api_key_repository.go ├── case_contributor_repository.go ├── case_event_repository.go ├── case_repository.go ├── client_db_index_editor.go ├── custom_list_repository.go ├── data_model_repository.go ├── enforce_security.go ├── export_decisions_mock.go ├── feature_access_reader.go ├── firebase_token_verifier.go ├── inboxes_repository.go ├── ingested_data_indexes_repository.go ├── jwt_encoder_validator.go ├── organization_repository.go ├── organization_schema_repository.go ├── postgres.go ├── sanction_check_config_repository.go ├── scenario_fetcher.go ├── scenario_iteration_read_repository.go ├── scenario_iteration_write_repository.go ├── scenario_list_repository.go ├── scenario_publication_repository.go ├── scenario_publisher.go ├── scenario_publisher_repository_mock.go ├── scenario_repository_mock.go ├── scenario_testrun_repository.go ├── scheduled_execution_usecase_repository_mock.go ├── tag_repository.go ├── task_queue_repository.go ├── transaction.go ├── transaction_factory_mock.go ├── upload_log_repository.go └── user_repository.go ├── models ├── aggregate_filter.go ├── aggregate_query.go ├── analytics.go ├── api_key.go ├── ast │ ├── argument_err.go │ ├── ast_aggregator.go │ ├── ast_arguments.go │ ├── ast_custom_list_attr.go │ ├── ast_filter.go │ ├── ast_function.go │ ├── ast_function_test.go │ ├── ast_node.go │ ├── ast_node_evaluation.go │ ├── ast_node_weight_test.go │ ├── ast_sample_expression.go │ ├── evaluation_error_dto.go │ ├── evaluation_error_dto_test.go │ ├── evaluation_errors.go │ ├── execution_error_dto.go │ ├── node_evaluation_dto.go │ └── node_evaluation_dto_test.go ├── ast_validation.go ├── blob.go ├── case.go ├── case_contributor.go ├── case_event.go ├── case_files.go ├── case_tag.go ├── client_data_object.go ├── concrete_index.go ├── concrete_index_test.go ├── credentials.go ├── custom_list.go ├── data_model.go ├── data_model_options.go ├── data_model_pivot.go ├── data_model_test.go ├── databases.go ├── decision.go ├── decision_phantom.go ├── entity_annotation.go ├── entity_annotation_payload.go ├── errors.go ├── events.go ├── feature_access.go ├── firebase_identity.go ├── inbox.go ├── inbox_users.go ├── index_family.go ├── license.go ├── metabase_configuration.go ├── offloading_watermark.go ├── opensanctions.go ├── opensanctions_test.go ├── organization.go ├── organization_feature_access.go ├── organization_feature_access_test.go ├── organization_short_id.go ├── organization_short_id_test.go ├── outcome.go ├── pagination_and_sorting.go ├── partner.go ├── payload.go ├── permission.go ├── river_job.go ├── role.go ├── role_permission.go ├── rule.go ├── rule_snoozes.go ├── sanction_check.go ├── scenario_iterations.go ├── scenario_publications.go ├── scenario_test_run_summary.go ├── scenario_testrun.go ├── scenario_validation.go ├── scenarios.go ├── scheduled_scenario_execution_test.go ├── scheduled_scenario_executions.go ├── suspicious_activity_report.go ├── tag.go ├── transfer_alerts.go ├── transfer_check.go ├── upload_log.go ├── user.go └── webhook.go ├── pubapi ├── constants.go ├── features.go ├── pagination.go ├── params.go ├── pubapi.go ├── response.go ├── response_test.go ├── tests │ ├── e2e_test.go │ ├── fixtures │ │ ├── base │ │ │ └── base.yml │ │ ├── cases.yml │ │ ├── decision_rules.yml │ │ ├── decisions.yml │ │ ├── sanction_check_configs.yml │ │ ├── sanction_check_matches.yml │ │ ├── sanction_checks.yml │ │ ├── scenario_iteration_rules.yml │ │ ├── scenario_iterations.yml │ │ └── scheduled_executions.yml │ ├── setup_test.go │ └── specs │ │ └── v1 │ │ ├── batch_executions.go │ │ ├── decisions.go │ │ ├── sanction_checks.go │ │ ├── v1.go │ │ └── whitelists.go ├── usecases.go ├── v1 │ ├── decisions.go │ ├── dto │ │ ├── batch_executions.go │ │ ├── case.go │ │ ├── decision.go │ │ └── sanction_check.go │ ├── openapi.yml │ ├── params │ │ ├── batch_execution.go │ │ └── decision.go │ ├── routes.go │ ├── rule_snoozing.go │ ├── sanction_checks.go │ └── scheduled_executions.go ├── validator.go └── validator_test.go ├── pure_utils ├── clean_bom.go ├── currencies.go ├── database_ids.go ├── database_ids_test.go ├── duration │ ├── duration.go │ └── duration_test.go ├── map.go ├── map_test.go ├── slices.go ├── slices_test.go ├── strings.go └── strings_test.go ├── repositories ├── analytics_repository.go ├── api_key_repository.go ├── blob_repository.go ├── case_and_decisions.go ├── case_contributor_repository.go ├── case_event_repository.go ├── case_repository.go ├── client_db_repository.go ├── clock │ └── clock.go ├── convoy_repository.go ├── convoy_repository_test.go ├── custom_list_repository.go ├── data_model_options.go ├── data_model_repository.go ├── db_executor.go ├── db_executor_getter.go ├── dbmodels │ ├── db_api_key.go │ ├── db_ast_evaluation.go │ ├── db_ast_expression.go │ ├── db_case.go │ ├── db_case_contributor.go │ ├── db_case_event.go │ ├── db_case_files.go │ ├── db_case_tag.go │ ├── db_custom_list.go │ ├── db_custom_list_value.go │ ├── db_data_model.go │ ├── db_data_model_options.go │ ├── db_data_model_pivot.go │ ├── db_decision.go │ ├── db_decision_rule.go │ ├── db_decisions_to_create.go │ ├── db_entity_annotation.go │ ├── db_inbox.go │ ├── db_license.go │ ├── db_offloading_watermark.go │ ├── db_organization.go │ ├── db_organization_feature_access.go │ ├── db_partner.go │ ├── db_phantom_decision.go │ ├── db_rule.go │ ├── db_rule_snooze.go │ ├── db_sanction_check.go │ ├── db_sanction_check_config.go │ ├── db_sanction_check_file.go │ ├── db_sanction_check_match.go │ ├── db_sanction_check_whitelist.go │ ├── db_sanction_match_comment.go │ ├── db_scenario.go │ ├── db_scenario_iterations.go │ ├── db_scenario_publications.go │ ├── db_scenario_test_run_summary.go │ ├── db_scenario_testrun.go │ ├── db_scheduled_executions.go │ ├── db_suspicious_activity_report.go │ ├── db_tag.go │ ├── db_transfer_alerts.go │ ├── db_transfer_mappings.go │ ├── db_upload_logs.go │ ├── db_user.go │ └── db_webhook_events.go ├── decision_phantoms_repository.go ├── decisions_repository.go ├── entity_annotation_repository.go ├── errors.go ├── eval_scenario_testrun.go ├── firebase │ ├── client.go │ └── client_test.go ├── firebase_token_repository.go ├── fixtures │ └── opensanctions │ │ ├── response_full.json │ │ └── response_partial.json ├── httpmodels │ ├── http_name_recognition.go │ ├── http_opensanctions_dataset.go │ ├── http_opensanctions_dataset_freshness.go │ └── http_opensanctions_result.go ├── inbox_users_repository.go ├── inboxes_repository.go ├── ingested_data_indexes_repository.go ├── ingested_data_read_repository.go ├── ingested_data_read_repository_test.go ├── ingestion_repository.go ├── ingestion_repository_test.go ├── license_repository.go ├── liveness.go ├── marble_db_repository.go ├── marble_jwt_repository.go ├── migrations.go ├── migrations │ ├── 20230515205456_init_schema.sql │ ├── 20230515205457_init_marble.sql │ ├── 20230515205458_init_marble_ingestion_db.sql │ ├── 20230522211012_decisions_index.sql │ ├── 20230524173849_transaction_currency_nullable.sql │ ├── 20230530164800_add_users.sql │ ├── 20230606090000_add_role_to_api_key.sql │ ├── 20230606154759_create_client_tables.sql │ ├── 20230607180012_remove_ingestion_tables.sql │ ├── 20230615091308_rename_clienttables_organization_schema.sql │ ├── 20230615103735_add_export_shedulded_execution_s3.sql │ ├── 20230616120000_batch_executions.sql │ ├── 20230616133343_decision_scheduled.sql │ ├── 20230622150504_batch_trigger_sql.sql │ ├── 20230622155555_add_lists.sql │ ├── 20230712141856_add_custom_list_composite_key.sql │ ├── 20230717175911_add_ast_expression_to_rule.sql │ ├── 20230726153023_add_trigger_condition_ast_expression.sql │ ├── 20230728164957_remove_formula_and_trigger_condition.sql │ ├── 20230921150013_add_index_on_decision_rules.sql │ ├── 20230921173346_add_data_model.sql │ ├── 20230926112219_add_number_of_created_decisions.sql │ ├── 20230927110030_add_upload_logs.sql │ ├── 20230928104726_add_constraint_on_data_model.sql │ ├── 20230929112024_add_table_name_and_index_to_upload_logs.sql │ ├── 20231010091555_add_manual_to_scheduled_executions.sql │ ├── 20231011110218_add_data_model_enums.sql │ ├── 20231102181134_add_indexes_to_decisions.sql │ ├── 20231103160436_add_decision_indexes.sql │ ├── 20231113160835_create_case_table.sql │ ├── 20231113160850_add_case_id_to_decisions.sql │ ├── 20231115143600_add_data_model_float_enums.sql │ ├── 20231116141112_add_filtered_unique_index_on_list_names.sql │ ├── 20231117100514_create_case_events_table.sql │ ├── 20231117100515_add_name_to_users.sql │ ├── 20231117100516_remove_case_description.sql │ ├── 20231120110742_create_case_contributors.sql │ ├── 20231122111904_change_user_id_foreign_key.sql │ ├── 20231123141501_add_inbox_tables.sql │ ├── 20231127120900_add_inbox_id_on_cases.sql │ ├── 20231129180209_create_tags_table.sql │ ├── 20231201101404_remove_inbox_from_tags.sql │ ├── 20231215152328_add_deleted_at_to_users.sql │ ├── 20231218220000_create_case_files_table.sql │ ├── 20231222101032_change_user_email_index.sql │ ├── 20240112114607_fix_user_email_index.sql │ ├── 20240124111516_create_analytics_schema.sql │ ├── 20240201142144_remove_firebase_uid.sql │ ├── 20240201155650_api_key_description.sql │ ├── 20240202145805_remove_default_apikey_role.sql │ ├── 20240223090700_data_model_link_delete_cascade.sql │ ├── 20240223094300_drop_data_models.sql │ ├── 20240223095300_drop_organization_database_name.sql │ ├── 20240301225900_case_event_userid_nullable.sql │ ├── 20240304105230_scenario_decision_to_case_settings.sql │ ├── 20240304155400_hash_apikeys.sql │ ├── 20240312104400_organization_unique_name.sql │ ├── 20240312153600_iterationId_on_decisions_and_ruleId_on_decisionRules.sql │ ├── 20240313141600_apikeys_keyHash_index.sql │ ├── 20240314104400_iterationId_on_decisions_and_ruleId_on_decisionRules_backfill.sql │ ├── 20240322141900_transfer_check_feature.sql │ ├── 20240325133100_store_api_key_prefix.sql │ ├── 20240325142444_evaluation_on_decisionrules.sql │ ├── 20240325143700_introduce_partners.sql │ ├── 20240327153500_ingested_tables_primary_keys.sql │ ├── 20240402153800_audit_trail_trigger.sql │ ├── 20240403105200_audit_table_schema_change.sql │ ├── 20240419115400_data_model_pivot.sql │ ├── 20240423160700_pivot_value_on_decisions.sql │ ├── 20240425140600_decisions_pivot_index.sql │ ├── 20240425161100_rationalize_decisions_indexes.sql │ ├── 20240502162700_partner_id_on_users.sql │ ├── 20240507140800_scenario_workflow_type.sql │ ├── 20240521171800_add_to_case_workflow_indexes.sql │ ├── 20240607115200_add_indexes_for_iteration_deletion.sql │ ├── 20240607153000_field_rule_group_on_rules.sql │ ├── 20240620154045_introduce_licenses.sql │ ├── 20240627115100_partner_bic.sql │ ├── 20240628170500_introduce_transfer_alerts.sql │ ├── 20240708141735_create_webhook_events_table.sql │ ├── 20240717170435_webhooks_events_rename_retry_count.sql │ ├── 20240725140200_licences_webhooks_entitlement.sql │ ├── 20240731141800_introduce_rule_snoozes.sql │ ├── 20240806171200_rule_execution_outcome.sql │ ├── 20240806173300_licences_rule_snoozes.sql │ ├── 20240808165800_rule_snoozes_store_decision_id.sql │ ├── 20240812112700_rule_snoozes_store_rule_id.sql │ ├── 20240829121800_upload_log_rows_ingested.sql │ ├── 20240909134500_new_outcome_block_and_review.sql │ ├── 20240909164400_workflows_migrate_new_outcome.sql │ ├── 20240909165500_decisions_review_status.sql │ ├── 20240909173500_decisions_drop_error_code.sql │ ├── 20240910221600_decision_rules_org_id_idx.sql │ ├── 20240923141200_remove_batch_trigger_sql.sql │ ├── 20240925120000_decision_rules_toast_setting.sql │ ├── 20241002123300_reworked_batch_execution.sql │ ├── 20241002223100_unique_batch_per_scenario.sql │ ├── 20241015171200_fields_and_constraint_cleanup.sql │ ├── 20241018170100_index_for_async_decision_creation.sql │ ├── 20241104103800_drop_constraint_enum_values.sql │ ├── 20241105131500_add_use_marble_db_schema_as_default.sql │ ├── 20241105143100_drop_analytics_schema.sql │ ├── 20241105151948_decision_phantom.sql │ ├── 20241105152000_scenario_testrun.sql │ ├── 20241105152100_decision_phantom_continued.sql │ ├── 20241203135200_scenario_rules_stable_id.sql │ ├── 20241204215200_phantom_decisions_index.sql │ ├── 20241209154100_org_default_timezone.sql │ ├── 20241210134900_licences_test_run.sql │ ├── 20241218115400_improve_decisions_indexes.sql │ ├── 20250102151657_case_name_template.sql │ ├── 20250108102844_add_organization_feature_access_table.sql │ ├── 20250117095221_fuzzy_match_on_case_name.sql │ ├── 20250117125000_add_sanction_check_config.sql │ ├── 20250120101100_sanction_check.sql │ ├── 20250210144400_create_sanctions_whitelist.sql │ ├── 20250217152700_drop_sanction_check_score_modif.sql │ ├── 20250217162300_change_sanction_check_status_check_and_fk_cascade.sql │ ├── 20250218103800_add_sanction_check_error_codes.sql │ ├── 20250226112700_add_sanction_check_match_enriched.sql │ ├── 20250303102000_create_test_run_summaries.sql │ ├── 20250303102400_add_sanction_check_stable_id.sql │ ├── 20250306160800_add_updated_time_to_test_runs.sql │ ├── 20250307145800_add_case_snoozes.sql │ ├── 20250310143900_create_case_assignment.sql │ ├── 20250317155200_add_index_on_whitelist_entity_id.sql │ ├── 20250319174700_remove_whitelist_user_non_null.sql │ ├── 20250321221500_set_snooze_user_nullable.sql │ ├── 20250325084800_create_entity_annotations.sql │ ├── 20250416141100_create_tag_types.sql │ ├── 20250424120000_add_fuzzystrmatch_extension.sql │ ├── 20250428080000_cases_new_statuses.sql │ ├── 20250428080100_add_suspicious_activity_reports.sql │ ├── 20250428080200_add_case_waiting_for_action.sql │ ├── 20250428080300_add_inbox_escalation.sql │ ├── 20250428083000_create_data_model_table_options.sql │ ├── 20250506113500_remove_snooze_groups_cascade.sql │ ├── 20250512144400_change_case_index.sql │ ├── 20250516140900_decision_rules_offloading.sql │ └── 20250604153400_user_ai_assist_setting.sql ├── name_recognition_repository.go ├── name_recognition_repository_test.go ├── offloading_repository.go ├── opensanctions_repository.go ├── opensanctions_repository_test.go ├── organization_repository.go ├── organization_schema_repository.go ├── pg_indexes │ ├── pg_indexes.go │ └── pg_indexes_test.go ├── postgres │ ├── api_keys.go │ ├── organizations.go │ ├── postgres.go │ └── users.go ├── repositories.go ├── rule_snoozes.go ├── rules.go ├── sanction_check_config_repository.go ├── sanction_check_repository.go ├── sanction_check_whitelist_repository.go ├── scenario_iterations_read.go ├── scenario_iterations_write.go ├── scenario_publications.go ├── scenario_testrun.go ├── scenarios_read.go ├── scenarios_write.go ├── scheduled_executions.go ├── sql_to_model.go ├── sql_to_row.go ├── suspicious_activity_report_repository.go ├── table_name_with_schema.go ├── tag_repository.go ├── task_queue_repository.go ├── transfer_alerts.go ├── transfer_mappings.go ├── transfercheck_enrichment.go ├── transfercheck_partners.go ├── upload_log_repository.go ├── user_repository.go ├── utils.go └── webhook_events_repository.go ├── specs ├── public_api.yaml ├── readme.md ├── transfercheck_api.yaml └── v1_public_api.yaml ├── tools.go ├── usecases ├── analytics_usecase.go ├── api_key_usecase.go ├── api_key_usecase_test.go ├── ast_eval │ ├── evaluate │ │ ├── adaptArgument.go │ │ ├── adaptArgument_test.go │ │ ├── dry_run_fake_value.go │ │ ├── eval_comparaison.go │ │ ├── eval_comparaison_test.go │ │ ├── eval_contains_any.go │ │ ├── eval_contains_any_test.go │ │ ├── eval_equal.go │ │ ├── eval_equal_test.go │ │ ├── eval_fuzzy_match.go │ │ ├── eval_fuzzy_match_test.go │ │ ├── eval_is_empty.go │ │ ├── eval_is_empty_test.go │ │ ├── eval_is_multiple_of.go │ │ ├── eval_is_multiple_of_test.go │ │ ├── eval_not.go │ │ ├── eval_not_equal.go │ │ ├── eval_not_equal_test.go │ │ ├── eval_not_test.go │ │ ├── eval_string_concat.go │ │ ├── eval_string_concat_test.go │ │ ├── eval_string_contains.go │ │ ├── eval_string_contains_test.go │ │ ├── eval_string_in_list.go │ │ ├── eval_string_in_list_test.go │ │ ├── eval_string_starts_ends_with.go │ │ ├── eval_string_template.go │ │ ├── eval_string_template_test.go │ │ ├── evaluate_aggregator.go │ │ ├── evaluate_arithmetic.go │ │ ├── evaluate_arithmetic_divide.go │ │ ├── evaluate_arithmetic_divide_test.go │ │ ├── evaluate_arithmetic_test.go │ │ ├── evaluate_boolean_arithmetic.go │ │ ├── evaluate_boolean_arithmetic_test.go │ │ ├── evaluate_custom_list_values.go │ │ ├── evaluate_custom_list_values_test.go │ │ ├── evaluate_database_access.go │ │ ├── evaluate_database_access_test.go │ │ ├── evaluate_filter.go │ │ ├── evaluate_filter_test.go │ │ ├── evaluate_fuzzy_match_options.go │ │ ├── evaluate_fuzzy_match_options_test.go │ │ ├── evaluate_list.go │ │ ├── evaluate_list_test.go │ │ ├── evaluate_read_payload.go │ │ ├── evaluate_time.go │ │ ├── evaluate_time_arithmetic.go │ │ ├── evaluate_time_arithmetic_test.go │ │ ├── evaluate_time_test.go │ │ ├── evaluate_timestamp_extract.go │ │ ├── evaluate_timestamp_extract_test.go │ │ ├── evaluate_undefined.go │ │ ├── evaluate_undefined_test.go │ │ ├── evaluator.go │ │ ├── helpers.go │ │ ├── to_float64.go │ │ ├── to_float64_test.go │ │ ├── to_int64.go │ │ └── to_int64_test.go │ ├── evaluate_ast.go │ ├── evaluate_ast_expression.go │ ├── evaluate_ast_test.go │ ├── evaluate_environment.go │ ├── evaluate_environment_factory.go │ └── weighted_nodes.go ├── ast_expression_usecase.go ├── ast_expression_usecase_test.go ├── case_usecase.go ├── custom_list_usecase.go ├── data_model_helpers_test.go ├── data_model_usecase.go ├── data_model_usecase_test.go ├── decision_phantom │ └── decision_phantom.go ├── decision_usecase.go ├── decision_workflows │ └── decision_workflows.go ├── entity_annotations_usecase.go ├── evaluate_scenario │ ├── data_accessor.go │ ├── evaluate_sanction_check.go │ ├── evaluate_sanction_check_test.go │ └── evaluate_scenario.go ├── executor_factory │ ├── executor_factory.go │ ├── executor_factory_stub.go │ ├── helpers.go │ └── interfaces.go ├── feature_access │ └── feature_access_reader.go ├── inboxes │ ├── inbox_users.go │ ├── inboxes.go │ └── inboxes_test.go ├── inboxes_usecase.go ├── inboxes_usecase_test.go ├── indexes │ ├── aggregate_query.go │ ├── aggregate_query_test.go │ ├── aggregate_query_to_idx_test.go │ ├── concrete_index.go │ ├── index_editor.go │ ├── index_editor_test.go │ ├── index_family.go │ └── index_family_test.go ├── ingested_data_reader_usecase.go ├── ingestion_usecase.go ├── ingestion_usecase_test.go ├── license_usecase.go ├── liveness.go ├── offloaded_reader.go ├── organization │ └── organization_creator.go ├── organization_usecase.go ├── partner_usecase.go ├── payload_parser │ ├── payload.go │ └── payload_test.go ├── rule_snoozes.go ├── rules_usecase.go ├── sanction_check_config_usecase.go ├── sanction_check_usecase.go ├── sanction_check_usecase_mock_test.go ├── sanction_check_usecase_test.go ├── scenario_iterations_usecase.go ├── scenario_publication_usecase_test.go ├── scenario_publications_usecase.go ├── scenario_test_run_usecase.go ├── scenario_test_run_usecase_test.go ├── scenario_usecase.go ├── scenario_usecase_test.go ├── scenarios │ ├── scenario_and_iteration.go │ ├── scenario_and_iteration_test.go │ ├── scenario_publisher.go │ ├── scenario_publisher_test.go │ ├── scenario_validation.go │ └── scenario_validation_test.go ├── scheduled_execution │ ├── async_decision_job.go │ ├── async_scheduled_exec_status_job.go │ ├── batch_filtering.go │ ├── batch_filtering_test.go │ ├── export_schedule_execution.go │ ├── index_cleanup_job.go │ ├── index_creation_job.go │ ├── match_enrichment_job.go │ ├── offloading_job.go │ ├── run_scheduled_execution.go │ ├── schedule_scenarios.go │ └── test_run_summary_job.go ├── scheduled_execution_usecase.go ├── scheduled_execution_usecase_test.go ├── security │ ├── enforce_security.go │ ├── enforce_security_analytics.go │ ├── enforce_security_annotation.go │ ├── enforce_security_api_key.go │ ├── enforce_security_case.go │ ├── enforce_security_custom_list.go │ ├── enforce_security_decision.go │ ├── enforce_security_decision_phantom.go │ ├── enforce_security_inboxes.go │ ├── enforce_security_inboxes_test.go │ ├── enforce_security_ingestion.go │ ├── enforce_security_license.go │ ├── enforce_security_organization.go │ ├── enforce_security_partner.go │ ├── enforce_security_sanction_checks.go │ ├── enforce_security_scenario.go │ ├── enforce_security_snoozes.go │ ├── enforce_security_tags.go │ ├── enforce_security_testrun.go │ ├── enforce_security_transfer_check.go │ ├── enforce_security_user.go │ ├── enforce_security_user_test.go │ └── enforce_security_webhook.go ├── seed_usecase.go ├── signup_usecase.go ├── suspicious_activity_report_usecase.go ├── tag_usecase.go ├── task_queue.go ├── testing_helpers.go ├── token │ ├── generator.go │ ├── generator_test.go │ ├── validator.go │ └── validator_test.go ├── tracking │ └── track.go ├── transfer_alerts.go ├── transfer_check_usecase.go ├── transfers_data_read │ └── transfers_data_read.go ├── usecases.go ├── usecases_with_creds.go ├── user_usecase.go ├── version_usecase.go ├── webhook_events_usecase.go └── webhooks_usecase.go └── utils ├── auth.go ├── auth_test.go ├── blanks.go ├── context_credentials.go ├── context_keys.go ├── context_logging.go ├── context_segment.go ├── context_tracing.go ├── db_column_value_map.go ├── db_column_value_map_test.go ├── dummy_datamodel.go ├── env_vars.go ├── filter.go ├── logging.go ├── organization_security.go ├── organization_security_test.go ├── pg_utils.go ├── pointers.go ├── sentry.go ├── testing.go └── uuid.go /.dockerignore: -------------------------------------------------------------------------------- 1 | # ignore already-built apps 2 | output 3 | output/* 4 | -------------------------------------------------------------------------------- /.firebaserc: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /.github/workflows/backend_test_workflow.yaml: -------------------------------------------------------------------------------- 1 | name: Test back-end 2 | 3 | on: [workflow_call] 4 | 5 | permissions: 6 | contents: read 7 | pull-requests: read 8 | checks: write 9 | 10 | jobs: 11 | test_backend: 12 | name: Test back-end 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | 17 | - name: Set up Go 18 | uses: actions/setup-go@v5 19 | with: 20 | go-version-file: ./go.mod 21 | 22 | - name: golangci-lint 23 | uses: golangci/golangci-lint-action@v7 24 | with: 25 | version: v2.1 26 | args: --timeout=2m 27 | 28 | - name: Build 29 | run: go build -v ./... 30 | 31 | - name: Test 32 | run: go test ./... 33 | -------------------------------------------------------------------------------- /.github/workflows/build_and_test.yaml: -------------------------------------------------------------------------------- 1 | name: Build and test back-end repo 2 | 3 | on: [pull_request] 4 | 5 | concurrency: 6 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 7 | cancel-in-progress: true 8 | 9 | jobs: 10 | test_backend: 11 | permissions: 12 | contents: read 13 | pull-requests: read 14 | checks: write 15 | uses: ./.github/workflows/backend_test_workflow.yaml 16 | -------------------------------------------------------------------------------- /.github/workflows/deploy_production.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy Production 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v[0-9]+.[0-9]+.[0-9]+" 7 | 8 | concurrency: 9 | group: ${{ github.workflow }}-${{ github.ref }} 10 | cancel-in-progress: true 11 | 12 | jobs: 13 | test_backend: 14 | permissions: 15 | contents: read 16 | pull-requests: read 17 | checks: write 18 | uses: ./.github/workflows/backend_test_workflow.yaml 19 | 20 | build_and_deploy_backend: 21 | needs: test_backend 22 | uses: ./.github/workflows/backend_deploy_workflow.yaml 23 | permissions: 24 | contents: read 25 | id-token: "write" # needed for using open id token to authenticate with GCP services 26 | with: 27 | environment: "production" 28 | version: ${{ github.ref_name }} 29 | secrets: 30 | segment_write_key_opensource: ${{ secrets.SEGMENT_WRITE_KEY_OPENSOURCE }} 31 | -------------------------------------------------------------------------------- /.github/workflows/deploy_staging.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy Staging 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | concurrency: 9 | group: ${{ github.workflow }}-${{ github.ref }} 10 | cancel-in-progress: true 11 | 12 | jobs: 13 | test_backend: 14 | permissions: 15 | contents: read 16 | pull-requests: read 17 | checks: write 18 | uses: ./.github/workflows/backend_test_workflow.yaml 19 | 20 | build_and_deploy_backend: 21 | needs: test_backend 22 | uses: ./.github/workflows/backend_deploy_workflow.yaml 23 | permissions: 24 | contents: read 25 | id-token: "write" # needed for using open id token to authenticate with GCP services 26 | with: 27 | environment: "staging" 28 | version: latest 29 | secrets: 30 | segment_write_key_opensource: ${{ secrets.SEGMENT_WRITE_KEY_OPENSOURCE }} 31 | -------------------------------------------------------------------------------- /.vscode/.user-settings.sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "mise.configureExtensionsUseSymLinks": true, 3 | "go.lintTool": "golangci-lint-v2", 4 | "go.goroot": "${workspaceFolder}/.vscode/mise-tools/goRoot", 5 | "debug.javascript.defaultRuntimeExecutable": { 6 | "pwa-node": "${workspaceFolder}/.vscode/mise-tools/node" 7 | }, 8 | "go.alternateTools": { 9 | "go": "${workspaceFolder}/.vscode/mise-tools/go", 10 | "dlv": "${workspaceFolder}/.vscode/mise-tools/dlv", 11 | "gopls": "${workspaceFolder}/.vscode/mise-tools/gopls" 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "golang.go", 4 | "inferrinizzard.prettier-sql-vscode", 5 | "hverlin.mise-vscode" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM golang:1.24 AS build 2 | 3 | ARG MARBLE_VERSION=dev 4 | ARG SEGMENT_WRITE_KEY= 5 | 6 | WORKDIR /go/src/app 7 | COPY . . 8 | 9 | RUN go get 10 | 11 | RUN CGO_ENABLED=0 go build -o /go/bin/app -ldflags="-X 'main.apiVersion=${MARBLE_VERSION}' -X 'main.segmentWriteKey=${SEGMENT_WRITE_KEY}'" 12 | 13 | FROM alpine:3.19 14 | 15 | COPY --from=build /go/bin/app / 16 | COPY --from=build /usr/local/go/lib/time/zoneinfo.zip / 17 | ENV ZONEINFO=/zoneinfo.zip 18 | 19 | ENV PORT=${PORT:-8080} 20 | EXPOSE $PORT 21 | 22 | ENTRYPOINT ["/app"] -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | help: ## Display this help 2 | @egrep -h '\s##\s' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m %-30s\033[0m %s\n", $$1, $$2}' 3 | 4 | generate_migration: ## Generate a new migration 5 | @read -p "Enter migration name: " name; \ 6 | goose -dir repositories/migrations/ create $$name sql 7 | 8 | generate_api_clients: ## Generate API clients 9 | go generate ./api-clients/convoy/generate.go 10 | -------------------------------------------------------------------------------- /api-clients/convoy/cfg.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/deepmap/oapi-codegen/HEAD/configuration-schema.json 2 | package: convoy 3 | output: client.gen.go 4 | generate: 5 | models: true 6 | client: true 7 | output-options: 8 | include-operation-ids: 9 | - CreateEndpointFanoutEvent 10 | - CreateEndpoint 11 | - CreateSubscription 12 | - GetEndpoint 13 | - GetEndpoints 14 | - GetSubscriptions 15 | - DeleteEndpoint 16 | - UpdateEndpoint 17 | - UpdateSubscription 18 | -------------------------------------------------------------------------------- /api-clients/convoy/generate.go: -------------------------------------------------------------------------------- 1 | package convoy 2 | 3 | //go:generate go run github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen -config cfg.yaml api.yaml 4 | -------------------------------------------------------------------------------- /api/configuration.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import "time" 4 | 5 | type Configuration struct { 6 | Env string 7 | AppName string 8 | Port string 9 | MarbleAppUrl string 10 | MarbleBackofficeUrl string 11 | RequestLoggingLevel string 12 | TokenLifetimeMinute int 13 | SegmentWriteKey string 14 | DisableSegment bool 15 | BatchTimeout time.Duration 16 | DecisionTimeout time.Duration 17 | DefaultTimeout time.Duration 18 | } 19 | -------------------------------------------------------------------------------- /api/handle_analytics.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "net/http" 5 | 6 | "github.com/gin-gonic/gin" 7 | 8 | "github.com/checkmarble/marble-backend/dto" 9 | "github.com/checkmarble/marble-backend/pure_utils" 10 | "github.com/checkmarble/marble-backend/usecases" 11 | "github.com/checkmarble/marble-backend/utils" 12 | ) 13 | 14 | func handleListAnalytics(uc usecases.Usecases) func(c *gin.Context) { 15 | return func(c *gin.Context) { 16 | ctx := c.Request.Context() 17 | organizationId, err := utils.OrganizationIdFromRequest(c.Request) 18 | if presentError(ctx, c, err) { 19 | return 20 | } 21 | 22 | usecase := usecasesWithCreds(ctx, uc).NewAnalyticsUseCase() 23 | analytics, err := usecase.ListAnalytics(ctx, organizationId) 24 | if presentError(ctx, c, err) { 25 | return 26 | } 27 | 28 | c.JSON(http.StatusOK, gin.H{ 29 | "analytics": pure_utils.Map(analytics, dto.AdaptAnalyticsDto), 30 | }) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /api/handle_editor.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "net/http" 5 | 6 | "github.com/gin-gonic/gin" 7 | 8 | "github.com/checkmarble/marble-backend/dto" 9 | "github.com/checkmarble/marble-backend/pure_utils" 10 | "github.com/checkmarble/marble-backend/usecases" 11 | ) 12 | 13 | func handleGetEditorIdentifiers(uc usecases.Usecases) func(c *gin.Context) { 14 | return func(c *gin.Context) { 15 | ctx := c.Request.Context() 16 | scenarioID := c.Param("scenario_id") 17 | 18 | usecase := usecasesWithCreds(ctx, uc).AstExpressionUsecase() 19 | result, err := usecase.EditorIdentifiers(ctx, scenarioID) 20 | 21 | if presentError(ctx, c, err) { 22 | return 23 | } 24 | 25 | databaseNodes, err := pure_utils.MapErr(result.DatabaseAccessors, dto.AdaptNodeDto) 26 | if presentError(ctx, c, err) { 27 | return 28 | } 29 | payloadbaseNodes, err := pure_utils.MapErr(result.PayloadAccessors, dto.AdaptNodeDto) 30 | if presentError(ctx, c, err) { 31 | return 32 | } 33 | 34 | c.JSON(http.StatusOK, gin.H{ 35 | "database_accessors": databaseNodes, 36 | "payload_accessors": payloadbaseNodes, 37 | }) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /api/handle_liveness_probe.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "net/http" 5 | 6 | "github.com/checkmarble/marble-backend/usecases" 7 | "github.com/gin-gonic/gin" 8 | ) 9 | 10 | func handleLivenessProbe(uc usecases.Usecases) func(c *gin.Context) { 11 | return func(c *gin.Context) { 12 | ctx := c.Request.Context() 13 | usecase := uc.NewLivenessUsecase() 14 | err := usecase.Liveness(ctx) 15 | if presentError(ctx, c, err) { 16 | return 17 | } 18 | 19 | c.JSON(http.StatusOK, gin.H{ 20 | "mood": "Feu flammes !", 21 | }) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /api/handle_signup_status.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "net/http" 5 | 6 | "github.com/checkmarble/marble-backend/usecases" 7 | "github.com/gin-gonic/gin" 8 | ) 9 | 10 | func handleSignupStatus(uc usecases.Usecases) func(c *gin.Context) { 11 | return func(c *gin.Context) { 12 | ctx := c.Request.Context() 13 | signupUc := usecases.NewSignupUsecase(uc.NewExecutorFactory(), 14 | &uc.Repositories.MarbleDbRepository, 15 | &uc.Repositories.MarbleDbRepository, 16 | ) 17 | 18 | migrationsRunForOrgs, hasAnOrganization, err := signupUc.HasAnOrganization(ctx) 19 | if presentError(ctx, c, err) { 20 | return 21 | } 22 | 23 | migrationsRunForUsers, hasAUser, err := signupUc.HasAUser(ctx) 24 | if presentError(ctx, c, err) { 25 | return 26 | } 27 | 28 | c.JSON(http.StatusOK, gin.H{ 29 | "migrations_run": migrationsRunForOrgs && migrationsRunForUsers, 30 | "has_an_organization": hasAnOrganization, 31 | "has_a_user": hasAUser, 32 | }) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /api/handle_version.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "net/http" 5 | 6 | "github.com/checkmarble/marble-backend/usecases" 7 | "github.com/gin-gonic/gin" 8 | ) 9 | 10 | func handleVersion(uc usecases.Usecases) func(c *gin.Context) { 11 | return func(c *gin.Context) { 12 | uc := uc.NewVersionUsecase() 13 | apiVersion := uc.GetApiVersion() 14 | 15 | c.JSON(http.StatusOK, gin.H{ 16 | "version": apiVersion, 17 | }) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /api/parse_authorization_bearer_header.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "fmt" 5 | "net/http" 6 | "strings" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | ) 10 | 11 | func ParseAuthorizationBearerHeader(header http.Header) (string, error) { 12 | authorization := header.Get("Authorization") 13 | if authorization == "" { 14 | return "", nil 15 | } 16 | 17 | authHeader := strings.Split(header.Get("Authorization"), "Bearer ") 18 | if len(authHeader) != 2 { 19 | return "", fmt.Errorf("malformed token: %w", models.UnAuthorizedError) 20 | } 21 | return authHeader[1], nil 22 | } 23 | -------------------------------------------------------------------------------- /api/parse_authorization_bearer_header_test.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "net/http" 5 | "testing" 6 | 7 | "github.com/stretchr/testify/assert" 8 | 9 | "github.com/checkmarble/marble-backend/models" 10 | ) 11 | 12 | func TestParseAuthorizationBearerHeader_Norminal(t *testing.T) { 13 | header := http.Header{} 14 | header.Add("Authorization", "Bearer TOKEN") 15 | 16 | authorization, err := ParseAuthorizationBearerHeader(header) 17 | assert.NoError(t, err) 18 | assert.Equal(t, authorization, "TOKEN") 19 | } 20 | 21 | func TestParseAuthorizationBearerHeader_EmptyHeader(t *testing.T) { 22 | authorization, err := ParseAuthorizationBearerHeader(http.Header{}) 23 | assert.NoError(t, err) 24 | assert.Empty(t, authorization) 25 | } 26 | 27 | func TestParseAuthorizationBearerHeader_BadBearerFormat(t *testing.T) { 28 | header := http.Header{} 29 | header.Add("Authorization", "MalformedBearer") 30 | 31 | _, err := ParseAuthorizationBearerHeader(header) 32 | assert.ErrorIs(t, err, models.UnAuthorizedError) 33 | } 34 | -------------------------------------------------------------------------------- /api/usecases_with_context_credentials.go: -------------------------------------------------------------------------------- 1 | package api 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/usecases" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | func usecasesWithCreds(ctx context.Context, uc usecases.Usecases) *usecases.UsecasesWithCreds { 11 | creds, found := utils.CredentialsFromCtx(ctx) 12 | if !found { 13 | panic("no credentials in context") 14 | } 15 | 16 | return &usecases.UsecasesWithCreds{ 17 | Usecases: uc, 18 | Credentials: creds, 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /cmd/config.go: -------------------------------------------------------------------------------- 1 | package cmd 2 | 3 | type CompiledConfig struct { 4 | Version string 5 | SegmentWriteKey string 6 | } 7 | -------------------------------------------------------------------------------- /contrib/firebase-local-data.example/auth_export/config.json: -------------------------------------------------------------------------------- 1 | {"signIn":{"allowDuplicateEmails":false},"emailPrivacyConfig":{"enableImprovedEmailPrivacy":false}} -------------------------------------------------------------------------------- /contrib/firebase-local-data.example/firebase-export-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "13.2.1", 3 | "auth": { 4 | "version": "13.2.1", 5 | "path": "auth_export" 6 | } 7 | } -------------------------------------------------------------------------------- /contrib/yente-datasets.yml: -------------------------------------------------------------------------------- 1 | catalogs: 2 | - url: "https://data.opensanctions.org/datasets/latest/index.json" 3 | scope: us_sanctions 4 | resource_name: entities.ftm.json 5 | datasets: [] 6 | 7 | -------------------------------------------------------------------------------- /dto/analytics_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import "github.com/checkmarble/marble-backend/models" 4 | 5 | type Analytics struct { 6 | EmbeddingType string `json:"embedding_type"` 7 | SignedEmbeddingURL string `json:"signed_embedding_url"` 8 | } 9 | 10 | func AdaptAnalyticsDto(analytics models.Analytics) Analytics { 11 | return Analytics{ 12 | EmbeddingType: analytics.EmbeddingType.String(), 13 | SignedEmbeddingURL: analytics.SignedEmbeddingURL, 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /dto/ast_validation_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "github.com/checkmarble/marble-backend/models" 5 | "github.com/checkmarble/marble-backend/models/ast" 6 | "github.com/checkmarble/marble-backend/pure_utils" 7 | ) 8 | 9 | type AstValidationDto struct { 10 | Errors []ScenarioValidationErrorDto `json:"errors"` 11 | Evaluation ast.NodeEvaluationDto `json:"evaluation"` 12 | } 13 | 14 | func AdaptAstValidationDto(s models.AstValidation) AstValidationDto { 15 | return AstValidationDto{ 16 | Errors: pure_utils.Map(s.Errors, AdaptScenarioValidationErrorDto), 17 | Evaluation: ast.AdaptNodeEvaluationDto(s.Evaluation), 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /dto/case_contributor_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type APICaseContributor struct { 10 | Id string `json:"id"` 11 | CaseId string `json:"case_id"` 12 | UserId string `json:"user_id"` 13 | CreatedAt time.Time `json:"created_at"` 14 | } 15 | 16 | func NewAPICaseContributor(caseContributor models.CaseContributor) APICaseContributor { 17 | return APICaseContributor{ 18 | Id: caseContributor.Id, 19 | CaseId: caseContributor.CaseId, 20 | UserId: caseContributor.UserId, 21 | CreatedAt: caseContributor.CreatedAt, 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /dto/case_event_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/guregu/null/v5" 8 | ) 9 | 10 | type APICaseEvent struct { 11 | Id string `json:"id"` 12 | CaseId string `json:"case_id"` 13 | UserId null.String `json:"user_id"` 14 | CreatedAt time.Time `json:"created_at"` 15 | EventType string `json:"event_type"` 16 | AdditionalNote string `json:"additional_note"` 17 | NewValue string `json:"new_value"` 18 | ResourceType string `json:"resource_type"` 19 | ResourceId string `json:"resource_id"` 20 | } 21 | 22 | func NewAPICaseEvent(caseEvent models.CaseEvent) APICaseEvent { 23 | return APICaseEvent{ 24 | Id: caseEvent.Id, 25 | CaseId: caseEvent.CaseId, 26 | UserId: caseEvent.UserId, 27 | CreatedAt: caseEvent.CreatedAt, 28 | EventType: string(caseEvent.EventType), 29 | AdditionalNote: caseEvent.AdditionalNote, 30 | NewValue: caseEvent.NewValue, 31 | ResourceType: string(caseEvent.ResourceType), 32 | ResourceId: caseEvent.ResourceId, 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /dto/case_files_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type APICaseFile struct { 10 | Id string `json:"id"` 11 | CaseId string `json:"case_id"` 12 | CreatedAt time.Time `json:"created_at"` 13 | FileName string `json:"file_name"` 14 | } 15 | 16 | func NewAPICaseFile(caseFile models.CaseFile) APICaseFile { 17 | return APICaseFile{ 18 | Id: caseFile.Id, 19 | CaseId: caseFile.CaseId, 20 | CreatedAt: caseFile.CreatedAt, 21 | FileName: caseFile.FileName, 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /dto/case_tag_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type APICaseTag struct { 10 | Id string `json:"id"` 11 | CaseId string `json:"case_id"` 12 | TagId string `json:"tag_id"` 13 | CreatedAt time.Time `json:"created_at"` 14 | } 15 | 16 | func NewAPICaseTag(t models.CaseTag) APICaseTag { 17 | apiCaseTag := APICaseTag{ 18 | Id: t.Id, 19 | CaseId: t.CaseId, 20 | TagId: t.TagId, 21 | CreatedAt: t.CreatedAt, 22 | } 23 | 24 | return apiCaseTag 25 | } 26 | 27 | type CreateCaseTagBody struct { 28 | TagIds []string `json:"tag_ids" binding:"required"` 29 | } 30 | -------------------------------------------------------------------------------- /dto/pagination.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import "github.com/checkmarble/marble-backend/models" 4 | 5 | type PaginationAndSorting struct { 6 | OffsetId string `form:"offset_id"` 7 | Sorting string `form:"sorting"` 8 | Order string `form:"order"` 9 | Limit int `form:"limit" binding:"max=100"` 10 | } 11 | 12 | func AdaptPaginationAndSorting(input PaginationAndSorting) models.PaginationAndSorting { 13 | return models.PaginationAndSorting{ 14 | OffsetId: input.OffsetId, 15 | Sorting: models.SortingFieldFrom(input.Sorting), 16 | Order: models.SortingOrderFrom(input.Order), 17 | Limit: input.Limit, 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /dto/partner_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/guregu/null/v5" 8 | ) 9 | 10 | type Partner struct { 11 | Id string `json:"id"` 12 | CreatedAt time.Time `json:"created_at"` 13 | Name string `json:"name"` 14 | Bic string `json:"bic"` 15 | } 16 | 17 | func AdaptPartnerDto(partner models.Partner) Partner { 18 | return Partner{ 19 | Id: partner.Id, 20 | CreatedAt: partner.CreatedAt, 21 | Name: partner.Name, 22 | Bic: partner.Bic, 23 | } 24 | } 25 | 26 | type PartnerCreateBody struct { 27 | Name string `json:"name"` 28 | Bic string `json:"bic"` 29 | } 30 | 31 | func AdaptPartnerCreateInput(dto PartnerCreateBody) models.PartnerCreateInput { 32 | return models.PartnerCreateInput{ 33 | Name: dto.Name, 34 | Bic: dto.Bic, 35 | } 36 | } 37 | 38 | type PartnerUpdateBody struct { 39 | Name null.String `json:"name"` 40 | Bic null.String `json:"bic"` 41 | } 42 | 43 | func AdaptPartnerUpdateInput(dto PartnerUpdateBody) models.PartnerUpdateInput { 44 | return models.PartnerUpdateInput{ 45 | Name: dto.Name, 46 | Bic: dto.Bic, 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /dto/suspicious_activity_report.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "mime/multipart" 5 | "time" 6 | 7 | "github.com/checkmarble/marble-backend/models" 8 | ) 9 | 10 | type SuspiciousActivityReportDto struct { 11 | ReportId string `json:"id"` //nolint:tagliatelle 12 | Status string `json:"status"` 13 | HasFile bool `json:"has_file"` 14 | CreatedBy string `json:"created_by"` 15 | UploadedBy *string `json:"uploaded_by,omitempty"` 16 | CreatedAt time.Time `json:"created_at"` 17 | } 18 | 19 | type SuspiciousActivityReportParams struct { 20 | Status *string `form:"status" binding:"omitempty,oneof=pending completed"` 21 | File *multipart.FileHeader `form:"file"` 22 | } 23 | 24 | func AdaptSuspiciousActivityReportDto(model models.SuspiciousActivityReport) SuspiciousActivityReportDto { 25 | return SuspiciousActivityReportDto{ 26 | ReportId: model.ReportId, 27 | Status: model.Status.String(), 28 | HasFile: model.UploadedBy != nil, 29 | CreatedBy: model.CreatedBy, 30 | UploadedBy: model.UploadedBy, 31 | CreatedAt: model.CreatedAt, 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /dto/tag_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type APITag struct { 10 | Id string `json:"id"` 11 | Name string `json:"name"` 12 | Color string `json:"color"` 13 | OrganizationId string `json:"organization_id"` 14 | CreatedAt time.Time `json:"created_at"` 15 | CasesCount *int `json:"cases_count"` 16 | Target string `json:"target"` 17 | } 18 | 19 | func AdaptTagDto(t models.Tag) APITag { 20 | return APITag{ 21 | Id: t.Id, 22 | Name: t.Name, 23 | Color: t.Color, 24 | OrganizationId: t.OrganizationId, 25 | CreatedAt: t.CreatedAt, 26 | CasesCount: t.CasesCount, 27 | Target: string(t.Target), 28 | } 29 | } 30 | 31 | type CreateTagBody struct { 32 | Target string `json:"target" binding:"omitempty,oneof=case object"` 33 | Name string `json:"name" binding:"required"` 34 | Color string `json:"color" binding:"required,hexcolor"` 35 | } 36 | 37 | type UpdateTagBody struct { 38 | Name string `json:"name"` 39 | Color string `json:"color" binding:"hexcolor"` 40 | } 41 | -------------------------------------------------------------------------------- /dto/upload_log_dto.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type UploadLogDto struct { 10 | Status string `json:"status"` 11 | StartedAt time.Time `json:"started_at"` 12 | FinishedAt *time.Time `json:"finished_at"` 13 | LinesProcessed int `json:"lines_processed"` 14 | NumRowsIngested int `json:"num_rows_ingested"` 15 | } 16 | 17 | func AdaptUploadLogDto(log models.UploadLog) UploadLogDto { 18 | return UploadLogDto{ 19 | Status: string(log.UploadStatus), 20 | StartedAt: log.StartedAt, 21 | FinishedAt: log.FinishedAt, 22 | LinesProcessed: log.LinesProcessed, 23 | NumRowsIngested: log.RowsIngested, 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /firebase.json: -------------------------------------------------------------------------------- 1 | { 2 | "emulators": { 3 | "auth": { 4 | "port": 9099 5 | }, 6 | "ui": { 7 | "enabled": true 8 | }, 9 | "singleProjectMode": true 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /infra/firebase_client.go: -------------------------------------------------------------------------------- 1 | package infra 2 | 3 | import ( 4 | "context" 5 | 6 | firebase "firebase.google.com/go/v4" 7 | "firebase.google.com/go/v4/auth" 8 | "github.com/cockroachdb/errors" 9 | ) 10 | 11 | func InitializeFirebase(ctx context.Context) *auth.Client { 12 | app, err := firebase.NewApp(ctx, nil) 13 | if err != nil { 14 | panic(errors.Wrap(err, "error initializing app")) 15 | } 16 | 17 | client, err := app.Auth(ctx) 18 | if err != nil { 19 | panic(errors.Wrap(err, "error getting Auth client")) 20 | } 21 | 22 | return client 23 | } 24 | -------------------------------------------------------------------------------- /infra/firebase_client_mock.go: -------------------------------------------------------------------------------- 1 | package infra 2 | 3 | import ( 4 | "context" 5 | 6 | "firebase.google.com/go/v4/auth" 7 | ) 8 | 9 | type mockedTokenVerifier struct{} 10 | 11 | func (m mockedTokenVerifier) VerifyIDToken(_ context.Context, email string) (*auth.Token, error) { 12 | return &auth.Token{ 13 | Firebase: auth.FirebaseInfo{ 14 | Identities: map[string]any{"email": []any{email}}, 15 | SignInProvider: "password", 16 | }, 17 | Claims: map[string]any{ 18 | "email_verified": true, 19 | }, 20 | }, nil 21 | } 22 | 23 | func NewMockedFirebaseTokenVerifier() mockedTokenVerifier { //nolint:revive 24 | return mockedTokenVerifier{} 25 | } 26 | -------------------------------------------------------------------------------- /integration_test/generate_usecases.go: -------------------------------------------------------------------------------- 1 | package integration 2 | 3 | import ( 4 | "github.com/checkmarble/marble-backend/models" 5 | "github.com/checkmarble/marble-backend/usecases" 6 | ) 7 | 8 | func generateUsecaseWithCredForMarbleAdmin(testUsecases usecases.Usecases) usecases.UsecasesWithCreds { 9 | creds := models.Credentials{Role: models.MARBLE_ADMIN} 10 | return usecases.UsecasesWithCreds{ 11 | Usecases: testUsecases, 12 | Credentials: creds, 13 | } 14 | } 15 | 16 | func generateUsecaseWithCreds( 17 | testUsecases usecases.Usecases, 18 | creds models.Credentials, 19 | ) usecases.UsecasesWithCreds { 20 | return usecases.UsecasesWithCreds{ 21 | Usecases: testUsecases, 22 | Credentials: creds, 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /jobs/generate_usecases_with_creds.go: -------------------------------------------------------------------------------- 1 | package jobs 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/usecases" 8 | ) 9 | 10 | func GenerateUsecaseWithCredForMarbleAdmin(ctx context.Context, jobUsecases usecases.Usecases) usecases.UsecasesWithCreds { 11 | creds := models.Credentials{ 12 | Role: models.MARBLE_ADMIN, 13 | OrganizationId: "", 14 | } 15 | return usecases.UsecasesWithCreds{ 16 | Usecases: jobUsecases, 17 | Credentials: creds, 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /jobs/ingest_data_from_csv.go: -------------------------------------------------------------------------------- 1 | package jobs 2 | 3 | import ( 4 | "context" 5 | "time" 6 | 7 | "github.com/checkmarble/marble-backend/usecases" 8 | ) 9 | 10 | const csvIngestionTimeout = 1 * time.Hour 11 | 12 | func IngestDataFromCsv(ctx context.Context, uc usecases.Usecases) { 13 | executeWithMonitoring( 14 | ctx, 15 | uc, 16 | "batch-ingestion", 17 | func( 18 | ctx context.Context, usecases usecases.Usecases, 19 | ) error { 20 | usecasesWithCreds := GenerateUsecaseWithCredForMarbleAdmin(ctx, usecases) 21 | usecase := usecasesWithCreds.NewIngestionUseCase() 22 | ctx, cancel := context.WithTimeout(ctx, csvIngestionTimeout) 23 | defer cancel() 24 | return usecase.IngestDataFromCsv(ctx) 25 | }, 26 | ) 27 | } 28 | -------------------------------------------------------------------------------- /jobs/send_pending_webhook_events.go: -------------------------------------------------------------------------------- 1 | package jobs 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/usecases" 7 | ) 8 | 9 | func SendPendingWebhookEvents(ctx context.Context, uc usecases.Usecases) { 10 | executeWithMonitoring( 11 | ctx, 12 | uc, 13 | "send-webhook-events", 14 | func( 15 | ctx context.Context, usecases usecases.Usecases, 16 | ) error { 17 | usecasesWithCreds := GenerateUsecaseWithCredForMarbleAdmin(ctx, usecases) 18 | webhooksUsecase := usecasesWithCreds.NewWebhookEventsUsecase() 19 | return webhooksUsecase.RetrySendWebhookEvents(ctx) 20 | }, 21 | ) 22 | } 23 | -------------------------------------------------------------------------------- /mise.toml: -------------------------------------------------------------------------------- 1 | [tools] 2 | go = "1.24" 3 | 4 | [env] 5 | _.file = '.env' -------------------------------------------------------------------------------- /mocks/case_contributor_repository.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "github.com/stretchr/testify/mock" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/repositories" 8 | ) 9 | 10 | type CaseContributorRepository struct { 11 | mock.Mock 12 | } 13 | 14 | func (r *CaseContributorRepository) GetCaseContributor(exec repositories.Executor, caseId, userId string) (models.CaseContributor, error) { 15 | args := r.Called(exec, caseId, userId) 16 | return args.Get(0).(models.CaseContributor), args.Error(1) 17 | } 18 | 19 | func (r *CaseContributorRepository) CreateCaseContributor(exec repositories.Executor, caseId, userId string) error { 20 | args := r.Called(exec, caseId, userId) 21 | return args.Error(0) 22 | } 23 | -------------------------------------------------------------------------------- /mocks/case_event_repository.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "github.com/stretchr/testify/mock" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/repositories" 8 | ) 9 | 10 | type CaseEventRepository struct { 11 | mock.Mock 12 | } 13 | 14 | func (r *CaseEventRepository) ListCaseEvents(exec repositories.Executor, caseId string) ([]models.CaseEvent, error) { 15 | args := r.Called(exec, caseId) 16 | return args.Get(0).([]models.CaseEvent), args.Error(1) 17 | } 18 | 19 | func (r *CaseEventRepository) CreateCaseEvent(exec repositories.Executor, 20 | createCaseEventAttributes models.CreateCaseEventAttributes, newCaseEventId string, 21 | ) error { 22 | args := r.Called(exec, createCaseEventAttributes, newCaseEventId) 23 | return args.Error(0) 24 | } 25 | 26 | func (r *CaseEventRepository) BatchCreateCaseEvents(exec repositories.Executor, 27 | createCaseEventAttributes []models.CreateCaseEventAttributes, 28 | ) error { 29 | args := r.Called(exec, createCaseEventAttributes) 30 | return args.Error(0) 31 | } 32 | -------------------------------------------------------------------------------- /mocks/export_decisions_mock.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "context" 5 | "io" 6 | 7 | "github.com/stretchr/testify/mock" 8 | ) 9 | 10 | type ExportDecisionsMock struct { 11 | mock.Mock 12 | } 13 | 14 | func (e *ExportDecisionsMock) ExportDecisions(ctx context.Context, organizationId string, scheduledExecutionId string, dest io.Writer) (int, error) { 15 | args := e.Called(ctx, organizationId, scheduledExecutionId, dest) 16 | return args.Int(0), args.Error(1) 17 | } 18 | -------------------------------------------------------------------------------- /mocks/feature_access_reader.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/stretchr/testify/mock" 8 | ) 9 | 10 | type FeatureAccessReader struct { 11 | mock.Mock 12 | } 13 | 14 | func (r *FeatureAccessReader) GetOrganizationFeatureAccess( 15 | ctx context.Context, 16 | organizationId string, 17 | userId *models.UserId, 18 | ) (models.OrganizationFeatureAccess, error) { 19 | args := r.Called(ctx, organizationId, userId) 20 | return args.Get(0).(models.OrganizationFeatureAccess), args.Error(1) 21 | } 22 | -------------------------------------------------------------------------------- /mocks/firebase_token_verifier.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/stretchr/testify/mock" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | ) 10 | 11 | type FirebaseTokenVerifier struct { 12 | mock.Mock 13 | } 14 | 15 | func (m *FirebaseTokenVerifier) VerifyFirebaseToken(ctx context.Context, firebaseToken string) (models.FirebaseIdentity, error) { 16 | args := m.Called(ctx, firebaseToken) 17 | return args.Get(0).(models.FirebaseIdentity), args.Error(1) 18 | } 19 | -------------------------------------------------------------------------------- /mocks/jwt_encoder_validator.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/stretchr/testify/mock" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | ) 10 | 11 | type JWTEncoderValidator struct { 12 | mock.Mock 13 | } 14 | 15 | func (m *JWTEncoderValidator) EncodeMarbleToken(expirationTime time.Time, creds models.Credentials) (string, error) { 16 | args := m.Called(expirationTime, creds) 17 | return args.String(0), args.Error(1) 18 | } 19 | 20 | func (m *JWTEncoderValidator) ValidateMarbleToken(marbleToken string) (models.Credentials, error) { 21 | args := m.Called(marbleToken) 22 | return args.Get(0).(models.Credentials), args.Error(1) 23 | } 24 | -------------------------------------------------------------------------------- /mocks/organization_repository.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/repositories" 8 | "github.com/stretchr/testify/mock" 9 | ) 10 | 11 | type OrganizationRepository struct { 12 | mock.Mock 13 | } 14 | 15 | func (m *OrganizationRepository) GetOrganizationById( 16 | ctx context.Context, 17 | exec repositories.Executor, 18 | organizationId string, 19 | ) (models.Organization, error) { 20 | args := m.Called(ctx, exec, organizationId) 21 | return args.Get(0).(models.Organization), args.Error(1) 22 | } 23 | -------------------------------------------------------------------------------- /mocks/postgres.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/stretchr/testify/mock" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | ) 10 | 11 | type Database struct { 12 | mock.Mock 13 | } 14 | 15 | func (m *Database) UserByEmail(ctx context.Context, email string) (models.User, error) { 16 | args := m.Called(ctx, email) 17 | return args.Get(0).(models.User), args.Error(1) 18 | } 19 | 20 | func (m *Database) GetOrganizationByID(ctx context.Context, organizationID string) (models.Organization, error) { 21 | args := m.Called(ctx, organizationID) 22 | return args.Get(0).(models.Organization), args.Error(1) 23 | } 24 | 25 | func (m *Database) GetApiKeyByHash(ctx context.Context, hash []byte) (models.ApiKey, error) { 26 | args := m.Called(ctx, hash) 27 | return args.Get(0).(models.ApiKey), args.Error(1) 28 | } 29 | -------------------------------------------------------------------------------- /mocks/scenario_fetcher.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/stretchr/testify/mock" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | "github.com/checkmarble/marble-backend/repositories" 10 | ) 11 | 12 | type ScenarioFetcher struct { 13 | mock.Mock 14 | } 15 | 16 | func (m *ScenarioFetcher) FetchScenarioAndIteration( 17 | ctx context.Context, 18 | exec repositories.Executor, 19 | scenarioIterationId string, 20 | ) (models.ScenarioAndIteration, error) { 21 | args := m.Called(ctx, exec, scenarioIterationId) 22 | return args.Get(0).(models.ScenarioAndIteration), args.Error(1) 23 | } 24 | -------------------------------------------------------------------------------- /mocks/scenario_iteration_read_repository.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "github.com/stretchr/testify/mock" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/repositories" 8 | ) 9 | 10 | type ScenarioIterationReadRepository struct { 11 | mock.Mock 12 | } 13 | 14 | func (s *ScenarioIterationReadRepository) GetScenarioIteration(exec repositories.Executor, 15 | scenarioIterationId string, 16 | ) (models.ScenarioIteration, error) { 17 | args := s.Called(exec, scenarioIterationId) 18 | return args.Get(0).(models.ScenarioIteration), args.Error(1) 19 | } 20 | 21 | func (s *ScenarioIterationReadRepository) ListScenarioIterations(exec repositories.Executor, 22 | organizationId string, filters models.GetScenarioIterationFilters, 23 | ) ([]models.ScenarioIteration, error) { 24 | args := s.Called(exec, organizationId, filters) 25 | return args.Get(0).([]models.ScenarioIteration), args.Error(1) 26 | } 27 | -------------------------------------------------------------------------------- /mocks/scenario_list_repository.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/stretchr/testify/mock" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | "github.com/checkmarble/marble-backend/repositories" 10 | ) 11 | 12 | type ScenarioListRepository struct { 13 | mock.Mock 14 | } 15 | 16 | func (m *ScenarioListRepository) ListScenariosOfOrganization(ctx context.Context, 17 | exec repositories.Executor, organizationId string, 18 | ) ([]models.Scenario, error) { 19 | args := m.Called(ctx, exec, organizationId) 20 | return args.Get(0).([]models.Scenario), args.Error(1) 21 | } 22 | -------------------------------------------------------------------------------- /mocks/scenario_publisher.go: -------------------------------------------------------------------------------- 1 | package mocks 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/stretchr/testify/mock" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | "github.com/checkmarble/marble-backend/repositories" 10 | ) 11 | 12 | type ScenarioPublisher struct { 13 | mock.Mock 14 | } 15 | 16 | func (m *ScenarioPublisher) PublishOrUnpublishIteration( 17 | ctx context.Context, 18 | tx repositories.Transaction, 19 | scenarioAndIteration models.ScenarioAndIteration, 20 | publicationAction models.PublicationAction, 21 | ) ([]models.ScenarioPublication, error) { 22 | args := m.Called(ctx, tx, scenarioAndIteration, publicationAction) 23 | return args.Get(0).([]models.ScenarioPublication), args.Error(1) 24 | } 25 | -------------------------------------------------------------------------------- /models/aggregate_filter.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "github.com/checkmarble/marble-backend/models/ast" 4 | 5 | // Define where to put that 6 | type FilterWithType struct { 7 | Filter ast.Filter 8 | FieldType DataType 9 | } 10 | -------------------------------------------------------------------------------- /models/api_key.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type ApiKey struct { 6 | Id string 7 | CreatedAt time.Time 8 | Description string 9 | Hash []byte 10 | OrganizationId string 11 | PartnerId *string 12 | Prefix string 13 | Role Role 14 | } 15 | 16 | type CreateApiKeyInput struct { 17 | Description string 18 | OrganizationId string 19 | Role Role 20 | } 21 | 22 | type CreatedApiKey struct { 23 | ApiKey 24 | Key string 25 | } 26 | -------------------------------------------------------------------------------- /models/ast/argument_err.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | import "fmt" 4 | 5 | type ArgumentError struct { 6 | ArgumentIndex int 7 | ArgumentName string 8 | } 9 | 10 | func (e ArgumentError) Error() string { 11 | if e.ArgumentIndex >= 0 { 12 | return fmt.Sprintf("argument: %d", e.ArgumentIndex) 13 | } 14 | return fmt.Sprintf("named argument: %s", e.ArgumentName) 15 | } 16 | 17 | func NewArgumentError(argumentIndex int) ArgumentError { 18 | return ArgumentError{ 19 | ArgumentIndex: argumentIndex, 20 | ArgumentName: "", 21 | } 22 | } 23 | 24 | func NewNamedArgumentError(argumentName string) ArgumentError { 25 | return ArgumentError{ 26 | ArgumentIndex: -1, 27 | ArgumentName: argumentName, 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /models/ast/ast_aggregator.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | type Aggregator string 4 | 5 | const ( 6 | AGGREGATOR_AVG Aggregator = "AVG" 7 | AGGREGATOR_COUNT Aggregator = "COUNT" 8 | AGGREGATOR_COUNT_DISTINCT Aggregator = "COUNT_DISTINCT" 9 | AGGREGATOR_MAX Aggregator = "MAX" 10 | AGGREGATOR_MIN Aggregator = "MIN" 11 | AGGREGATOR_SUM Aggregator = "SUM" 12 | AGGREGATOR_UNKNOWN Aggregator = "Unkown aggregator" 13 | ) 14 | 15 | var FuncAggregatorAttributes = FuncAttributes{ 16 | DebugName: "FUNC_AGGREGATOR", 17 | AstName: "Aggregator", 18 | NamedArguments: []string{"tableName", "fieldName", "aggregator", "filters", "label"}, 19 | Cost: 50, 20 | } 21 | -------------------------------------------------------------------------------- /models/ast/ast_arguments.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | type Arguments struct { 4 | Args []any 5 | NamedArgs map[string]any 6 | } 7 | -------------------------------------------------------------------------------- /models/ast/ast_custom_list_attr.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | // ======= CustomListAccess ======= 4 | 5 | var AttributeFuncCustomListAccess = struct { 6 | FuncAttributes 7 | ArgumentCustomListId string 8 | }{ 9 | FuncAttributes: FuncAttributes{ 10 | DebugName: "FUNC_CUSTOM_LIST_ACCESS", 11 | AstName: "CustomListAccess", 12 | NamedArguments: []string{ 13 | "customListId", 14 | }, 15 | Cost: 30, 16 | }, 17 | ArgumentCustomListId: "customListId", 18 | } 19 | 20 | func NewNodeCustomListAccess(customListId string) Node { 21 | return Node{Function: FUNC_CUSTOM_LIST_ACCESS}. 22 | AddNamedChild(AttributeFuncCustomListAccess.ArgumentCustomListId, NewNodeConstant(customListId)) 23 | } 24 | -------------------------------------------------------------------------------- /models/ast/ast_function_test.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | ) 8 | 9 | func TestFunction(t *testing.T) { 10 | // The stability of int values of function are not critical, they are never serialized, 11 | // but it is nice to have them in order 12 | assert.Equal(t, int(FUNC_UNKNOWN), -2) 13 | assert.Equal(t, int(FUNC_UNDEFINED), -1) 14 | assert.Equal(t, int(FUNC_CONSTANT), 0) 15 | assert.Equal(t, int(FUNC_ADD), 1) 16 | } 17 | -------------------------------------------------------------------------------- /models/ast/ast_node_weight_test.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | ) 8 | 9 | func TestNodeWeights(t *testing.T) { 10 | tts := []struct { 11 | n Node 12 | c int 13 | }{ 14 | {Node{Function: FUNC_AND, Children: []Node{{Function: FUNC_DB_ACCESS}, {Function: FUNC_PAYLOAD}}}, 30}, 15 | {Node{Function: FUNC_AND, Children: []Node{{Function: FUNC_DB_ACCESS}, { 16 | Function: FUNC_ADD, Children: []Node{{ 17 | Function: FUNC_AGGREGATOR, 18 | Children: []Node{{Function: FUNC_CUSTOM_LIST_ACCESS}, {Function: FUNC_PAYLOAD}}, 19 | }}, 20 | }}}, 110}, 21 | } 22 | 23 | for _, tt := range tts { 24 | assert.Equal(t, tt.c, tt.n.Cost()) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /models/ast/ast_sample_expression.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | func NewAstCompareBalance() Node { 4 | // (50 + 51) > 100 5 | return Node{Function: FUNC_GREATER}. 6 | AddChild(Node{Function: FUNC_ADD}. 7 | AddChild(Node{Constant: 51}). 8 | AddChild(Node{Constant: 50}), 9 | ). 10 | AddChild(Node{Constant: 100}) 11 | } 12 | -------------------------------------------------------------------------------- /models/ast/node_evaluation_dto_test.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | import ( 4 | "encoding/json" 5 | "testing" 6 | 7 | "github.com/stretchr/testify/assert" 8 | ) 9 | 10 | func encodeDecodeNodeEvaluation(t *testing.T, evaluation NodeEvaluation) NodeEvaluationDto { 11 | jsonData, err := json.Marshal(AdaptNodeEvaluationDto(evaluation)) 12 | assert.NoError(t, err) 13 | 14 | var result NodeEvaluationDto 15 | err = json.Unmarshal(jsonData, &result) 16 | assert.NoError(t, err) 17 | 18 | return result 19 | } 20 | 21 | func TestAdaptAdaptNodeEvaluationDto_noerror(t *testing.T) { 22 | // evaluation succeded -> errors is encoded as en empty array 23 | result := encodeDecodeNodeEvaluation(t, NodeEvaluation{ 24 | Errors: []error{}, 25 | }) 26 | 27 | assert.NotNil(t, result.Errors) 28 | assert.Len(t, result.Errors, 0) 29 | } 30 | 31 | func TestAdaptAdaptNodeEvaluationDto_noevaluation(t *testing.T) { 32 | // no evaluation -> errors is encoded as nil 33 | result := encodeDecodeNodeEvaluation(t, NodeEvaluation{ 34 | Errors: nil, 35 | }) 36 | assert.Empty(t, result.Errors) 37 | } 38 | -------------------------------------------------------------------------------- /models/ast_validation.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "github.com/checkmarble/marble-backend/models/ast" 4 | 5 | type AstValidation struct { 6 | Errors []ScenarioValidationError 7 | Evaluation ast.NodeEvaluation 8 | } 9 | 10 | func NewAstValidation() AstValidation { 11 | return AstValidation{ 12 | Errors: make([]ScenarioValidationError, 0), 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /models/blob.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "io" 5 | ) 6 | 7 | type Blob struct { 8 | FileName string 9 | ReadCloser io.ReadCloser 10 | } 11 | -------------------------------------------------------------------------------- /models/case_contributor.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type CaseContributor struct { 6 | Id string 7 | CaseId string 8 | UserId string 9 | CreatedAt time.Time 10 | } 11 | -------------------------------------------------------------------------------- /models/case_files.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "mime/multipart" 5 | "time" 6 | ) 7 | 8 | type CaseFile struct { 9 | Id string 10 | CaseId string 11 | CreatedAt time.Time 12 | BucketName string 13 | FileReference string 14 | FileName string 15 | } 16 | 17 | type CreateCaseFilesInput struct { 18 | CaseId string 19 | Files []multipart.FileHeader 20 | } 21 | 22 | type CreateDbCaseFileInput struct { 23 | Id string 24 | BucketName string 25 | CaseId string 26 | FileName string 27 | FileReference string 28 | } 29 | -------------------------------------------------------------------------------- /models/case_tag.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type CaseTag struct { 6 | Id string 7 | CaseId string 8 | TagId string 9 | CreatedAt time.Time 10 | DeletedAt *time.Time 11 | } 12 | 13 | type CreateCaseTagsAttributes struct { 14 | CaseId string 15 | TagIds []string 16 | } 17 | -------------------------------------------------------------------------------- /models/credentials.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type Identity struct { 4 | UserId UserId 5 | Email string 6 | FirstName string 7 | LastName string 8 | ApiKeyName string 9 | } 10 | 11 | type Credentials struct { 12 | ActorIdentity Identity // email or api key, for audit log 13 | OrganizationId string 14 | PartnerId *string 15 | Role Role 16 | } 17 | 18 | func NewCredentialWithUser(user User) Credentials { 19 | return Credentials{ 20 | ActorIdentity: Identity{ 21 | UserId: user.UserId, 22 | Email: user.Email, 23 | FirstName: user.FirstName, 24 | LastName: user.LastName, 25 | }, 26 | OrganizationId: user.OrganizationId, 27 | PartnerId: user.PartnerId, 28 | Role: user.Role, 29 | } 30 | } 31 | 32 | func NewCredentialWithApiKey(organizationId string, partnerId *string, role Role, apiKeyName string) Credentials { 33 | return Credentials{ 34 | ActorIdentity: Identity{ 35 | ApiKeyName: apiKeyName, 36 | }, 37 | OrganizationId: organizationId, 38 | PartnerId: partnerId, 39 | Role: role, 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /models/data_model_options.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type DataModelOptions struct { 4 | Id string 5 | TableId string 6 | DisplayedFields []string 7 | FieldOrder []string 8 | } 9 | 10 | type UpdateDataModelOptionsRequest struct { 11 | TableId string 12 | DisplayedFields []string 13 | FieldOrder []string 14 | } 15 | -------------------------------------------------------------------------------- /models/data_model_test.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | ) 8 | 9 | func TestDataType(t *testing.T) { 10 | // DataType is serialized in database 11 | // So we want to make sure the values stay stable 12 | assert.Equal(t, int(UnknownDataType), -1) 13 | assert.Equal(t, int(Bool), 0) 14 | assert.Equal(t, int(Int), 1) 15 | assert.Equal(t, int(Float), 2) 16 | assert.Equal(t, int(String), 3) 17 | assert.Equal(t, int(Timestamp), 4) 18 | } 19 | -------------------------------------------------------------------------------- /models/databases.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "fmt" 4 | 5 | type DatabaseSchemaType int 6 | 7 | const ( 8 | // Marble Database schema 9 | DATABASE_SCHEMA_TYPE_MARBLE DatabaseSchemaType = iota 10 | // client's shema database 11 | DATABASE_SCHEMA_TYPE_CLIENT 12 | ) 13 | 14 | type DatabaseSchema struct { 15 | SchemaType DatabaseSchemaType 16 | Schema string 17 | } 18 | 19 | var DATABASE_MARBLE_SCHEMA = DatabaseSchema{ 20 | SchemaType: DATABASE_SCHEMA_TYPE_MARBLE, 21 | Schema: "marble", 22 | } 23 | 24 | type OrganizationSchema struct { 25 | OrganizationId string 26 | DatabaseSchema DatabaseSchema 27 | } 28 | 29 | func OrgSchemaName(orgName string) string { 30 | return fmt.Sprintf("org-%s", orgName) 31 | } 32 | -------------------------------------------------------------------------------- /models/feature_access.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type FeatureAccess int 4 | 5 | const ( 6 | Restricted FeatureAccess = iota 7 | Allowed 8 | Test 9 | MissingConfiguration 10 | UnknownFeatureAccess 11 | ) 12 | 13 | var ValidFeaturesAccess = []FeatureAccess{Allowed, Restricted, Test} 14 | 15 | // Provide a string value for each outcome 16 | func (f FeatureAccess) String() string { 17 | switch f { 18 | case Allowed: 19 | return "allowed" 20 | case Restricted: 21 | return "restricted" 22 | case Test: 23 | return "test" 24 | case MissingConfiguration: 25 | return "missing_configuration" 26 | } 27 | return "unknown" 28 | } 29 | 30 | // Provide an Outcome from a string value 31 | func FeatureAccessFrom(s string) FeatureAccess { 32 | switch s { 33 | case "allowed": 34 | return Allowed 35 | case "restricted": 36 | return Restricted 37 | case "test": 38 | return Test 39 | case "missing_configuration": 40 | return MissingConfiguration 41 | } 42 | return UnknownFeatureAccess 43 | } 44 | 45 | func (f FeatureAccess) IsAllowed() bool { 46 | return f == Allowed || f == Test 47 | } 48 | -------------------------------------------------------------------------------- /models/firebase_identity.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type FirebaseIdentity struct { 4 | Email string 5 | } 6 | -------------------------------------------------------------------------------- /models/inbox.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type InboxStatus string 6 | 7 | const ( 8 | InboxStatusActive InboxStatus = "active" 9 | InboxStatusInactive InboxStatus = "archived" 10 | ) 11 | 12 | type Inbox struct { 13 | Id string 14 | Name string 15 | OrganizationId string 16 | Status InboxStatus 17 | EscalationInboxId *string 18 | CreatedAt time.Time 19 | UpdatedAt time.Time 20 | InboxUsers []InboxUser 21 | CasesCount *int 22 | } 23 | 24 | type InboxMetadata struct { 25 | Id string 26 | Name string 27 | Status InboxStatus 28 | } 29 | 30 | func (i Inbox) GetMetadata() InboxMetadata { 31 | return InboxMetadata{ 32 | Id: i.Id, 33 | Name: i.Name, 34 | Status: i.Status, 35 | } 36 | } 37 | 38 | type CreateInboxInput struct { 39 | Name string 40 | OrganizationId string 41 | EscalationInboxId *string 42 | } 43 | 44 | type UpdateInboxInput struct { 45 | Id string 46 | Name string 47 | EscalationInboxId *string 48 | } 49 | -------------------------------------------------------------------------------- /models/inbox_users.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type InboxUser struct { 6 | Id string 7 | InboxId string 8 | UserId string 9 | OrganizationId string 10 | CreatedAt time.Time 11 | UpdatedAt time.Time 12 | Role InboxUserRole 13 | } 14 | 15 | type CreateInboxUserInput struct { 16 | InboxId string 17 | UserId string 18 | Role InboxUserRole 19 | } 20 | 21 | type InboxUserRole string 22 | 23 | const ( 24 | InboxUserRoleAdmin InboxUserRole = "admin" 25 | InboxUserRoleMember InboxUserRole = "member" 26 | ) 27 | 28 | type InboxUserFilterInput struct { 29 | InboxId string 30 | UserId UserId 31 | } 32 | -------------------------------------------------------------------------------- /models/metabase_configuration.go: -------------------------------------------------------------------------------- 1 | package models 2 | -------------------------------------------------------------------------------- /models/offloading_watermark.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type OffloadingWatermark struct { 6 | OrgId string 7 | TableName string 8 | WatermarkTime time.Time 9 | WatermarkId string 10 | CreatedAt time.Time 11 | UpdatedAt time.Time 12 | } 13 | -------------------------------------------------------------------------------- /models/opensanctions_test.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/checkmarble/marble-backend/pure_utils" 7 | "github.com/stretchr/testify/assert" 8 | ) 9 | 10 | func TestOpenSanctionsAbstractTypeMapping(t *testing.T) { 11 | tts := []struct { 12 | kind string 13 | outputLength int 14 | outputs []string 15 | }{ 16 | {"Vehicle", 2, []string{"Airplane", "Vessel"}}, 17 | } 18 | 19 | for _, tt := range tts { 20 | req := SanctionCheckRefineRequest{Type: tt.kind, Query: OpenSanctionCheckFilter{ 21 | "name": []string{"value"}, 22 | }} 23 | 24 | queries := AdaptRefineRequestToMatchable(req) 25 | types := pure_utils.Map(queries, func(q OpenSanctionsCheckQuery) string { 26 | return q.Type 27 | }) 28 | 29 | assert.Len(t, queries, tt.outputLength) 30 | 31 | for _, c := range tt.outputs { 32 | assert.Contains(t, types, c) 33 | } 34 | 35 | valuesEqual := true 36 | 37 | for _, q := range queries { 38 | assert.Len(t, q.Filters["name"], 1) 39 | if q.Filters["name"][0] != "value" { 40 | valuesEqual = false 41 | } 42 | } 43 | 44 | assert.True(t, valuesEqual) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /models/organization_short_id.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "encoding/hex" 5 | 6 | "github.com/google/uuid" 7 | ) 8 | 9 | type OrganizationShortId [4]byte 10 | 11 | func NewOrganizationShortId(organizationId string) OrganizationShortId { 12 | orgUuid := uuid.MustParse(organizationId) 13 | return (OrganizationShortId)(orgUuid[:4]) 14 | } 15 | 16 | func (shortId OrganizationShortId) String() string { 17 | var buf [8]byte 18 | hex.Encode(buf[:], shortId[:]) 19 | return string(buf[:]) 20 | } 21 | -------------------------------------------------------------------------------- /models/organization_short_id_test.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | ) 8 | 9 | func TestNewOrganizationShortId(t *testing.T) { 10 | aa := NewOrganizationShortId("12345678-ffff-ffff-ffff-ffffffffffff") 11 | assert.Equal(t, aa.String(), "12345678") 12 | } 13 | -------------------------------------------------------------------------------- /models/outcome.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | type Outcome int 4 | 5 | const ( 6 | Approve Outcome = iota 7 | Review 8 | BlockAndReview 9 | Decline 10 | UnknownOutcome 11 | ) 12 | 13 | var ( 14 | ValidOutcomes = []Outcome{Approve, Review, BlockAndReview, Decline} 15 | ValidForcedOutcome = []Outcome{Review, BlockAndReview, Decline} 16 | ) 17 | 18 | // Provide a string value for each outcome 19 | func (o Outcome) String() string { 20 | switch o { 21 | case Approve: 22 | return "approve" 23 | case Review: 24 | return "review" 25 | case BlockAndReview: 26 | return "block_and_review" 27 | case Decline: 28 | return "decline" 29 | } 30 | return "unknown" 31 | } 32 | 33 | // Provide an Outcome from a string value 34 | func OutcomeFrom(s string) Outcome { 35 | switch s { 36 | case "approve": 37 | return Approve 38 | case "review": 39 | return Review 40 | case "block_and_review": 41 | return BlockAndReview 42 | case "decline": 43 | return Decline 44 | } 45 | return UnknownOutcome 46 | } 47 | -------------------------------------------------------------------------------- /models/partner.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/guregu/null/v5" 7 | ) 8 | 9 | type Partner struct { 10 | Id string 11 | CreatedAt time.Time 12 | Name string 13 | Bic string 14 | } 15 | 16 | type PartnerCreateInput struct { 17 | Name string 18 | Bic string 19 | } 20 | 21 | type PartnerUpdateInput struct { 22 | Name null.String 23 | Bic null.String 24 | } 25 | 26 | type PartnerFilters struct { 27 | Bic null.String 28 | } 29 | -------------------------------------------------------------------------------- /models/scenario_test_run_summary.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type ScenarioTestRunSummary struct { 6 | Id string 7 | RuleName *string 8 | RuleStableId *string 9 | TestRunId string 10 | Version int 11 | Watermark time.Time 12 | Outcome string 13 | Total int 14 | } 15 | -------------------------------------------------------------------------------- /models/tag.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type Tag struct { 6 | Id string 7 | Target TagTarget 8 | Name string 9 | Color string 10 | OrganizationId string 11 | CreatedAt time.Time 12 | UpdatedAt time.Time 13 | DeletedAt *time.Time 14 | CasesCount *int 15 | } 16 | 17 | type CreateTagAttributes struct { 18 | Color string 19 | OrganizationId string 20 | Target TagTarget 21 | Name string 22 | } 23 | 24 | type UpdateTagAttributes struct { 25 | Color string 26 | Name string 27 | TagId string 28 | } 29 | 30 | type TagTarget string 31 | 32 | const ( 33 | TagTargetCase TagTarget = "case" 34 | TagTargetObject TagTarget = "object" 35 | TagTargetUnknown TagTarget = "unknown" 36 | ) 37 | 38 | func TagTargetFromString(s string) TagTarget { 39 | switch s { 40 | case "case": 41 | return TagTargetCase 42 | case "object": 43 | return TagTargetObject 44 | default: 45 | return TagTargetUnknown 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /models/user.go: -------------------------------------------------------------------------------- 1 | package models 2 | 3 | import "time" 4 | 5 | type UserId string 6 | 7 | type User struct { 8 | UserId UserId 9 | Email string 10 | Role Role 11 | OrganizationId string 12 | PartnerId *string 13 | FirstName string 14 | LastName string 15 | DeletedAt *time.Time 16 | AiAssistEnabled bool 17 | } 18 | 19 | type CreateUser struct { 20 | Email string 21 | Role Role 22 | OrganizationId string 23 | PartnerId *string 24 | FirstName string 25 | LastName string 26 | } 27 | 28 | type UpdateUser struct { 29 | UserId string 30 | Email *string 31 | Role *Role 32 | FirstName *string 33 | LastName *string 34 | } 35 | -------------------------------------------------------------------------------- /pubapi/constants.go: -------------------------------------------------------------------------------- 1 | package pubapi 2 | 3 | import ( 4 | "errors" 5 | ) 6 | 7 | const ( 8 | LinkDecisions = "decisions" 9 | LinkSanctionChecks = "sanction_checks" 10 | LinkSanctionCheckMatches = "sanction_check_matches" 11 | ) 12 | 13 | var ( 14 | ErrInternalServerError = errors.New("server_error") 15 | 16 | ErrFeatureDisabled = errors.New("feature_disabled") 17 | ErrNotConfigured = errors.New("feature_not_configured") 18 | 19 | ErrForbidden = errors.New("forbidden") 20 | ErrNotFound = errors.New("not_found") 21 | ErrInvalidPayload = errors.New("invalid_payload") 22 | ErrConflict = errors.New("conflict") 23 | ErrUnprocessableEntity = errors.New("unprocessable_entity") 24 | ErrTimeout = errors.New("timeout") 25 | ) 26 | -------------------------------------------------------------------------------- /pubapi/features.go: -------------------------------------------------------------------------------- 1 | package pubapi 2 | 3 | import ( 4 | "github.com/checkmarble/marble-backend/models" 5 | "github.com/checkmarble/marble-backend/usecases" 6 | "github.com/gin-gonic/gin" 7 | ) 8 | 9 | func CheckFeatureAccess(c *gin.Context, uc *usecases.UsecasesWithCreds) bool { 10 | featureAccessReader := uc.NewFeatureAccessReader() 11 | 12 | // Does not take into account access to AI features that are per-user - any per-user permissions do not make sense in the context of public API 13 | features, err := featureAccessReader.GetOrganizationFeatureAccess(c.Request.Context(), uc.Credentials.OrganizationId, nil) 14 | if err != nil { 15 | NewErrorResponse().WithError(err).Serve(c) 16 | return false 17 | } 18 | 19 | if !features.Sanctions.IsAllowed() { 20 | if features.Sanctions == models.MissingConfiguration { 21 | NewErrorResponse().WithError(ErrNotConfigured).Serve(c) 22 | return false 23 | } 24 | 25 | NewErrorResponse().WithError(ErrFeatureDisabled).Serve(c) 26 | return false 27 | } 28 | 29 | return true 30 | } 31 | -------------------------------------------------------------------------------- /pubapi/pagination.go: -------------------------------------------------------------------------------- 1 | package pubapi 2 | 3 | import "github.com/checkmarble/marble-backend/models" 4 | 5 | type PaginationParams struct { 6 | After string `form:"after" binding:"omitempty,uuid"` 7 | Order string `form:"order" binding:"omitempty,oneof=ASC DESC"` 8 | Limit int `form:"limit" binding:"omitempty,min=1,max=100"` 9 | } 10 | 11 | func (p PaginationParams) ToModel(defaults models.PaginationDefaults) models.PaginationAndSorting { 12 | return models.WithPaginationDefaults(models.PaginationAndSorting{ 13 | OffsetId: p.After, 14 | Order: models.SortingOrderFrom(p.Order), 15 | Limit: p.Limit, 16 | }, defaults) 17 | } 18 | -------------------------------------------------------------------------------- /pubapi/params.go: -------------------------------------------------------------------------------- 1 | package pubapi 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/cockroachdb/errors" 7 | "github.com/gin-gonic/gin" 8 | "github.com/google/uuid" 9 | ) 10 | 11 | func UuidParam(c *gin.Context, param string) (*uuid.UUID, error) { 12 | parsed, err := uuid.Parse(c.Param(param)) 13 | if err != nil { 14 | return nil, errors.WithDetail(ErrInvalidPayload, err.Error()) 15 | } 16 | 17 | return &parsed, nil 18 | } 19 | 20 | var dateFormats = []string{ 21 | "2006-01-02T15:04:05Z", 22 | "2006-01-02T15:04:05-0700", 23 | "2006-01-02T15:04:05-07:00", 24 | } 25 | 26 | type DateTime time.Time 27 | 28 | func (b *DateTime) UnmarshalParam(param string) error { 29 | for _, df := range dateFormats { 30 | dt, err := time.Parse(df, param) 31 | if err != nil { 32 | continue 33 | } 34 | 35 | *b = DateTime(dt) 36 | 37 | return nil 38 | } 39 | 40 | return errors.WithDetailf(ErrInvalidPayload, "invalid datetime format, use yyyy-mm-ddThh:mm:ss+zz:zz") 41 | } 42 | 43 | func (b *DateTime) IsZero() bool { 44 | if b == nil { 45 | return true 46 | } 47 | return time.Time(*b).IsZero() 48 | } 49 | -------------------------------------------------------------------------------- /pubapi/pubapi.go: -------------------------------------------------------------------------------- 1 | package pubapi 2 | 3 | import ( 4 | "net/url" 5 | "reflect" 6 | "strings" 7 | 8 | "github.com/gin-gonic/gin/binding" 9 | "github.com/go-playground/validator/v10" 10 | ) 11 | 12 | type Config struct { 13 | MarbleAppUrl *url.URL 14 | } 15 | 16 | func InitPublicApi() { 17 | if validator, ok := binding.Validator.Engine().(*validator.Validate); ok { 18 | validator.RegisterTagNameFunc(fieldNameFromTag) 19 | } 20 | } 21 | 22 | func fieldNameFromTag(fld reflect.StructField) string { 23 | name := strings.SplitN(fld.Tag.Get("json"), ",", 2)[0] 24 | if len(name) > 0 { 25 | if name == "-" { 26 | return "" 27 | } 28 | return name 29 | } 30 | 31 | name = strings.SplitN(fld.Tag.Get("form"), ",", 2)[0] 32 | if len(name) > 0 { 33 | return name 34 | } 35 | 36 | return "" 37 | } 38 | -------------------------------------------------------------------------------- /pubapi/tests/e2e_test.go: -------------------------------------------------------------------------------- 1 | package tests 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | "log/slog" 7 | "net/http" 8 | "testing" 9 | 10 | v1 "github.com/checkmarble/marble-backend/pubapi/tests/specs/v1" 11 | "github.com/checkmarble/marble-backend/utils" 12 | ) 13 | 14 | func TestPublicApi(t *testing.T) { 15 | for _, version := range []string{"v1beta"} { 16 | t.Run(fmt.Sprintf("Public API %s integration tests", version), func(it *testing.T) { 17 | ctx := context.Background() 18 | ctx = utils.StoreLoggerInContext(ctx, slog.New(slog.DiscardHandler)) 19 | 20 | pg := setupPostgres(it, ctx) 21 | sock := setupApi(it, ctx, pg.MustConnectionString(ctx)) 22 | 23 | client(t, sock, "", "").GET("/liveness").Expect().Status(http.StatusOK) 24 | client(t, sock, version, "invalidkey").GET("/example").Expect().Status(http.StatusUnauthorized) 25 | 26 | v1.PublicApiV1(t, client(t, sock, version, "testapikey")) 27 | }) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /pubapi/tests/fixtures/base/base.yml: -------------------------------------------------------------------------------- 1 | organizations: 2 | - id: 00000000-0000-0000-0000-000000000000 3 | name: ACME 4 | 5 | organization_feature_access: 6 | - org_id: 00000000-0000-0000-0000-000000000000 7 | 8 | api_keys: 9 | - org_id: 00000000-0000-0000-0000-000000000000 10 | prefix: abc 11 | role: 5 12 | key_hash: RAW=sha256('testapikey') 13 | 14 | scenarios: 15 | - id: 11111111-1111-1111-1111-111111111111 16 | org_id: 00000000-0000-0000-0000-000000000000 17 | name: Scenario name 18 | description: Scenario description 19 | trigger_object_type: transactions 20 | - id: 22222222-2222-2222-2222-222222222222 21 | org_id: 00000000-0000-0000-0000-000000000000 22 | name: Scenario 2 name 23 | description: Scenario 2 description 24 | trigger_object_type: transactions 25 | -------------------------------------------------------------------------------- /pubapi/tests/fixtures/cases.yml: -------------------------------------------------------------------------------- 1 | - id: 00000000-0000-0000-0000-000000000000 2 | org_id: 00000000-0000-0000-0000-000000000000 3 | name: Case name -------------------------------------------------------------------------------- /pubapi/tests/fixtures/decision_rules.yml: -------------------------------------------------------------------------------- 1 | # Rules for decision 11111111-1111-1111-1111-111111111111 2 | 3 | - id: 11111111-1111-1111-1111-111111111111 4 | org_id: 00000000-0000-0000-0000-000000000000 5 | decision_id: 11111111-1111-1111-1111-111111111111 6 | score_modifier: 10 7 | result: true 8 | error_code: 0 9 | rule_id: 11111111-1111-1111-1111-111111111111 10 | rule_evaluation: 11 | return_value: 12 | value: 42 13 | outcome: no_hit 14 | 15 | - id: 22222222-2222-2222-2222-222222222222 16 | org_id: 00000000-0000-0000-0000-000000000000 17 | decision_id: 11111111-1111-1111-1111-111111111111 18 | score_modifier: 20 19 | result: true 20 | error_code: 100 21 | rule_id: 11111111-1111-1111-1111-111111111111 22 | rule_evaluation: null 23 | outcome: hit -------------------------------------------------------------------------------- /pubapi/tests/fixtures/sanction_check_configs.yml: -------------------------------------------------------------------------------- 1 | - scenario_iteration_id: 11111111-1111-1111-1111-111111111111 2 | name: Sanction check config 3 | forced_outcome: approve 4 | stable_id: 00000000-0000-0000-0000-000000000000 -------------------------------------------------------------------------------- /pubapi/tests/fixtures/sanction_check_matches.yml: -------------------------------------------------------------------------------- 1 | - id: 11111111-1111-1111-1111-111111111111 2 | sanction_check_id: 11111111-1111-1111-1111-111111111111 3 | opensanction_entity_id: ABC123 4 | status: pending 5 | query_ids: RAW=array['one', 'two'] 6 | payload: 7 | lorem: ipsum 8 | 9 | - id: 22222222-2222-2222-2222-222222222222 10 | sanction_check_id: 11111111-1111-1111-1111-111111111111 11 | opensanction_entity_id: ABC123 12 | status: no_hit 13 | query_ids: RAW=array['one', 'two'] 14 | payload: 15 | lorem: ipsum 16 | 17 | - id: 33333333-3333-3333-3333-333333333333 18 | sanction_check_id: 11111111-1111-1111-1111-111111111111 19 | opensanction_entity_id: ABC123 20 | status: no_hit 21 | query_ids: RAW=array['one', 'two'] 22 | payload: 23 | lorem: ipsum -------------------------------------------------------------------------------- /pubapi/tests/fixtures/sanction_checks.yml: -------------------------------------------------------------------------------- 1 | - id: 11111111-1111-1111-1111-111111111111 2 | decision_id: 11111111-1111-1111-1111-111111111111 3 | status: in_review 4 | search_input: 5 | lorem: ipsum 6 | search_datasets: RAW=array['one', 'two'] 7 | match_threshold: 0.9 8 | match_limit: 20 -------------------------------------------------------------------------------- /pubapi/tests/fixtures/scenario_iteration_rules.yml: -------------------------------------------------------------------------------- 1 | - id: 11111111-1111-1111-1111-111111111111 2 | org_id: 00000000-0000-0000-0000-000000000000 3 | scenario_iteration_id: 11111111-1111-1111-1111-111111111111 4 | display_order: 1 5 | name: The Rule 6 | description: The Description 7 | score_modifier: +9000 8 | formula_ast_expression: 9 | ok: true 10 | rule_group: The Group 11 | stable_rule_id: 11111111-1111-1111-1111-111111111111 -------------------------------------------------------------------------------- /pubapi/tests/fixtures/scenario_iterations.yml: -------------------------------------------------------------------------------- 1 | - id: 11111111-1111-1111-1111-111111111111 2 | org_id: 00000000-0000-0000-0000-000000000000 3 | scenario_id: 11111111-1111-1111-1111-111111111111 4 | version: "42" 5 | 6 | - id: 22222222-2222-2222-2222-222222222222 7 | org_id: 00000000-0000-0000-0000-000000000000 8 | scenario_id: 22222222-2222-2222-2222-222222222222 9 | version: "42" -------------------------------------------------------------------------------- /pubapi/tests/fixtures/scheduled_executions.yml: -------------------------------------------------------------------------------- 1 | - id: 11111111-1111-1111-1111-111111111111 2 | organization_id: 00000000-0000-0000-0000-000000000000 3 | scenario_id: 11111111-1111-1111-1111-111111111111 4 | scenario_iteration_id: 11111111-1111-1111-1111-111111111111 5 | status: processing 6 | manual: true 7 | started_at: 2025-01-01T10:00:00Z 8 | 9 | - id: 22222222-2222-2222-2222-222222222222 10 | organization_id: 00000000-0000-0000-0000-000000000000 11 | scenario_id: 22222222-2222-2222-2222-222222222222 12 | scenario_iteration_id: 22222222-2222-2222-2222-222222222222 13 | status: success 14 | manual: false 15 | number_of_created_decisions: 42 16 | started_at: 2025-01-01T08:00:00Z 17 | finished_at: 2025-01-01T09:00:00Z 18 | -------------------------------------------------------------------------------- /pubapi/tests/specs/v1/v1.go: -------------------------------------------------------------------------------- 1 | package v1 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/gavv/httpexpect/v2" 7 | ) 8 | 9 | func PublicApiV1(t *testing.T, e *httpexpect.Expect) { 10 | sanctionChecks(t, e) 11 | whitelists(t, e) 12 | decisions(t, e) 13 | batchExecutions(t, e) 14 | } 15 | -------------------------------------------------------------------------------- /pubapi/usecases.go: -------------------------------------------------------------------------------- 1 | package pubapi 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/usecases" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | func UsecasesWithCreds(ctx context.Context, uc usecases.Usecases) *usecases.UsecasesWithCreds { 11 | creds, found := utils.CredentialsFromCtx(ctx) 12 | if !found { 13 | panic("no credentials in context") 14 | } 15 | 16 | return &usecases.UsecasesWithCreds{ 17 | Usecases: uc, 18 | Credentials: creds, 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /pubapi/v1/dto/batch_executions.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type ScheduledExecution struct { 10 | Id string `json:"id"` 11 | Scenario DecisionScenario `json:"scenario"` 12 | Manual bool `json:"manual"` 13 | Status string `json:"status"` 14 | DecisionsCreated int `json:"decisions_created"` 15 | CreatedAt time.Time `json:"created_at"` 16 | FinishedAt *time.Time `json:"finished_at"` 17 | } 18 | 19 | func AdaptScheduledExecution(model models.ScheduledExecution) ScheduledExecution { 20 | return ScheduledExecution{ 21 | Id: model.Id, 22 | Scenario: DecisionScenario{ 23 | Id: model.ScenarioId, 24 | IterationId: model.ScenarioIterationId, 25 | Version: model.ScenarioVersion, 26 | }, 27 | Manual: model.Manual, 28 | Status: model.Status.String(), 29 | DecisionsCreated: model.NumberOfCreatedDecisions, 30 | CreatedAt: model.StartedAt, 31 | FinishedAt: model.FinishedAt, 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /pubapi/v1/dto/case.go: -------------------------------------------------------------------------------- 1 | package dto 2 | 3 | import ( 4 | "github.com/checkmarble/marble-backend/models" 5 | ) 6 | 7 | type Case struct { 8 | Id string `json:"id"` 9 | } 10 | 11 | func AdaptCase(c models.Case) Case { 12 | return Case{ 13 | Id: c.Id, 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /pubapi/v1/params/batch_execution.go: -------------------------------------------------------------------------------- 1 | package params 2 | 3 | import ( 4 | "github.com/checkmarble/marble-backend/models" 5 | "github.com/checkmarble/marble-backend/pubapi" 6 | "github.com/checkmarble/marble-backend/utils" 7 | ) 8 | 9 | type ListBatchExecutionsParams struct { 10 | pubapi.PaginationParams 11 | 12 | ScenarioId *string `form:"scenario_id" binding:"omitzero,uuid"` 13 | } 14 | 15 | func (p ListBatchExecutionsParams) ToFilters(orgId string) models.ListScheduledExecutionsFilters { 16 | filters := models.ListScheduledExecutionsFilters{ 17 | OrganizationId: orgId, 18 | } 19 | 20 | if !utils.NilOrZero(p.ScenarioId) { 21 | filters.ScenarioId = *p.ScenarioId 22 | } 23 | 24 | return filters 25 | } 26 | -------------------------------------------------------------------------------- /pure_utils/clean_bom.go: -------------------------------------------------------------------------------- 1 | package pure_utils 2 | 3 | import ( 4 | "bufio" 5 | "io" 6 | ) 7 | 8 | const ( 9 | bom0 = 0xef 10 | bom1 = 0xbb 11 | bom2 = 0xbf 12 | ) 13 | 14 | func NewReaderWithoutBom(r io.Reader) io.Reader { 15 | buf := bufio.NewReader(r) 16 | b, err := buf.Peek(3) 17 | if err != nil { 18 | // not enough bytes 19 | return buf 20 | } 21 | if b[0] == bom0 && b[1] == bom1 && b[2] == bom2 { 22 | _, _ = buf.Discard(3) 23 | } 24 | return buf 25 | } 26 | -------------------------------------------------------------------------------- /pure_utils/database_ids.go: -------------------------------------------------------------------------------- 1 | package pure_utils 2 | 3 | import ( 4 | "github.com/google/uuid" 5 | ) 6 | 7 | func NewPrimaryKey(organizationId string) string { 8 | // Output first 32 bits from the organizationId uuid, and the rest is random from a new uuid v4 9 | newUuid := uuid.New() 10 | orgIdAsUuid := uuid.MustParse(organizationId) 11 | 12 | var output uuid.UUID 13 | copy(output[:4], orgIdAsUuid[:4]) 14 | copy(output[4:], newUuid[4:]) 15 | 16 | return output.String() 17 | } 18 | -------------------------------------------------------------------------------- /pure_utils/database_ids_test.go: -------------------------------------------------------------------------------- 1 | package pure_utils 2 | 3 | import ( 4 | "log" 5 | "testing" 6 | 7 | "github.com/stretchr/testify/assert" 8 | ) 9 | 10 | func TestNewPrimaryKey(t *testing.T) { 11 | organizationId := "86d9b92d-e654-4de3-8d3f-81830246c891" 12 | 13 | newId := NewPrimaryKey(organizationId) 14 | 15 | log.Println(organizationId) 16 | log.Println(newId) 17 | 18 | asserts := assert.New(t) 19 | asserts.Equal(organizationId[:8], newId[:8]) 20 | asserts.NotEqual(organizationId, newId) 21 | } 22 | 23 | func TestNewUUIDStartWithOrgId(t *testing.T) { 24 | newId := NewPrimaryKey("12345678-ffff-ffff-ffff-ffffffffffff") 25 | 26 | // first 8 characters are the org id 27 | assert.Equal(t, newId[:8], "12345678") 28 | // the rest is diffenrent 29 | assert.NotEqual(t, newId[8:], "-ffff-ffff-ffff-ffffffffffff") 30 | } 31 | -------------------------------------------------------------------------------- /pure_utils/slices.go: -------------------------------------------------------------------------------- 1 | package pure_utils 2 | 3 | import ( 4 | "github.com/hashicorp/go-set/v2" 5 | ) 6 | 7 | func ContainsSameElements[T comparable](a, b []T) bool { 8 | return set.From(a).Equal(set.From(b)) 9 | } 10 | -------------------------------------------------------------------------------- /pure_utils/slices_test.go: -------------------------------------------------------------------------------- 1 | package pure_utils 2 | 3 | import "testing" 4 | 5 | func TestSlicesEqual(t *testing.T) { 6 | tests := []struct { 7 | name string 8 | a, b []string 9 | want bool 10 | }{ 11 | {"same elements, different order", []string{"a", "b", "c"}, []string{"c", "b", "a"}, true}, 12 | {"same elements, with repetitions", []string{"a", "b", "c"}, []string{"a", "b", "b", "c"}, true}, 13 | {"different lengths", []string{"a", "b"}, []string{"a", "b", "c"}, false}, 14 | {"different elements", []string{"a", "b", "c"}, []string{"a", "b", "d"}, false}, 15 | {"empty slices", []string{}, []string{}, true}, 16 | {"one empty slice", []string{"a", "b", "c"}, []string{}, false}, 17 | } 18 | 19 | for _, tt := range tests { 20 | t.Run(tt.name, func(t *testing.T) { 21 | if got := ContainsSameElements(tt.a, tt.b); got != tt.want { 22 | t.Errorf("SlicesEqual() = %v, want %v", got, tt.want) 23 | } 24 | }) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /repositories/client_db_repository.go: -------------------------------------------------------------------------------- 1 | package repositories 2 | 3 | type ClientDbRepository struct{} 4 | -------------------------------------------------------------------------------- /repositories/clock/clock.go: -------------------------------------------------------------------------------- 1 | package clock 2 | 3 | import "time" 4 | 5 | type Clock interface { 6 | Now() time.Time 7 | } 8 | 9 | type clock struct{} 10 | 11 | func (c *clock) Now() time.Time { 12 | return time.Now() 13 | } 14 | 15 | func New() Clock { 16 | return &clock{} 17 | } 18 | 19 | type Mock struct { 20 | now time.Time 21 | } 22 | 23 | func NewMock(now time.Time) *Mock { 24 | return &Mock{ 25 | now: now, 26 | } 27 | } 28 | 29 | func (m *Mock) Now() time.Time { 30 | return m.now 31 | } 32 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_ast_evaluation.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "encoding/json" 5 | 6 | "github.com/checkmarble/marble-backend/models/ast" 7 | ) 8 | 9 | func SerializeNodeEvaluationDto(nodeEvaluation *ast.NodeEvaluationDto) ([]byte, error) { 10 | if nodeEvaluation == nil { 11 | return nil, nil 12 | } 13 | 14 | return json.Marshal(&nodeEvaluation) 15 | } 16 | 17 | func DeserializeNodeEvaluationDto(serializedNodeEvaluationDto []byte) (*ast.NodeEvaluationDto, error) { 18 | if len(serializedNodeEvaluationDto) == 0 { 19 | return nil, nil 20 | } 21 | 22 | var nodeEvaluationDto ast.NodeEvaluationDto 23 | if err := json.Unmarshal(serializedNodeEvaluationDto, &nodeEvaluationDto); err != nil { 24 | return nil, err 25 | } 26 | 27 | return &nodeEvaluationDto, nil 28 | } 29 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_ast_expression.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | 7 | "github.com/checkmarble/marble-backend/dto" 8 | "github.com/checkmarble/marble-backend/models/ast" 9 | ) 10 | 11 | func SerializeFormulaAstExpression(formulaAstExpression *ast.Node) (*[]byte, error) { 12 | if formulaAstExpression == nil { 13 | return nil, nil 14 | } 15 | 16 | nodeDto, err := dto.AdaptNodeDto(*formulaAstExpression) 17 | if err != nil { 18 | return nil, fmt.Errorf("unable to marshal rule formula ast expression: %w", err) 19 | } 20 | 21 | serialized, err := json.Marshal(nodeDto) 22 | return &serialized, err 23 | } 24 | 25 | func AdaptSerializedAstExpression(serializedAstExpression []byte) (*ast.Node, error) { 26 | if len(serializedAstExpression) == 0 { 27 | return nil, nil 28 | } 29 | 30 | var nodeDto dto.NodeDto 31 | if err := json.Unmarshal(serializedAstExpression, &nodeDto); err != nil { 32 | return nil, err 33 | } 34 | 35 | node, err := dto.AdaptASTNode(nodeDto) 36 | if err != nil { 37 | return nil, err 38 | } 39 | return &node, nil 40 | } 41 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_case_contributor.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DBCaseContributor struct { 11 | Id string `db:"id"` 12 | CaseId string `db:"case_id"` 13 | UserId string `db:"user_id"` 14 | CreatedAt time.Time `db:"created_at"` 15 | } 16 | 17 | const TABLE_CASE_CONTRIBUTORS = "case_contributors" 18 | 19 | var SelectCaseContributorColumn = utils.ColumnList[DBCaseContributor]() 20 | 21 | func AdaptCaseContributor(caseContributor DBCaseContributor) (models.CaseContributor, error) { 22 | return models.CaseContributor{ 23 | Id: caseContributor.Id, 24 | CaseId: caseContributor.CaseId, 25 | UserId: caseContributor.UserId, 26 | CreatedAt: caseContributor.CreatedAt, 27 | }, nil 28 | } 29 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_case_files.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DBCaseFile struct { 11 | Id string `db:"id"` 12 | CreatedAt time.Time `db:"created_at"` 13 | CaseId string `db:"case_id"` 14 | BucketName string `db:"bucket_name"` 15 | FileReference string `db:"file_reference"` 16 | FileName string `db:"file_name"` 17 | } 18 | 19 | const TABLE_CASE_FILES = "case_files" 20 | 21 | var SelectCaseFileColumn = utils.ColumnList[DBCaseFile]() 22 | 23 | func AdaptCaseFile(db DBCaseFile) (models.CaseFile, error) { 24 | return models.CaseFile{ 25 | Id: db.Id, 26 | CaseId: db.CaseId, 27 | CreatedAt: db.CreatedAt, 28 | BucketName: db.BucketName, 29 | FileName: db.FileName, 30 | FileReference: db.FileReference, 31 | }, nil 32 | } 33 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_case_tag.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | "github.com/jackc/pgx/v5/pgtype" 9 | ) 10 | 11 | type DBCaseTag struct { 12 | Id string `db:"id"` 13 | CaseId string `db:"case_id"` 14 | TagId string `db:"tag_id"` 15 | CreatedAt time.Time `db:"created_at"` 16 | DeletedAt pgtype.Timestamp `db:"deleted_at"` 17 | } 18 | 19 | const TABLE_CASE_TAGS = "case_tags" 20 | 21 | var SelectCaseTagColumn = utils.ColumnList[DBCaseTag]() 22 | 23 | func AdaptCaseTag(db DBCaseTag) (models.CaseTag, error) { 24 | return models.CaseTag{ 25 | Id: db.Id, 26 | CaseId: db.CaseId, 27 | TagId: db.TagId, 28 | CreatedAt: db.CreatedAt, 29 | }, nil 30 | } 31 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_custom_list_value.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DBCustomListValueResult struct { 11 | Id string `db:"id"` 12 | CustomListId string `db:"custom_list_id"` 13 | Value string `db:"value"` 14 | CreatedAt time.Time `db:"created_at"` 15 | DeletedAt *time.Time `db:"deleted_at"` 16 | } 17 | 18 | const TABLE_CUSTOM_LIST_VALUE = "custom_list_values" 19 | 20 | var ColumnsSelectCustomListValue = utils.ColumnList[DBCustomListValueResult]() 21 | 22 | func AdaptCustomListValue(db DBCustomListValueResult) (models.CustomListValue, error) { 23 | return models.CustomListValue{ 24 | Id: db.Id, 25 | CustomListId: db.CustomListId, 26 | Value: db.Value, 27 | CreatedAt: db.CreatedAt, 28 | DeletedAt: db.DeletedAt, 29 | }, nil 30 | } 31 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_data_model_options.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "github.com/checkmarble/marble-backend/models" 5 | "github.com/checkmarble/marble-backend/utils" 6 | ) 7 | 8 | type DbDataModelOptions struct { 9 | Id string `db:"id"` 10 | TableId string `db:"table_id"` 11 | DisplayedFields []string `db:"displayed_fields"` 12 | FieldOrder []string `db:"field_order"` 13 | } 14 | 15 | const TABLE_DATA_MODEL_OPTIONS = "data_model_options" 16 | 17 | var SelectDataModelOptionsColumns = utils.ColumnList[DbDataModelOptions]() 18 | 19 | func AdaptDataModelOptions(db DbDataModelOptions) (models.DataModelOptions, error) { 20 | return models.DataModelOptions{ 21 | Id: db.Id, 22 | TableId: db.TableId, 23 | DisplayedFields: db.DisplayedFields, 24 | FieldOrder: db.FieldOrder, 25 | }, nil 26 | } 27 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_data_model_pivot.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | "github.com/jackc/pgx/v5/pgtype" 9 | ) 10 | 11 | type DbPivot struct { 12 | Id string `db:"id"` 13 | BaseTableId string `db:"base_table_id"` 14 | CreatedAt time.Time `db:"created_at"` 15 | FieldId pgtype.Text `db:"field_id"` 16 | OrganizationId string `db:"organization_id"` 17 | PathLinkIds []string `db:"path_link_ids"` 18 | } 19 | 20 | const TABLE_DATA_MODEL_PIVOTS = "data_model_pivots" 21 | 22 | var SelectPivotColumns = utils.ColumnList[DbPivot]() 23 | 24 | func AdaptPivotMetadata(dbPivot DbPivot) (models.PivotMetadata, error) { 25 | pivot := models.PivotMetadata{ 26 | Id: dbPivot.Id, 27 | OrganizationId: dbPivot.OrganizationId, 28 | CreatedAt: dbPivot.CreatedAt, 29 | 30 | BaseTableId: dbPivot.BaseTableId, 31 | PathLinkIds: dbPivot.PathLinkIds, 32 | } 33 | if dbPivot.FieldId.Valid { 34 | pivot.FieldId = &dbPivot.FieldId.String 35 | } 36 | 37 | return pivot, nil 38 | } 39 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_decisions_to_create.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DecisionToCreate struct { 11 | Id string `db:"id"` 12 | ScheduledExecutionId string `db:"scheduled_execution_id"` 13 | ObjectId string `db:"object_id"` 14 | Status string `db:"status"` 15 | CreatedAt time.Time `db:"created_at"` 16 | UpdateAt time.Time `db:"updated_at"` 17 | } 18 | 19 | const TABLE_DECISIONS_TO_CREATE = "decisions_to_create" 20 | 21 | var DecisionToCreateFields = utils.ColumnList[DecisionToCreate]() 22 | 23 | func AdaptDecisionToCreate(db DecisionToCreate) (models.DecisionToCreate, error) { 24 | return models.DecisionToCreate{ 25 | Id: db.Id, 26 | ScheduledExecutionId: db.ScheduledExecutionId, 27 | ObjectId: db.ObjectId, 28 | Status: models.DecisionToCreateStatus(db.Status), 29 | CreatedAt: db.CreatedAt, 30 | UpdateAt: db.UpdateAt, 31 | }, nil 32 | } 33 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_offloading_watermark.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DbOffloadingWatermark struct { 11 | OrgId string `db:"org_id"` 12 | TableName string `db:"table_name"` 13 | WatermarkTime time.Time `db:"watermark_time"` 14 | WatermarkId string `db:"watermark_id"` 15 | CreatedAt time.Time `db:"created_at"` 16 | UpdatedAt time.Time `db:"updated_at"` 17 | } 18 | 19 | const TABLE_OFFLOADING_WATERMARKS = "offloading_watermarks" 20 | 21 | var SelectOffloadingWatermarkColumn = utils.ColumnList[DbOffloadingWatermark]() 22 | 23 | func AdaptOffloadingWatermark(db DbOffloadingWatermark) (models.OffloadingWatermark, error) { 24 | return models.OffloadingWatermark{ 25 | OrgId: db.OrgId, 26 | TableName: db.TableName, 27 | WatermarkTime: db.WatermarkTime, 28 | WatermarkId: db.WatermarkId, 29 | CreatedAt: db.CreatedAt, 30 | UpdatedAt: db.UpdatedAt, 31 | }, nil 32 | } 33 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_partner.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DBPartner struct { 11 | Id string `db:"id"` 12 | CreatedAt time.Time `db:"created_at"` 13 | Name string `db:"name"` 14 | Bic string `db:"bic"` 15 | } 16 | 17 | const TABLE_PARTNERS = "partners" 18 | 19 | var PartnerFields = utils.ColumnList[DBPartner]() 20 | 21 | func AdaptPartner(db DBPartner) (models.Partner, error) { 22 | return models.Partner{ 23 | Id: db.Id, 24 | CreatedAt: db.CreatedAt, 25 | Name: db.Name, 26 | Bic: db.Bic, 27 | }, nil 28 | } 29 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_phantom_decision.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import "time" 4 | 5 | const TABLE_PHANTOM_DECISIONS = "phantom_decisions" 6 | 7 | type DbPhantomDecision struct { 8 | Id string `db:"id"` 9 | OrganizationId string `db:"org_id"` 10 | CreatedAt time.Time `db:"created_at"` 11 | Outcome string `db:"outcome"` 12 | ScenarioId string `db:"scenario_id"` 13 | Score int `db:"score"` 14 | ScenarioIterationId string `db:"scenario_iteration_id"` 15 | TestRunId string `db:"test_run_id"` 16 | ScenarioVersion string `db:"scenario_version"` 17 | } 18 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_sanction_check_file.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DBSanctionCheckFile struct { 11 | Id string `db:"id"` 12 | SanctionCheckId string `db:"sanction_check_id"` 13 | BucketName string `db:"bucket_name"` 14 | FileReference string `db:"file_reference"` 15 | FileName string `db:"file_name"` 16 | CreatedAt time.Time `db:"created_at"` 17 | } 18 | 19 | const TABLE_SANCTION_CHECK_FILES = "sanction_check_files" 20 | 21 | var SelectSanctionCheckFileColumn = utils.ColumnList[DBSanctionCheckFile]() 22 | 23 | func AdaptSanctionCheckFile(db DBSanctionCheckFile) (models.SanctionCheckFile, error) { 24 | return models.SanctionCheckFile{ 25 | Id: db.Id, 26 | SanctionCheckId: db.SanctionCheckId, 27 | CreatedAt: db.CreatedAt, 28 | BucketName: db.BucketName, 29 | FileName: db.FileName, 30 | FileReference: db.FileReference, 31 | }, nil 32 | } 33 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_sanction_check_whitelist.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | const TABLE_SANCTION_CHECK_WHITELISTS = "sanction_check_whitelists" 11 | 12 | type DBSanctionCheckWhitelists struct { 13 | Id string `db:"id"` 14 | OrgId string `db:"org_id"` 15 | CounterpartyId string `db:"counterparty_id"` 16 | EntityId string `db:"entity_id"` 17 | WhitelistedBy *string `db:"whitelisted_by"` 18 | CreatedAt time.Time `db:"created_at"` 19 | } 20 | 21 | var SanctionCheckWhitelistColumnList = utils.ColumnList[DBSanctionCheckWhitelists]() 22 | 23 | func AdaptSanctionCheckWhitelist(db DBSanctionCheckWhitelists) (models.SanctionCheckWhitelist, error) { 24 | return models.SanctionCheckWhitelist{ 25 | Id: db.Id, 26 | OrgId: db.OrgId, 27 | CounterpartyId: db.CounterpartyId, 28 | EntityId: db.EntityId, 29 | WhitelistedBy: db.WhitelistedBy, 30 | CreatedAt: db.CreatedAt, 31 | }, nil 32 | } 33 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_sanction_match_comment.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | const TABLE_SANCTION_CHECK_MATCH_COMMENTS = "sanction_check_match_comments" 11 | 12 | var SelectSanctionCheckMatchCommentsColumn = utils.ColumnList[DBSanctionCheckMatchComment]() 13 | 14 | type DBSanctionCheckMatchComment struct { 15 | Id string `db:"id"` 16 | SanctionCheckMatchId string `db:"sanction_check_match_id"` 17 | CommentedBy string `db:"commented_by"` 18 | Comment string `db:"comment"` 19 | CreatedAt time.Time `db:"created_at"` 20 | } 21 | 22 | func AdaptSanctionCheckMatchComment(dto DBSanctionCheckMatchComment) (models.SanctionCheckMatchComment, error) { 23 | return models.SanctionCheckMatchComment{ 24 | Id: dto.Id, 25 | MatchId: dto.SanctionCheckMatchId, 26 | CommenterId: models.UserId(dto.CommentedBy), 27 | Comment: dto.Comment, 28 | CreatedAt: dto.CreatedAt, 29 | }, nil 30 | } 31 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_transfer_alerts.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DBTransferAlert struct { 11 | Id string `db:"id"` 12 | TransferId string `db:"transfer_id"` 13 | OrganizationId string `db:"organization_id"` 14 | SenderPartnerId string `db:"sender_partner_id"` 15 | BeneficiaryPartnerId string `db:"beneficiary_partner_id"` 16 | CreatedAt time.Time `db:"created_at"` 17 | Status string `db:"status"` 18 | 19 | Message string `db:"message"` 20 | TransferEndToEndId string `db:"transfer_end_to_end_id"` 21 | BeneficiaryIban string `db:"beneficiary_iban"` 22 | SenderIban string `db:"sender_iban"` 23 | } 24 | 25 | const TABLE_TRANSFER_ALERTS = "transfer_alerts" 26 | 27 | var SelectTransferAlertsColumn = utils.ColumnList[DBTransferAlert]() 28 | 29 | func AdaptTransferAlert(db DBTransferAlert) (models.TransferAlert, error) { 30 | return models.TransferAlert(db), nil 31 | } 32 | -------------------------------------------------------------------------------- /repositories/dbmodels/db_transfer_mappings.go: -------------------------------------------------------------------------------- 1 | package dbmodels 2 | 3 | import ( 4 | "time" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/utils" 8 | ) 9 | 10 | type DBTransferMapping struct { 11 | Id string `db:"id"` 12 | ClientTransferId string `db:"client_transfer_id"` 13 | CreatedAt time.Time `db:"created_at"` 14 | OrganizationId string `db:"organization_id"` 15 | PartnerId string `db:"partner_id"` 16 | } 17 | 18 | const TABLE_TRANSFER_MAPPINGS = "transfer_mappings" 19 | 20 | var SelectTransferMappingsColumn = utils.ColumnList[DBTransferMapping]() 21 | 22 | func AdaptTransferMapping(db DBTransferMapping) (models.TransferMapping, error) { 23 | return models.TransferMapping(db), nil 24 | } 25 | -------------------------------------------------------------------------------- /repositories/errors.go: -------------------------------------------------------------------------------- 1 | package repositories 2 | 3 | import ( 4 | "github.com/jackc/pgerrcode" 5 | "github.com/jackc/pgx/v5/pgconn" 6 | "github.com/pkg/errors" 7 | ) 8 | 9 | func IsUniqueViolationError(err error) bool { 10 | var pgxErr *pgconn.PgError 11 | return errors.As(err, &pgxErr) && pgxErr.Code == pgerrcode.UniqueViolation 12 | } 13 | -------------------------------------------------------------------------------- /repositories/eval_scenario_testrun.go: -------------------------------------------------------------------------------- 1 | package repositories 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type EvalScenarioRepository interface { 10 | GetScenarioIteration(ctx context.Context, exec Executor, scenarioIterationId string) (models.ScenarioIteration, error) 11 | } 12 | 13 | type EvalSanctionCheckConfigRepository interface { 14 | GetSanctionCheckConfig(ctx context.Context, exec Executor, scenarioIterationId string) (*models.SanctionCheckConfig, error) 15 | } 16 | -------------------------------------------------------------------------------- /repositories/firebase_token_repository.go: -------------------------------------------------------------------------------- 1 | package repositories 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type FireBaseTokenRepository interface { 10 | VerifyFirebaseToken(ctx context.Context, firebaseToken string) (models.FirebaseIdentity, error) 11 | } 12 | -------------------------------------------------------------------------------- /repositories/fixtures/opensanctions/response_full.json: -------------------------------------------------------------------------------- 1 | { 2 | "responses": { 3 | "test": { 4 | "status": 200, 5 | "results": [ 6 | { 7 | "id": "UNIQUEID", 8 | "schema": "Person", 9 | "properties": { 10 | "name": ["Bob", "Joe"] 11 | }, 12 | "match": true 13 | }, 14 | { 15 | "id": "UNIQUEID2", 16 | "schema": "Business", 17 | "properties": { 18 | "name": ["ACME Inc."] 19 | }, 20 | "match": true 21 | } 22 | ], 23 | "total": { 24 | "value": 2 25 | } 26 | } 27 | }, 28 | "limit": 1 29 | } 30 | -------------------------------------------------------------------------------- /repositories/fixtures/opensanctions/response_partial.json: -------------------------------------------------------------------------------- 1 | { 2 | "responses": { 3 | "test": { 4 | "status": 200, 5 | "results": [ 6 | { 7 | "id": "UNIQUEID", 8 | "schema": "Person", 9 | "properties": { 10 | "name": ["Bob", "Joe"] 11 | }, 12 | "match": true 13 | } 14 | ], 15 | "total": { 16 | "value": 10 17 | } 18 | } 19 | }, 20 | "limit": 1 21 | } 22 | -------------------------------------------------------------------------------- /repositories/httpmodels/http_name_recognition.go: -------------------------------------------------------------------------------- 1 | package httpmodels 2 | 3 | type HTTPNameRecognitionMatch struct { 4 | Type string `json:"type"` 5 | Text string `json:"text"` 6 | } 7 | -------------------------------------------------------------------------------- /repositories/liveness.go: -------------------------------------------------------------------------------- 1 | package repositories 2 | 3 | import "context" 4 | 5 | func (repo *MarbleDbRepository) Liveness(ctx context.Context, exec Executor) error { 6 | sql := "SELECT 1" 7 | row := exec.QueryRow(ctx, sql) 8 | var result int 9 | if err := row.Scan(&result); err != nil { 10 | return err 11 | } 12 | return nil 13 | } 14 | -------------------------------------------------------------------------------- /repositories/marble_db_repository.go: -------------------------------------------------------------------------------- 1 | package repositories 2 | 3 | type MarbleDbRepository struct{} 4 | -------------------------------------------------------------------------------- /repositories/migrations/20230515205456_init_schema.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | -- create and make default the marble schema 4 | CREATE SCHEMA IF NOT EXISTS marble; 5 | 6 | do $$ 7 | begin 8 | execute 'GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA marble TO ' || current_user; 9 | end 10 | $$; 11 | 12 | DO $$ 13 | BEGIN 14 | EXECUTE 'ALTER DATABASE ' || current_database() || ' SET search_path TO marble, public'; 15 | END 16 | $$; 17 | 18 | DO $$ 19 | BEGIN 20 | EXECUTE format('ALTER ROLE %I SET search_path = marble, public;', current_user); 21 | END 22 | $$; 23 | 24 | -- also set it for the current session 25 | SET 26 | SEARCH_PATH = marble, 27 | public; 28 | 29 | -- add UUID extension 30 | CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; 31 | 32 | -- +goose StatementEnd 33 | -- +goose Down 34 | -- +goose StatementBegin 35 | DROP SCHEMA IF EXISTS marble CASCADE; 36 | 37 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20230522211012_decisions_index.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decisions ADD COLUMN trigger_object_type VARCHAR; 4 | ALTER TABLE decisions ADD COLUMN trigger_object json; 5 | CREATE INDEX decisions_org_id_idx ON decisions(org_id, created_at DESC); 6 | -- +goose StatementEnd 7 | 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | ALTER TABLE decisions DROP COLUMN trigger_object_type; 11 | ALTER TABLE decisions DROP COLUMN trigger_object; 12 | DROP INDEX decisions_org_id_idx; 13 | -- +goose StatementEnd 14 | -------------------------------------------------------------------------------- /repositories/migrations/20230524173849_transaction_currency_nullable.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE accounts ALTER COLUMN currency DROP NOT NULL; 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE accounts ALTER COLUMN currency SET NOT NULL; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20230530164800_add_users.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | -- users 4 | CREATE TABLE users( 5 | id uuid DEFAULT uuid_generate_v4(), 6 | email VARCHAR NOT NULL, 7 | firebase_uid VARCHAR NOT NULL, 8 | role INTEGER NOT NULL, 9 | organization_id uuid, 10 | PRIMARY KEY(id) 11 | ); 12 | 13 | CREATE UNIQUE INDEX users_email_idx ON users(email); 14 | CREATE INDEX users_firebase_idx ON users(firebase_uid); 15 | CREATE INDEX users_organizationid_idx ON users(organization_id); 16 | -- +goose StatementEnd 17 | 18 | -- +goose Down 19 | -- +goose StatementBegin 20 | DROP INDEX users_email_idx; 21 | DROP INDEX users_firebase_idx; 22 | DROP INDEX users_organizationid_idx; 23 | DROP TABLE users; 24 | -- +goose StatementEnd 25 | -------------------------------------------------------------------------------- /repositories/migrations/20230606090000_add_role_to_api_key.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | -- add default role to tokens 4 | -- default is API_CLIENT=5 5 | ALTER TABLE tokens RENAME TO apikeys; 6 | ALTER TABLE apikeys RENAME COLUMN Token TO key; 7 | ALTER TABLE apikeys ADD COLUMN role INTEGER NOT NULL DEFAULT 5; 8 | CREATE UNIQUE INDEX apikey_key_idx ON apikeys(key); 9 | 10 | -- +goose StatementEnd 11 | 12 | -- +goose Down 13 | -- +goose StatementBegin 14 | DROP INDEX apikey_key_idx; 15 | ALTER TABLE apiKeys DROP COLUMN role; 16 | ALTER TABLE apikeys RENAME COLUMN key TO Token; 17 | ALTER TABLE apikeys RENAME TO tokens; 18 | 19 | -- +goose StatementEnd 20 | -------------------------------------------------------------------------------- /repositories/migrations/20230606154759_create_client_tables.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE 4 | client_tables ( 5 | id uuid DEFAULT uuid_generate_v4 (), 6 | org_id uuid, 7 | schema_name VARCHAR(255) NOT NULL, 8 | PRIMARY KEY (id), 9 | CONSTRAINT fk_client_tables_organization FOREIGN KEY (org_id) REFERENCES organizations (id) ON DELETE CASCADE 10 | ); 11 | 12 | CREATE UNIQUE INDEX client_tables_org_id_idx ON client_tables (org_id); 13 | 14 | -- +goose StatementEnd 15 | -- +goose Down 16 | -- +goose StatementBegin 17 | DROP INDEX client_tables_org_id_idx; 18 | 19 | DROP TABLE client_tables; 20 | 21 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20230615091308_rename_clienttables_organization_schema.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | ALTER TABLE client_tables RENAME TO organizations_schema; 5 | ALTER INDEX client_tables_org_id_idx RENAME TO organization_schema_org_id_idx; 6 | ALTER TABLE organizations_schema RENAME CONSTRAINT fk_client_tables_organization TO fk_organization_schema_organization; 7 | 8 | -- +goose StatementEnd 9 | 10 | -- +goose Down 11 | -- +goose StatementBegin 12 | 13 | ALTER TABLE organizations_schema RENAME CONSTRAINT fk_organization_schema_organization TO fk_client_tables_organization; 14 | ALTER INDEX organization_schema_org_id_idx RENAME TO client_tables_org_id_idx; 15 | ALTER TABLE organizations_schema RENAME TO client_tables; 16 | 17 | -- +goose StatementEnd 18 | -------------------------------------------------------------------------------- /repositories/migrations/20230615103735_add_export_shedulded_execution_s3.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE organizations ADD COLUMN export_scheduled_execution_s3 VARCHAR DEFAULT ''; 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE organizations DROP COLUMN export_scheduled_execution_s3; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20230616120000_batch_executions.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iterations ADD COLUMN batch_trigger_sql VARCHAR; 4 | ALTER TABLE scenario_iterations ADD COLUMN schedule VARCHAR; 5 | 6 | 7 | CREATE TABLE scheduled_executions ( 8 | id uuid DEFAULT uuid_generate_v4(), 9 | organization_id uuid NOT NULL, 10 | scenario_id uuid NOT NULL, 11 | scenario_iteration_id uuid NOT NULL, 12 | status VARCHAR NOT NULL, 13 | started_at TIMESTAMP NOT NULL DEFAULT NOW(), 14 | finished_at TIMESTAMP, 15 | PRIMARY KEY(id) 16 | ); 17 | -- +goose StatementEnd 18 | 19 | -- +goose Down 20 | -- +goose StatementBegin 21 | ALTER TABLE scenario_iterations DROP COLUMN batch_trigger_sql; 22 | ALTER TABLE scenario_iterations DROP COLUMN schedule; 23 | DROP TABLE scheduled_executions; 24 | -- +goose StatementEnd 25 | -------------------------------------------------------------------------------- /repositories/migrations/20230616133343_decision_scheduled.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decisions ADD COLUMN scheduled_execution_id uuid; 4 | CREATE INDEX decisions_scheduled_execution_id_idx ON decisions(scheduled_execution_id, created_at DESC); 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | DROP INDEX decisions_scheduled_execution_id_idx; 10 | ALTER TABLE decisions DROP COLUMN scheduled_execution_id; 11 | -- +goose StatementEnd 12 | -------------------------------------------------------------------------------- /repositories/migrations/20230622150504_batch_trigger_sql.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iterations ALTER COLUMN batch_trigger_sql set default ''; 4 | ALTER TABLE scenario_iterations ALTER COLUMN schedule set default ''; 5 | 6 | -- +goose StatementEnd 7 | 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | ALTER TABLE scenario_iterations ALTER COLUMN batch_trigger_sql DROP DEFAULT; 11 | ALTER TABLE scenario_iterations ALTER COLUMN schedule DROP DEFAULT; 12 | -- +goose StatementEnd 13 | -------------------------------------------------------------------------------- /repositories/migrations/20230712141856_add_custom_list_composite_key.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE custom_lists ADD CONSTRAINT custom_lists_organization_id_name_key UNIQUE (organization_id, name) 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE custom_lists DROP CONSTRAINT custom_lists_organization_id_name_key; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20230717175911_add_ast_expression_to_rule.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iteration_rules ADD COLUMN formula_ast_expression json; 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE scenario_iteration_rules DROP COLUMN formula_ast_expression; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20230726153023_add_trigger_condition_ast_expression.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iterations ADD COLUMN trigger_condition_ast_expression json; 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE scenario_iterations DROP COLUMN trigger_condition_ast_expression; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20230728164957_remove_formula_and_trigger_condition.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iterations DROP COLUMN trigger_condition; 4 | ALTER TABLE scenario_iteration_rules DROP COLUMN formula; 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE scenario_iteration_rules ADD COLUMN formula json NOT NULL; 10 | ALTER TABLE scenario_iterations ADD COLUMN trigger_condition json; 11 | -- +goose StatementEnd 12 | -------------------------------------------------------------------------------- /repositories/migrations/20230921150013_add_index_on_decision_rules.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE INDEX decision_rules_decisionId_idx ON decision_rules(decision_id); 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | DROP INDEX decision_rules_decisionId_idx; 9 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20230926112219_add_number_of_created_decisions.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scheduled_executions ADD COLUMN number_of_created_decisions INTEGER NOT NULL DEFAULT -1; 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE scheduled_executions DROP COLUMN number_of_created_decisions; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20230927110030_add_upload_logs.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE upload_logs ( 4 | id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), 5 | org_id UUID REFERENCES organizations(id) ON DELETE CASCADE NOT NULL, 6 | user_id UUID REFERENCES users NOT NULL, 7 | file_name VARCHAR NOT NULL, 8 | status VARCHAR NOT NULL, 9 | started_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), 10 | finished_at TIMESTAMP WITH TIME ZONE, 11 | lines_processed INTEGER NOT NULL DEFAULT 0 12 | ) 13 | -- +goose StatementEnd 14 | 15 | -- +goose Down 16 | -- +goose StatementBegin 17 | DROP TABLE upload_logs; 18 | -- +goose StatementEnd 19 | -------------------------------------------------------------------------------- /repositories/migrations/20230928104726_add_constraint_on_data_model.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | ALTER TABLE data_model_tables 5 | ADD CONSTRAINT unique_data_model_tables_name 6 | UNIQUE (organization_id, name); 7 | 8 | ALTER TABLE data_model_fields 9 | ADD CONSTRAINT unique_data_model_fields_name 10 | UNIQUE (table_id, name); 11 | 12 | ALTER TABLE data_model_links 13 | ADD CONSTRAINT unique_data_model_links 14 | UNIQUE (parent_table_id, parent_field_id, child_table_id, child_field_id); 15 | 16 | -- +goose StatementEnd 17 | 18 | -- +goose Down 19 | -- +goose StatementBegin 20 | 21 | ALTER TABLE data_model_tables DROP CONSTRAINT unique_data_model_tables_name; 22 | ALTER TABLE data_model_fields DROP CONSTRAINT unique_data_model_fields_name; 23 | ALTER TABLE data_model_links DROP CONSTRAINT unique_data_model_links; 24 | 25 | -- +goose StatementEnd 26 | -------------------------------------------------------------------------------- /repositories/migrations/20230929112024_add_table_name_and_index_to_upload_logs.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE upload_logs ADD COLUMN table_name VARCHAR NOT NULL DEFAULT ''; 4 | CREATE INDEX idx_table_name_org_id ON upload_logs (table_name, org_id); 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE upload_logs DROP COLUMN table_name; 10 | DROP INDEX idx_table_name_org_id; 11 | -- +goose StatementEnd 12 | -------------------------------------------------------------------------------- /repositories/migrations/20231010091555_add_manual_to_scheduled_executions.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scheduled_executions ADD COLUMN manual boolean NOT NULL DEFAULT false; 4 | UPDATE scheduled_executions SET manual = false; 5 | CREATE INDEX scheduled_executions_scenario_id_idx ON scheduled_executions(scenario_id); 6 | CREATE INDEX scheduled_executions_organization_id_idx ON scheduled_executions(organization_id); 7 | -- +goose StatementEnd 8 | 9 | -- +goose Down 10 | -- +goose StatementBegin 11 | ALTER TABLE scheduled_executions DROP COLUMN manual; 12 | DROP INDEX scheduled_executions_scenario_id_idx; 13 | DROP INDEX scheduled_executions_organization_id_idx; 14 | -- +goose StatementEnd 15 | -------------------------------------------------------------------------------- /repositories/migrations/20231011110218_add_data_model_enums.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | ALTER TABLE data_model_fields 5 | ADD COLUMN is_enum BOOLEAN NOT NULL DEFAULT FALSE; 6 | 7 | CREATE TABLE data_model_enum_values ( 8 | field_id UUID REFERENCES data_model_fields ON DELETE CASCADE NOT NULL, 9 | value TEXT NOT NULL, 10 | created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), 11 | last_seen TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() 12 | ); 13 | 14 | ALTER TABLE data_model_enum_values 15 | ADD CONSTRAINT unique_data_model_enum_values_field_id_value 16 | UNIQUE (field_id, value); 17 | 18 | CREATE INDEX data_model_enum_values_field_id_last_seen ON data_model_enum_values(field_id, last_seen DESC); 19 | 20 | -- +goose StatementEnd 21 | 22 | -- +goose Down 23 | -- +goose StatementBegin 24 | 25 | ALTER TABLE data_model_fields DROP COLUMN is_enum; 26 | DROP TABLE data_model_enum_values; 27 | 28 | -- +goose StatementEnd 29 | -------------------------------------------------------------------------------- /repositories/migrations/20231102181134_add_indexes_to_decisions.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE INDEX scenario_id_idx ON decisions(scenario_id); 4 | CREATE INDEX created_at_idx ON decisions(created_at); 5 | CREATE INDEX outcome_idx ON decisions(outcome); 6 | CREATE INDEX trigger_object_type_idx ON decisions(trigger_object_type); 7 | -- +goose StatementEnd 8 | 9 | -- +goose Down 10 | -- +goose StatementBegin 11 | DROP INDEX scenario_id_idx; 12 | DROP INDEX created_at_idx; 13 | DROP INDEX outcome_idx; 14 | DROP INDEX trigger_object_type_idx; 15 | -- +goose StatementEnd 16 | -------------------------------------------------------------------------------- /repositories/migrations/20231103160436_add_decision_indexes.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | DROP INDEX scenario_id_idx; 4 | DROP INDEX created_at_idx; 5 | DROP INDEX outcome_idx; 6 | DROP INDEX trigger_object_type_idx; 7 | CREATE INDEX scenario_id_idx ON decisions(org_id, scenario_id, created_at DESC); 8 | CREATE INDEX outcome_idx ON decisions(org_id, outcome, created_at DESC); 9 | CREATE INDEX trigger_object_type_idx ON decisions(org_id, trigger_object_type, created_at DESC); 10 | -- +goose StatementEnd 11 | 12 | -- +goose Down 13 | -- +goose StatementBegin 14 | CREATE INDEX scenario_id_idx ON decisions(scenario_id); 15 | CREATE INDEX created_at_idx ON decisions(created_at); 16 | CREATE INDEX outcome_idx ON decisions(outcome); 17 | CREATE INDEX trigger_object_type_idx ON decisions(trigger_object_type); 18 | DROP INDEX scenario_id_idx; 19 | DROP INDEX outcome_idx; 20 | DROP INDEX trigger_object_type_idx; 21 | -- +goose StatementEnd 22 | -------------------------------------------------------------------------------- /repositories/migrations/20231113160835_create_case_table.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE cases ( 4 | id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), 5 | org_id UUID REFERENCES organizations(id) ON DELETE CASCADE NOT NULL, 6 | name text NOT NULL, 7 | status VARCHAR NOT NULL DEFAULT 'open', 8 | description text, 9 | created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() 10 | ); 11 | 12 | CREATE INDEX case_org_id_idx ON cases(org_id, created_at DESC); 13 | CREATE INDEX case_status_idx ON cases(org_id, status, created_at DESC); 14 | -- +goose StatementEnd 15 | 16 | -- +goose Down 17 | -- +goose StatementBegin 18 | DROP INDEX case_org_id_idx; 19 | DROP INDEX case_status_idx; 20 | DROP TABLE cases; 21 | -- +goose StatementEnd 22 | -------------------------------------------------------------------------------- /repositories/migrations/20231113160850_add_case_id_to_decisions.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decisions 4 | ADD COLUMN case_id UUID; 5 | CREATE INDEX decisions_case_id_idx ON decisions(org_id, case_id); 6 | -- +goose StatementEnd 7 | 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | DROP INDEX decisions_case_id_idx; 11 | ALTER TABLE decisions DROP COLUMN case_id; 12 | -- +goose StatementEnd 13 | -------------------------------------------------------------------------------- /repositories/migrations/20231116141112_add_filtered_unique_index_on_list_names.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE UNIQUE INDEX custom_list_unique_name_idx ON custom_lists (organization_id, name) WHERE deleted_at IS NULL; 4 | ALTER TABLE custom_lists DROP CONSTRAINT custom_lists_organization_id_name_key; 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | DROP INDEX custom_list_unique_name_idx; 10 | ALTER TABLE custom_lists ADD CONSTRAINT custom_lists_organization_id_name_key UNIQUE (organization_id, name) 11 | -- +goose StatementEnd 12 | -------------------------------------------------------------------------------- /repositories/migrations/20231117100514_create_case_events_table.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE case_events ( 4 | id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), 5 | case_id UUID REFERENCES cases(id) ON DELETE CASCADE NOT NULL, 6 | user_id UUID NOT NULL, 7 | event_type VARCHAR NOT NULL, 8 | created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), 9 | additional_note text, 10 | resource_id UUID, 11 | resource_type text, 12 | new_value VARCHAR, 13 | previous_value VARCHAR 14 | ); 15 | 16 | CREATE INDEX case_event_case_id_idx ON case_events(case_id, created_at DESC); 17 | -- +goose StatementEnd 18 | 19 | -- +goose Down 20 | -- +goose StatementBegin 21 | DROP INDEX case_event_case_id_idx; 22 | DROP TABLE case_events; 23 | -- +goose StatementEnd 24 | -------------------------------------------------------------------------------- /repositories/migrations/20231117100515_add_name_to_users.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE users ADD COLUMN first_name VARCHAR; 4 | ALTER TABLE users ADD COLUMN last_name VARCHAR; 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE users DROP COLUMN first_name; 10 | ALTER TABLE users DROP COLUMN last_name; 11 | -- +goose StatementEnd 12 | -------------------------------------------------------------------------------- /repositories/migrations/20231117100516_remove_case_description.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE cases DROP COLUMN description; 4 | ALTER TABLE case_events ALTER COLUMN new_value TYPE text; 5 | ALTER TABLE case_events ALTER COLUMN previous_value TYPE text; 6 | -- +goose StatementEnd 7 | 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | ALTER TABLE cases ADD COLUMN description text; 11 | ALTER TABLE case_events ALTER COLUMN new_value TYPE varchar; 12 | ALTER TABLE case_events ALTER COLUMN previous_value TYPE varchar; 13 | -- +goose StatementEnd 14 | -------------------------------------------------------------------------------- /repositories/migrations/20231120110742_create_case_contributors.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE case_contributors ( 4 | id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), 5 | case_id UUID REFERENCES cases(id) ON DELETE CASCADE NOT NULL, 6 | user_id UUID NOT NULL, 7 | created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() 8 | ); 9 | 10 | CREATE UNIQUE INDEX case_contributors_case_id_user_id_idx on case_contributors(case_id, user_id); 11 | -- +goose StatementEnd 12 | 13 | -- +goose Down 14 | -- +goose StatementBegin 15 | DROP INDEX case_contributors_case_id_user_id_idx; 16 | DROP TABLE case_contributors; 17 | -- +goose StatementEnd 18 | -------------------------------------------------------------------------------- /repositories/migrations/20231122111904_change_user_id_foreign_key.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE case_contributors ADD CONSTRAINT case_contributors_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; 4 | ALTER TABLE upload_logs DROP CONSTRAINT upload_logs_user_id_fkey; 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE case_contributors DROP CONSTRAINT case_contributors_user_id_fkey; 10 | ALTER TABLE upload_logs ADD CONSTRAINT upload_logs_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; 11 | -- +goose StatementEnd 12 | -------------------------------------------------------------------------------- /repositories/migrations/20231127120900_add_inbox_id_on_cases.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | ALTER TABLE cases ADD COLUMN inbox_id UUID REFERENCES inboxes ON DELETE CASCADE; 5 | 6 | -- +goose StatementEnd 7 | 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | 11 | ALTER TABLE cases DROP COLUMN inbox_id; 12 | 13 | -- +goose StatementEnd 14 | -------------------------------------------------------------------------------- /repositories/migrations/20231201101404_remove_inbox_from_tags.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE tags DROP COLUMN inbox_id; 4 | ALTER TABLE tags ADD COLUMN org_id UUID NOT NULL REFERENCES organizations(id) ON DELETE CASCADE; 5 | DROP INDEX IF EXISTS tags_unique_name_inbox_id; 6 | CREATE UNIQUE INDEX tags_unique_name_org_id ON tags (name, org_id) WHERE deleted_at IS NULL; 7 | -- +goose StatementEnd 8 | 9 | -- +goose Down 10 | -- +goose StatementBegin 11 | DROP INDEX tags_unique_name_org_id; 12 | ALTER TABLE tags DROP COLUMN org_id; 13 | ALTER TABLE tags ADD COLUMN inbox_id UUID NOT NULL REFERENCES inboxes(id) ON DELETE CASCADE; 14 | CREATE UNIQUE INDEX tags_unique_name_inbox_id ON tags (name, inbox_id) WHERE deleted_at IS NULL; 15 | -- +goose StatementEnd 16 | -------------------------------------------------------------------------------- /repositories/migrations/20231215152328_add_deleted_at_to_users.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE users ADD COLUMN deleted_at TIMESTAMP WITH TIME ZONE; 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE users DROP COLUMN deleted_at; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20231218220000_create_case_files_table.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE IF NOT EXISTS case_files ( 4 | id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), 5 | created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), 6 | case_id UUID NOT NULL REFERENCES cases(id) ON DELETE CASCADE, 7 | bucket_name VARCHAR(255) NOT NULL, 8 | file_reference VARCHAR(255) NOT NULL, 9 | file_name VARCHAR(255) NOT NULL 10 | ); 11 | CREATE UNIQUE INDEX case_files_unique_case_id_file_name ON case_files (case_id, bucket_name, file_reference); 12 | -- +goose StatementEnd 13 | 14 | -- +goose Down 15 | -- +goose StatementBegin 16 | DROP TABLE case_files; 17 | -- +goose StatementEnd 18 | -------------------------------------------------------------------------------- /repositories/migrations/20231222101032_change_user_email_index.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | DROP INDEX users_email_idx; 4 | CREATE UNIQUE INDEX users_email_idx ON users (email) WHERE deleted_at IS NOT NULL; 5 | 6 | -- +goose StatementEnd 7 | 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | DROP INDEX users_email_idx; 11 | CREATE UNIQUE INDEX users_email_idx ON users (email); 12 | -- +goose StatementEnd 13 | -------------------------------------------------------------------------------- /repositories/migrations/20240112114607_fix_user_email_index.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | DROP INDEX users_email_idx; 4 | 5 | CREATE UNIQUE INDEX users_email_idx ON users (email) 6 | WHERE deleted_at IS NULL; 7 | 8 | -- +goose StatementEnd 9 | -- +goose Down 10 | -- +goose StatementBegin 11 | DROP INDEX users_email_idx; 12 | 13 | CREATE UNIQUE INDEX users_email_idx ON users (email) 14 | WHERE deleted_at IS NOT NULL; 15 | 16 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240124111516_create_analytics_schema.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | -- create the analytics schema and an analytics user 4 | CREATE SCHEMA IF NOT EXISTS analytics; 5 | 6 | do $$ 7 | begin 8 | execute 'GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA analytics TO ' || current_user; 9 | end 10 | $$; 11 | 12 | -- +goose StatementEnd 13 | -- +goose Down 14 | -- +goose StatementBegin 15 | DROP SCHEMA IF EXISTS analytics CASCADE; 16 | 17 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240201142144_remove_firebase_uid.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | DROP INDEX users_firebase_idx; 4 | ALTER TABLE users DROP COLUMN firebase_uid; 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE users ADD COLUMN firebase_uid VARCHAR NOT NULL; 10 | CREATE INDEX users_firebase_idx ON users(firebase_uid); 11 | -- +goose StatementEnd 12 | -------------------------------------------------------------------------------- /repositories/migrations/20240201155650_api_key_description.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE apikeys ADD COLUMN description VARCHAR(255) NOT NULL DEFAULT ''; 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE apikeys DROP COLUMN description; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20240202145805_remove_default_apikey_role.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE apikeys ALTER COLUMN role DROP DEFAULT; 4 | -- +goose StatementEnd 5 | 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | ALTER TABLE apikeys ALTER COLUMN role SET DEFAULT 5; 9 | -- +goose StatementEnd 10 | -------------------------------------------------------------------------------- /repositories/migrations/20240223090700_data_model_link_delete_cascade.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE data_model_links 4 | DROP CONSTRAINT data_model_links_organization_id_fkey; 5 | 6 | ALTER TABLE data_model_links 7 | ADD CONSTRAINT data_model_links_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations ON DELETE CASCADE; 8 | 9 | -- +goose StatementEnd 10 | -- +goose Down 11 | -- +goose StatementBegin 12 | ALTER TABLE data_model_links 13 | DROP CONSTRAINT data_model_links_organization_id_fkey; 14 | 15 | ALTER TABLE data_model_links 16 | ADD CONSTRAINT data_model_links_organization_id_fkey FOREIGN KEY organization_id REFERENCES organizations; 17 | 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240223094300_drop_data_models.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | DROP TABLE data_models; 4 | 5 | DROP TYPE data_models_status; 6 | 7 | -- +goose StatementEnd 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | CREATE TYPE data_models_status AS ENUM('validated', 'live', 'deprecated'); 11 | 12 | CREATE TABLE 13 | data_models ( 14 | id uuid DEFAULT uuid_generate_v4 () PRIMARY KEY, 15 | org_id uuid REFERENCES organizations ON DELETE CASCADE NOT NULL, 16 | version VARCHAR NOT NULL, 17 | status data_models_status NOT NULL, 18 | tables json NOT NULL, 19 | deleted_at TIMESTAMP WITH TIME ZONE 20 | ); 21 | 22 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240223095300_drop_organization_database_name.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | --- those views are created outside if this migration file, but we need to drop it here becauses it uses the database_name column 4 | -- (it wil be recreated as the migrations script is run) 5 | DROP VIEW IF EXISTS analytics.organizations; 6 | 7 | ALTER TABLE organizations 8 | DROP COLUMN database_name; 9 | 10 | -- +goose StatementEnd 11 | -- +goose Down 12 | -- +goose StatementBegin 13 | ALTER TABLE organizations 14 | ADD COLUMN database_name VARCHAR NOT NULL; 15 | 16 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240301225900_case_event_userid_nullable.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE case_events 4 | ALTER COLUMN user_id 5 | DROP NOT NULL; 6 | 7 | -- +goose StatementEnd 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | ALTER TABLE case_events 11 | ALTER COLUMN user_id 12 | SET NOT NULL; 13 | 14 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240304105230_scenario_decision_to_case_settings.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenarios 4 | ADD COLUMN decision_to_case_inbox_id UUID REFERENCES inboxes (id) ON DELETE SET NULL ON UPDATE CASCADE; 5 | 6 | ALTER TABLE scenarios 7 | ADD COLUMN decision_to_case_outcomes varchar(50) []; 8 | 9 | -- +goose StatementEnd 10 | -- +goose Down 11 | -- +goose StatementBegin 12 | ALTER TABLE scenarios 13 | DROP COLUMN decision_to_case_inbox_id; 14 | 15 | ALTER TABLE scenarios 16 | DROP COLUMN decision_to_case_outcomes; 17 | 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240304155400_hash_apikeys.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE apikeys 4 | ADD COLUMN key_hash bytea; 5 | 6 | ALTER TABLE apikeys 7 | ADD COLUMN created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(); 8 | 9 | UPDATE apikeys 10 | SET 11 | key_hash = sha256(key::bytea) 12 | where 13 | key_hash is null; 14 | 15 | -- +goose StatementEnd 16 | -- +goose Down 17 | -- +goose StatementBegin 18 | ALTER TABLE apikeys 19 | DROP COLUMN key_hash; 20 | 21 | ALTER TABLE apikeys 22 | DROP COLUMN created_at; 23 | 24 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240312104400_organization_unique_name.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE UNIQUE INDEX organization_name_unique_idx ON organizations (name) 4 | WHERE 5 | deleted_at IS NULL; 6 | 7 | -- +goose StatementEnd 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | DROP INDEX organization_name_unique_idx; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240312153600_iterationId_on_decisions_and_ruleId_on_decisionRules.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decisions 4 | ADD COLUMN scenario_iteration_id UUID REFERENCES scenario_iterations (id); 5 | 6 | ALTER TABLE decision_rules 7 | ADD COLUMN rule_id UUID REFERENCES scenario_iteration_rules (id); 8 | 9 | -- +goose StatementEnd 10 | -- +goose Down 11 | -- +goose StatementBegin 12 | ALTER TABLE decisions 13 | DROP COLUMN scenario_iteration_id; 14 | 15 | ALTER TABLE decision_rules 16 | DROP COLUMN rule_id; 17 | 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240313141600_apikeys_keyHash_index.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE apikeys 4 | ALTER COLUMN key_hash 5 | SET NOT NULL; 6 | 7 | CREATE UNIQUE INDEX apikeys_key_hash_index ON apikeys (key_hash) 8 | WHERE 9 | deleted_at IS NULL; 10 | 11 | -- +goose StatementEnd 12 | -- +goose Down 13 | -- +goose StatementBegin 14 | DROP INDEX apikeys_key_hash_index; 15 | 16 | ALTER TABLE apikeys 17 | ALTER COLUMN key_hash 18 | DROP NOT NULL; 19 | 20 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240325133100_store_api_key_prefix.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | --- those views are created outside if this migration file, but we need to drop it here becauses it uses the table name 4 | -- (it wil be recreated as the migrations script is run) 5 | DROP VIEW IF EXISTS analytics.apikeys; 6 | 7 | DROP INDEX apikey_key_idx; 8 | 9 | UPDATE apikeys 10 | SET 11 | key = SUBSTRING(key, 1, 3) 12 | where 13 | true; 14 | 15 | ALTER TABLE apikeys 16 | RENAME COLUMN key TO prefix; 17 | 18 | ALTER TABLE apikeys 19 | RENAME TO api_keys; 20 | 21 | -- +goose StatementEnd 22 | -- +goose Down 23 | -- +goose StatementBegin 24 | ALTER TABLE api_keys 25 | RENAME TO apikeys; 26 | 27 | ALTER TABLE apikeys 28 | RENAME COLUMN prefix TO key; 29 | 30 | CREATE UNIQUE INDEX apikey_key_idx ON apikeys (prefixkey); 31 | 32 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240325142444_evaluation_on_decisionrules.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decision_rules 4 | ADD COLUMN rule_evaluation jsonb; 5 | 6 | -- +goose StatementEnd 7 | 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | ALTER TABLE decision_rules 11 | DROP COLUMN rule_evaluation; 12 | -- +goose StatementEnd 13 | -------------------------------------------------------------------------------- /repositories/migrations/20240325143700_introduce_partners.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE IF NOT EXISTS 4 | partners ( 5 | id uuid DEFAULT uuid_generate_v4 () PRIMARY KEY, 6 | created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL, 7 | name varchar(255) NOT NULL 8 | ); 9 | 10 | ALTER TABLE api_keys 11 | ADD COLUMN IF NOT EXISTS partner_id uuid REFERENCES partners (id) ON DELETE SET NULL; 12 | 13 | DELETE FROM transfer_mappings 14 | WHERE 15 | TRUE; 16 | 17 | ALTER TABLE transfer_mappings 18 | ADD COLUMN IF NOT EXISTS partner_id uuid NOT NULL REFERENCES partners (id) ON DELETE SET NULL; 19 | 20 | CREATE UNIQUE INDEX IF NOT EXISTS transfer_mappings_client_transfer_id_idx ON transfer_mappings (organization_id, partner_id, client_transfer_id); 21 | 22 | -- +goose StatementEnd 23 | -- +goose Down 24 | -- +goose StatementBegin 25 | ALTER TABLE api_keys 26 | DROP COLUMN partner_id; 27 | 28 | DROP INDEX IF EXISTS transfer_mappings_client_transfer_id_idx; 29 | 30 | ALTER TABLE transfer_mappings 31 | DROP COLUMN partner_id; 32 | 33 | DROP TABLE partners; 34 | 35 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240419115400_data_model_pivot.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE IF NOT EXISTS 4 | data_model_pivots ( 5 | id uuid DEFAULT uuid_generate_v4 () PRIMARY KEY, 6 | base_table_id uuid NOT NULL REFERENCES data_model_tables (id) ON DELETE CASCADE, 7 | created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL, 8 | field_id uuid REFERENCES data_model_fields (id) ON DELETE CASCADE, 9 | organization_id uuid NOT NULL REFERENCES organizations (id) ON DELETE CASCADE, 10 | path_link_ids uuid[] NOT NULL DEFAULT ARRAY[]::uuid[] 11 | ); 12 | 13 | CREATE UNIQUE INDEX data_model_pivots_base_table_id_idx ON data_model_pivots (organization_id, base_table_id); 14 | 15 | -- +goose StatementEnd 16 | -- +goose Down 17 | -- +goose StatementBegin 18 | DROP INDEX data_model_pivots_base_table_id_idx; 19 | 20 | DROP TABLE data_model_pivots; 21 | 22 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240423160700_pivot_value_on_decisions.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decisions 4 | ADD COLUMN pivot_id uuid REFERENCES data_model_pivots (id) ON DELETE SET NULL; 5 | 6 | ALTER TABLE decisions 7 | ADD COLUMN pivot_value text; 8 | 9 | -- +goose StatementEnd 10 | -- +goose Down 11 | -- +goose StatementBegin 12 | ALTER TABLE decisions 13 | DROP COLUMN pivot_id; 14 | 15 | ALTER TABLE decisions 16 | DROP COLUMN pivot_value; 17 | 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240425140600_decisions_pivot_index.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE INDEX decisions_pivot_value_index ON decisions (org_id, pivot_value, created_at DESC); 4 | 5 | -- +goose StatementEnd 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | DROP INDEX decisions_pivot_value_index; 9 | 10 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240502162700_partner_id_on_users.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE users 4 | ADD COLUMN partner_id UUID REFERENCES partners (id) ON DELETE CASCADE; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE users 10 | DROP COLUMN partner_id; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240507140800_scenario_workflow_type.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenarios 4 | ADD COLUMN decision_to_case_workflow_type VARCHAR(255) NOT NULL DEFAULT 'DISABLED'; 5 | 6 | UPDATE scenarios 7 | SET 8 | decision_to_case_workflow_type = 'CREATE_CASE' 9 | WHERE 10 | decision_to_case_inbox_id IS NOT NULL; 11 | 12 | -- +goose StatementEnd 13 | -- +goose Down 14 | -- +goose StatementBegin 15 | ALTER TABLE scenarios 16 | DROP COLUMN decision_to_case_workflow_type; 17 | 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240521171800_add_to_case_workflow_indexes.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE INDEX decisions_add_to_case_idx ON decisions (org_id, pivot_value, case_id) 4 | WHERE 5 | pivot_value IS NOT NULL 6 | AND case_id IS NOT NULL; 7 | 8 | CREATE INDEX cases_add_to_case_workflow_idx ON cases (org_id, inbox_id, id) 9 | WHERE 10 | status IN ('open', 'investigating'); 11 | 12 | DROP INDEX decisions_case_id_idx; 13 | 14 | CREATE INDEX decisions_case_id_idx ON decisions (org_id, case_id) INCLUDE (pivot_value); 15 | 16 | -- +goose StatementEnd 17 | -- +goose Down 18 | -- +goose StatementBegin 19 | DROP INDEX decisions_case_id_idx; 20 | 21 | DROP INDEX cases_add_to_case_workflow_idx; 22 | 23 | DROP INDEX decisions_add_to_case_idx; 24 | 25 | CREATE INDEX decisions_case_id_idx ON decisions (org_id, case_id); 26 | 27 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240607115200_add_indexes_for_iteration_deletion.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE INDEX decisions_scenario_iteration_id_idx ON decisions (scenario_iteration_id); 4 | 5 | CREATE INDEX decision_rules_rule_id_idx ON decision_rules (rule_id); 6 | 7 | -- +goose StatementEnd 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | DROP INDEX decisions_scenario_iteration_id_idx; 11 | 12 | DROP INDEX decision_rules_rule_id_idx; 13 | 14 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240607153000_field_rule_group_on_rules.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iteration_rules 4 | ADD COLUMN rule_group VARCHAR(255) NOT NULL DEFAULT ''; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE scenario_iteration_rules 10 | DROP COLUMN rule_group; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240620154045_introduce_licenses.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE IF NOT EXISTS 4 | licenses ( 5 | id uuid DEFAULT uuid_generate_v4 (), 6 | key VARCHAR NOT NULL, 7 | created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL, 8 | suspended_at TIMESTAMP WITH TIME ZONE, 9 | expiration_date TIMESTAMP WITH TIME ZONE NOT NULL, 10 | name VARCHAR NOT NULL, 11 | description VARCHAR NOT NULL, 12 | sso_entitlement BOOLEAN NOT NULL, 13 | workflows_entitlement BOOLEAN NOT NULL, 14 | analytics_entitlement BOOLEAN NOT NULL, 15 | data_enrichment BOOLEAN NOT NULL, 16 | user_roles BOOLEAN NOT NULL, 17 | PRIMARY KEY(id) 18 | ); 19 | 20 | CREATE UNIQUE INDEX IF NOT EXISTS idx_key ON licenses(key); 21 | 22 | -- +goose StatementEnd 23 | -- +goose Down 24 | -- +goose StatementBegin 25 | 26 | DROP INDEX IF EXISTS idx_key; 27 | 28 | DROP TABLE licenses; 29 | 30 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240627115100_partner_bic.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE partners 4 | ADD COLUMN bic VARCHAR NOT NULL DEFAULT ''; 5 | 6 | CREATE INDEX partners_bic_idx ON partners (UPPER(bic)); 7 | 8 | -- +goose StatementEnd 9 | -- +goose Down 10 | -- +goose StatementBegin 11 | ALTER TABLE partners 12 | DROP COLUMN bic; 13 | 14 | DROP INDEX partners_bic_idx; 15 | 16 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240717170435_webhooks_events_rename_retry_count.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE webhook_events 4 | RENAME COLUMN send_attempt_count TO retry_count; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE webhook_events 10 | RENAME COLUMN retry_count TO send_attempt_count; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240725140200_licences_webhooks_entitlement.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE licenses 4 | ADD COLUMN webhooks BOOL NOT NULL DEFAULT FALSE; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | ALTER TABLE licenses 9 | DROP COLUMN webhooks; 10 | 11 | -- +goose StatementBegin 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240806171200_rule_execution_outcome.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decision_rules 4 | ADD COLUMN outcome VARCHAR(10) NOT NULL DEFAULT ''; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | ALTER TABLE decision_rules 9 | DROP COLUMN outcome; 10 | 11 | -- +goose StatementBegin 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240806173300_licences_rule_snoozes.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE licenses 4 | ADD COLUMN rule_snoozes BOOL NOT NULL DEFAULT FALSE; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | ALTER TABLE licenses 9 | DROP COLUMN rule_snoozes; 10 | 11 | -- +goose StatementBegin 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240808165800_rule_snoozes_store_decision_id.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE rule_snoozes 4 | ADD COLUMN created_from_decision_id UUID REFERENCES decisions (id) ON DELETE SET NULL; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | ALTER TABLE rule_snoozes 9 | DROP COLUMN created_from_decision_id; 10 | 11 | -- +goose StatementBegin 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240812112700_rule_snoozes_store_rule_id.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | TRUNCATE rule_snoozes; 4 | 5 | ALTER TABLE rule_snoozes 6 | ADD COLUMN created_from_rule_id UUID NOT NULL REFERENCES scenario_iteration_rules (id) ON DELETE CASCADE; 7 | 8 | -- +goose StatementEnd 9 | -- +goose Down 10 | ALTER TABLE rule_snoozes 11 | DROP COLUMN created_from_rule_id; 12 | 13 | -- +goose StatementBegin 14 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240829121800_upload_log_rows_ingested.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE upload_logs 4 | ADD COLUMN num_rows_ingested INTEGER NOT NULL DEFAULT 0; 5 | 6 | UPDATE upload_logs 7 | SET 8 | num_rows_ingested = lines_processed 9 | WHERE 10 | status = 'success'; 11 | 12 | -- +goose StatementEnd 13 | -- +goose Down 14 | ALTER TABLE upload_logs 15 | DROP COLUMN num_rows_ingested; 16 | 17 | -- +goose StatementBegin 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240909134500_new_outcome_block_and_review.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iterations 4 | ADD COLUMN score_block_and_review_threshold INT2; 5 | 6 | UPDATE scenario_iterations 7 | SET 8 | score_block_and_review_threshold = score_reject_threshold 9 | WHERE 10 | score_block_and_review_threshold IS NULL; 11 | 12 | ALTER TYPE decision_outcome 13 | RENAME VALUE 'null' TO 'block_and_review'; 14 | 15 | -- +goose StatementEnd 16 | -- +goose Down 17 | ALTER TABLE scenario_iterations 18 | DROP COLUMN score_block_and_review_threshold; 19 | 20 | ALTER TYPE decision_outcome 21 | RENAME VALUE 'block_and_review' TO 'null'; 22 | 23 | -- +goose StatementBegin 24 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240909164400_workflows_migrate_new_outcome.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | UPDATE scenarios 4 | SET 5 | decision_to_case_outcomes = array_append(decision_to_case_outcomes, 'block_and_review') 6 | WHERE 7 | 'review' = ANY (decision_to_case_outcomes); 8 | 9 | -- +goose StatementEnd 10 | -- +goose Down 11 | UPDATE scenarios 12 | SET 13 | decision_to_case_outcomes = array_remove(decision_to_case_outcomes, 'block_and_review') 14 | WHERE 15 | 'block_and_review' = ANY (decision_to_case_outcomes); 16 | 17 | -- +goose StatementBegin 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240909165500_decisions_review_status.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decisions 4 | ADD COLUMN review_status VARCHAR(10); 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | ALTER TABLE decisions 9 | DROP COLUMN review_status; 10 | 11 | -- +goose StatementBegin 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240909173500_decisions_drop_error_code.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decisions 4 | ALTER COLUMN error_code 5 | DROP NOT NULL; 6 | 7 | -- +goose StatementEnd 8 | -- +goose Down 9 | ALTER TABLE decisions 10 | ALTER COLUMN error_code 11 | SET NOT NULL; 12 | 13 | -- +goose StatementBegin 14 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240910221600_decision_rules_org_id_idx.sql: -------------------------------------------------------------------------------- 1 | -- +goose NO TRANSACTION 2 | -- +goose Up 3 | CREATE INDEX CONCURRENTLY IF NOT EXISTS decision_rules_org_id_idx ON decision_rules (org_id); 4 | 5 | CREATE INDEX CONCURRENTLY IF NOT EXISTS decision_pivot_id_idx ON decisions (pivot_id); 6 | 7 | -- +goose Down 8 | DROP INDEX decision_rules_org_id_idx; 9 | 10 | DROP INDEX decision_pivot_id_idx; 11 | 12 | -- +goose StatementBegin 13 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240923141200_remove_batch_trigger_sql.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iterations 4 | DROP COLUMN batch_trigger_sql; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE scenario_iterations 10 | ADD COLUMN batch_trigger_sql VARCHAR DEFAULT ''; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20240925120000_decision_rules_toast_setting.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE decision_rules 4 | SET 5 | (TOAST_TUPLE_TARGET = 128); 6 | 7 | ALTER TABLE decision_rules 8 | ALTER COLUMN name 9 | DROP NOT NULL; 10 | 11 | ALTER TABLE decision_rules 12 | ALTER COLUMN description 13 | DROP NOT NULL; 14 | 15 | -- +goose StatementEnd 16 | -- +goose Down 17 | -- +goose StatementBegin 18 | ALTER TABLE decision_rules 19 | SET 20 | (TOAST_TUPLE_TARGET = 2048); 21 | 22 | ALTER TABLE decision_rules 23 | ALTER COLUMN name 24 | SET NOT NULL; 25 | 26 | ALTER TABLE decision_rules 27 | ALTER COLUMN description 28 | SET NOT NULL; 29 | 30 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241002223100_unique_batch_per_scenario.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE INDEX unique_scheduled_per_scenario_idx ON scheduled_executions (scenario_id) 4 | WHERE 5 | (status IN ('pending', 'processing')); 6 | 7 | -- +goose StatementEnd 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | DROP INDEX unique_scheduled_per_scenario_idx; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241018170100_index_for_async_decision_creation.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE INDEX decisions_to_create_query_pending_idx ON decisions_to_create (scheduled_execution_id) 4 | WHERE 5 | status IN ('pending', 'failed'); 6 | 7 | -- +goose StatementEnd 8 | -- +goose Down 9 | -- +goose StatementBegin 10 | DROP INDEX decisions_to_create_query_pending_idx; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241104103800_drop_constraint_enum_values.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE data_model_enum_values 4 | DROP CONSTRAINT IF EXISTS data_model_enum_values_field_id_fkey; 5 | 6 | ALTER TABLE data_model_enum_values 7 | DROP COLUMN last_seen; 8 | 9 | -- +goose StatementEnd 10 | -- +goose Down 11 | -- +goose StatementBegin 12 | ALTER TABLE data_model_enum_values 13 | ADD CONSTRAINT data_model_enum_values_field_id_fkey FOREIGN KEY (field_id) REFERENCES data_model_fields (id) ON DELETE CASCADE; 14 | 15 | ALTER TABLE data_model_enum_values 16 | ADD COLUMN last_seen TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(); 17 | 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241105131500_add_use_marble_db_schema_as_default.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE organizations 4 | ADD COLUMN IF NOT EXISTS use_marble_db_schema_as_default BOOLEAN NOT NULL DEFAULT FALSE; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE organizations 10 | DROP COLUMN use_marble_db_schema_as_default; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241105143100_drop_analytics_schema.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | DROP SCHEMA IF EXISTS analytics CASCADE; 4 | 5 | -- +goose StatementEnd 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | CREATE SCHEMA IF NOT EXISTS analytics; 9 | 10 | do $$ 11 | begin 12 | execute 'GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA analytics TO ' || current_user; 13 | end 14 | $$; 15 | 16 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241105151948_decision_phantom.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE 4 | phantom_decisions ( 5 | id uuid DEFAULT uuid_generate_v4 (), 6 | org_id uuid NOT NULL, 7 | created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), 8 | outcome decision_outcome NOT NULL, 9 | scenario_id uuid NOT NULL, 10 | score INT NOT NULL, 11 | scenario_iteration_id uuid NOT NULL, 12 | PRIMARY KEY (id), 13 | CONSTRAINT fk_phantom_decisions_org FOREIGN KEY (org_id) REFERENCES organizations (id) ON DELETE CASCADE, 14 | CONSTRAINT fk_phantom_decisions_scenario_ite_id FOREIGN KEY (scenario_iteration_id) REFERENCES scenario_iterations (id) ON DELETE CASCADE 15 | ); 16 | 17 | -- +goose StatementEnd 18 | -- +goose Down 19 | -- +goose StatementBegin 20 | DROP TABLE phantom_decisions; 21 | 22 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241105152000_scenario_testrun.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE TABLE 4 | scenario_test_run ( 5 | id uuid DEFAULT uuid_generate_v4 (), 6 | scenario_iteration_id uuid NOT NULL, 7 | live_scenario_iteration_id uuid NOT NULL, 8 | created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), 9 | expires_at TIMESTAMP WITH TIME ZONE NOT NULL, 10 | status VARCHAR NOT NULL, 11 | PRIMARY KEY (id), 12 | CONSTRAINT fk_scenario_publications_scenario_iterations FOREIGN KEY (scenario_iteration_id) REFERENCES scenario_iterations (id) ON DELETE CASCADE 13 | ); 14 | 15 | -- +goose StatementEnd 16 | -- +goose Down 17 | -- +goose StatementBegin 18 | DROP TABLE scenario_test_run; 19 | 20 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241105152100_decision_phantom_continued.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE phantom_decisions 4 | ADD COLUMN scenario_version INT NOT NULL; 5 | 6 | ALTER TABLE phantom_decisions 7 | ADD COLUMN test_run_id uuid NOT NULL REFERENCES scenario_test_run (id) ON DELETE CASCADE; 8 | 9 | -- +goose StatementEnd 10 | -- +goose Down 11 | -- +goose StatementBegin 12 | ALTER TABLE phantom_decisions 13 | DROP COLUMN scenario_version; 14 | 15 | ALTER TABLE phantom_decisions 16 | DROP COLUMN test_run_id; 17 | 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241203135200_scenario_rules_stable_id.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenario_iteration_rules 4 | ADD COLUMN stable_rule_id uuid; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE scenario_iteration_rules 10 | DROP COLUMN stable_rule_id; 11 | 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241204215200_phantom_decisions_index.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | CREATE INDEX phantom_decisions_org_idx ON phantom_decisions (org_id, created_at DESC); 4 | 5 | -- +goose StatementEnd 6 | -- +goose Down 7 | -- +goose StatementBegin 8 | DROP INDEX phantom_decisions_org_idx; 9 | 10 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241209154100_org_default_timezone.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE organizations 4 | ADD COLUMN default_scenario_timezone text; 5 | 6 | ALTER TABLE organizations 7 | DROP COLUMN export_scheduled_execution_s3; 8 | 9 | -- +goose StatementEnd 10 | -- +goose Down 11 | -- +goose StatementBegin 12 | ALTER TABLE organizations 13 | DROP COLUMN default_scenario_timezone; 14 | 15 | ALTER TABLE organizations 16 | ADD COLUMN export_scheduled_execution_s3 VARCHAR DEFAULT ''; 17 | 18 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20241210134900_licences_test_run.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE licenses 4 | ADD COLUMN test_run BOOL NOT NULL DEFAULT FALSE; 5 | 6 | -- +goose StatementEnd 7 | -- +goose Down 8 | ALTER TABLE licenses 9 | DROP COLUMN test_run; 10 | 11 | -- +goose StatementBegin 12 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20250102151657_case_name_template.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | ALTER TABLE scenarios 4 | ADD COLUMN decision_to_case_name_template JSON; 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | ALTER TABLE scenarios 10 | DROP COLUMN decision_to_case_name_template; 11 | -- +goose StatementEnd 12 | -------------------------------------------------------------------------------- /repositories/migrations/20250117095221_fuzzy_match_on_case_name.sql: -------------------------------------------------------------------------------- 1 | -- +goose NO TRANSACTION 2 | -- +goose Up 3 | 4 | CREATE EXTENSION IF NOT EXISTS pg_trgm; 5 | 6 | -- +goose StatementBegin 7 | 8 | DO $$ 9 | BEGIN 10 | EXECUTE format('ALTER ROLE %I SET pg_trgm.similarity_threshold = 0.1', current_user); 11 | END 12 | $$; 13 | 14 | -- +goose StatementEnd 15 | 16 | CREATE INDEX CONCURRENTLY trgm_cases_on_name ON cases USING GIN (name gin_trgm_ops); 17 | 18 | CREATE INDEX CONCURRENTLY case_org_id_idx_2 ON cases (org_id, created_at DESC) INCLUDE (inbox_id, status, name); 19 | 20 | DROP INDEX CONCURRENTLY IF EXISTS case_org_id_idx; 21 | 22 | DROP INDEX CONCURRENTLY IF EXISTS case_status_idx; 23 | 24 | -- +goose Down 25 | CREATE INDEX CONCURRENTLY case_status_idx ON cases (org_id, status, created_at DESC); 26 | 27 | CREATE INDEX CONCURRENTLY case_org_id_idx ON cases (org_id, created_at DESC); 28 | 29 | DROP INDEX CONCURRENTLY IF EXISTS case_org_id_idx_2; 30 | 31 | DROP INDEX CONCURRENTLY IF EXISTS trgm_cases_on_name; 32 | -------------------------------------------------------------------------------- /repositories/migrations/20250217152700_drop_sanction_check_score_modif.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | alter table sanction_check_configs 4 | drop column score_modifier; 5 | 6 | update sanction_check_configs 7 | set 8 | forced_outcome = 'review' 9 | where 10 | forced_outcome is null; 11 | 12 | alter table sanction_check_configs 13 | alter column forced_outcome 14 | set not null; 15 | 16 | -- +goose StatementEnd 17 | -- +goose Down 18 | -- +goose StatementBegin 19 | alter table sanction_check_configs 20 | add column score_modifier integer default 0; 21 | 22 | alter table sanction_check_configs 23 | alter column forced_outcome 24 | drop not null; 25 | 26 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20250218103800_add_sanction_check_error_codes.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table sanction_checks 4 | add column error_codes text[]; 5 | 6 | -- +goose Down 7 | 8 | alter table sanction_checks 9 | drop column error_codes; -------------------------------------------------------------------------------- /repositories/migrations/20250226112700_add_sanction_check_match_enriched.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table sanction_check_matches 4 | add column enriched bool default false; 5 | 6 | -- +goose Down 7 | 8 | alter table sanction_check_matches 9 | drop column enriched; 10 | -------------------------------------------------------------------------------- /repositories/migrations/20250303102000_create_test_run_summaries.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | create table scenario_test_run_summaries ( 5 | id uuid default gen_random_uuid(), 6 | test_run_id uuid not null, 7 | version int not null, 8 | rule_stable_id text, 9 | rule_name text, 10 | watermark timestamp with time zone not null, 11 | outcome text not null, 12 | total int not null default 0, 13 | 14 | primary key (id), 15 | 16 | constraint fk_scenario_test_run 17 | foreign key (test_run_id) 18 | references scenario_test_run (id), 19 | 20 | unique nulls not distinct (test_run_id, version, rule_stable_id, outcome) 21 | ); 22 | 23 | alter table scenario_test_run 24 | add column summarized bool not null default false; 25 | 26 | create index idx_scenario_test_summaries_test_run on scenario_test_run_summaries (test_run_id); 27 | 28 | -- +goose StatementEnd 29 | 30 | -- +goose Down 31 | -- +goose StatementBegin 32 | 33 | drop table scenario_test_run_summaries; 34 | 35 | alter table scenario_test_run 36 | drop column summarized; 37 | 38 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20250303102400_add_sanction_check_stable_id.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | alter table sanction_check_configs 5 | add column stable_id uuid null; 6 | 7 | update sanction_check_configs scc 8 | set stable_id = sci.scenario_id 9 | from scenario_iterations sci 10 | where 11 | scc.scenario_iteration_id = sci.id and 12 | stable_id is null; 13 | 14 | alter table sanction_check_configs 15 | alter column stable_id set not null; 16 | 17 | -- +goose StatementEnd 18 | 19 | -- +goose Down 20 | 21 | alter table sanction_check_configs 22 | drop column stable_id; -------------------------------------------------------------------------------- /repositories/migrations/20250306160800_add_updated_time_to_test_runs.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | alter table scenario_test_run 5 | add column updated_at timestamp with time zone null default now(); 6 | 7 | update scenario_test_run 8 | set updated_at = now(); 9 | 10 | alter table scenario_test_run 11 | alter column updated_at set not null; 12 | 13 | -- +goose StatementEnd 14 | 15 | -- +goose Down 16 | 17 | alter table scenario_test_run 18 | drop column updated_at; -------------------------------------------------------------------------------- /repositories/migrations/20250307145800_add_case_snoozes.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table cases 4 | add column snoozed_until timestamp with time zone null; 5 | 6 | -- +goose Down 7 | 8 | alter table cases 9 | drop column snoozed_until; -------------------------------------------------------------------------------- /repositories/migrations/20250310143900_create_case_assignment.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table cases 4 | add column assigned_to uuid null default null, 5 | add constraint fk_assigned_to_user 6 | foreign key (assigned_to) references users (id) 7 | on delete set null; 8 | 9 | -- +goose Down 10 | 11 | alter table cases 12 | drop column assigned_to; -------------------------------------------------------------------------------- /repositories/migrations/20250317155200_add_index_on_whitelist_entity_id.sql: -------------------------------------------------------------------------------- 1 | -- +goose NO TRANSACTION 2 | -- +goose Up 3 | 4 | create index concurrently idx_sanction_check_whitelists_entity_id on sanction_check_whitelists (org_id, entity_id); 5 | 6 | -- +goose Down 7 | 8 | drop index idx_sanction_check_whitelists_entity_id; -------------------------------------------------------------------------------- /repositories/migrations/20250319174700_remove_whitelist_user_non_null.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table sanction_check_whitelists 4 | alter column whitelisted_by drop not null; 5 | 6 | -- +goose Down 7 | 8 | alter table sanction_check_whitelists 9 | alter column whitelisted_by set not null; 10 | -------------------------------------------------------------------------------- /repositories/migrations/20250321221500_set_snooze_user_nullable.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table rule_snoozes 4 | alter column created_by_user drop not null; 5 | 6 | -- +goose Down 7 | 8 | alter table rule_snoozes 9 | alter column created_by_user set not null; -------------------------------------------------------------------------------- /repositories/migrations/20250325084800_create_entity_annotations.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | create table entity_annotations ( 4 | id uuid default uuid_generate_v4(), 5 | org_id uuid not null, 6 | object_type text not null, 7 | object_id text not null, 8 | case_id uuid, 9 | annotation_type text not null, 10 | payload jsonb not null, 11 | annotated_by uuid, 12 | created_at timestamp with time zone not null default now(), 13 | deleted_at timestamp with time zone default null, 14 | 15 | primary key (id), 16 | foreign key (org_id) references organizations (id), 17 | foreign key (case_id) references cases (id) on delete set null, 18 | foreign key (annotated_by) references users (id) on delete set null 19 | ); 20 | 21 | create index idx_entity_annotations 22 | on entity_annotations (org_id, object_type, object_id, annotation_type) 23 | where deleted_at is null; 24 | 25 | create index idx_entity_annotations_case_id 26 | on entity_annotations (org_id, case_id) 27 | where deleted_at is null; 28 | 29 | -- +goose Down 30 | 31 | drop table entity_annotations; -------------------------------------------------------------------------------- /repositories/migrations/20250416141100_create_tag_types.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table tags 4 | add column target text not null default 'case', 5 | add constraint tag_kind_check check (target in ('case', 'object')); 6 | 7 | -- +goose Down 8 | 9 | alter table tags 10 | drop column target; 11 | -------------------------------------------------------------------------------- /repositories/migrations/20250424120000_add_fuzzystrmatch_extension.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | CREATE EXTENSION if not exists fuzzystrmatch SCHEMA public; 4 | 5 | -- +goose Down 6 | -------------------------------------------------------------------------------- /repositories/migrations/20250428080100_add_suspicious_activity_reports.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | create table suspicious_activity_reports ( 4 | id uuid primary key default gen_random_uuid(), 5 | report_id uuid not null default gen_random_uuid(), 6 | case_id uuid not null, 7 | status text not null check (status in ('pending', 'completed')), 8 | bucket text, 9 | blob_key text, 10 | created_by uuid not null, 11 | uploaded_by uuid, 12 | created_at timestamp with time zone not null default now(), 13 | deleted_at timestamp with time zone, 14 | 15 | constraint fk_case_id foreign key (case_id) references cases (id) on delete cascade, 16 | constraint fk_created_by foreign key (created_by) references users (id), 17 | constraint fk_uploaded_by foreign key (uploaded_by) references users (id) 18 | ); 19 | 20 | create unique index idx_live_suspicious_activity_reports on suspicious_activity_reports (case_id, report_id) where (deleted_at is null); 21 | 22 | -- +goose Down 23 | 24 | drop table suspicious_activity_reports; -------------------------------------------------------------------------------- /repositories/migrations/20250428080200_add_case_waiting_for_action.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table cases 4 | add column boost text default null; 5 | 6 | create index idx_cases_add_to_case_workflow 7 | on cases (org_id, inbox_id, id) 8 | where (status in ('pending', 'investigating')); 9 | 10 | drop index cases_add_to_case_workflow_idx; 11 | 12 | create index idx_inbox_cases 13 | on cases (org_id, inbox_id, (boost is null), created_at desc, id desc); 14 | 15 | -- +goose Down 16 | 17 | create index cases_add_to_case_workflow_idx 18 | on cases (org_id, inbox_id, id) 19 | where (status IN ('open', 'investigating')); 20 | 21 | drop index idx_inbox_cases; 22 | drop index idx_cases_add_to_case_workflow; 23 | 24 | alter table cases 25 | drop column boost; -------------------------------------------------------------------------------- /repositories/migrations/20250428080300_add_inbox_escalation.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | 3 | alter table inboxes 4 | add column escalation_inbox_id uuid, 5 | add constraint fk_escalation_inbox_id 6 | foreign key (escalation_inbox_id) references inboxes (id) 7 | on delete set null; 8 | 9 | -- +goose Down 10 | 11 | alter table inboxes 12 | drop column escalation_inbox_id; -------------------------------------------------------------------------------- /repositories/migrations/20250428083000_create_data_model_table_options.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | create table data_model_options ( 5 | id uuid default gen_random_uuid(), 6 | table_id uuid not null unique, 7 | displayed_fields uuid[] not null default '{}', 8 | field_order uuid[] not null default '{}', 9 | 10 | primary key (id), 11 | constraint fk_data_model_table 12 | foreign key (table_id) references data_model_tables (id) 13 | on delete cascade 14 | ); 15 | 16 | -- +goose StatementEnd 17 | 18 | -- +goose Down 19 | -- +goose StatementBegin 20 | 21 | drop table data_model_options; 22 | 23 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20250506113500_remove_snooze_groups_cascade.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | alter table scenario_iteration_rules 5 | drop constraint scenario_iteration_rules_snooze_group_id_fkey, 6 | add constraint scenario_iteration_rules_snooze_group_id_fkey 7 | foreign key (scenario_iteration_id) references scenario_iterations 8 | on delete set null; 9 | 10 | -- +goose StatementEnd 11 | 12 | -- +goose Down 13 | 14 | -- Omited because we really don't want this. -------------------------------------------------------------------------------- /repositories/migrations/20250512144400_change_case_index.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | alter index idx_inbox_cases rename to idx_inbox_cases_2; 5 | 6 | create index idx_inbox_cases 7 | on cases (org_id, inbox_id, (boost is null), (assigned_to is not null), created_at desc, id desc); 8 | 9 | drop index idx_inbox_cases_2; 10 | 11 | -- +goose StatementEnd 12 | 13 | -- +goose Down 14 | -- +goose StatementBegin 15 | 16 | alter index idx_inbox_cases rename to idx_inbox_cases_2; 17 | 18 | create index idx_inbox_cases 19 | on cases (org_id, inbox_id, (boost is null), created_at desc, id desc); 20 | 21 | drop index idx_inbox_cases_2; 22 | 23 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20250516140900_decision_rules_offloading.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | create table offloading_watermarks ( 5 | org_id uuid, 6 | table_name text, 7 | watermark_time timestamp with time zone not null, 8 | watermark_id uuid not null, 9 | created_at timestamp with time zone, 10 | updated_at timestamp with time zone, 11 | 12 | primary key (org_id, table_name), 13 | constraint fk_org_id 14 | foreign key (org_id) references organizations (id) 15 | on delete cascade 16 | ); 17 | 18 | -- +goose StatementEnd 19 | 20 | -- +goose Down 21 | -- +goose StatementBegin 22 | 23 | drop table offloading_watermarks; 24 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/migrations/20250604153400_user_ai_assist_setting.sql: -------------------------------------------------------------------------------- 1 | -- +goose Up 2 | -- +goose StatementBegin 3 | 4 | ALTER TABLE users ADD COLUMN ai_assist_enabled BOOLEAN NOT NULL DEFAULT FALSE; 5 | -- +goose StatementEnd 6 | 7 | -- +goose Down 8 | -- +goose StatementBegin 9 | 10 | ALTER TABLE 11 | users DROP COLUMN ai_assist_enabled; 12 | 13 | -- +goose StatementEnd -------------------------------------------------------------------------------- /repositories/postgres/api_keys.go: -------------------------------------------------------------------------------- 1 | package postgres 2 | 3 | import ( 4 | "context" 5 | "errors" 6 | "fmt" 7 | 8 | "github.com/jackc/pgx/v5" 9 | 10 | "github.com/checkmarble/marble-backend/models" 11 | "github.com/checkmarble/marble-backend/repositories/dbmodels" 12 | ) 13 | 14 | func (db *Database) GetApiKeyByHash(ctx context.Context, hash []byte) (models.ApiKey, error) { 15 | query := ` 16 | SELECT id, org_id, prefix, description, partner_id, role 17 | FROM api_keys 18 | WHERE key_hash = $1 19 | AND deleted_at IS NULL 20 | ` 21 | 22 | var apiKey dbmodels.DBApiKey 23 | err := db.pool.QueryRow(ctx, query, hash).Scan( 24 | &apiKey.Id, 25 | &apiKey.OrganizationId, 26 | &apiKey.Prefix, 27 | &apiKey.Description, 28 | &apiKey.PartnerId, 29 | &apiKey.Role, 30 | ) 31 | if errors.Is(err, pgx.ErrNoRows) { 32 | return models.ApiKey{}, models.NotFoundError 33 | } 34 | if err != nil { 35 | return models.ApiKey{}, fmt.Errorf("pool.QueryRow error: %w", err) 36 | } 37 | return dbmodels.AdaptApikey(apiKey) 38 | } 39 | -------------------------------------------------------------------------------- /repositories/postgres/organizations.go: -------------------------------------------------------------------------------- 1 | package postgres 2 | 3 | import ( 4 | "context" 5 | "errors" 6 | "fmt" 7 | 8 | "github.com/jackc/pgx/v5" 9 | 10 | "github.com/checkmarble/marble-backend/models" 11 | ) 12 | 13 | func (db *Database) GetOrganizationByID(ctx context.Context, organizationID string) (models.Organization, error) { 14 | query := ` 15 | SELECT id, name 16 | FROM organizations 17 | WHERE id = $1 18 | ` 19 | 20 | var organization models.Organization 21 | err := db.pool.QueryRow(ctx, query, organizationID).Scan( 22 | &organization.Id, 23 | &organization.Name, 24 | ) 25 | if errors.Is(err, pgx.ErrNoRows) { 26 | return models.Organization{}, models.NotFoundError 27 | } 28 | if err != nil { 29 | return models.Organization{}, fmt.Errorf("pool.QueryRow error: %w", err) 30 | } 31 | return organization, nil 32 | } 33 | -------------------------------------------------------------------------------- /repositories/postgres/postgres.go: -------------------------------------------------------------------------------- 1 | package postgres 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | 7 | "github.com/Masterminds/squirrel" 8 | "github.com/jackc/pgx/v5" 9 | "github.com/jackc/pgx/v5/pgxpool" 10 | ) 11 | 12 | type Database struct { 13 | pool *pgxpool.Pool 14 | } 15 | 16 | type Transaction struct { 17 | pgx.Tx 18 | } 19 | 20 | func (tx *Transaction) Rollback(ctx context.Context) { 21 | _ = tx.Tx.Rollback(ctx) 22 | } 23 | 24 | func (db *Database) Begin(ctx context.Context) (*Transaction, error) { 25 | tx, err := db.pool.Begin(ctx) 26 | if err != nil { 27 | return nil, fmt.Errorf("db.pool.Begin error: %w", err) 28 | } 29 | return &Transaction{ 30 | Tx: tx, 31 | }, nil 32 | } 33 | 34 | func New(pool *pgxpool.Pool) *Database { 35 | return &Database{ 36 | pool: pool, 37 | } 38 | } 39 | 40 | func NewQueryBuilder() squirrel.StatementBuilderType { 41 | return squirrel.StatementBuilder.PlaceholderFormat(squirrel.Dollar) 42 | } 43 | -------------------------------------------------------------------------------- /repositories/table_name_with_schema.go: -------------------------------------------------------------------------------- 1 | package repositories 2 | 3 | import ( 4 | "github.com/jackc/pgx/v5" 5 | ) 6 | 7 | func pgIdentifierWithSchema(exec Executor, tableName string, field ...string) string { 8 | input := []string{exec.DatabaseSchema().Schema, tableName} 9 | if len(field) > 0 { 10 | input = append(input, field[0]) 11 | } 12 | return pgx.Identifier.Sanitize(input) 13 | } 14 | -------------------------------------------------------------------------------- /repositories/utils.go: -------------------------------------------------------------------------------- 1 | package repositories 2 | 3 | import ( 4 | "context" 5 | "fmt" 6 | 7 | "github.com/checkmarble/marble-backend/models" 8 | "github.com/checkmarble/marble-backend/pure_utils" 9 | ) 10 | 11 | const postgres_audit_user_id_parameter = "custom.current_user_id" 12 | 13 | func setCurrentUserIdContext(ctx context.Context, exec Executor, userId *models.UserId) error { 14 | if userId != nil { 15 | _, err := exec.Exec( 16 | ctx, 17 | fmt.Sprintf("SELECT SET_CONFIG('%s', $1, false)", postgres_audit_user_id_parameter), 18 | *userId, 19 | ) 20 | return err 21 | } 22 | return nil 23 | } 24 | 25 | func columnsNames(tablename string, fields []string) []string { 26 | return pure_utils.Map(fields, func(f string) string { 27 | return fmt.Sprintf("%s.%s", tablename, f) 28 | }) 29 | } 30 | -------------------------------------------------------------------------------- /specs/readme.md: -------------------------------------------------------------------------------- 1 | Syncronize the openapi spec to the webpage's API documentation with readme: 2 | 3 | ``` 4 | npm install rdme@latest -g 5 | rdme openapi 6 | ``` 7 | 8 | This will require you to : 9 | 10 | 1. login with readme if you have a user/password. 11 | 2. enter the subdomain used (= checkmarble) 12 | 3. select the API definition you want to push 13 | 4. select "Update an existing OAS file" 14 | 5. select the desired file to update (**be careful to select the right one**) 15 | 16 | > **⚠️ Ensure you sync the selected OAS with the corresponding API Reference or you will loose any existing manual edition in the process ⚠️** 17 | 18 | For more information on the readme openapi extension, see: 19 | 20 | -------------------------------------------------------------------------------- /tools.go: -------------------------------------------------------------------------------- 1 | //go:build tools 2 | 3 | package main 4 | 5 | import ( 6 | _ "github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen" 7 | ) 8 | -------------------------------------------------------------------------------- /usecases/analytics_usecase.go: -------------------------------------------------------------------------------- 1 | package usecases 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type AnalyticsRepository interface { 10 | ListAnalytics(ctx context.Context, organizationId string) ([]models.Analytics, error) 11 | } 12 | 13 | type EnforceSecurityAnalytics interface { 14 | ReadAnalytics(analytics models.Analytics) error 15 | } 16 | 17 | type AnalyticsUseCase struct { 18 | enforceSecurity EnforceSecurityAnalytics 19 | analyticsRepository AnalyticsRepository 20 | } 21 | 22 | func (usecase *AnalyticsUseCase) ListAnalytics(ctx context.Context, organizationId string) ([]models.Analytics, error) { 23 | analyticsList, err := usecase.analyticsRepository.ListAnalytics(ctx, organizationId) 24 | if err != nil { 25 | return []models.Analytics{}, err 26 | } 27 | for _, analytics := range analyticsList { 28 | if err := usecase.enforceSecurity.ReadAnalytics(analytics); err != nil { 29 | return []models.Analytics{}, err 30 | } 31 | } 32 | return analyticsList, nil 33 | } 34 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/eval_is_empty.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models/ast" 7 | ) 8 | 9 | type IsEmpty struct{} 10 | 11 | func (f IsEmpty) Evaluate(ctx context.Context, arguments ast.Arguments) (any, []error) { 12 | if err := verifyNumberOfArguments(arguments.Args, 1); err != nil { 13 | return MakeEvaluateError(err) 14 | } 15 | if arguments.Args[0] == nil || arguments.Args[0] == "" { 16 | return true, nil 17 | } 18 | 19 | return false, nil 20 | } 21 | 22 | type IsNotEmpty struct{} 23 | 24 | func (f IsNotEmpty) Evaluate(ctx context.Context, arguments ast.Arguments) (any, []error) { 25 | if err := verifyNumberOfArguments(arguments.Args, 1); err != nil { 26 | return MakeEvaluateError(err) 27 | } 28 | if arguments.Args[0] == nil || arguments.Args[0] == "" { 29 | return false, nil 30 | } 31 | 32 | return true, nil 33 | } 34 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/eval_not.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models/ast" 7 | ) 8 | 9 | type Not struct{} 10 | 11 | func (f Not) Evaluate(ctx context.Context, arguments ast.Arguments) (any, []error) { 12 | if err := verifyNumberOfArguments(arguments.Args, 1); err != nil { 13 | return MakeEvaluateError(err) 14 | } 15 | if arguments.Args[0] == nil { 16 | return nil, nil 17 | } 18 | 19 | v, err := adaptArgumentToBool(arguments.Args[0]) 20 | errs := MakeAdaptedArgsErrors([]error{err}) 21 | if len(errs) > 0 { 22 | return nil, errs 23 | } 24 | 25 | return !v, nil 26 | } 27 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/eval_not_test.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | "testing" 6 | 7 | "github.com/checkmarble/marble-backend/models/ast" 8 | 9 | "github.com/stretchr/testify/assert" 10 | ) 11 | 12 | func TestNot_Evaluate_true(t *testing.T) { 13 | result, errs := Not{}.Evaluate(context.TODO(), ast.Arguments{Args: []any{true}}) 14 | assert.Empty(t, errs) 15 | assert.Equal(t, false, result) 16 | } 17 | 18 | func TestNot_Evaluate_false(t *testing.T) { 19 | result, errs := Not{}.Evaluate(context.TODO(), ast.Arguments{Args: []any{false}}) 20 | assert.Empty(t, errs) 21 | assert.Equal(t, true, result) 22 | } 23 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluate_arithmetic_divide.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/cockroachdb/errors" 7 | 8 | "github.com/checkmarble/marble-backend/models/ast" 9 | ) 10 | 11 | type ArithmeticDivide struct{} 12 | 13 | func (f ArithmeticDivide) Evaluate(ctx context.Context, arguments ast.Arguments) (any, []error) { 14 | leftAny, rightAny, err := leftAndRight(arguments.Args) 15 | if err != nil { 16 | return MakeEvaluateError(errors.Wrap(err, "Error in Evaluate function Divide")) 17 | } 18 | if leftAny == nil || rightAny == nil { 19 | return nil, nil 20 | } 21 | 22 | // promote to float64 23 | left, right, errs := adaptLeftAndRight(leftAny, rightAny, promoteArgumentToFloat64) 24 | if len(errs) > 0 { 25 | return nil, errs 26 | } 27 | 28 | if right == 0.0 { 29 | return MakeEvaluateError(errors.Wrap(ast.ErrDivisionByZero, 30 | "Zero division error in Evaluate function Divide")) 31 | } 32 | 33 | return MakeEvaluateResult(left / right) 34 | } 35 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluate_arithmetic_divide_test.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | "testing" 6 | 7 | "github.com/checkmarble/marble-backend/models/ast" 8 | 9 | "github.com/stretchr/testify/assert" 10 | ) 11 | 12 | const TEN_DIVIDE_BY_THREE = float64(3.3333333333333335) 13 | 14 | func TestNewArithmetic_divide_float64(t *testing.T) { 15 | r, errs := ArithmeticDivide{}.Evaluate(context.TODO(), ast.Arguments{Args: []any{10.0, 3}}) 16 | assert.Empty(t, errs) 17 | assert.Equal(t, r, TEN_DIVIDE_BY_THREE) 18 | } 19 | 20 | func TestNewArithmetic_divide_int(t *testing.T) { 21 | // check that no integer division is performed 22 | r, errs := ArithmeticDivide{}.Evaluate(context.TODO(), ast.Arguments{Args: []any{10, 3}}) 23 | assert.Empty(t, errs) 24 | assert.Equal(t, r, TEN_DIVIDE_BY_THREE) 25 | } 26 | 27 | func TestNewArithmeticFunction_float_divide_by_zero(t *testing.T) { 28 | _, errs := ArithmeticDivide{}.Evaluate(context.TODO(), ast.Arguments{Args: []any{1.0, 0.0}}) 29 | if assert.Len(t, errs, 1) { 30 | assert.ErrorIs(t, errs[0], ast.ErrDivisionByZero) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluate_list.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models/ast" 7 | ) 8 | 9 | type List struct{} 10 | 11 | func (l List) Evaluate(ctx context.Context, arguments ast.Arguments) (any, []error) { 12 | return arguments.Args, nil 13 | } 14 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluate_list_test.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | "testing" 6 | 7 | "github.com/checkmarble/marble-backend/models/ast" 8 | 9 | "github.com/stretchr/testify/assert" 10 | ) 11 | 12 | var list = List{} 13 | 14 | func TestList(t *testing.T) { 15 | arguments := ast.Arguments{ 16 | Args: []any{1, 2, 3}, 17 | } 18 | expectedResult := []int{1, 2, 3} 19 | result, errs := list.Evaluate(context.TODO(), arguments) 20 | assert.Empty(t, errs) 21 | assert.ObjectsAreEqualValues(expectedResult, result) 22 | } 23 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluate_read_payload.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/checkmarble/marble-backend/models/ast" 8 | "github.com/checkmarble/marble-backend/pure_utils" 9 | ) 10 | 11 | type Payload struct { 12 | Function ast.Function 13 | ClientObject models.ClientObject 14 | } 15 | 16 | func NewPayload(f ast.Function, payload models.ClientObject) Payload { 17 | return Payload{ 18 | Function: ast.FUNC_PAYLOAD, 19 | ClientObject: payload, 20 | } 21 | } 22 | 23 | func (p Payload) Evaluate(ctx context.Context, arguments ast.Arguments) (any, []error) { 24 | payloadFieldName, err := adaptArgumentToString(arguments.Args[0]) 25 | if err != nil { 26 | return nil, MakeAdaptedArgsErrors([]error{err}) 27 | } 28 | 29 | value := p.ClientObject.Data[payloadFieldName] 30 | 31 | valueStr, ok := value.(string) 32 | if ok { 33 | return pure_utils.Normalize(valueStr), nil 34 | } 35 | 36 | return value, nil 37 | } 38 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluate_time_test.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | "testing" 6 | "time" 7 | 8 | "github.com/checkmarble/marble-backend/models/ast" 9 | 10 | "github.com/stretchr/testify/assert" 11 | ) 12 | 13 | func TestTimeNow(t *testing.T) { 14 | result, errs := TimeFunctions{ast.FUNC_TIME_NOW}.Evaluate(context.TODO(), ast.Arguments{}) 15 | assert.Empty(t, errs) 16 | assert.WithinDuration(t, time.Now(), result.(time.Time), 1*time.Millisecond) 17 | } 18 | 19 | func TestParseTime(t *testing.T) { 20 | result, errs := TimeFunctions{ast.FUNC_PARSE_TIME}.Evaluate(context.TODO(), ast.Arguments{ 21 | Args: []any{"2021-07-07T00:00:00Z"}, 22 | }) 23 | assert.Empty(t, errs) 24 | assert.Equal(t, time.Date(2021, 7, 7, 0, 0, 0, 0, time.UTC), result.(time.Time)) 25 | } 26 | 27 | func TestParseTime_fail(t *testing.T) { 28 | _, errs := TimeFunctions{ast.FUNC_PARSE_TIME}.Evaluate(context.TODO(), ast.Arguments{ 29 | Args: []any{"2021-07-07 00:00:00Z"}, 30 | }) 31 | if assert.Len(t, errs, 1) { 32 | assert.Error(t, errs[0]) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluate_undefined.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/cockroachdb/errors" 7 | 8 | "github.com/checkmarble/marble-backend/models/ast" 9 | ) 10 | 11 | type Undefined struct{} 12 | 13 | func (f Undefined) Evaluate(ctx context.Context, arguments ast.Arguments) (any, []error) { 14 | return MakeEvaluateError(errors.Wrap(ast.ErrUndefinedFunction, "Evaluate function Undefined")) 15 | } 16 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluate_undefined_test.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | "testing" 6 | 7 | "github.com/checkmarble/marble-backend/models/ast" 8 | 9 | "github.com/stretchr/testify/assert" 10 | ) 11 | 12 | func TestUndefined(t *testing.T) { 13 | _, errs := Undefined{}.Evaluate(context.TODO(), ast.Arguments{}) 14 | if assert.Len(t, errs, 1) { 15 | assert.ErrorIs(t, errs[0], ast.ErrUndefinedFunction) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/evaluator.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models/ast" 7 | ) 8 | 9 | type Evaluator interface { 10 | Evaluate(ctx context.Context, arguments ast.Arguments) (any, []error) 11 | } 12 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/to_float64.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/cockroachdb/errors" 7 | ) 8 | 9 | func ToFloat64(v any) (float64, error) { 10 | switch v := v.(type) { 11 | 12 | case int: 13 | return float64(v), nil 14 | case int8: 15 | return float64(v), nil 16 | case int16: 17 | return float64(v), nil 18 | case int32: 19 | return float64(v), nil 20 | case int64: 21 | return float64(v), nil 22 | 23 | case uint: 24 | return float64(v), nil 25 | case uint8: 26 | return float64(v), nil 27 | case uint16: 28 | return float64(v), nil 29 | case uint32: 30 | return float64(v), nil 31 | case uint64: 32 | return float64(v), nil 33 | 34 | case float32: 35 | return float64(v), nil 36 | case float64: 37 | return v, nil 38 | 39 | default: 40 | return 0, errors.New(fmt.Sprintf("value %v cannot be converted to float64", v)) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/to_float64_test.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | ) 8 | 9 | func TestToFloat64(t *testing.T) { 10 | expected := float64(13) 11 | 12 | check := func(v any) { 13 | result, err := ToFloat64(v) 14 | assert.NoError(t, err) 15 | assert.Equal(t, expected, result) 16 | } 17 | 18 | check(int8(13)) 19 | check(int16(13)) 20 | check(int32(13)) 21 | check(int64(13)) 22 | 23 | check(int(13)) 24 | check(uint(13)) 25 | 26 | check(uint8(13)) 27 | check(uint16(13)) 28 | check(uint32(13)) 29 | check(uint64(13)) 30 | 31 | check(float32(13)) 32 | check(float64(13)) 33 | } 34 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/to_int64.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "fmt" 5 | "math" 6 | 7 | "github.com/cockroachdb/errors" 8 | ) 9 | 10 | func ToInt64(v any) (int64, error) { 11 | switch v := v.(type) { 12 | case int: 13 | return int64(v), nil 14 | case int8: 15 | return int64(v), nil 16 | case int16: 17 | return int64(v), nil 18 | case int32: 19 | return int64(v), nil 20 | case int64: 21 | return v, nil 22 | 23 | case uint: 24 | return int64(v), nil 25 | case uint8: 26 | return int64(v), nil 27 | case uint16: 28 | return int64(v), nil 29 | case uint32: 30 | return int64(v), nil 31 | case uint64: 32 | if v > math.MaxInt64 { 33 | return 0, errors.New(fmt.Sprintf("uint64 value %d is too large to be converted to int64", v)) 34 | } 35 | return int64(v), nil 36 | default: 37 | return 0, errors.New(fmt.Sprintf("value '%v' cannot be converted to int64", v)) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate/to_int64_test.go: -------------------------------------------------------------------------------- 1 | package evaluate 2 | 3 | import ( 4 | "math" 5 | "testing" 6 | 7 | "github.com/stretchr/testify/assert" 8 | ) 9 | 10 | func TestToInt64(t *testing.T) { 11 | expected := int64(13) 12 | 13 | check := func(v any) { 14 | result, err := ToInt64(v) 15 | assert.NoError(t, err) 16 | assert.Equal(t, expected, result) 17 | } 18 | 19 | check(int8(13)) 20 | check(int16(13)) 21 | check(int32(13)) 22 | check(int64(13)) 23 | 24 | check(int(13)) 25 | check(uint(13)) 26 | 27 | check(uint8(13)) 28 | check(uint16(13)) 29 | check(uint32(13)) 30 | check(uint64(13)) 31 | } 32 | 33 | func TestInvalidNumbers(t *testing.T) { 34 | checkErr := func(v interface{}) { 35 | _, err := ToInt64(v) 36 | assert.Error(t, err) 37 | } 38 | 39 | // to big 40 | checkErr(uint64(math.MaxUint64)) 41 | 42 | // checkErr 43 | checkErr(float32(0)) 44 | checkErr(float64(0)) 45 | checkErr("0") 46 | } 47 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate_ast_expression.go: -------------------------------------------------------------------------------- 1 | package ast_eval 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/cockroachdb/errors" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | "github.com/checkmarble/marble-backend/models/ast" 10 | ) 11 | 12 | type EvaluateAstExpression struct { 13 | AstEvaluationEnvironmentFactory AstEvaluationEnvironmentFactory 14 | } 15 | 16 | func (evaluator EvaluateAstExpression) EvaluateAstExpression( 17 | ctx context.Context, 18 | cache *EvaluationCache, 19 | ruleAstExpression ast.Node, 20 | organizationId string, 21 | payload models.ClientObject, 22 | dataModel models.DataModel, 23 | ) (ast.NodeEvaluation, error) { 24 | environment := evaluator.AstEvaluationEnvironmentFactory(EvaluationEnvironmentFactoryParams{ 25 | OrganizationId: organizationId, 26 | ClientObject: payload, 27 | DataModel: dataModel, 28 | DatabaseAccessReturnFakeValue: false, 29 | }) 30 | 31 | evaluation, ok := EvaluateAst(ctx, cache, environment, ruleAstExpression) 32 | if !ok { 33 | return evaluation, errors.Join(evaluation.FlattenErrors()...) 34 | } 35 | 36 | return evaluation, nil 37 | } 38 | -------------------------------------------------------------------------------- /usecases/ast_eval/evaluate_environment_factory.go: -------------------------------------------------------------------------------- 1 | package ast_eval 2 | 3 | import "github.com/checkmarble/marble-backend/models" 4 | 5 | type EvaluationEnvironmentFactoryParams struct { 6 | OrganizationId string 7 | ClientObject models.ClientObject 8 | DataModel models.DataModel 9 | DatabaseAccessReturnFakeValue bool 10 | } 11 | 12 | type AstEvaluationEnvironmentFactory func(params EvaluationEnvironmentFactoryParams) AstEvaluationEnvironment 13 | -------------------------------------------------------------------------------- /usecases/executor_factory/interfaces.go: -------------------------------------------------------------------------------- 1 | package executor_factory 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/repositories" 7 | ) 8 | 9 | type TransactionFactory interface { 10 | TransactionInOrgSchema(ctx context.Context, organizationId string, 11 | f func(tx repositories.Transaction) error) error 12 | Transaction(ctx context.Context, fn func(tx repositories.Transaction) error) error 13 | } 14 | 15 | // Interface to be used in usecases, implemented by the DbExecutorFactory class in the usecases/db_executor_factory package 16 | // which itself has the ExecutorGetter repository class injected in it. 17 | type ExecutorFactory interface { 18 | NewClientDbExecutor(ctx context.Context, organizationId string) (repositories.Executor, error) 19 | NewExecutor() repositories.Executor 20 | } 21 | -------------------------------------------------------------------------------- /usecases/liveness.go: -------------------------------------------------------------------------------- 1 | package usecases 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/repositories" 7 | "github.com/checkmarble/marble-backend/usecases/executor_factory" 8 | ) 9 | 10 | type livenessRepository interface { 11 | Liveness(ctx context.Context, exec repositories.Executor) error 12 | } 13 | 14 | type LivenessUsecase struct { 15 | executorFactory executor_factory.ExecutorFactory 16 | livenessRepository livenessRepository 17 | } 18 | 19 | func (u *LivenessUsecase) Liveness(ctx context.Context) error { 20 | return u.livenessRepository.Liveness(ctx, u.executorFactory.NewExecutor()) 21 | } 22 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_analytics.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "errors" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type EnforceSecurityAnalyticsImpl struct { 10 | EnforceSecurity 11 | Credentials models.Credentials 12 | } 13 | 14 | func (e *EnforceSecurityAnalyticsImpl) ReadAnalytics(analytics models.Analytics) error { 15 | return errors.Join( 16 | e.Permission(models.ANALYTICS_READ), e.ReadOrganization(analytics.OrganizationId), 17 | ) 18 | } 19 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_annotation.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "github.com/checkmarble/marble-backend/models" 5 | ) 6 | 7 | type EnforceSecurityAnnotation interface { 8 | DeleteAnnotation() error 9 | } 10 | 11 | type EnforceSecurityAnnotationImpl struct { 12 | EnforceSecurity 13 | Credentials models.Credentials 14 | } 15 | 16 | func (e *EnforceSecurityAnnotationImpl) DeleteAnnotation() error { 17 | return e.Permission(models.ANNOTATION_DELETE) 18 | } 19 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_api_key.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "errors" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type EnforceSecurityApiKeyImpl struct { 10 | EnforceSecurity 11 | Credentials models.Credentials 12 | } 13 | 14 | func (e *EnforceSecurityApiKeyImpl) ReadApiKey(apiKey models.ApiKey) error { 15 | return errors.Join( 16 | e.Permission(models.APIKEY_READ), e.ReadOrganization(apiKey.OrganizationId), 17 | ) 18 | } 19 | 20 | func (e *EnforceSecurityApiKeyImpl) CreateApiKey(organizationId string) error { 21 | return errors.Join( 22 | e.Permission(models.APIKEY_CREATE), e.ReadOrganization(organizationId), 23 | ) 24 | } 25 | 26 | func (e *EnforceSecurityApiKeyImpl) DeleteApiKey(apiKey models.ApiKey) error { 27 | // For now, we don't have any specific permission for deleting an API key 28 | return e.CreateApiKey(apiKey.OrganizationId) 29 | } 30 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_custom_list.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "errors" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type EnforceSecurityCustomList interface { 10 | EnforceSecurity 11 | ReadCustomList(customList models.CustomList) error 12 | ModifyCustomList(customList models.CustomList) error 13 | CreateCustomList() error 14 | } 15 | 16 | type EnforceSecurityCustomListImpl struct { 17 | EnforceSecurity 18 | Credentials models.Credentials 19 | } 20 | 21 | func (e *EnforceSecurityCustomListImpl) ReadCustomList(customList models.CustomList) error { 22 | return errors.Join( 23 | e.Permission(models.CUSTOM_LISTS_READ), 24 | e.ReadOrganization(customList.OrganizationId), 25 | ) 26 | } 27 | 28 | func (e *EnforceSecurityCustomListImpl) CreateCustomList() error { 29 | return errors.Join( 30 | e.Permission(models.CUSTOM_LISTS_EDIT), 31 | ) 32 | } 33 | 34 | func (e *EnforceSecurityCustomListImpl) ModifyCustomList(customList models.CustomList) error { 35 | return errors.Join( 36 | e.Permission(models.CUSTOM_LISTS_EDIT), 37 | e.ReadOrganization(customList.OrganizationId), 38 | ) 39 | } 40 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_decision_phantom.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "github.com/checkmarble/marble-backend/models" 5 | "github.com/cockroachdb/errors" 6 | ) 7 | 8 | type EnforceSecurityPhantomDecision interface { 9 | EnforceSecurity 10 | CreatePhantomDecision(organizationId string) error 11 | } 12 | 13 | type EnforceSecurityPhantomDecisionImpl struct { 14 | EnforceSecurity 15 | Credentials models.Credentials 16 | } 17 | 18 | func (e *EnforceSecurityPhantomDecisionImpl) CreatePhantomDecision(organizationId string) error { 19 | return errors.Join( 20 | e.Permission(models.PHANTOM_DECISION_CREATE), 21 | e.ReadOrganization(organizationId), 22 | ) 23 | } 24 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_ingestion.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "errors" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type EnforceSecurityIngestion interface { 10 | EnforceSecurity 11 | CanIngest(organizationId string) error 12 | } 13 | 14 | type EnforceSecurityIngestionImpl struct { 15 | EnforceSecurity 16 | Credentials models.Credentials 17 | } 18 | 19 | func (e *EnforceSecurityIngestionImpl) CanIngest(organizationId string) error { 20 | return errors.Join( 21 | e.Permission(models.INGESTION), 22 | e.ReadOrganization(organizationId), 23 | ) 24 | } 25 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_license.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "context" 5 | "errors" 6 | 7 | "github.com/checkmarble/marble-backend/models" 8 | ) 9 | 10 | func (e *EnforceSecurityImpl) ListLicenses(ctx context.Context) error { 11 | return errors.Join( 12 | e.Permission(models.LICENSE_LIST), 13 | ) 14 | } 15 | 16 | func (e *EnforceSecurityImpl) CreateLicense(ctx context.Context) error { 17 | return errors.Join( 18 | e.Permission(models.LICENSE_CREATE), 19 | ) 20 | } 21 | 22 | func (e *EnforceSecurityImpl) UpdateLicense(ctx context.Context) error { 23 | return errors.Join( 24 | e.Permission(models.LICENSE_UPDATE), 25 | ) 26 | } 27 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_partner.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/cockroachdb/errors" 7 | 8 | "github.com/checkmarble/marble-backend/models" 9 | "github.com/checkmarble/marble-backend/utils" 10 | ) 11 | 12 | func (e *EnforceSecurityImpl) ListPartners(ctx context.Context) error { 13 | return errors.Join( 14 | e.Permission(models.PARTNER_LIST), 15 | ) 16 | } 17 | 18 | func (e *EnforceSecurityImpl) CreatePartner(ctx context.Context) error { 19 | return errors.Join( 20 | e.Permission(models.PARTNER_CREATE), 21 | ) 22 | } 23 | 24 | func (e *EnforceSecurityImpl) ReadPartner(ctx context.Context, partnerId string) error { 25 | err := e.Permission(models.PARTNER_LIST) 26 | if err == nil { 27 | return nil 28 | } 29 | 30 | return errors.Join( 31 | e.Permission(models.PARTNER_READ), 32 | utils.EnforcePartnerAccess(e.Credentials, partnerId), 33 | ) 34 | } 35 | 36 | func (e *EnforceSecurityImpl) UpdatePartner(ctx context.Context) error { 37 | return errors.Join( 38 | e.Permission(models.PARTNER_UPDATE), 39 | ) 40 | } 41 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_sanction_checks.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type EnforceSecuritySanctionCheck interface { 10 | EnforceSecurity 11 | 12 | ReadWhitelist(ctx context.Context) error 13 | WriteWhitelist(ctx context.Context) error 14 | PerformFreeformSearch(ctx context.Context) error 15 | } 16 | 17 | func (e *EnforceSecurityImpl) ReadWhitelist(ctx context.Context) error { 18 | return e.Permission(models.SANCTION_CHECK_WHITELIST_READ) 19 | } 20 | 21 | func (e *EnforceSecurityImpl) WriteWhitelist(ctx context.Context) error { 22 | return e.Permission(models.SANCTION_CHECK_WHITELIST_WRITE) 23 | } 24 | 25 | func (e *EnforceSecurityImpl) PerformFreeformSearch(ctx context.Context) error { 26 | return e.Permission(models.SANCTION_CHECK_FREEFORM_SEARCH) 27 | } 28 | -------------------------------------------------------------------------------- /usecases/security/enforce_security_tags.go: -------------------------------------------------------------------------------- 1 | package security 2 | 3 | import ( 4 | "errors" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | ) 8 | 9 | type EnforceSecurityTags interface { 10 | EnforceSecurity 11 | ReadTag(tag models.Tag) error 12 | CreateTag(organizationId string) error 13 | UpdateTag(tag models.Tag) error 14 | DeleteTag(tag models.Tag) error 15 | } 16 | 17 | func (e *EnforceSecurityImpl) ReadTag(tag models.Tag) error { 18 | return errors.Join( 19 | e.Permission(models.TAG_READ), 20 | e.ReadOrganization(tag.OrganizationId), 21 | ) 22 | } 23 | 24 | func (e *EnforceSecurityImpl) CreateTag(organizationId string) error { 25 | return errors.Join( 26 | e.Permission(models.TAG_CREATE), 27 | e.ReadOrganization(organizationId), 28 | ) 29 | } 30 | 31 | func (e *EnforceSecurityImpl) UpdateTag(tag models.Tag) error { 32 | return errors.Join( 33 | e.Permission(models.TAG_UPDATE), 34 | e.ReadOrganization(tag.OrganizationId), 35 | ) 36 | } 37 | 38 | func (e *EnforceSecurityImpl) DeleteTag(tag models.Tag) error { 39 | return errors.Join( 40 | e.Permission(models.TAG_DELETE), 41 | e.ReadOrganization(tag.OrganizationId), 42 | ) 43 | } 44 | -------------------------------------------------------------------------------- /usecases/testing_helpers.go: -------------------------------------------------------------------------------- 1 | package usecases 2 | 3 | import ( 4 | "context" 5 | "strings" 6 | 7 | "github.com/checkmarble/marble-backend/repositories" 8 | "github.com/google/uuid" 9 | ) 10 | 11 | func pipe[T any](fns ...func(t T) T) func(T) T { 12 | return func(t T) T { 13 | for _, fn := range fns { 14 | t = fn(t) 15 | } 16 | return t 17 | } 18 | } 19 | 20 | func escapeSql(str string) string { 21 | // replace all (,),$ by the escaped equivalent 22 | return pipe( 23 | func(s string) string { return strings.ReplaceAll(s, "(", "\\(") }, 24 | func(s string) string { return strings.ReplaceAll(s, ")", "\\)") }, 25 | func(s string) string { return strings.ReplaceAll(s, "$", "\\$") }, 26 | )(str) 27 | } 28 | 29 | type anyUuid struct{} 30 | 31 | func (a anyUuid) Match(v any) bool { 32 | str, ok := v.(string) 33 | if !ok { 34 | return false 35 | } 36 | _, err := uuid.Parse(str) 37 | return err == nil 38 | } 39 | 40 | func matchContext(ctx context.Context) bool { return true } 41 | func matchExec(exec repositories.Executor) bool { return true } 42 | -------------------------------------------------------------------------------- /usecases/version_usecase.go: -------------------------------------------------------------------------------- 1 | package usecases 2 | 3 | type VersionUsecase struct { 4 | ApiVersion string 5 | } 6 | 7 | func (uc VersionUsecase) GetApiVersion() string { 8 | return uc.ApiVersion 9 | } 10 | -------------------------------------------------------------------------------- /utils/blanks.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | func Or[T any](value *T, or T) T { 4 | if value != nil { 5 | return *value 6 | } 7 | return or 8 | } 9 | 10 | func NilOrZero[T comparable](value *T) bool { 11 | if value == nil { 12 | return true 13 | } 14 | if *value == *new(T) { 15 | return true 16 | } 17 | return false 18 | } 19 | -------------------------------------------------------------------------------- /utils/context_keys.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | type ContextKey int 4 | 5 | const ( 6 | ContextKeyCredentials ContextKey = iota 7 | ContextKeyLogger 8 | ContextKeySegmentClient 9 | ContextKeyOpenTelemetryTracer 10 | ) 11 | -------------------------------------------------------------------------------- /utils/context_segment.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/gin-gonic/gin" 7 | "github.com/segmentio/analytics-go/v3" 8 | ) 9 | 10 | func SegmentClientFromContext(ctx context.Context) (analytics.Client, bool) { 11 | client, found := ctx.Value(ContextKeySegmentClient).(analytics.Client) 12 | return client, found 13 | } 14 | 15 | func StoreSegmentClientInContext(ctx context.Context, client analytics.Client) context.Context { 16 | return context.WithValue(ctx, ContextKeySegmentClient, client) 17 | } 18 | 19 | func StoreSegmentClientInContextMiddleware(client analytics.Client) gin.HandlerFunc { 20 | return func(c *gin.Context) { 21 | ctxWithSegment := StoreSegmentClientInContext(c.Request.Context(), client) 22 | c.Request = c.Request.WithContext(ctxWithSegment) 23 | c.Next() 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /utils/context_tracing.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "context" 5 | 6 | "github.com/gin-gonic/gin" 7 | "go.opentelemetry.io/otel/trace" 8 | "go.opentelemetry.io/otel/trace/noop" 9 | ) 10 | 11 | func OpenTelemetryTracerFromContext(ctx context.Context) trace.Tracer { 12 | tracer, found := ctx.Value(ContextKeyOpenTelemetryTracer).(trace.Tracer) 13 | 14 | if !found { 15 | LoggerFromContext(ctx).DebugContext(ctx, "OpenTelemetryTracer not found in context, using NoopTracer: traces will be dismissed") 16 | return &noop.Tracer{} 17 | } 18 | 19 | return tracer 20 | } 21 | 22 | func StoreOpenTelemetryTracerInContext(ctx context.Context, tracer trace.Tracer) context.Context { 23 | return context.WithValue(ctx, ContextKeyOpenTelemetryTracer, tracer) 24 | } 25 | 26 | func StoreOpenTelemetryTracerInContextMiddleware(tracer trace.Tracer) gin.HandlerFunc { 27 | return func(c *gin.Context) { 28 | ctxWithTracer := StoreOpenTelemetryTracerInContext(c.Request.Context(), tracer) 29 | c.Request = c.Request.WithContext(ctxWithTracer) 30 | c.Next() 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /utils/filter.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | // Map returns a new slice with the same length as src, but with values transformed by f 4 | func Filter[T any](src []T, f func(T) bool) []T { 5 | us := make([]T, 0, len(src)) 6 | for i := range src { 7 | if f(src[i]) { 8 | us = append(us, src[i]) 9 | } 10 | } 11 | return us 12 | } 13 | -------------------------------------------------------------------------------- /utils/organization_security_test.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | 8 | "github.com/stretchr/testify/assert" 9 | ) 10 | 11 | func TestEnforceOrganizationAccess(t *testing.T) { 12 | err := EnforceOrganizationAccess(models.Credentials{ 13 | OrganizationId: "1234", 14 | Role: models.NO_ROLE, 15 | }, "1234") 16 | assert.NoError(t, err) 17 | } 18 | 19 | func TestEnforceOrganizationAccess_EmptyCredential(t *testing.T) { 20 | err := EnforceOrganizationAccess(models.Credentials{}, "1234") 21 | assert.ErrorIs(t, err, models.ForbiddenError) 22 | } 23 | 24 | func TestEnforceOrganizationAccess_Fail(t *testing.T) { 25 | err := EnforceOrganizationAccess(models.Credentials{OrganizationId: "not 1234"}, "1234") 26 | assert.ErrorIs(t, err, models.ForbiddenError) 27 | } 28 | 29 | func TestEnforceOrganizationAccess_marble_admin_override(t *testing.T) { 30 | err := EnforceOrganizationAccess(models.Credentials{Role: models.MARBLE_ADMIN}, "1234") 31 | assert.NoError(t, err) 32 | } 33 | -------------------------------------------------------------------------------- /utils/pg_utils.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "reflect" 5 | "strings" 6 | ) 7 | 8 | // Return a []string of columns based on db tag 9 | func ColumnList[T any](prefixes ...string) []string { 10 | var value T 11 | var result []string 12 | 13 | inputElemValue := reflect.Indirect(reflect.ValueOf(value)) 14 | inputElemType := inputElemValue.Type() 15 | 16 | for _, sf := range reflect.VisibleFields(inputElemType) { 17 | if !sf.IsExported() { 18 | continue 19 | } 20 | dbTag, dbTagPresent := sf.Tag.Lookup("db") 21 | if !dbTagPresent { 22 | continue 23 | } 24 | colName := strings.Split(dbTag, ",")[0] 25 | if dbTag == "-" { 26 | // Field is ignored, skip it. 27 | continue 28 | } 29 | colWithPrefixes := strings.Join(append(prefixes, colName), ".") 30 | result = append(result, colWithPrefixes) 31 | } 32 | 33 | return result 34 | } 35 | -------------------------------------------------------------------------------- /utils/pointers.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import "reflect" 4 | 5 | type PtrToOptions struct { 6 | OmitZero bool 7 | } 8 | 9 | // Return a ptr to the provided value. 10 | // 11 | // Use options to change internal logic : 12 | // - OmitZero: zero values return a nil ptr 13 | func PtrTo[T any](v T, options *PtrToOptions) *T { 14 | if options == nil { 15 | options = &PtrToOptions{ 16 | OmitZero: false, 17 | } 18 | } 19 | 20 | if options.OmitZero { 21 | rv := reflect.ValueOf(v) 22 | if rv.IsZero() { 23 | return nil 24 | } 25 | } 26 | 27 | return &v 28 | } 29 | 30 | // Syntactic sugar for PtrTo with default options 31 | func Ptr[T any](v T) *T { 32 | return PtrTo(v, nil) 33 | } 34 | -------------------------------------------------------------------------------- /utils/uuid.go: -------------------------------------------------------------------------------- 1 | package utils 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/checkmarble/marble-backend/models" 7 | "github.com/google/uuid" 8 | ) 9 | 10 | func ValidateUuid(uuidParam string) error { 11 | _, err := uuid.Parse(uuidParam) 12 | if err != nil { 13 | err = fmt.Errorf("'%s' is not a valid UUID: %w", uuidParam, models.BadParameterError) 14 | } 15 | return err 16 | } 17 | 18 | func ByteUuid(str string) [16]byte { 19 | return [16]byte(uuid.MustParse(str)) 20 | } 21 | --------------------------------------------------------------------------------