├── .bumpversion.cfg ├── .coveragerc ├── .github ├── ISSUE_TEMPLATE │ ├── bug-report.yml │ └── feature-request.yml ├── dependabot.yml └── workflows │ ├── README.md │ ├── build-push-container.yml │ ├── changelog.yml │ ├── codeql-analysis.yml │ ├── gh-pages.yml │ ├── issues.yml │ ├── publish-package.yml │ ├── run-codspeed-tests.yml │ ├── run-linting-tests.yml │ ├── run-unit-tests.yml │ └── scheduled-build.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .stignore ├── CHANGELOG.md ├── Dockerfile ├── LICENSE ├── NOTICE ├── README.md ├── codecov.yml ├── docs ├── architecture │ ├── application │ │ ├── domainmodels.md │ │ ├── forms-frontend.md │ │ ├── tasks.md │ │ └── workflow.md │ ├── framework.md │ ├── img │ │ ├── base-orchestrator-setup.png │ │ └── custom-orchestrator-setup.png │ ├── orchestration │ │ ├── img │ │ │ ├── Custom-orchestrator-ui-using-override.png │ │ │ └── Standard-orchestrator-ui.png │ │ ├── orchestrator-ui.md │ │ └── philosophy.md │ ├── product_modelling │ │ ├── backfilling.md │ │ ├── context.md │ │ ├── imports.md │ │ ├── introduction.md │ │ ├── ip_static.md │ │ ├── ip_static.png │ │ ├── l2_point_to_point.md │ │ ├── l2_point_to_point.png │ │ ├── l2_vpn.md │ │ ├── l2_vpn.png │ │ ├── modelling.md │ │ ├── node.md │ │ ├── node.png │ │ ├── port.md │ │ ├── port.png │ │ ├── product_block_graph.md │ │ ├── product_block_graph.png │ │ ├── standards.md │ │ └── terminology.md │ └── tldr.md ├── contributing │ ├── development.md │ ├── guidelines.md │ └── testing.md ├── css │ ├── custom.css │ ├── style.css │ └── termynal.css ├── getting-started │ ├── base.md │ ├── docker.md │ ├── orchestration-ui.md │ ├── prepare-source-folder.md │ └── versions.md ├── img │ ├── WFO-Emblem-White.png │ └── favicon.ico ├── index.md ├── js │ ├── custom.js │ └── termynal.js ├── migration-guide │ ├── 2.0.md │ ├── 3.0.md │ └── 4.0.md ├── reference-docs │ ├── api.md │ ├── app │ │ ├── app.md │ │ └── scaling.md │ ├── auth-backend-and-frontend.md │ ├── cli.md │ ├── database.md │ ├── domain_models │ │ ├── generator.md │ │ ├── instantiating.md │ │ ├── model_attributes.md │ │ ├── overview.md │ │ ├── product_blocks.md │ │ ├── product_types.md │ │ ├── properties.md │ │ ├── pydantic_hooks.md │ │ ├── type_casting.md │ │ ├── union_types.md │ │ └── validation.md │ ├── forms.md │ ├── graphql.md │ ├── metrics.md │ ├── python.md │ ├── search.md │ ├── search_overview.png │ ├── serialization.md │ ├── tasks.md │ ├── tests.md │ ├── tldr.md │ ├── websockets.md │ └── workflows │ │ ├── callbacks.md │ │ ├── workflow-lifecycles.md │ │ └── workflow-steps.md └── workshops │ ├── advanced │ ├── bootstrap.md │ ├── create-your-own.md │ ├── docker-installation.md │ ├── domain-models.md │ ├── execute-workflows.md │ ├── generator.md │ ├── l2_point_to_point.png │ ├── node-create.md │ ├── node-modify.md │ ├── node-terminate.md │ ├── node-validate.md │ ├── overview.md │ ├── scenario.md │ ├── workflow-basics.md │ └── workflow-introduction.md │ ├── beginner │ ├── create-user-group.md │ ├── create-user.md │ ├── database-migration.md │ ├── debian.md │ ├── docker.md │ ├── domain-models.md │ ├── explore.md │ ├── input-forms.md │ ├── macos.md │ ├── modify-user-group.md │ ├── modify-user.md │ ├── register-workflows.md │ ├── start-applications.md │ ├── terminate-user-group.md │ ├── terminate-user.md │ ├── workflow-introduction.md │ └── workshop-overview.md │ └── images │ ├── Software-topology.drawio.png │ ├── clab_topology.png │ ├── metadata_products.png │ ├── netbox_devices_active.png │ ├── subscriptions.png │ └── topology.drawio.png ├── includes └── abbreviations.md ├── mkdocs.yml ├── nitpick-style.toml ├── orchestrator ├── __init__.py ├── api │ ├── __init__.py │ ├── api_v1 │ │ ├── __init__.py │ │ ├── api.py │ │ └── endpoints │ │ │ ├── __init__.py │ │ │ ├── health.py │ │ │ ├── processes.py │ │ │ ├── product_blocks.py │ │ │ ├── products.py │ │ │ ├── resource_types.py │ │ │ ├── settings.py │ │ │ ├── subscription_customer_descriptions.py │ │ │ ├── subscriptions.py │ │ │ ├── translations.py │ │ │ ├── user.py │ │ │ ├── workflows.py │ │ │ └── ws.py │ ├── error_handling.py │ ├── helpers.py │ └── models.py ├── app.py ├── cli │ ├── __init__.py │ ├── database.py │ ├── domain_gen_helpers │ │ ├── __init__.py │ │ ├── fixed_input_helpers.py │ │ ├── helpers.py │ │ ├── product_block_helpers.py │ │ ├── product_helpers.py │ │ ├── resource_type_helpers.py │ │ └── types.py │ ├── generate.py │ ├── generator │ │ ├── __init__.py │ │ ├── custom_templates │ │ │ ├── README │ │ │ ├── additional_create_imports.j2 │ │ │ ├── additional_create_input_fields.j2 │ │ │ ├── additional_create_steps.j2 │ │ │ ├── additional_modify_imports.j2 │ │ │ ├── additional_modify_input_fields.j2 │ │ │ ├── additional_modify_steps.j2 │ │ │ ├── additional_terminate_imports.j2 │ │ │ ├── additional_terminate_input_fields.j2 │ │ │ └── additional_terminate_steps.j2 │ │ ├── generator │ │ │ ├── __init__.py │ │ │ ├── enums.py │ │ │ ├── helpers.py │ │ │ ├── migration.py │ │ │ ├── product.py │ │ │ ├── product_block.py │ │ │ ├── settings.py │ │ │ ├── translations.py │ │ │ ├── unittest.py │ │ │ ├── validations.py │ │ │ └── workflow.py │ │ ├── products │ │ │ └── workshop │ │ │ │ ├── circuit.yaml │ │ │ │ ├── node.yaml │ │ │ │ ├── user.yaml │ │ │ │ └── user_group.yaml │ │ └── templates │ │ │ ├── additional_create_imports.j2 │ │ │ ├── additional_create_steps.j2 │ │ │ ├── additional_modify_imports.j2 │ │ │ ├── additional_modify_steps.j2 │ │ │ ├── additional_terminate_steps.j2 │ │ │ ├── constrained_int_definitions.j2 │ │ │ ├── create_data_head.j2 │ │ │ ├── create_product.j2 │ │ │ ├── enums.j2 │ │ │ ├── lazy_workflow_instance.j2 │ │ │ ├── list_definitions.j2 │ │ │ ├── macros.j2 │ │ │ ├── modify_product.j2 │ │ │ ├── new_product_migration.j2 │ │ │ ├── product.j2 │ │ │ ├── product_block.j2 │ │ │ ├── shared_forms.j2 │ │ │ ├── shared_workflows.j2 │ │ │ ├── subscription_model_registry.j2 │ │ │ ├── terminate_product.j2 │ │ │ ├── test_create_workflow.j2 │ │ │ ├── test_modify_workflow.j2 │ │ │ ├── test_product_type.j2 │ │ │ ├── test_terminate_workflow.j2 │ │ │ ├── test_validate_workflow.j2 │ │ │ └── validate_product.j2 │ ├── helpers │ │ ├── __init__.py │ │ ├── input_helpers.py │ │ └── print_helpers.py │ ├── main.py │ ├── migrate_domain_models.py │ ├── migrate_tasks.py │ ├── migrate_workflows.py │ ├── migration_helpers.py │ └── scheduler.py ├── config │ ├── __init__.py │ └── assignee.py ├── db │ ├── __init__.py │ ├── database.py │ ├── filters │ │ ├── __init__.py │ │ ├── filters.py │ │ ├── process.py │ │ ├── product.py │ │ ├── product_block.py │ │ ├── resource_type.py │ │ ├── search_filters │ │ │ ├── __init__.py │ │ │ └── inferred_filter.py │ │ ├── subscription.py │ │ └── workflow.py │ ├── helpers.py │ ├── listeners.py │ ├── loaders.py │ ├── models.py │ ├── queries │ │ ├── __init__.py │ │ ├── subscription.py │ │ └── subscription_instance.py │ ├── range │ │ ├── __init__.py │ │ └── range.py │ └── sorting │ │ ├── __init__.py │ │ ├── process.py │ │ ├── product.py │ │ ├── product_block.py │ │ ├── resource_type.py │ │ ├── sorting.py │ │ ├── subscription.py │ │ └── workflow.py ├── devtools │ ├── __init__.py │ ├── populator.py │ └── scripts │ │ ├── __init__.py │ │ ├── migrate_20.py │ │ ├── migrate_30.py │ │ └── shared.py ├── distlock │ ├── __init__.py │ ├── distlock_manager.py │ └── managers │ │ ├── __init__.py │ │ ├── memory_distlock_manager.py │ │ └── redis_distlock_manager.py ├── domain │ ├── __init__.py │ ├── base.py │ ├── context_cache.py │ ├── customer_description.py │ ├── helpers.py │ ├── lifecycle.py │ └── subscription_instance_transform.py ├── exception_handlers.py ├── forms │ ├── __init__.py │ └── validators │ │ ├── __init__.py │ │ ├── customer_contact_list.py │ │ ├── customer_id.py │ │ ├── display_subscription.py │ │ ├── network_type_validators.py │ │ └── product_id.py ├── graphql │ ├── __init__.py │ ├── autoregistration.py │ ├── extensions │ │ ├── __init__.py │ │ ├── model_cache.py │ │ └── stats.py │ ├── loaders │ │ ├── __init__.py │ │ └── subscriptions.py │ ├── mutations │ │ ├── customer_description.py │ │ └── start_process.py │ ├── pagination.py │ ├── resolvers │ │ ├── __init__.py │ │ ├── customer.py │ │ ├── helpers.py │ │ ├── process.py │ │ ├── product.py │ │ ├── product_block.py │ │ ├── resource_type.py │ │ ├── settings.py │ │ ├── subscription.py │ │ ├── version.py │ │ └── workflow.py │ ├── schema.py │ ├── schemas │ │ ├── __init__.py │ │ ├── customer.py │ │ ├── customer_description.py │ │ ├── errors.py │ │ ├── fixed_input.py │ │ ├── helpers.py │ │ ├── process.py │ │ ├── product.py │ │ ├── product_block.py │ │ ├── resource_type.py │ │ ├── settings.py │ │ ├── strawberry_pydantic_patch.py │ │ ├── subscription.py │ │ ├── version.py │ │ └── workflow.py │ ├── types.py │ └── utils │ │ ├── __init__.py │ │ ├── create_resolver_error_handler.py │ │ ├── get_query_loaders.py │ │ ├── get_selected_fields.py │ │ ├── get_selected_paths.py │ │ ├── get_subscription_product_blocks.py │ │ ├── is_query_detailed.py │ │ ├── override_class.py │ │ └── to_graphql_result_page.py ├── log_config.py ├── metrics │ ├── __init__.py │ ├── engine.py │ ├── init.py │ ├── processes.py │ └── subscriptions.py ├── migrations │ ├── README │ ├── alembic.ini │ ├── env.py │ ├── helpers.py │ ├── script.py.mako │ ├── templates │ │ ├── alembic.ini.j2 │ │ ├── env.py.j2 │ │ └── helpers.py.j2 │ └── versions │ │ └── schema │ │ ├── 2020-10-19_3323bcb934e7_fix_tsv_triggers.py │ │ ├── 2020-10-19_a76b9185b334_add_generic_workflows_to_core.py │ │ ├── 2020-10-19_c112305b07d3_initial_schema_migration.py │ │ ├── 2021-04-06_3c8b9185c221_add_validate_products_task.py │ │ ├── 2021-07-01_6896a54e9483_add_product_block_relations.py │ │ ├── 2021-11-17_19cdd3ab86f6_fix_parse_websearch.py │ │ ├── 2022-02-16_bed6bc0b197a_rename_parent_and_child_block_relations.py │ │ ├── 2023-03-06_e05bb1967eff_add_subscriptions_search_view.py │ │ ├── 2023-05-25_b1970225392d_add_subscription_metadata_workflow.py │ │ ├── 2023-06-28_a09ac125ea73_add_throttling_to_refresh_subscriptions.py │ │ ├── 2023-06-28_a09ac125ea73_add_throttling_to_refresh_subscriptions.sql │ │ ├── 2023-07-17_165303a20fb1_customer_id_to_varchar.py │ │ ├── 2023-07-17_165303a20fb1_customer_id_to_varchar.sql │ │ ├── 2023-09-25_da5c9f4cce1c_add_subscription_metadata_to_fulltext_.py │ │ ├── 2023-09-25_da5c9f4cce1c_add_subscription_metadata_to_fulltext_.sql │ │ ├── 2023-12-06_048219045729_add_workflow_id_to_processes_table.py │ │ ├── 2024-09-27_460ec6748e37_add_uuid_search_workaround.py │ │ ├── 2024-09-27_460ec6748e37_add_uuid_search_workaround.sql │ │ ├── 2025-01-08_4c5859620539_add_version_column_to_subscription.py │ │ ├── 2025-01-19_4fjdn13f83ga_add_validate_product_type_task.py │ │ ├── 2025-02-12_bac6be6f2b4f_added_input_state_table.py │ │ ├── 2025-02-20_68d14db1b8da_make_workflow_description_mandatory.py │ │ ├── 2025-03-06_42b3d076a85b_subscription_instance_as_json_function.py │ │ ├── 2025-03-06_42b3d076a85b_subscription_instance_as_json_function.sql │ │ ├── 2025-04-09_fc5c993a4b4a_add_cascade_constraint_on_processes_.py │ │ └── 2025-05-08_161918133bec_add_is_task_to_workflow.py ├── py.typed ├── schedules │ ├── __init__.py │ ├── resume_workflows.py │ ├── scheduling.py │ ├── task_vacuum.py │ ├── validate_products.py │ └── validate_subscriptions.py ├── schemas │ ├── __init__.py │ ├── base.py │ ├── engine_settings.py │ ├── fixed_input.py │ ├── problem_detail.py │ ├── process.py │ ├── product.py │ ├── product_block.py │ ├── resource_type.py │ ├── subscription.py │ ├── subscription_descriptions.py │ └── workflow.py ├── security.py ├── services │ ├── __init__.py │ ├── celery.py │ ├── fixed_inputs.py │ ├── input_state.py │ ├── process_broadcast_thread.py │ ├── processes.py │ ├── products.py │ ├── resource_types.py │ ├── settings.py │ ├── subscription_relations.py │ ├── subscriptions.py │ ├── tasks.py │ ├── translations.py │ └── workflows.py ├── settings.py ├── targets.py ├── types.py ├── utils │ ├── __init__.py │ ├── crypt.py │ ├── datetime.py │ ├── deprecation_logger.py │ ├── docs.py │ ├── enrich_process.py │ ├── errors.py │ ├── fixed_inputs.py │ ├── functional.py │ ├── get_subscription_dict.py │ ├── get_updated_properties.py │ ├── helpers.py │ ├── json.py │ ├── redis.py │ ├── redis_client.py │ ├── search_query.py │ ├── state.py │ ├── strings.py │ └── validate_data_version.py ├── version.py ├── websocket │ ├── __init__.py │ ├── managers │ │ ├── broadcast_websocket_manager.py │ │ └── memory_websocket_manager.py │ └── websocket_manager.py ├── workflow.py └── workflows │ ├── __init__.py │ ├── modify_note.py │ ├── removed_workflow.py │ ├── steps.py │ ├── tasks │ ├── __init__.py │ ├── cleanup_tasks_log.py │ ├── resume_workflows.py │ ├── validate_product_type.py │ └── validate_products.py │ ├── translations │ └── en-GB.json │ └── utils.py ├── pyproject.toml ├── setup.cfg ├── setup.py └── test ├── __init__.py ├── acceptance_tests ├── __init__.py ├── conftest.py ├── fixtures │ └── test_orchestrator │ │ ├── __init__.py │ │ ├── devtools │ │ └── populator │ │ │ ├── __init__.py │ │ │ └── test_product_populator.py │ │ ├── main.py │ │ ├── product_blocks │ │ ├── __init__.py │ │ └── test_product_blocks.py │ │ ├── products │ │ ├── __init__.py │ │ └── test_product.py │ │ └── workflows │ │ ├── __init__.py │ │ └── create_test_product.py └── test_test_product.py └── unit_tests ├── __init__.py ├── api ├── __init__.py ├── test_health.py ├── test_helpers.py ├── test_models.py ├── test_processes.py ├── test_processes_ws.py ├── test_product_blocks.py ├── test_products.py ├── test_resource_types.py ├── test_settings.py ├── test_subscription_customer_descriptions.py ├── test_subscriptions.py ├── test_workflows.py └── test_ws.py ├── cli ├── __init__.py ├── conftest.py ├── data │ ├── generate.sh │ ├── generate │ │ ├── alembic.ini │ │ ├── main.py │ │ ├── migrations │ │ │ ├── env.py │ │ │ ├── helpers.py │ │ │ ├── script.py.mako │ │ │ └── versions │ │ │ │ └── schema │ │ │ │ ├── 2024-02-20_59e1199aff7f_create_data_head.py │ │ │ │ ├── 2024-02-20_85be1c80731c_add_example2.py │ │ │ │ ├── 2024-02-20_ea9e6c9de75c_add_example1.py │ │ │ │ └── 2024-06-07_380a5b0c928c_add_example4.py │ │ ├── products │ │ │ ├── __init__.py │ │ │ ├── product_blocks │ │ │ │ ├── __init__.py │ │ │ │ ├── example1.py │ │ │ │ ├── example2.py │ │ │ │ ├── example4.py │ │ │ │ └── example4sub.py │ │ │ └── product_types │ │ │ │ ├── __init__.py │ │ │ │ ├── example1.py │ │ │ │ ├── example2.py │ │ │ │ └── example4.py │ │ ├── test │ │ │ └── unit_tests │ │ │ │ ├── domain │ │ │ │ └── product_types │ │ │ │ │ ├── test_example1.py │ │ │ │ │ ├── test_example2.py │ │ │ │ │ └── test_example4.py │ │ │ │ └── workflows │ │ │ │ ├── example1 │ │ │ │ ├── test_create_example1.py │ │ │ │ ├── test_modify_example1.py │ │ │ │ ├── test_terminate_example1.py │ │ │ │ └── test_validate_example1.py │ │ │ │ ├── example2 │ │ │ │ ├── test_create_example2.py │ │ │ │ ├── test_modify_example2.py │ │ │ │ ├── test_terminate_example2.py │ │ │ │ └── test_validate_example2.py │ │ │ │ └── example4 │ │ │ │ ├── test_create_example4.py │ │ │ │ ├── test_modify_example4.py │ │ │ │ ├── test_terminate_example4.py │ │ │ │ └── test_validate_example4.py │ │ ├── translations │ │ │ └── en-GB.json │ │ └── workflows │ │ │ ├── __init__.py │ │ │ ├── example1 │ │ │ ├── create_example1.py │ │ │ ├── modify_example1.py │ │ │ ├── shared │ │ │ │ └── forms.py │ │ │ ├── terminate_example1.py │ │ │ └── validate_example1.py │ │ │ ├── example2 │ │ │ ├── create_example2.py │ │ │ ├── modify_example2.py │ │ │ ├── shared │ │ │ │ └── forms.py │ │ │ └── terminate_example2.py │ │ │ ├── example4 │ │ │ ├── create_example4.py │ │ │ ├── modify_example4.py │ │ │ ├── shared │ │ │ │ └── forms.py │ │ │ └── terminate_example4.py │ │ │ └── shared.py │ ├── invalid_product_config1.yaml │ ├── invalid_product_config2.yaml │ ├── product_config1.yaml │ ├── product_config2.yaml │ ├── product_config3.yaml │ └── product_config4.yaml ├── generator │ ├── __init__.py │ └── test_enums.py ├── helpers.py ├── test_cli_generate.py ├── test_config_validation.py ├── test_generate_code.py ├── test_migrate_domain_models_with_instances.py └── test_migrate_domain_models_without_instances.py ├── config.py ├── conftest.py ├── db ├── __init__.py ├── test_listeners.py └── test_migration_does_column_exist.py ├── domain ├── __init__.py ├── test_base.py ├── test_base_multiple.py ├── test_base_performance.py ├── test_base_serializable_property.py ├── test_base_with_list_union.py ├── test_base_with_union.py └── test_lifecycle.py ├── fixtures ├── __init__.py ├── processes.py ├── products │ ├── __init__.py │ ├── product_blocks │ │ ├── __init__.py │ │ ├── product_block_list_nested.py │ │ ├── product_block_one.py │ │ ├── product_block_one_nested.py │ │ ├── product_block_with_list_union.py │ │ ├── product_block_with_union.py │ │ ├── product_sub_block_one.py │ │ └── product_sub_block_two.py │ ├── product_types │ │ ├── __init__.py │ │ ├── product_type_list_nested.py │ │ ├── product_type_list_union.py │ │ ├── product_type_list_union_overlap.py │ │ ├── product_type_one.py │ │ ├── product_type_one_nested.py │ │ ├── product_type_sub_list_union.py │ │ ├── product_type_sub_one.py │ │ ├── product_type_sub_two.py │ │ ├── product_type_sub_union.py │ │ ├── product_type_union.py │ │ └── subscription_relations.py │ └── resource_types.py └── workflows.py ├── forms ├── __init__.py ├── test_customer_contact_list.py ├── test_customer_id.py ├── test_display_subscription.py └── test_generic_validators.py ├── graphql ├── __init__.py ├── conftest.py ├── extensions │ ├── __init__.py │ └── test_stats.py ├── mutations │ ├── helpers.py │ ├── test_customer_description.py │ └── test_start_process.py ├── test_customer.py ├── test_process.py ├── test_processes.py ├── test_product.py ├── test_product_blocks.py ├── test_resource_types.py ├── test_settings.py ├── test_sort_and_filter_fields.py ├── test_subscription.py ├── test_subscription_relations.py ├── test_subscriptions.py ├── test_version.py ├── test_workflows.py └── utils │ ├── __init__.py │ ├── fixtures.py │ ├── test_autoregistration.py │ ├── test_get_query_loaders.py │ ├── test_get_selected_paths.py │ ├── test_is_query_detailed.py │ ├── test_is_querying_page_data.py │ └── test_override_class.py ├── helpers.py ├── metrics ├── __init__.py ├── conftest.py ├── test_engine_metrics.py ├── test_process_metrics.py └── test_subscription_metrics.py ├── migrations ├── __init__.py └── test_cascade_constraint.py ├── schedules ├── __init__.py └── test_scheduling.py ├── services ├── __init__.py ├── test_input_state.py ├── test_processes.py ├── test_products.py ├── test_subscription_relations.py ├── test_subscriptions.py └── test_translations.py ├── test_db.py ├── test_types.py ├── test_workflow.py ├── utils ├── __init__.py ├── get_subscription_dict.py ├── test_datetime.py ├── test_errors.py ├── test_functional.py ├── test_get_updated_properties.py ├── test_json.py ├── test_search_query.py ├── test_state.py └── test_strings.py ├── websocket ├── __init__.py └── test_broadcast.py └── workflows ├── __init__.py ├── conftest.py ├── shared ├── __init__.py └── test_validate_subscriptions.py ├── tasks ├── __init__.py ├── test_clean_up_task_log.py ├── test_resume_workflows.py ├── test_validate_product_type.py └── test_validate_products.py ├── test_async_workflow.py ├── test_config_db_code.py ├── test_generic_workflow_steps.py ├── test_lifecycle_status_manager.py └── test_modify_note.py /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 4.0.4 3 | commit = False 4 | tag = False 5 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(rc(?P\d+))? 6 | serialize = 7 | {major}.{minor}.{patch}rc{build} 8 | {major}.{minor}.{patch} 9 | 10 | [bumpversion:file:orchestrator/__init__.py] 11 | search = __version__ = "{current_version}" 12 | replace = __version__ = "{new_version}" 13 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = *migrations*|*test* 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 2 | description: File a feature request 3 | title: "[Feature]: " 4 | labels: ["triage", "feature"] 5 | projects: ["workfloworchestrator/orchestrator-core", "workfloworchestrator/orchestrator-ui", "workfloworchestrator/oauth2-lib", "workfloworchestrator/nwa-stdlib"] 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: | 10 | Thanks for taking the time to fill out this feature request! 11 | - type: input 12 | id: contact 13 | attributes: 14 | label: Contact Details 15 | description: How can we get in touch with you if we need more info? 16 | placeholder: ex. email@example.com 17 | validations: 18 | required: false 19 | - type: textarea 20 | id: feature 21 | attributes: 22 | label: What should we build? 23 | description: What feature are we missing? What would an ideal solution look like? 24 | placeholder: Tell us what we should build! 25 | validations: 26 | required: true 27 | - type: textarea 28 | id: logs 29 | attributes: 30 | label: Relevant pseudo code 31 | description: Please write, copy or paste any relevant log output and or code. This will be automatically formatted into code, so no need for backticks. 32 | render: shell 33 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /.github/workflows/changelog.yml: -------------------------------------------------------------------------------- 1 | name: Changelog 2 | on: 3 | release: 4 | types: 5 | - created 6 | jobs: 7 | changelog: 8 | runs-on: ubuntu-20.04 9 | steps: 10 | - name: "✏️ Generate release changelog" 11 | uses: heinrichreimer/github-changelog-generator-action@v2.3 12 | with: 13 | token: ${{ secrets.GITHUB_TOKEN }} 14 | -------------------------------------------------------------------------------- /.github/workflows/gh-pages.yml: -------------------------------------------------------------------------------- 1 | name: gh-pages 2 | on: 3 | push: 4 | branches: 5 | - main 6 | jobs: 7 | deploy: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v3 11 | - uses: actions/setup-python@v2 12 | with: 13 | python-version: 3.x 14 | 15 | - run: pip install flit 16 | - run: flit install --only-deps --deps develop 17 | - run: mkdocs gh-deploy --force 18 | -------------------------------------------------------------------------------- /.github/workflows/issues.yml: -------------------------------------------------------------------------------- 1 | name: Add a new GitHub Project card linked to a GitHub issue to the specified project column 2 | on: [issues] 3 | jobs: 4 | github-actions-automate-projects: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - name: add-new-issues-to-repository-based-project-column 8 | uses: docker://takanabe/github-actions-automate-projects:v0.0.1 9 | if: github.event_name == 'issues' && github.event.action == 'opened' 10 | env: 11 | GITHUB_TOKEN: ${{ secrets.CI_TOKEN_GITHUB }} 12 | GITHUB_PROJECT_URL: https://github.com/orgs/workfloworchestrator/projects/3/ 13 | GITHUB_PROJECT_COLUMN_NAME: Todo 14 | -------------------------------------------------------------------------------- /.github/workflows/publish-package.yml: -------------------------------------------------------------------------------- 1 | name: Upload Python Package 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v3 12 | - name: Set up Python 13 | uses: actions/setup-python@v2 14 | with: 15 | python-version: '3.x' 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install flit 20 | - name: Build and publish 21 | env: 22 | FLIT_USERNAME: ${{ secrets.FLIT_USERNAME }} 23 | FLIT_PASSWORD: ${{ secrets.FLIT_PASSWORD }} 24 | run: | 25 | flit publish 26 | -------------------------------------------------------------------------------- /.github/workflows/run-linting-tests.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Linting tests 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | workflow_call: 10 | pull_request: 11 | 12 | jobs: 13 | build: 14 | name: Linting Tests 15 | runs-on: ubuntu-latest 16 | strategy: 17 | matrix: 18 | python-version: ['3.11', '3.12', '3.13'] 19 | fail-fast: false 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | - name: Set up Python ${{ matrix.python-version }} 24 | uses: actions/setup-python@v2 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | - name: Install dependencies 28 | run: | 29 | python -m pip install --upgrade pip 30 | pip install flit 31 | flit install --deps develop --symlink 32 | - name: Check with mypy 33 | run: | 34 | mypy . 35 | - name: Run remaining pre-commit hooks 36 | uses: pre-commit/action@v3.0.1 37 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Build stage 2 | FROM python:3.11-slim AS build 3 | WORKDIR /app 4 | RUN apt-get update \ 5 | && apt-get install -y --no-install-recommends git build-essential \ 6 | && rm -rf /var/lib/apt/lists/* 7 | COPY . . 8 | RUN pip install --upgrade pip --no-cache-dir 9 | RUN pip install build --no-cache-dir 10 | RUN python -m build --wheel --outdir dist 11 | 12 | # Final stage 13 | FROM python:3.11-slim 14 | ENV PIP_ROOT_USER_ACTION=ignore 15 | RUN apt-get update \ 16 | && apt-get install -y --no-install-recommends git \ 17 | && rm -rf /var/lib/apt/lists/* 18 | RUN pip install --upgrade pip --no-cache-dir 19 | COPY --from=build /app/dist/*.whl /tmp/ 20 | RUN pip install /tmp/*.whl --no-cache-dir 21 | RUN useradd orchestrator 22 | USER orchestrator 23 | WORKDIR /home/orchestrator 24 | CMD ["uvicorn", "--host", "0.0.0.0", "--port", "8080", "main:app"] 25 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | The Orchestrator-Core 2 | Copyright [2018-2025] SURF, ESnet, GÉANT 3 | 4 | This product includes software developed at 5 | SURF NL (https://surf.nl) 6 | ESnet (https://es.net) 7 | GÉANT Vereniging (https://geant.org) 8 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - "docs" 3 | - "test" 4 | - "examples" 5 | - "orchestrator/devtools" 6 | - "orchestrator/migrations" 7 | - "orchestrator/cli/generator/templates" 8 | - "orchestrator/cli/generator/custom_templates" 9 | -------------------------------------------------------------------------------- /docs/architecture/img/base-orchestrator-setup.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/img/base-orchestrator-setup.png -------------------------------------------------------------------------------- /docs/architecture/img/custom-orchestrator-setup.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/img/custom-orchestrator-setup.png -------------------------------------------------------------------------------- /docs/architecture/orchestration/img/Custom-orchestrator-ui-using-override.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/orchestration/img/Custom-orchestrator-ui-using-override.png -------------------------------------------------------------------------------- /docs/architecture/orchestration/img/Standard-orchestrator-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/orchestration/img/Standard-orchestrator-ui.png -------------------------------------------------------------------------------- /docs/architecture/orchestration/philosophy.md: -------------------------------------------------------------------------------- 1 | # Philosophy 2 | The Workflow Orchestrator is a framework of tools that help the developer create workflow to modify the lifecycle of 3 | subscribed products. It is un-opinionated in what it can orchestrate, but very opinionated in how. The Orchestrator is 4 | designed to run linear workflows that represent the business process of delivering a product. In comparison to other 5 | workflow engines like [Camunda](https://camunda.com/) or [Airflow](https://airflow.apache.org/index.html) we try to keep 6 | the options of the developer limited. In most cases the Workflow Orchestrator framework is flexible enough to 7 | handle the intelligence needed in the business process. 8 | 9 | ## Lightweight 10 | The core functionality of the framework is relatively simple: 11 | 12 | * There is a simple step engine that executes python functions. 13 | * Every step is designed to be atomic to make execution as safe as possible. 14 | * When using the Workflow Orchestrator with the example-ui, it is possible to create highly dynamic [forms](../../reference-docs/forms.md) in 15 | Python. The developer does not need to implement any code in the frontend to get started straight away. 16 | * Furthermore we are working on an extensive set of [tools](../../reference-docs/cli.md) to help bootstrap the development experience. 17 | -------------------------------------------------------------------------------- /docs/architecture/product_modelling/context.md: -------------------------------------------------------------------------------- 1 | # Context 2 | 3 | The models described further on assume an Ethernet network that consists of 4 | nodes where each node has physical ports. Network services have endpoints that 5 | connect to ports. The attributes that are specific to an endpoint are modelled 6 | as a service attach point. Examples of such attributes are the layer two 7 | label(s) used on that port or a point-to-point IP address. An inventory 8 | management system (IMS) is used to keep track of everything that is being 9 | deployed, and a network resource manager (NRM), such as NSO or Ansible, is used 10 | to provision the services on the network. All IP addresses and prefixes are 11 | stored in an IP address management (IPAM) tool. 12 | -------------------------------------------------------------------------------- /docs/architecture/product_modelling/introduction.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | Growing numbers of National Research and Education Networks (NREN) are 4 | interested in automating and orchestrating their network portfolio. However, 5 | individual NRENs may be at different levels of engagement, ranging from 6 | interested but with no concrete plans as yet, to fully automated and 7 | orchestrated. Of the many commercial and open-source tools that can be used, 8 | the NREN community’s interest appears to be focused on Ansible and NSO for the 9 | automation part and on Workflow Orchestrator (WFO) for the orchestration part. 10 | Although the WFO is agnostic to the domain it is used in, this section 11 | describes, as an example that will be recognised by NREN, a set of network 12 | service products that are common to this community and can be used in 13 | combination with the Workflow Orchestrator. 14 | -------------------------------------------------------------------------------- /docs/architecture/product_modelling/ip_static.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/product_modelling/ip_static.png -------------------------------------------------------------------------------- /docs/architecture/product_modelling/l2_point_to_point.md: -------------------------------------------------------------------------------- 1 | # L2 Point-to-Point 2 | 3 | The Layer 2 point-to-point service is modelled using two product blocks. The 4 | l2_point_to_point product block holds the pointers to IMS and the NRM, the speed 5 | of the circuit, and whether the speed policer is enabled or not, as well as 6 | pointers to the two service attach points. The latter are modelled with the 7 | L2_service_attach_point product block and keep track of the port associated with 8 | that endpoint and, in the case where 802.1Q has to be enabled, the VLAN range 9 | used. The service can either be deployed protected or unprotected in the service 10 | provider network. This is administered with the fixed input protection_type. 11 | 12 | 13 | 14 | * **protection_type**: this service is either unprotected or protected 15 | * **ims_id**: ID of the node in the inventory management system 16 | * **nrm_id**: ID of the node in the network resource manager 17 | * **speed**: the speed of the point-to-point service in Mbit/s 18 | * **speed_policer**: enable the speed policer for this service 19 | * **sap**: a constrained list of exactly two Layer2 service attach points 20 | * **vlan_range**: range of Layer 2 labels to be used on this endpoint of the service 21 | * **port**: link to the Port product block this service endpoint connects to 22 | -------------------------------------------------------------------------------- /docs/architecture/product_modelling/l2_point_to_point.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/product_modelling/l2_point_to_point.png -------------------------------------------------------------------------------- /docs/architecture/product_modelling/l2_vpn.md: -------------------------------------------------------------------------------- 1 | 2.4 L2 VPN 2 | 3 | The Layer 2 VPN service is much like the Layer 2 point-to-point service, which 4 | makes it possible to reuse existing product blocks, with a few differences such 5 | as the absence of fixed inputs. The L2_vpn_virtual_circuit product block 6 | inherits from the L2_ptp_virtual_circuit product block, and adds attributes to 7 | (dis)allow VLAN retagging and control over the BUM filter. And because a VPN 8 | can have one or more endpoints, unlike a point-to-point that has exactly two 9 | endpoints, the list of service attach points is overridden to reflect this. 10 | 11 | 12 | 13 | * **bum_filter**: enable broadcast, unknown unicast, and multicast (BUM) traffic filter 14 | * **vlan_retagging**: allow VLAN retagging on endpoints 15 | * **sap**: a constrained list of at least one Layer2 service attach point 16 | -------------------------------------------------------------------------------- /docs/architecture/product_modelling/l2_vpn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/product_modelling/l2_vpn.png -------------------------------------------------------------------------------- /docs/architecture/product_modelling/node.md: -------------------------------------------------------------------------------- 1 | # Node 2 | 3 | The administration handoff in IMS will be different for every organisation. For 4 | this example, it is assumed that all administration that comes with the physical 5 | installation and first-time configuration of the network node in IMS is done 6 | manually by a NOC engineer. This makes the node product rather simple. The only 7 | product block that is defined holds pointers to all related information that is 8 | stored in the operations support systems (OSS). This includes of course a 9 | pointer to the information in IMS, and after the service has been deployed on 10 | the network, another pointer to the related information in the NRM. To keep 11 | track of all IP addresses and prefixes used across the network service product, 12 | the pointers to the IPv4 and IPv6 loopback addresses on the node are also 13 | stored. 14 | 15 | 16 | 17 | * **ims_id**: ID of the node in the inventory management system 18 | * **nrm_id**: ID of the node in the network resource manager 19 | * **ipv4_ipam_id**: ID of the node’s iPv4 loopback address in IPAM 20 | * **ipv6_ipam_id**: ID of the node’s iPv6 loopback address in IPAM 21 | -------------------------------------------------------------------------------- /docs/architecture/product_modelling/node.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/product_modelling/node.png -------------------------------------------------------------------------------- /docs/architecture/product_modelling/port.md: -------------------------------------------------------------------------------- 1 | # Port 2 | 3 | Once a NOC engineer has physically installed a port in a node and added 4 | some basic administration to IMS, the port is marked as available and can be 5 | further configured through the port product. To distinguish between ports with 6 | different speeds (1Gbit/s, 10Gbit/s, etcetera), the fixed input speed is used, 7 | which also allows filtering available ports of the right speed. Besides pointers 8 | to the administration of the port in IMS and the NRM, configuration options 9 | including 802.1Q, Ethernet auto negotiation, and the use of LLDP are registered, 10 | as well as a reference to the Node the port is installed in. 11 | 12 | 13 | 14 | * **speed**: the speed of the physical interface on the node in Mbit/s 15 | * **ims_id**: ID of the node in the inventory management system 16 | * **nrm_id**: ID of the node in the network resource manager 17 | * **mode**: the port is either untagged, tagged or a link member in an aggregate 18 | * **auto_negotiation**: enable Ethernet auto negotiation 19 | * **lldp**: enable the link layer discovery protocol 20 | * **node**: link to the Node product block the port is residing on 21 | -------------------------------------------------------------------------------- /docs/architecture/product_modelling/port.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/product_modelling/port.png -------------------------------------------------------------------------------- /docs/architecture/product_modelling/product_block_graph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/architecture/product_modelling/product_block_graph.png -------------------------------------------------------------------------------- /docs/architecture/product_modelling/standards.md: -------------------------------------------------------------------------------- 1 | # Standards 2 | 3 | There are many standards describing how network service products and their 4 | attributes can be modelled. Most of these are very detailed as they try to 5 | cover as many use cases as possible, which can prove overwhelming. Here we aim 6 | to do the opposite and only model the bare minimum. This makes it easier to see 7 | the relationship between the network service models, and how each model can be 8 | extended with attributes that are specific to the organisation that uses them. 9 | 10 | A common way of modelling products is to split the models into a 11 | customer-facing part that contains all the attributes that are significant to 12 | the customer, and a resource-facing part that extends that set of attributes 13 | with all the attributes that are needed to actually deploy a service on the 14 | network. We assume here that such a separation is being used, where the 15 | customer-facing part lives in the Workflow Orchestrator and the resource-facing 16 | part lives in a provisioning system such as NSO or Ansible. 17 | -------------------------------------------------------------------------------- /docs/architecture/product_modelling/terminology.md: -------------------------------------------------------------------------------- 1 | # Terminology 2 | 3 | The data and business rules of the products and product blocks are modelled in 4 | Workflow Orchestrator domain models. A product is a collection of one or more 5 | product blocks, and zero or more fixed inputs. Fixed inputs are customer-facing 6 | attributes that cannot be changed at will by a customer because they are 7 | constrained in some way, for example by a physical constraint such as the speed 8 | of a port or a financial constraint such as the maximum capacity of a service. 9 | Product blocks are collections of resource types (customer-facing attributes) 10 | that together describe a set of attributes that can be repeated one or more 11 | times within a product and can optionally point to other product blocks. A 12 | product block is a logical collection of resource types that taken together 13 | make reusable instances. They can be referenced many times from within other 14 | products and make it possible to build a logical topology of the network within 15 | the orchestrator database. A subscription is a product instantiation for a 16 | specific customer. See the rest of the Workflow Orchestrator documentation for 17 | more details. 18 | -------------------------------------------------------------------------------- /docs/css/custom.css: -------------------------------------------------------------------------------- 1 | 2 | .termynal-comment { 3 | color: #4a968f; 4 | font-style: italic; 5 | display: block; 6 | } 7 | 8 | .termy [data-termynal] { 9 | white-space: pre-wrap; 10 | } 11 | 12 | a.external-link::after { 13 | /* \00A0 is a non-breaking space 14 | to make the mark be on the same line as the link 15 | */ 16 | content: "\00A0[↪]"; 17 | } 18 | 19 | a.internal-link::after { 20 | /* \00A0 is a non-breaking space 21 | to make the mark be on the same line as the link 22 | */ 23 | content: "\00A0↪"; 24 | } 25 | -------------------------------------------------------------------------------- /docs/css/style.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --md-primary-fg-color: #0067AC; 3 | --md-primary-fg-color--light: #94A4B8; 4 | --md-primary-fg-color--dark: #64758B; 5 | --md-primary-bg-color: #F1F5F9; 6 | --md-primary-bg-color--light: #FFFFFF; 7 | --md-accent-fg-color: #262830; 8 | --md-accent-fg-color--transparent: #51576; 9 | --md-accent-bg-color: #F1F5F9; 10 | --md-accent-bg-color--light: #FFFFFF; 11 | } 12 | 13 | .md-header__button.md-logo { 14 | padding: 0; 15 | } 16 | .md-header__button.md-logo img, .md-header__button.md-logo svg { 17 | height: 55px; 18 | } 19 | 20 | .md-header, .md-tabs { 21 | background-color: #05385E; 22 | } 23 | 24 | img[alt='pypi-downloads'], img[alt='npm-downloads'] { 25 | height: 19px; 26 | padding-top: 5px; 27 | } 28 | -------------------------------------------------------------------------------- /docs/getting-started/docker.md: -------------------------------------------------------------------------------- 1 | # Docker development 2 | As well as developing within a regular python environment it is also possible to develop with a docker environment. 3 | This method clones our [example-orchestrator](https://github.com/workfloworchestrator/example-orchestrator) repo and 4 | kickstarts the development from this mono-repo setup. 5 | 6 | !!! note 7 | This method of developing is meant for beginners who would like to have a very opinionated version of the 8 | orchestrator that already has some pre-built integrations. 9 | 10 | 11 | ## Shipped inside this repo 12 | This repo contains a `docker-compose` that builds the following applications: 13 | 14 | * Orchestrator-core 15 | * Orchestrator-ui 16 | * Postgres 17 | * Redis 18 | * NetBox 19 | * GraphQL Federation 20 | 21 | Furthermore the repository also contains a lot of example code for some of the example products that have been 22 | implemented. If you would like to quickly get to know the application please follow the [README.md](https://github.com/workfloworchestrator/example-orchestrator/blob/master/README.md) 23 | to find out how the docker setup works. 24 | -------------------------------------------------------------------------------- /docs/getting-started/versions.md: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | The orchestrator backend has the following requirements 3 | 4 | ### Backend 5 | For the backend you need the following packages: 6 | 7 | * Python >= 3.11 8 | * Postgres >= 15 9 | 10 | ### Optional dependencies 11 | * Redis 12 | * Docker 13 | -------------------------------------------------------------------------------- /docs/img/WFO-Emblem-White.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/img/WFO-Emblem-White.png -------------------------------------------------------------------------------- /docs/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/img/favicon.ico -------------------------------------------------------------------------------- /docs/migration-guide/3.0.md: -------------------------------------------------------------------------------- 1 | # 3.0 Migration Guide 2 | 3 | In this document we describe the steps that should be taken to migrate from `orchestrator-core` v2 to v3. 4 | 5 | ## About 3.0 6 | 7 | In this release, deprecated import statements from the `orchestrator.types` module are removed, that now come from 8 | `pydantic-forms.types` instead. These will have to be updated in your implementation of the orchestrator as well. 9 | 10 | ## Steps 11 | 12 | To update the import statements you may have in your implementation of Workflow Orchestrator, we offer a migration 13 | script that can be run as follows: `python -m orchestrator.devtools.scripts.migrate_30 ` where `` points to 14 | your orchestrator implementation. 15 | -------------------------------------------------------------------------------- /docs/reference-docs/api.md: -------------------------------------------------------------------------------- 1 | # API Documentation 2 | 3 | The WFO has an API located at `/api/` and has browsable UI docs at `/api/redoc` OpenAPI spec available to download directly from your WFO instance. You can also view the swagger docs here, however, note that this is pulled directly from the example orchestrator demo site and might not exactly line up with this version of the documentation, so pay attention to the version shown below. You can also view these on the functioning demo WFO instance [here!](https://demo.workfloworchestrator.org/api/redoc) 4 | 5 | 6 | !!swagger-http https://demo.workfloworchestrator.org/api/openapi.json!! 7 | -------------------------------------------------------------------------------- /docs/reference-docs/app/app.md: -------------------------------------------------------------------------------- 1 | # app.py 2 | 3 | The app.py module is used in `orchestrator-core` for actually running the entire WFO FastAPI backend and the CLI. 4 | 5 | ## FastAPI Backend 6 | 7 | The code for the WFO's Fast API backend is very well documented, so look through the functions used in this module here: 8 | 9 | ::: orchestrator.app 10 | options: 11 | heading_level: 3 12 | 13 | A great example of how to use the functions available in app.py with your own `main.py` when you instantiate your own instance of the orchestrator can be seen in the [example orchestrator repository's](https://github.com/workfloworchestrator/example-orchestrator/blob/master/main.py) `main.py` file. 14 | 15 | ```python 16 | {% include 'https://raw.githubusercontent.com/workfloworchestrator/example-orchestrator/master/main.py' %} 17 | ``` 18 | 19 | ## CLI App 20 | 21 | The orchestrator core also has a CLI application that is documented in [detail here](../cli.md). You can bring this into your `main.py` file so that you can run the orchestrator CLI for development like so: 22 | 23 | ```python 24 | if __name__ == "__main__": 25 | core_cli() 26 | ``` 27 | -------------------------------------------------------------------------------- /docs/reference-docs/domain_models/generator.md: -------------------------------------------------------------------------------- 1 | # Generator 2 | 3 | If all of this domain modelling process seems like too much work, then good news, as all clever engineers before us have done, we've fixed that with YAML! Using the WFO CLI, you can generate your product types directly from a YAML. For more information on how to do that, check out the [CLI `generate` command documentation.](../cli.md#generate) 4 | -------------------------------------------------------------------------------- /docs/reference-docs/domain_models/instantiating.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/domain_models/instantiating.md -------------------------------------------------------------------------------- /docs/reference-docs/domain_models/overview.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | Domain models are one of the core concepts to understand in the workflow orchestrator. To understand the high-level concepts of a domain model, start by reading the domain models page in the [Architecture](../../architecture/application/domainmodels.md) section of the documentation. Additionally, for an example design of domain models that could be commonly implemented by NRENs, see [this document](https://resources.geant.org/wp-content/uploads/2023/06/M7.3_Common-NREN-Network-Service-Product-Models.pdf 4 | ) created for GÉANT. 5 | -------------------------------------------------------------------------------- /docs/reference-docs/domain_models/properties.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/domain_models/properties.md -------------------------------------------------------------------------------- /docs/reference-docs/domain_models/pydantic_hooks.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/domain_models/pydantic_hooks.md -------------------------------------------------------------------------------- /docs/reference-docs/domain_models/type_casting.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/domain_models/type_casting.md -------------------------------------------------------------------------------- /docs/reference-docs/domain_models/union_types.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/domain_models/union_types.md -------------------------------------------------------------------------------- /docs/reference-docs/domain_models/validation.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/domain_models/validation.md -------------------------------------------------------------------------------- /docs/reference-docs/search_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/search_overview.png -------------------------------------------------------------------------------- /docs/reference-docs/serialization.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/serialization.md -------------------------------------------------------------------------------- /docs/reference-docs/tasks.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/tasks.md -------------------------------------------------------------------------------- /docs/reference-docs/tests.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/reference-docs/tests.md -------------------------------------------------------------------------------- /docs/reference-docs/tldr.md: -------------------------------------------------------------------------------- 1 | # TL;DR 2 | 3 | This reference documentation is for developers who want to learn more about the internals of the Workflow Orchestrator. For more conceptual and high-level documentation, head over to the [Architecture](../architecture/tldr.md) section of the documentation. This is a mix of user-guide style docs and documentation of the actual python classes and functions in use in the `orchestrator-core`. Many of these function and class docs are also available in the code base directly from your IDE. 4 | -------------------------------------------------------------------------------- /docs/reference-docs/workflows/workflow-lifecycles.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | Initial 4 | Started 5 | Resumed 6 | Failed 7 | Completed 8 | Aborted 9 | ProcessStat 10 | -------------------------------------------------------------------------------- /docs/workshops/advanced/create-your-own.md: -------------------------------------------------------------------------------- 1 | # Create your own workflow and product 2 | 3 | To cap off this workshop we will create a new product and workflows by using the built in tools that the Workflow 4 | Orchestrator provides the user. In this scenario you will create a product that is very similar to the provided 5 | L2VPN product, but constrained to two interfaces. In other words a L2 Point-to-Point circuit. 6 | 7 | ## L2 Point-to-Point model 8 | {{ external_markdown('https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/main/docs/architecture/product_modelling/l2_point_to_point.md', 9 | '') }} 10 | -------------------------------------------------------------------------------- /docs/workshops/advanced/domain-models.md: -------------------------------------------------------------------------------- 1 | # Domain models 2 | 3 | ## Introduction 4 | 5 | First read the [Architecture; TL;DR](../../architecture/tldr.md) section of the orchestrator core documentation to get an overview of the 6 | concepts that will be covered. 7 | 8 | ## Products 9 | {{ external_markdown('https://raw.githubusercontent.com/workfloworchestrator/example-orchestrator/master/README.md', 10 | '## Products') }} 11 | -------------------------------------------------------------------------------- /docs/workshops/advanced/l2_point_to_point.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/workshops/advanced/l2_point_to_point.png -------------------------------------------------------------------------------- /docs/workshops/advanced/node-create.md: -------------------------------------------------------------------------------- 1 | {{ external_markdown('https://raw.githubusercontent.com/workfloworchestrator/example-orchestrator/master/README.md', 2 | '### Create workflow') }} 3 | -------------------------------------------------------------------------------- /docs/workshops/advanced/node-modify.md: -------------------------------------------------------------------------------- 1 | {{ external_markdown('https://raw.githubusercontent.com/workfloworchestrator/example-orchestrator/master/README.md', 2 | '### Modify workflow') }} 3 | -------------------------------------------------------------------------------- /docs/workshops/advanced/node-terminate.md: -------------------------------------------------------------------------------- 1 | {{ external_markdown('https://raw.githubusercontent.com/workfloworchestrator/example-orchestrator/master/README.md', 2 | '### Terminate workflow') }} 3 | -------------------------------------------------------------------------------- /docs/workshops/advanced/node-validate.md: -------------------------------------------------------------------------------- 1 | {{ external_markdown('https://raw.githubusercontent.com/workfloworchestrator/example-orchestrator/master/README.md', 2 | '### Validate workflow') }} 3 | -------------------------------------------------------------------------------- /docs/workshops/advanced/scenario.md: -------------------------------------------------------------------------------- 1 | # Scenario 2 | 3 | During this workshop a set of products will be used together with the needed workflows to manage enrolling network 4 | nodes into the Workflow Orchestrator and creating circuits between nodes. 5 | The products will be just complex enough to show the basic capabilities of products, product blocks, fixed inputs, 6 | resource types and workflows in the workflow orchestrator. We will cover nesting product blocks and products together. 7 | 8 | ## Product hiearchy example 9 | In the diagram below you can see how all products and product blocks relate to each other. The example orchestrator 10 | has implemented the following example products and corresponding workflows that can be used to build a basic network 11 | topology and customer facing services: 12 | 13 | {{ external_markdown('https://raw.githubusercontent.com/workfloworchestrator/example-orchestrator/master/README.md', '### Implemented products') }} 14 | 15 | Product block graph 16 | 17 | !!! Hint 18 | Take some time to explore the module described in above. It shows how the product modelling is done in Python. 19 | Once you are familiar with the code. Continue with the workshop 20 | -------------------------------------------------------------------------------- /docs/workshops/advanced/workflow-basics.md: -------------------------------------------------------------------------------- 1 | # Workflow Basics 2 | {{ external_markdown('https://raw.githubusercontent.com/workfloworchestrator/example-orchestrator/master/README.md', 3 | '## Workflows - Basics') }} 4 | -------------------------------------------------------------------------------- /docs/workshops/beginner/start-applications.md: -------------------------------------------------------------------------------- 1 | # Start orchestrator and client 2 | 3 | ## Manual 4 | 5 | ### Start orchestrator 6 | 7 | From the `example-orchestrator` folder, use Uvicorn to start the orchestrator: 8 | 9 | ```shell 10 | uvicorn --host 127.0.0.1 --port 8080 main:app 11 | ``` 12 | 13 | If you are running without authentication set up, you can set the environment variable to false from the command line: 14 | ``` 15 | OAUTH2_ACTIVE=false uvicorn --host localhost --port 8080 main:app 16 | ``` 17 | 18 | Visit [the app](http://127.0.0.1:8080/api/docs) to view the API documentation. 19 | 20 | ### Start client 21 | 22 | From the `example-orchestrator-ui` folder, run the following command to start the front end. 23 | `npm run dev` 24 | 25 | ## Docker compose 26 | 27 | Using Docker compose the only thing needed to start all application is to 28 | run: 29 | 30 | ```shell 31 | docker compose up 32 | ``` 33 | 34 | And point a browser to `http://localhost:3000/`. 35 | 36 | !!! note 37 | 38 | Once opened in the browser, ignore the message about the CRM not being 39 | responsive, this workshop does not include the setup of an interface to a 40 | CRM, fake customers IDs will be used instead. 41 | -------------------------------------------------------------------------------- /docs/workshops/images/Software-topology.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/workshops/images/Software-topology.drawio.png -------------------------------------------------------------------------------- /docs/workshops/images/clab_topology.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/workshops/images/clab_topology.png -------------------------------------------------------------------------------- /docs/workshops/images/metadata_products.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/workshops/images/metadata_products.png -------------------------------------------------------------------------------- /docs/workshops/images/netbox_devices_active.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/workshops/images/netbox_devices_active.png -------------------------------------------------------------------------------- /docs/workshops/images/subscriptions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/workshops/images/subscriptions.png -------------------------------------------------------------------------------- /docs/workshops/images/topology.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/docs/workshops/images/topology.drawio.png -------------------------------------------------------------------------------- /includes/abbreviations.md: -------------------------------------------------------------------------------- 1 | *[WFO]: Workflow Orchestrator 2 | *[YAML]: YAML Ain't Markup Language™ 3 | *[SURF]: The national research and education network of the Netherlands. 4 | *[ESnet]: The Energy Sciences Network 5 | *[GÉANT]: The pan-European data network for the research and education community. 6 | *[NREN]: National Research and Education Network 7 | -------------------------------------------------------------------------------- /nitpick-style.toml: -------------------------------------------------------------------------------- 1 | [nitpick.files."setup.cfg"] 2 | comma_separated_values = ["mypy.plugins"] 3 | -------------------------------------------------------------------------------- /orchestrator/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2025 SURF, GÉANT. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | """This is the orchestrator workflow engine.""" 15 | 16 | __version__ = "4.0.4" 17 | 18 | from orchestrator.app import OrchestratorCore 19 | from orchestrator.settings import app_settings 20 | from orchestrator.workflow import begin, conditional, done, focussteps, inputstep, retrystep, step, steplens, workflow 21 | 22 | __all__ = [ 23 | "OrchestratorCore", 24 | "app_settings", 25 | "step", 26 | "inputstep", 27 | "workflow", 28 | "retrystep", 29 | "begin", 30 | "done", 31 | "conditional", 32 | "focussteps", 33 | "steplens", 34 | ] 35 | -------------------------------------------------------------------------------- /orchestrator/api/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/api/api_v1/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/api/api_v1/endpoints/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/api/api_v1/endpoints/translations.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | import structlog 15 | from fastapi import Path 16 | from fastapi.routing import APIRouter 17 | 18 | from orchestrator.services.translations import generate_translations 19 | 20 | logger = structlog.get_logger(__name__) 21 | 22 | 23 | router = APIRouter() 24 | 25 | 26 | @router.get("/{language}", response_model=dict) 27 | def get_translations(language: str = Path(..., pattern="^[a-z]+-[A-Z]+$")) -> dict: 28 | return generate_translations(language) 29 | -------------------------------------------------------------------------------- /orchestrator/cli/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/cli/domain_gen_helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/cli/domain_gen_helpers/__init__.py -------------------------------------------------------------------------------- /orchestrator/cli/generator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/cli/generator/__init__.py -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/README: -------------------------------------------------------------------------------- 1 | This folder contains example custom templates. Copy this folder to your own 2 | repository, and add the `--custom-templates ` option to the `generate` 3 | CLI command line. 4 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_create_imports.j2: -------------------------------------------------------------------------------- 1 | from orchestrator.forms.validators import ContactPersonList 2 | from surf.forms.validators import JiraTicketId 3 | from surf.products.services.subscription import subscription_description 4 | from surf.workflows.shared.mail import send_confirmation_email 5 | from surf.workflows.shared.jira import generic_update_jira_step 6 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_create_input_fields.j2: -------------------------------------------------------------------------------- 1 | contact_persons: ContactPersonList = [] # type: ignore 2 | ticket_id: JiraTicketId = JiraTicketId("") 3 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_create_steps.j2: -------------------------------------------------------------------------------- 1 | additional_steps = begin >> generic_update_jira_step >> send_confirmation_email() 2 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_modify_imports.j2: -------------------------------------------------------------------------------- 1 | from orchestrator.forms.validators import ContactPersonList 2 | from surf.forms.validators import JiraTicketId 3 | from surf.products.services.subscription import subscription_description 4 | from surf.workflows.shared.mail import send_confirmation_email 5 | from surf.workflows.shared.jira import generic_update_jira_step 6 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_modify_input_fields.j2: -------------------------------------------------------------------------------- 1 | contact_persons: ContactPersonList = [] # type: ignore 2 | ticket_id: JiraTicketId = JiraTicketId("") 3 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_modify_steps.j2: -------------------------------------------------------------------------------- 1 | additional_steps = begin >> generic_update_jira_step >> send_confirmation_email() 2 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_terminate_imports.j2: -------------------------------------------------------------------------------- 1 | from orchestrator.forms.validators import ContactPersonList 2 | from surf.forms.validators import JiraTicketId 3 | from surf.workflows.shared.jira import generic_update_jira_step 4 | from surf.workflows.shared.mail import send_confirmation_email 5 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_terminate_input_fields.j2: -------------------------------------------------------------------------------- 1 | contact_persons: contact_person_list(UUID(customer_id)) = [] # type: ignore 2 | ticket_id: JiraTicketId | None = None 3 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/custom_templates/additional_terminate_steps.j2: -------------------------------------------------------------------------------- 1 | additional_steps = begin >> generic_update_jira_step >> send_confirmation_email() 2 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/generator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/cli/generator/generator/__init__.py -------------------------------------------------------------------------------- /orchestrator/cli/generator/products/workshop/circuit.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # This file describes the "Circuit" product from the advanced Orchestrator workshop 3 | # 4 | 5 | config: 6 | create_summary_forms: false 7 | name: Circuit 8 | type: Circuit 9 | tag: CIRCUIT 10 | description: "Circuit for workshop" 11 | fixed_inputs: 12 | - name: speed 13 | type: str 14 | product_blocks: 15 | - name: ckt 16 | type: Circuit 17 | tag: CIRCUIT 18 | description: "Circuit Product Block for workshop" 19 | fields: 20 | - name: members 21 | type: list 22 | description: "members" 23 | list_type: Layer3Interface 24 | min_items: 2 25 | max_items: 2 26 | - name: circuit_id 27 | type: int 28 | description: "circuit id" 29 | required: provisioning 30 | - name: under_maintenance 31 | type: bool 32 | description: "under maintenance" 33 | required: provisioning 34 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/products/workshop/node.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # This file describes the "Node" product from the advanced Orchestrator workshop 3 | # 4 | 5 | config: 6 | create_summary_forms: false 7 | name: Node 8 | type: node 9 | product_blocks: 10 | - name: node 11 | type: Node 12 | fields: 13 | - name: node_id 14 | type: int 15 | required: provisioning 16 | - name: node_name 17 | type: str 18 | required: provisioning 19 | - name: ipv4_loopback 20 | type: ipaddress.IPv4Address 21 | required: provisioning 22 | - name: ipv6_loopback 23 | type: ipaddress.IPv6Address 24 | required: provisioning 25 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/products/workshop/user.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # This file describes the "User" product from the Orchestrator workshop 3 | # 4 | 5 | config: 6 | create_summary_forms: false 7 | name: User 8 | type: User 9 | fixed_inputs: 10 | - name: affiliation 11 | type: enum 12 | enum_type: str 13 | values: 14 | - "internal" 15 | - "external" 16 | product_blocks: 17 | - name: user 18 | type: User 19 | fields: 20 | - name: group 21 | type: UserGroup 22 | - name: username 23 | type: str 24 | required: provisioning 25 | - name: age 26 | type: int 27 | - name: user_id 28 | type: int 29 | required: active 30 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/products/workshop/user_group.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # This file describes the "UserGroup" product from the Orchestrator workshop 3 | # 4 | 5 | config: 6 | create_summary_forms: false 7 | name: UserGroup 8 | type: UserGroup 9 | product_blocks: 10 | - name: user_group 11 | type: UserGroup 12 | fields: 13 | - name: group_name 14 | type: str 15 | required: provisioning 16 | - name: group_id 17 | type: int 18 | required: active 19 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/additional_create_imports.j2: -------------------------------------------------------------------------------- 1 | from orchestrator.domain import SubscriptionModel 2 | 3 | 4 | def subscription_description(subscription: SubscriptionModel) -> str: 5 | """Generate subscription description. 6 | 7 | The suggested pattern is to implement a subscription service that generates a subscription specific 8 | description, in case that is not present the description will just be set to the product name. 9 | """ 10 | return f"{subscription.product.name} subscription" 11 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/additional_create_steps.j2: -------------------------------------------------------------------------------- 1 | additional_steps = begin 2 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/additional_modify_imports.j2: -------------------------------------------------------------------------------- 1 | from orchestrator.domain import SubscriptionModel 2 | 3 | 4 | def subscription_description(subscription: SubscriptionModel) -> str: 5 | """The suggested pattern is to implement a subscription service that generates a subscription specific 6 | description, in case that is not present the description will just be set to the product name. 7 | """ 8 | return f"{subscription.product.name} subscription" 9 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/additional_modify_steps.j2: -------------------------------------------------------------------------------- 1 | additional_steps = begin 2 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/additional_terminate_steps.j2: -------------------------------------------------------------------------------- 1 | additional_steps = begin 2 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/constrained_int_definitions.j2: -------------------------------------------------------------------------------- 1 | {% for field in constrained_ints_to_generate %} 2 | {%- set ge = '{:_}'.format(field.min_value) -%} 3 | {%- set le = '{:_}'.format(field.max_value) -%} 4 | {{ field.type }} = Annotated[int, {% if ge | length %}Ge({{ ge }}), {% endif %}{% if le | length %} Le({{ le }}){% endif %}] 5 | {% endfor %} 6 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/create_data_head.j2: -------------------------------------------------------------------------------- 1 | """Create data head. 2 | 3 | Revision ID: {{ revision }} 4 | Revises: 5 | Create Date: {{ create_date }} 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "{{ revision }}" 13 | down_revision = None 14 | branch_labels = ("data",) 15 | depends_on = "{{ depends_on }}" 16 | 17 | 18 | def upgrade() -> None: 19 | pass 20 | 21 | 22 | def downgrade() -> None: 23 | pass 24 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/enums.j2: -------------------------------------------------------------------------------- 1 | {% for fi in int_enums -%} 2 | class {{ fi.type }}(IntEnum): 3 | {% for v in fi["values"] -%} 4 | _{{ v }} = {{ v }} 5 | {% endfor %} 6 | {%- endfor %} 7 | 8 | {% for fi in str_enums -%} 9 | class {{ fi.type }}(StrEnum): 10 | {% for v in fi["values"] -%} 11 | {{ v }} = "{{ v }}" 12 | {% endfor %} 13 | {% endfor -%} 14 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/lazy_workflow_instance.j2: -------------------------------------------------------------------------------- 1 | {# Lazy workflows definition #} 2 | 3 | LazyWorkflowInstance("workflows.{{ product.variable }}.create_{{ product.variable }}", "create_{{ product.variable }}") 4 | LazyWorkflowInstance("workflows.{{ product.variable }}.modify_{{ product.variable }}", "modify_{{ product.variable }}") 5 | LazyWorkflowInstance("workflows.{{ product.variable }}.terminate_{{ product.variable }}", "terminate_{{ product.variable }}") 6 | LazyWorkflowInstance("workflows.{{ product.variable }}.validate_{{ product.variable }}", "validate_{{ product.variable }}") 7 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/list_definitions.j2: -------------------------------------------------------------------------------- 1 | {% if lists_to_generate %} 2 | {% for list_type in lists_to_generate %} 3 | ListOf{{ list_type.name | capitalize }} = Annotated[list[SI], Len(min_length={{ list_type.min_items }}{% if list_type.max_items -%}, max_length={{ list_type.max_items }}{% endif %})] 4 | {% endfor %} 5 | {% endif -%} 6 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/macros.j2: -------------------------------------------------------------------------------- 1 | {% macro type_and_default(field, type, required_in) -%} 2 | {{ required(field, type, required_in) }}{{ default(field, type, required_in) }} 3 | {%- endmacro %} 4 | 5 | {% macro required(field, type, required_in) -%} 6 | {{ type }}{% if field.required not in required_in %} | None{% endif %} 7 | {%- endmacro %} 8 | 9 | {% macro default(field, type, required_in) -%} 10 | {% if field.required not in required_in -%} 11 | = {% if field.default is defined %}{{ field.default }}{% else %}None{% endif %} 12 | {%- endif %} 13 | {%- endmacro %} 14 | 15 | {% macro list_field(name, list_type) -%} 16 | {% if list_type in ["int", "str"] -%} 17 | {{ name }}: list[{{ list_type }}] 18 | {%- else -%} 19 | {{ name }}: ListOf{{ name | capitalize }}[{{ list_type }}] 20 | {%- endif -%} 21 | {%- endmacro %} 22 | 23 | {% macro lifecycle_type(type, existing_product_blocks, lifecycle) -%} 24 | {% if type in existing_product_blocks -%} 25 | {{ type }}Block{{ lifecycle }} 26 | {%- else -%} 27 | {{ type }} 28 | {%- endif -%} 29 | {%- endmacro %} 30 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/product.j2: -------------------------------------------------------------------------------- 1 | {# Product definition -#} 2 | 3 | from enum import IntEnum, StrEnum 4 | 5 | from orchestrator.domain.base import SubscriptionModel 6 | from orchestrator.types import SubscriptionLifecycle 7 | 8 | from {{ product_blocks_module }}.{{ root_block.variable }} import 9 | {{- " "}}{{ root_block.type }}Block 10 | {{- ", "}}{{- root_block.type }}BlockInactive 11 | {{- ", "}}{{- root_block.type }}BlockProvisioning 12 | 13 | {% include "enums.j2" %} 14 | {% if non_standard_fixed_inputs -%} 15 | from {{ product_types_module }}.fixed_input_types import {{ non_standard_fixed_inputs }} 16 | {% endif -%} 17 | 18 | class {{ product }}Inactive(SubscriptionModel, is_base=True): 19 | {% for fi in fixed_inputs -%} 20 | {{ fi.name }}: {{ fi.type }} 21 | {% endfor -%} 22 | {{ root_block.name }}: {{ root_block.type }}BlockInactive 23 | 24 | class {{ product }}Provisioning({{ product }}Inactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 25 | {% for fi in fixed_inputs -%} 26 | {{ fi.name }}: {{ fi.type }} 27 | {% endfor -%} 28 | {{ root_block.name }}: {{ root_block.type }}BlockProvisioning 29 | 30 | class {{ product }}({{ product }}Provisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 31 | {% for fi in fixed_inputs -%} 32 | {{ fi.name }}: {{ fi.type }} 33 | {% endfor -%} 34 | {{ root_block.name }}: {{ root_block.type }}Block 35 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/shared_forms.j2: -------------------------------------------------------------------------------- 1 | {% if product_block_types -%} 2 | from {{ product_blocks_module }}.{{ product_block.variable }} import {{ product_block_types | map(attribute='type') | join(", ") }} 3 | {%- endif %} 4 | 5 | {% for validation in validations %} 6 | def {{ validation.validation.id }}_validator({{ validation.field.name }}: {{ validation.field.type }}) -> {{ validation.field.type }}: 7 | if False: # TODO: implement validation for {{ validation.field.name }} 8 | raise ValueError("{{ validation.validation.description }}") 9 | 10 | return {{ validation.field.name }} 11 | 12 | {% endfor %} 13 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/subscription_model_registry.j2: -------------------------------------------------------------------------------- 1 | 2 | from {{ product_types_module }}.{{ product.variable }} import {{ product.type }} 3 | 4 | SUBSCRIPTION_MODEL_REGISTRY.update( 5 | { 6 | {% for variant_name, variant_fixed_inputs in product_variants -%} 7 | "{{ variant_name }}": {{ product.type }}, 8 | {% endfor -%} 9 | } 10 | ) # fmt:skip 11 | -------------------------------------------------------------------------------- /orchestrator/cli/generator/templates/test_validate_workflow.j2: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 4 | 5 | 6 | @pytest.mark.workflow 7 | def test_happy_flow(responses, {{ product.variable }}_subscription): 8 | # when 9 | 10 | result, _, _ = run_workflow("validate_{{ product.variable }}", {"subscription_id": {{ product.variable }}_subscription}) 11 | 12 | # then 13 | 14 | assert_complete(result) 15 | state = extract_state(result) 16 | assert state["check_core_db"] is True 17 | 18 | 19 | {% for validation in validations %} 20 | @pytest.mark.workflow 21 | def test_{{ validation.id }}(responses, {{ product.variable }}_subscription): 22 | # given 23 | 24 | # TODO: set test conditions or fixture so that "{{ validation.description }}" triggers 25 | 26 | # when 27 | 28 | with pytest.raises(AssertionError) as error: 29 | result, _, _ = run_workflow("validate_{{ product.variable }}", [{"subscription_id": {{ product.variable }}_subscription}, {}]) 30 | 31 | # then 32 | 33 | assert error.value.errors[0]["msg"] == "{{ validation.description }}" 34 | 35 | {% endfor %} 36 | -------------------------------------------------------------------------------- /orchestrator/cli/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/cli/helpers/input_helpers.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Iterable 2 | from typing import TypeVar 3 | 4 | import structlog 5 | 6 | from orchestrator.cli.helpers.print_helpers import print_fmt 7 | 8 | logger = structlog.get_logger(__name__) 9 | 10 | T = TypeVar("T") 11 | 12 | 13 | def get_user_input(text: str, default: str = "", optional: bool = False) -> str: 14 | while True: 15 | answer = input(text) 16 | if answer or default: 17 | return answer.strip() if answer else default 18 | if optional: 19 | return default 20 | 21 | 22 | def _enumerate_menu_keys(items: list | set) -> list[str]: 23 | return [str(i + 1) for i in range(len(items))] 24 | 25 | 26 | def _prompt_user_menu(options: Iterable[tuple[str, T]], keys: list[str] | None = None, repeat: bool = True) -> T | None: 27 | options_list = list(options) 28 | keys = keys or _enumerate_menu_keys(options_list) 29 | done = False 30 | while not done: 31 | for k, txt_v in zip(keys, options_list): 32 | print_fmt(f"{k}) {txt_v[0]}") 33 | choice = get_user_input("? ") 34 | if choice not in keys: 35 | print_fmt("Invalid choice") 36 | done = not repeat 37 | else: 38 | return options_list[keys.index(choice)][1] 39 | return None 40 | -------------------------------------------------------------------------------- /orchestrator/cli/helpers/print_helpers.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Callable, Iterable 2 | from typing import Any 3 | 4 | from pydantic_forms.types import strEnum 5 | 6 | 7 | def _esc_str(i: int) -> str: 8 | return f"\033[{i}m" 9 | 10 | 11 | class COLOR(strEnum): 12 | RESET = _esc_str(0) 13 | BOLD = _esc_str(1) 14 | DIM = _esc_str(2) 15 | ITALIC = _esc_str(3) 16 | UNDERLINE = _esc_str(4) 17 | 18 | BLACK = _esc_str(30) 19 | RED = _esc_str(31) 20 | GREEN = _esc_str(32) 21 | YELLOW = _esc_str(33) 22 | BLUE = _esc_str(34) 23 | MAGENTA = _esc_str(35) 24 | CYAN = _esc_str(36) 25 | 26 | 27 | def str_fmt(text: str, *, flags: Iterable[COLOR] = ()) -> str: 28 | return "".join(f for f in flags) + text + COLOR.RESET 29 | 30 | 31 | def print_fmt(text: str, *, flags: Iterable[COLOR] = (), print_fn: Callable = print, **kwargs: Any) -> None: 32 | print_fn(str_fmt(text, flags=flags), **kwargs) 33 | 34 | 35 | def noqa_print(s: str, **kwargs: Any) -> None: 36 | print(s, **kwargs) # noqa: T201 37 | -------------------------------------------------------------------------------- /orchestrator/cli/main.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | import typer 15 | 16 | from orchestrator.cli import database, generate, scheduler 17 | 18 | app = typer.Typer() 19 | app.add_typer(scheduler.app, name="scheduler", help="Access all the scheduler functions") 20 | app.add_typer(database.app, name="db", help="Interact with the application database") 21 | app.add_typer(generate.app, name="generate", help="Generate products, workflows and other artifacts") 22 | 23 | 24 | if __name__ == "__main__": 25 | app() 26 | -------------------------------------------------------------------------------- /orchestrator/config/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/config/assignee.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2025 SURF, GÉANT. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | import strawberry 15 | 16 | from pydantic_forms.types import strEnum 17 | 18 | 19 | @strawberry.enum 20 | class Assignee(strEnum): 21 | NOC = "NOC" 22 | SYSTEM = "SYSTEM" 23 | CHANGES = "CHANGES" 24 | KLANTSUPPORT = "KLANTSUPPORT" 25 | -------------------------------------------------------------------------------- /orchestrator/db/filters/__init__.py: -------------------------------------------------------------------------------- 1 | from orchestrator.db.filters.filters import ( 2 | CallableErrorHandler, 3 | Filter, 4 | QueryType, 5 | create_memoized_field_list, 6 | generic_filter_from_clauses, 7 | generic_filters_validate, 8 | ) 9 | 10 | __all__ = [ 11 | "Filter", 12 | "CallableErrorHandler", 13 | "QueryType", 14 | "create_memoized_field_list", 15 | "generic_filter_from_clauses", 16 | "generic_filters_validate", 17 | ] 18 | -------------------------------------------------------------------------------- /orchestrator/db/filters/product.py: -------------------------------------------------------------------------------- 1 | import structlog 2 | from sqlalchemy import BinaryExpression 3 | 4 | from orchestrator.db import ProductBlockTable, ProductTable 5 | from orchestrator.db.filters import create_memoized_field_list, generic_filter_from_clauses 6 | from orchestrator.db.filters.search_filters import default_inferred_column_clauses, filter_exact, node_to_str_val 7 | from orchestrator.utils.search_query import Node 8 | 9 | logger = structlog.get_logger(__name__) 10 | 11 | 12 | def product_block_clause(node: Node) -> BinaryExpression: 13 | return ProductTable.product_blocks.any(ProductBlockTable.name.ilike(node_to_str_val(node))) 14 | 15 | 16 | PRODUCT_TABLE_COLUMN_CLAUSES = default_inferred_column_clauses(ProductTable) | { 17 | "product_block": product_block_clause, 18 | "tag": filter_exact(ProductTable.tag), 19 | } 20 | 21 | product_filter_fields = create_memoized_field_list(PRODUCT_TABLE_COLUMN_CLAUSES) 22 | filter_products = generic_filter_from_clauses(PRODUCT_TABLE_COLUMN_CLAUSES) 23 | -------------------------------------------------------------------------------- /orchestrator/db/filters/product_block.py: -------------------------------------------------------------------------------- 1 | import structlog 2 | from sqlalchemy import BinaryExpression 3 | 4 | from orchestrator.db import ProductBlockTable, ProductTable, ResourceTypeTable 5 | from orchestrator.db.filters import create_memoized_field_list, generic_filter_from_clauses 6 | from orchestrator.db.filters.search_filters import default_inferred_column_clauses, node_to_str_val 7 | from orchestrator.utils.search_query import Node 8 | 9 | logger = structlog.get_logger(__name__) 10 | 11 | 12 | def products_clause(node: Node) -> BinaryExpression: 13 | return ProductBlockTable.products.any(ProductTable.name.ilike(node_to_str_val(node))) 14 | 15 | 16 | def resource_types_clause(node: Node) -> BinaryExpression: 17 | return ProductBlockTable.resource_types.any(ResourceTypeTable.resource_type.ilike(node_to_str_val(node))) 18 | 19 | 20 | PRODUCT_BLOCK_TABLE_COLUMN_CLAUSES = default_inferred_column_clauses(ProductBlockTable) | { 21 | "product": products_clause, 22 | "resource_type": resource_types_clause, 23 | } 24 | 25 | product_block_filter_fields = create_memoized_field_list(PRODUCT_BLOCK_TABLE_COLUMN_CLAUSES) 26 | filter_product_blocks = generic_filter_from_clauses(PRODUCT_BLOCK_TABLE_COLUMN_CLAUSES) 27 | -------------------------------------------------------------------------------- /orchestrator/db/filters/resource_type.py: -------------------------------------------------------------------------------- 1 | import structlog 2 | from sqlalchemy import BinaryExpression 3 | 4 | from orchestrator.db import ProductBlockTable, ResourceTypeTable 5 | from orchestrator.db.filters import create_memoized_field_list, generic_filter_from_clauses 6 | from orchestrator.db.filters.search_filters import default_inferred_column_clauses, node_to_str_val 7 | from orchestrator.utils.search_query import Node 8 | 9 | logger = structlog.get_logger(__name__) 10 | 11 | 12 | def product_blocks_clause(node: Node) -> BinaryExpression: 13 | return ResourceTypeTable.product_blocks.any(ProductBlockTable.name.ilike(node_to_str_val(node))) 14 | 15 | 16 | RESOURCE_TYPE_TABLE_COLUMN_CLAUSES = default_inferred_column_clauses(ResourceTypeTable) | { 17 | "product_block": product_blocks_clause, 18 | } 19 | 20 | resource_type_filter_fields = create_memoized_field_list(RESOURCE_TYPE_TABLE_COLUMN_CLAUSES) 21 | filter_resource_types = generic_filter_from_clauses(RESOURCE_TYPE_TABLE_COLUMN_CLAUSES) 22 | -------------------------------------------------------------------------------- /orchestrator/db/filters/search_filters/__init__.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import inspect 2 | 3 | from orchestrator.db.database import BaseModel 4 | from orchestrator.db.filters.search_filters.inferred_filter import filter_exact, inferred_filter, node_to_str_val 5 | 6 | __all__ = ["inferred_filter", "default_inferred_column_clauses", "node_to_str_val", "filter_exact"] 7 | 8 | from orchestrator.utils.search_query import WhereCondGenerator 9 | 10 | 11 | def default_inferred_column_clauses(table: type[BaseModel]) -> dict[str, WhereCondGenerator]: 12 | return {key: inferred_filter(column) for key, column in getattr(inspect(table), "columns", {}).items()} 13 | -------------------------------------------------------------------------------- /orchestrator/db/filters/workflow.py: -------------------------------------------------------------------------------- 1 | import structlog 2 | from sqlalchemy import BinaryExpression, select 3 | 4 | from orchestrator.db import ProductTable, WorkflowTable 5 | from orchestrator.db.filters import create_memoized_field_list, generic_filter_from_clauses 6 | from orchestrator.db.filters.search_filters import default_inferred_column_clauses, filter_exact, inferred_filter 7 | from orchestrator.utils.search_query import Node, WhereCondGenerator 8 | 9 | logger = structlog.get_logger(__name__) 10 | 11 | 12 | def make_product_clause(filter_generator: WhereCondGenerator) -> WhereCondGenerator: 13 | """The passed filter_generator takes a Node and returns a where clause acting on a ProductTable column.""" 14 | 15 | def product_clause(node: Node) -> BinaryExpression: 16 | subq = select(WorkflowTable.workflow_id).join(WorkflowTable.products).where(filter_generator(node)).subquery() 17 | return WorkflowTable.workflow_id.in_(subq) 18 | 19 | return product_clause 20 | 21 | 22 | WORKFLOW_TABLE_COLUMN_CLAUSES = default_inferred_column_clauses(WorkflowTable) | { 23 | "product": make_product_clause(inferred_filter(ProductTable.name)), 24 | "tag": make_product_clause(filter_exact(ProductTable.tag)), 25 | } 26 | 27 | 28 | workflow_filter_fields = create_memoized_field_list(WORKFLOW_TABLE_COLUMN_CLAUSES) 29 | filter_workflows = generic_filter_from_clauses(WORKFLOW_TABLE_COLUMN_CLAUSES) 30 | -------------------------------------------------------------------------------- /orchestrator/db/helpers.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | import structlog 4 | from sqlalchemy import text 5 | from sqlalchemy.dialects import postgresql 6 | from sqlalchemy.sql.elements import CompilerElement 7 | 8 | from orchestrator.db import db 9 | 10 | logger = structlog.get_logger(__name__) 11 | 12 | 13 | def to_sql_string(stmt: CompilerElement) -> str: 14 | dialect = postgresql.dialect() # type: ignore 15 | return str(stmt.compile(dialect=dialect, compile_kwargs={"literal_binds": True})) 16 | 17 | 18 | @functools.cache 19 | def get_postgres_version() -> int: 20 | """Returns the Postgres major version as an int.""" 21 | try: 22 | # The pg_version_num is pg_major_version * 10000 + pg_minor_version 23 | pg_version_num = int(db.session.scalar(text("show server_version_num"))) 24 | return pg_version_num // 10000 25 | except ValueError: 26 | logger.error("Unable to query Postgres version") 27 | return 0 28 | -------------------------------------------------------------------------------- /orchestrator/db/listeners.py: -------------------------------------------------------------------------------- 1 | import time 2 | from typing import Any 3 | 4 | from sqlalchemy import Connection, event 5 | from sqlalchemy.engine import Engine 6 | 7 | _listener_registry = [] 8 | 9 | 10 | def monitor_sqlalchemy_queries() -> None: 11 | 12 | @event.listens_for(Engine, "before_cursor_execute") 13 | def before_cursor_execute(conn: Connection, *_args: Any) -> None: 14 | conn.info["queries_started"] = conn.info.get("queries_started", 0) + 1 15 | conn.info.setdefault("query_start_time", []).append(time.time()) 16 | 17 | _listener_registry.append((Engine, "before_cursor_execute", before_cursor_execute)) 18 | 19 | @event.listens_for(Engine, "after_cursor_execute") 20 | def after_cursor_execute(conn: Connection, *_args: Any) -> None: 21 | conn.info["queries_completed"] = conn.info.get("queries_completed", 0) + 1 22 | total = time.time() - conn.info["query_start_time"].pop(-1) 23 | conn.info["query_time_spent"] = conn.info.get("query_time_spent", 0.0) + total 24 | 25 | _listener_registry.append((Engine, "after_cursor_execute", after_cursor_execute)) 26 | 27 | 28 | def disable_listeners() -> None: 29 | while _listener_registry: 30 | listener = _listener_registry.pop() 31 | event.remove(*listener) 32 | -------------------------------------------------------------------------------- /orchestrator/db/queries/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/db/queries/__init__.py -------------------------------------------------------------------------------- /orchestrator/db/queries/subscription_instance.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2025 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from uuid import UUID 15 | 16 | from sqlalchemy import select 17 | 18 | from orchestrator.db import db 19 | from orchestrator.db.models import SubscriptionInstanceAsJsonFunction 20 | 21 | 22 | def get_subscription_instance_dict(subscription_instance_id: UUID) -> dict: 23 | """Query the subscription instance as aggregated JSONB and returns it as a dict. 24 | 25 | Note: all values are returned as lists and have to be transformed by the caller. 26 | It was attempted to do this in the DB query but this gave worse performance. 27 | """ 28 | return db.session.execute(select(SubscriptionInstanceAsJsonFunction(subscription_instance_id))).scalar_one() 29 | -------------------------------------------------------------------------------- /orchestrator/db/range/__init__.py: -------------------------------------------------------------------------------- 1 | from orchestrator.db.range.range import apply_range_to_query, apply_range_to_statement 2 | 3 | __all__ = [ 4 | "apply_range_to_query", 5 | "apply_range_to_statement", 6 | ] 7 | -------------------------------------------------------------------------------- /orchestrator/db/sorting/__init__.py: -------------------------------------------------------------------------------- 1 | from orchestrator.db.sorting.sorting import ( 2 | QueryType, 3 | Sort, 4 | SortOrder, 5 | generic_apply_sorting, 6 | generic_column_sort, 7 | generic_sort, 8 | generic_sorts_validate, 9 | ) 10 | 11 | __all__ = [ 12 | "QueryType", 13 | "Sort", 14 | "SortOrder", 15 | "generic_sort", 16 | "generic_apply_sorting", 17 | "generic_column_sort", 18 | "generic_sorts_validate", 19 | ] 20 | -------------------------------------------------------------------------------- /orchestrator/db/sorting/product.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.inspection import inspect 2 | 3 | from orchestrator.db import ProductTable 4 | from orchestrator.db.filters import create_memoized_field_list 5 | from orchestrator.db.sorting import generic_column_sort, generic_sort 6 | from orchestrator.utils.helpers import to_camel 7 | 8 | PRODUCT_SORT_FUNCTIONS_BY_COLUMN = { 9 | to_camel(key): generic_column_sort(value, ProductTable) for [key, value] in inspect(ProductTable).columns.items() 10 | } 11 | 12 | product_sort_fields = create_memoized_field_list(PRODUCT_SORT_FUNCTIONS_BY_COLUMN) 13 | sort_products = generic_sort(PRODUCT_SORT_FUNCTIONS_BY_COLUMN) 14 | -------------------------------------------------------------------------------- /orchestrator/db/sorting/product_block.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.inspection import inspect 2 | 3 | from orchestrator.db import ProductBlockTable 4 | from orchestrator.db.filters import create_memoized_field_list 5 | from orchestrator.db.sorting import generic_column_sort, generic_sort 6 | from orchestrator.utils.helpers import to_camel 7 | 8 | PRODUCT_BLOCK_SORT_FUNCTIONS_BY_COLUMN = { 9 | to_camel(key): generic_column_sort(value, ProductBlockTable) 10 | for key, value in inspect(ProductBlockTable).columns.items() 11 | } 12 | 13 | product_block_sort_fields = create_memoized_field_list(PRODUCT_BLOCK_SORT_FUNCTIONS_BY_COLUMN) 14 | sort_product_blocks = generic_sort(PRODUCT_BLOCK_SORT_FUNCTIONS_BY_COLUMN) 15 | -------------------------------------------------------------------------------- /orchestrator/db/sorting/resource_type.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.inspection import inspect 2 | 3 | from orchestrator.db import ResourceTypeTable 4 | from orchestrator.db.filters import create_memoized_field_list 5 | from orchestrator.db.sorting import generic_column_sort, generic_sort 6 | from orchestrator.utils.helpers import to_camel 7 | 8 | RESOURCE_TYPE_SORT_FUNCTIONS_BY_COLUMN = { 9 | to_camel(key): generic_column_sort(value, ResourceTypeTable) 10 | for key, value in inspect(ResourceTypeTable).columns.items() 11 | } 12 | 13 | resource_type_sort_fields = create_memoized_field_list(RESOURCE_TYPE_SORT_FUNCTIONS_BY_COLUMN) 14 | sort_resource_types = generic_sort(RESOURCE_TYPE_SORT_FUNCTIONS_BY_COLUMN) 15 | -------------------------------------------------------------------------------- /orchestrator/db/sorting/workflow.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.inspection import inspect 2 | 3 | from orchestrator.db import WorkflowTable 4 | from orchestrator.db.filters import create_memoized_field_list 5 | from orchestrator.db.sorting import generic_column_sort, generic_sort 6 | from orchestrator.utils.helpers import to_camel 7 | 8 | WORKFLOW_SORT_FUNCTIONS_BY_COLUMN = { 9 | to_camel(key): generic_column_sort(value, WorkflowTable) for key, value in inspect(WorkflowTable).columns.items() 10 | } 11 | 12 | workflow_sort_fields = create_memoized_field_list(WORKFLOW_SORT_FUNCTIONS_BY_COLUMN) 13 | sort_workflows = generic_sort(WORKFLOW_SORT_FUNCTIONS_BY_COLUMN) 14 | -------------------------------------------------------------------------------- /orchestrator/devtools/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/devtools/__init__.py -------------------------------------------------------------------------------- /orchestrator/devtools/scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/devtools/scripts/__init__.py -------------------------------------------------------------------------------- /orchestrator/distlock/managers/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2022 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/domain/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from orchestrator.domain.base import SubscriptionModel, SubscriptionModelRegistry 15 | from orchestrator.utils.docs import make_product_type_index_doc 16 | 17 | SUBSCRIPTION_MODEL_REGISTRY: SubscriptionModelRegistry = SubscriptionModelRegistry() 18 | 19 | __doc__ = make_product_type_index_doc(SUBSCRIPTION_MODEL_REGISTRY) 20 | 21 | __all__ = ["SubscriptionModel", "SUBSCRIPTION_MODEL_REGISTRY"] 22 | -------------------------------------------------------------------------------- /orchestrator/exception_handlers.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | from starlette.requests import Request 16 | from starlette.responses import JSONResponse 17 | 18 | from orchestrator.api.error_handling import ProblemDetailException 19 | 20 | PROBLEM_DETAIL_FIELDS = ("title", "type") 21 | 22 | 23 | async def problem_detail_handler(request: Request, exc: ProblemDetailException) -> JSONResponse: 24 | headers = getattr(exc, "headers", None) 25 | 26 | body: dict = {"detail": exc.detail, "status": exc.status_code} 27 | 28 | for field in PROBLEM_DETAIL_FIELDS: 29 | value = getattr(exc, field, None) 30 | if value: 31 | body[field] = value 32 | 33 | if headers: 34 | return JSONResponse(body, status_code=exc.status_code, headers=headers) 35 | return JSONResponse(body, status_code=exc.status_code) 36 | -------------------------------------------------------------------------------- /orchestrator/forms/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | from typing import ClassVar 14 | 15 | from pydantic_forms.core import DisplayOnlyFieldType, generate_form, post_form 16 | from pydantic_forms.core import FormPage as PydanticFormsFormPage 17 | from pydantic_forms.types import JSON, InputForm, StateInputFormGenerator 18 | 19 | __all__ = [ 20 | "DisplayOnlyFieldType", 21 | "FormPage", 22 | "SubmitFormPage", 23 | "InputForm", 24 | "JSON", 25 | "StateInputFormGenerator", 26 | "generate_form", 27 | "post_form", 28 | ] 29 | 30 | 31 | class FormPage(PydanticFormsFormPage): 32 | meta__: ClassVar[JSON] = {"hasNext": True} 33 | 34 | 35 | class SubmitFormPage(FormPage): 36 | meta__: ClassVar[JSON] = {"hasNext": False} 37 | -------------------------------------------------------------------------------- /orchestrator/forms/validators/customer_id.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2023 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | from typing import Annotated 14 | 15 | from pydantic import Field 16 | 17 | CustomerId = Annotated[str, Field(json_schema_extra={"format": "customerId"})] 18 | -------------------------------------------------------------------------------- /orchestrator/forms/validators/display_subscription.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2023 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | from typing import Annotated 14 | from uuid import UUID 15 | 16 | from pydantic import Field 17 | 18 | DisplaySubscription = Annotated[ 19 | UUID, Field(frozen=True, json_schema_extra={"format": "subscription", "type": "string"}) 20 | ] 21 | -------------------------------------------------------------------------------- /orchestrator/forms/validators/network_type_validators.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/graphql/extensions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/graphql/extensions/__init__.py -------------------------------------------------------------------------------- /orchestrator/graphql/extensions/model_cache.py: -------------------------------------------------------------------------------- 1 | # Copyright 2022-2025 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | from collections.abc import Iterator 14 | 15 | from strawberry.extensions import SchemaExtension 16 | 17 | from orchestrator.domain.context_cache import cache_subscription_models 18 | 19 | 20 | class ModelCacheExtension(SchemaExtension): 21 | """Wraps the GraphQL operation in a cache_subscription_models context. 22 | 23 | For more background, please refer to the documentation of the contextmanager. 24 | """ 25 | 26 | def on_operation(self, *args, **kwargs) -> Iterator[None]: # type: ignore 27 | 28 | with cache_subscription_models(): 29 | yield 30 | -------------------------------------------------------------------------------- /orchestrator/graphql/loaders/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/graphql/loaders/__init__.py -------------------------------------------------------------------------------- /orchestrator/graphql/resolvers/__init__.py: -------------------------------------------------------------------------------- 1 | from orchestrator.graphql.resolvers.customer import resolve_customer 2 | from orchestrator.graphql.resolvers.process import resolve_process, resolve_processes 3 | from orchestrator.graphql.resolvers.product import resolve_products 4 | from orchestrator.graphql.resolvers.product_block import resolve_product_blocks 5 | from orchestrator.graphql.resolvers.resource_type import resolve_resource_types 6 | from orchestrator.graphql.resolvers.settings import SettingsMutation, resolve_settings 7 | from orchestrator.graphql.resolvers.subscription import resolve_subscription, resolve_subscriptions 8 | from orchestrator.graphql.resolvers.version import resolve_version 9 | from orchestrator.graphql.resolvers.workflow import resolve_workflows 10 | 11 | __all__ = [ 12 | "resolve_process", 13 | "resolve_processes", 14 | "resolve_products", 15 | "resolve_product_blocks", 16 | "resolve_settings", 17 | "SettingsMutation", 18 | "resolve_subscription", 19 | "resolve_subscriptions", 20 | "resolve_customer", 21 | "resolve_resource_types", 22 | "resolve_workflows", 23 | "resolve_version", 24 | ] 25 | -------------------------------------------------------------------------------- /orchestrator/graphql/resolvers/customer.py: -------------------------------------------------------------------------------- 1 | from orchestrator.graphql.pagination import Connection 2 | from orchestrator.graphql.schemas.customer import CustomerType 3 | from orchestrator.graphql.types import GraphqlFilter, GraphqlSort, OrchestratorInfo 4 | from orchestrator.graphql.utils.to_graphql_result_page import to_graphql_result_page 5 | from orchestrator.settings import app_settings 6 | 7 | 8 | async def resolve_customer( 9 | info: OrchestratorInfo, 10 | filter_by: list[GraphqlFilter] | None = None, 11 | sort_by: list[GraphqlSort] | None = None, 12 | first: int = 1, 13 | after: int = 0, 14 | ) -> Connection[CustomerType]: 15 | default_customer_list = [ 16 | CustomerType( 17 | customer_id=app_settings.DEFAULT_CUSTOMER_IDENTIFIER, 18 | fullname=app_settings.DEFAULT_CUSTOMER_FULLNAME, 19 | shortcode=app_settings.DEFAULT_CUSTOMER_SHORTCODE, 20 | ) 21 | ] 22 | total = len(default_customer_list) 23 | return to_graphql_result_page(default_customer_list, first, after, total) 24 | -------------------------------------------------------------------------------- /orchestrator/graphql/resolvers/helpers.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Sequence 2 | 3 | from sqlalchemy import CompoundSelect, Select, select 4 | from sqlalchemy.orm.strategy_options import _AbstractLoad 5 | 6 | from orchestrator.db import db 7 | from orchestrator.db.database import BaseModel 8 | 9 | 10 | def rows_from_statement( 11 | stmt: Select | CompoundSelect, 12 | base_table: type[BaseModel], 13 | unique: bool = False, 14 | loaders: Sequence[_AbstractLoad] = (), 15 | ) -> Sequence: 16 | """Helper function to handle some tricky cases with sqlalchemy types.""" 17 | # Tell SQLAlchemy that the rows must be objects of type `base_table` for CompoundSelect 18 | from_stmt = select(base_table).options(*loaders).from_statement(stmt) 19 | result = db.session.scalars(from_stmt) 20 | uresult = result.unique() if unique else result 21 | return uresult.all() 22 | -------------------------------------------------------------------------------- /orchestrator/graphql/resolvers/version.py: -------------------------------------------------------------------------------- 1 | from structlog import get_logger 2 | 3 | from orchestrator import __version__ 4 | from orchestrator.graphql.schemas.version import VersionType 5 | from orchestrator.graphql.types import OrchestratorInfo 6 | from orchestrator.graphql.utils import create_resolver_error_handler 7 | 8 | logger = get_logger(__name__) 9 | 10 | 11 | VERSIONS = [f"orchestrator-core: {__version__}"] 12 | 13 | 14 | def resolve_version(info: OrchestratorInfo) -> VersionType | None: 15 | logger.debug("resolve_version() called") 16 | _error_handler = create_resolver_error_handler(info) 17 | 18 | ver = None 19 | try: 20 | ver = VersionType(application_versions=VERSIONS) 21 | except Exception as e: 22 | logger.error(f"Error getting version: {str(e)}") 23 | _error_handler("Failed to retrieve orchestrator_core version", extensions={"code": "PACKAGE_VERSION_ERROR"}) 24 | 25 | return ver 26 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2022-2023 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | from orchestrator.graphql.schemas.product import ProductModelGraphql 14 | from orchestrator.graphql.schemas.strawberry_pydantic_patch import ( 15 | convert_pydantic_model_to_strawberry_class__patched, # noqa: F401 16 | ) 17 | from orchestrator.graphql.types import StrawberryModelType 18 | 19 | DEFAULT_GRAPHQL_MODELS: StrawberryModelType = { 20 | "ProductModelGraphql": ProductModelGraphql, 21 | } 22 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/customer.py: -------------------------------------------------------------------------------- 1 | import strawberry 2 | 3 | 4 | @strawberry.federation.type(keys=["customerId"]) 5 | class CustomerType: 6 | customer_id: str 7 | fullname: str 8 | shortcode: str 9 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/customer_description.py: -------------------------------------------------------------------------------- 1 | import strawberry 2 | 3 | from orchestrator.schemas import SubscriptionDescriptionSchema 4 | 5 | 6 | @strawberry.experimental.pydantic.type(model=SubscriptionDescriptionSchema, all_fields=True) 7 | class CustomerDescription: 8 | pass 9 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/errors.py: -------------------------------------------------------------------------------- 1 | import strawberry 2 | 3 | 4 | @strawberry.interface 5 | class BaseError: 6 | message: str 7 | 8 | 9 | @strawberry.type 10 | class Error(BaseError): 11 | message: str 12 | 13 | 14 | @strawberry.type 15 | class DebugError(BaseError): 16 | traceback: str 17 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/fixed_input.py: -------------------------------------------------------------------------------- 1 | import strawberry 2 | 3 | from orchestrator.schemas.fixed_input import FixedInputConfigurationItemSchema, FixedInputSchema, TagConfig 4 | 5 | 6 | @strawberry.experimental.pydantic.type(model=FixedInputSchema, all_fields=True) 7 | class FixedInput: 8 | pass 9 | 10 | 11 | @strawberry.experimental.pydantic.type(model=FixedInputConfigurationItemSchema, all_fields=True) 12 | class FixedInputConfigurationItem: 13 | pass 14 | 15 | 16 | @strawberry.input 17 | class FixedInputConfigurationInputType: 18 | fixed_inputs: list[FixedInputConfigurationItem] 19 | by_tag: TagConfig 20 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/helpers.py: -------------------------------------------------------------------------------- 1 | from typing import Any, TypeVar, cast 2 | 3 | T = TypeVar("T") 4 | 5 | 6 | def get_original_model(model: Any, klass: T) -> T: 7 | """Get original type in a typesafe way.""" 8 | original_model = getattr(model, "_original_model", None) 9 | 10 | if original_model: 11 | return cast(T, original_model) 12 | raise ValueError(f"Cant get original model for type {klass}") 13 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/resource_type.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Annotated 2 | 3 | import strawberry 4 | 5 | from orchestrator.db import ResourceTypeTable 6 | from orchestrator.graphql.schemas.helpers import get_original_model 7 | from orchestrator.schemas.resource_type import ResourceTypeSchema 8 | 9 | if TYPE_CHECKING: 10 | from orchestrator.graphql.schemas.product_block import ProductBlock 11 | 12 | 13 | @strawberry.experimental.pydantic.type(model=ResourceTypeSchema, all_fields=True) 14 | class ResourceType: 15 | @strawberry.field(description="Return all product blocks that make use of this resource type") # type: ignore 16 | async def product_blocks(self) -> list[Annotated["ProductBlock", strawberry.lazy(".product_block")]]: 17 | from orchestrator.graphql.schemas.product_block import ProductBlock 18 | 19 | model = get_original_model(self, ResourceTypeTable) 20 | return [ProductBlock.from_pydantic(product_block) for product_block in model.product_blocks] 21 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/settings.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Union 2 | 3 | import strawberry 4 | from strawberry.scalars import JSON 5 | 6 | from orchestrator.graphql.schemas.errors import Error 7 | from orchestrator.schemas import WorkerStatus 8 | from orchestrator.schemas.engine_settings import EngineSettingsSchema 9 | 10 | CACHE_FLUSH_OPTIONS: dict[str, str] = {"all": "All caches"} 11 | 12 | 13 | @strawberry.experimental.pydantic.type(model=WorkerStatus, all_fields=True) 14 | class WorkerStatusType: 15 | pass 16 | 17 | 18 | @strawberry.experimental.pydantic.type(model=EngineSettingsSchema, all_fields=True) 19 | class EngineSettingsType: 20 | pass 21 | 22 | 23 | @strawberry.type 24 | class StatusType: 25 | engine_settings: EngineSettingsType | None 26 | worker_status: WorkerStatusType | None 27 | cache_names: JSON | None 28 | 29 | 30 | # Responses 31 | @strawberry.type 32 | class CacheClearSuccess: 33 | deleted: int 34 | 35 | 36 | CacheClearResponse = Annotated[Union[CacheClearSuccess, Error], strawberry.union("CacheClearResponse")] 37 | StatusUpdateResponse = Annotated[Union[EngineSettingsType, Error], strawberry.union("StatusUpdateResponse")] 38 | -------------------------------------------------------------------------------- /orchestrator/graphql/schemas/version.py: -------------------------------------------------------------------------------- 1 | import strawberry 2 | 3 | 4 | @strawberry.type 5 | class VersionType: 6 | application_versions: list[str] 7 | -------------------------------------------------------------------------------- /orchestrator/graphql/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from orchestrator.graphql.utils.create_resolver_error_handler import create_resolver_error_handler 2 | from orchestrator.graphql.utils.get_selected_fields import get_selected_fields 3 | from orchestrator.graphql.utils.is_query_detailed import is_query_detailed, is_querying_page_data 4 | from orchestrator.graphql.utils.to_graphql_result_page import to_graphql_result_page 5 | 6 | __all__ = [ 7 | "get_selected_fields", 8 | "create_resolver_error_handler", 9 | "is_query_detailed", 10 | "is_querying_page_data", 11 | "to_graphql_result_page", 12 | ] 13 | -------------------------------------------------------------------------------- /orchestrator/graphql/utils/create_resolver_error_handler.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | from nwastdlib.graphql.extensions.error_handler_extension import ErrorType, register_error 16 | from orchestrator.db.filters import CallableErrorHandler 17 | from orchestrator.graphql.types import OrchestratorInfo 18 | 19 | 20 | def _format_context(context: dict) -> str: 21 | if not context: 22 | return "" 23 | return "({})".format(" ".join(f"{k}={v}" for k, v in context.items())) 24 | 25 | 26 | def create_resolver_error_handler(info: OrchestratorInfo) -> CallableErrorHandler: 27 | def handle_error(message: str, **context) -> None: # type: ignore 28 | return register_error(" ".join([message, _format_context(context)]), info, error_type=ErrorType.BAD_REQUEST) 29 | 30 | return handle_error 31 | -------------------------------------------------------------------------------- /orchestrator/graphql/utils/get_query_loaders.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import Load 2 | 3 | from orchestrator.db.database import BaseModel as DbBaseModel 4 | from orchestrator.db.loaders import ( 5 | get_query_loaders_for_model_paths, 6 | ) 7 | from orchestrator.graphql.types import OrchestratorInfo 8 | from orchestrator.graphql.utils.get_selected_paths import get_selected_paths 9 | 10 | 11 | def get_query_loaders_for_gql_fields(root_model: type[DbBaseModel], info: OrchestratorInfo) -> list[Load]: 12 | """Get sqlalchemy query loaders for the given GraphQL query. 13 | 14 | Based on the GraphQL query's selected fields, returns the required DB loaders to use 15 | in SQLALchemy's `.options()` for efficiently quering (nested) relationships. 16 | """ 17 | model_paths = [path.removeprefix("page.") for path in get_selected_paths(info)] 18 | 19 | return get_query_loaders_for_model_paths(root_model, model_paths) 20 | -------------------------------------------------------------------------------- /orchestrator/graphql/utils/get_selected_fields.py: -------------------------------------------------------------------------------- 1 | from more_itertools import first 2 | from strawberry.types.nodes import SelectedField, Selection 3 | 4 | from orchestrator.graphql.types import OrchestratorInfo 5 | 6 | 7 | def get_selected_fields(info: OrchestratorInfo) -> list[str]: 8 | """Get SelectedField names from the requested query (info). 9 | 10 | Can be used to get the selected fields of the schema, to only fetch those from the database. 11 | 12 | Args: 13 | info: The info class with request information. 14 | 15 | returns the names of SelectedFields as a list of strings. 16 | """ 17 | root_selected = info.selected_fields[0] 18 | 19 | def has_field_name(selection: Selection, field_name: str) -> bool: 20 | return isinstance(selection, SelectedField) and selection.name == field_name 21 | 22 | page_items = first((selection for selection in root_selected.selections if has_field_name(selection, "page")), None) 23 | if not page_items: 24 | page_items = root_selected 25 | 26 | return [selection.name for selection in page_items.selections if isinstance(selection, SelectedField)] 27 | -------------------------------------------------------------------------------- /orchestrator/graphql/utils/to_graphql_result_page.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from orchestrator.graphql.pagination import Connection, PageInfo 4 | 5 | 6 | def to_graphql_result_page( 7 | items: list[Any], 8 | first: int, 9 | after: int, 10 | total: int | None, 11 | sort_fields: list[str] | None = None, 12 | filter_fields: list[str] | None = None, 13 | ) -> Connection: 14 | has_next_page = len(items) > first 15 | 16 | page_items = items[:first] 17 | page_items_length = len(page_items) 18 | start_cursor = after if page_items_length else None 19 | end_cursor = after + page_items_length - 1 20 | 21 | return Connection( 22 | page=page_items, 23 | page_info=PageInfo( 24 | has_previous_page=bool(after), 25 | has_next_page=has_next_page, 26 | start_cursor=start_cursor, 27 | end_cursor=end_cursor, 28 | total_items=total if total else 0, 29 | sort_fields=sort_fields or [], 30 | filter_fields=filter_fields or [], 31 | ), 32 | ) 33 | -------------------------------------------------------------------------------- /orchestrator/metrics/__init__.py: -------------------------------------------------------------------------------- 1 | from orchestrator.metrics.init import ORCHESTRATOR_METRICS_REGISTRY, initialize_default_metrics 2 | 3 | __all__ = ["initialize_default_metrics", "ORCHESTRATOR_METRICS_REGISTRY"] 4 | -------------------------------------------------------------------------------- /orchestrator/metrics/init.py: -------------------------------------------------------------------------------- 1 | from prometheus_client import CollectorRegistry 2 | 3 | from orchestrator.metrics.engine import WorkflowEngineCollector 4 | from orchestrator.metrics.processes import ProcessCollector 5 | from orchestrator.metrics.subscriptions import SubscriptionCollector 6 | 7 | ORCHESTRATOR_METRICS_REGISTRY = CollectorRegistry(auto_describe=True) 8 | 9 | 10 | def initialize_default_metrics() -> None: 11 | """Register default Prometheus collectors.""" 12 | ORCHESTRATOR_METRICS_REGISTRY.register(SubscriptionCollector()) 13 | ORCHESTRATOR_METRICS_REGISTRY.register(ProcessCollector()) 14 | ORCHESTRATOR_METRICS_REGISTRY.register(WorkflowEngineCollector()) 15 | -------------------------------------------------------------------------------- /orchestrator/migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. 2 | -------------------------------------------------------------------------------- /orchestrator/migrations/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # template used to generate migration files 5 | file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s 6 | 7 | # set to 'true' to run the environment during 8 | # the 'revision' command, regardless of autogenerate 9 | # revision_environment = false 10 | script_location = %(here)s 11 | version_locations = %(here)s/versions/schema 12 | # Logging configuration 13 | [loggers] 14 | keys = root,sqlalchemy,alembic 15 | 16 | [handlers] 17 | keys = console 18 | 19 | [formatters] 20 | keys = generic 21 | 22 | [logger_root] 23 | level = WARN 24 | handlers = console 25 | qualname = 26 | 27 | [logger_sqlalchemy] 28 | level = WARN 29 | handlers = 30 | qualname = sqlalchemy.engine 31 | 32 | [logger_alembic] 33 | level = INFO 34 | handlers = 35 | qualname = alembic 36 | 37 | [handler_console] 38 | class = StreamHandler 39 | args = (sys.stderr,) 40 | level = NOTSET 41 | formatter = generic 42 | 43 | [formatter_generic] 44 | format = %(levelname)-5.5s [%(name)s] %(message)s 45 | datefmt = %H:%M:%S 46 | -------------------------------------------------------------------------------- /orchestrator/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message}. 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | ${imports if imports else ""} 11 | # revision identifiers, used by Alembic. 12 | revision = ${repr(up_revision)} 13 | down_revision = ${repr(down_revision)} 14 | branch_labels = ${repr(branch_labels)} 15 | depends_on = ${repr(depends_on)} 16 | 17 | 18 | def upgrade() -> None: 19 | ${upgrades if upgrades else "pass"} 20 | 21 | 22 | def downgrade() -> None: 23 | ${downgrades if downgrades else "pass"} 24 | -------------------------------------------------------------------------------- /orchestrator/migrations/templates/alembic.ini.j2: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # template used to generate migration files 5 | file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s 6 | 7 | # set to 'true' to run the environment during 8 | # the 'revision' command, regardless of autogenerate 9 | script_location = %(here)s/{{ migrations_dir }} 10 | version_locations = %(here)s/{{ migrations_dir }}/versions/schema 11 | # Logging configuration 12 | [loggers] 13 | keys = root,sqlalchemy,alembic 14 | 15 | [handlers] 16 | keys = console 17 | 18 | [formatters] 19 | keys = generic 20 | 21 | [logger_root] 22 | level = WARN 23 | handlers = console 24 | qualname = 25 | 26 | [logger_sqlalchemy] 27 | level = WARN 28 | handlers = 29 | qualname = sqlalchemy.engine 30 | 31 | [logger_alembic] 32 | level = INFO 33 | handlers = 34 | qualname = alembic 35 | 36 | [handler_console] 37 | class = StreamHandler 38 | args = (sys.stderr,) 39 | level = NOTSET 40 | formatter = generic 41 | 42 | [formatter_generic] 43 | format = %(levelname)-5.5s [%(name)s] %(message)s 44 | datefmt = %H:%M:%S 45 | -------------------------------------------------------------------------------- /orchestrator/migrations/templates/helpers.py.j2: -------------------------------------------------------------------------------- 1 | from orchestrator.migrations.helpers import * 2 | 3 | # Write your own helper functions below this line. 4 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2021-04-06_3c8b9185c221_add_validate_products_task.py: -------------------------------------------------------------------------------- 1 | """Add task_validate_products. 2 | 3 | Revision ID: 3c8b9185c221 4 | Revises: 3323bcb934e7 5 | Create Date: 2020-04-06 09:17:49.395612 6 | 7 | """ 8 | 9 | from uuid import uuid4 10 | 11 | import sqlalchemy as sa 12 | from alembic import op 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "3c8b9185c221" 16 | down_revision = "3323bcb934e7" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | workflows = [ 21 | {"name": "task_validate_products", "description": "Validate products", "workflow_id": uuid4(), "target": "SYSTEM"}, 22 | ] 23 | 24 | 25 | def upgrade() -> None: 26 | conn = op.get_bind() 27 | for workflow in workflows: 28 | conn.execute( 29 | sa.text( 30 | "INSERT INTO workflows VALUES (:workflow_id, :name, :target, :description, now()) ON CONFLICT DO NOTHING" 31 | ), 32 | workflow, 33 | ) 34 | 35 | 36 | def downgrade() -> None: 37 | conn = op.get_bind() 38 | for workflow in workflows: 39 | conn.execute(sa.text("DELETE FROM workflows WHERE name = :name"), {"name": workflow["name"]}) 40 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2023-05-25_b1970225392d_add_subscription_metadata_workflow.py: -------------------------------------------------------------------------------- 1 | """Add subscription metadata workflow. 2 | 3 | Revision ID: b1970225392d 4 | Revises: e05bb1967eff 5 | Create Date: 2023-05-25 09:22:46.491454 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | from sqlalchemy.dialects import postgresql 12 | from sqlalchemy_utils.types.uuid import UUIDType 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "b1970225392d" 16 | down_revision = "e05bb1967eff" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | METADATA_TABLE_NAME = "subscription_metadata" 21 | 22 | 23 | def upgrade() -> None: 24 | op.create_table( 25 | METADATA_TABLE_NAME, 26 | sa.Column( 27 | "subscription_id", 28 | UUIDType(), 29 | nullable=False, 30 | index=True, 31 | ), 32 | sa.Column("metadata", postgresql.JSONB(astext_type=sa.Text()), nullable=False), 33 | sa.ForeignKeyConstraint(["subscription_id"], ["subscriptions.subscription_id"], ondelete="CASCADE"), 34 | ) 35 | 36 | 37 | def downgrade() -> None: 38 | conn = op.get_bind() 39 | conn.execute(sa.text(f"DROP TABLE IF EXISTS {METADATA_TABLE_NAME}")) 40 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2023-06-28_a09ac125ea73_add_throttling_to_refresh_subscriptions.py: -------------------------------------------------------------------------------- 1 | """Add throttling to refresh_subscriptions_view trigger. 2 | 3 | Revision ID: a09ac125ea73 4 | Revises: b1970225392d 5 | Create Date: 2023-06-28 15:33:36.248121 6 | 7 | """ 8 | 9 | from pathlib import Path 10 | 11 | from alembic import op 12 | from sqlalchemy import text 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "a09ac125ea73" 16 | down_revision = "b1970225392d" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | 21 | def upgrade() -> None: 22 | conn = op.get_bind() 23 | 24 | revision_file_path = Path(__file__) 25 | with open(revision_file_path.with_suffix(".sql")) as f: 26 | conn.execute(text(f.read())) 27 | 28 | 29 | def downgrade() -> None: 30 | pass 31 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2023-06-28_a09ac125ea73_add_throttling_to_refresh_subscriptions.sql: -------------------------------------------------------------------------------- 1 | CREATE OR REPLACE FUNCTION refresh_subscriptions_search_view() 2 | RETURNS TRIGGER 3 | LANGUAGE plpgsql 4 | AS 5 | $$ 6 | DECLARE 7 | should_refresh bool; 8 | current_epoch int; 9 | last_refresh_epoch int; 10 | comment_sql text; 11 | BEGIN 12 | SELECT extract(epoch from now())::int INTO current_epoch; 13 | SELECT coalesce(pg_catalog.obj_description('subscriptions_search'::regclass)::int, 0) INTO last_refresh_epoch; 14 | 15 | SELECT (current_epoch - last_refresh_epoch) > 120 INTO should_refresh; 16 | 17 | IF should_refresh THEN 18 | REFRESH MATERIALIZED VIEW subscriptions_search; 19 | 20 | comment_sql := 'COMMENT ON MATERIALIZED VIEW subscriptions_search IS ' || quote_literal(current_epoch); 21 | EXECUTE comment_sql; 22 | END IF; 23 | RETURN NULL; 24 | END; 25 | $$; 26 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2023-07-17_165303a20fb1_customer_id_to_varchar.py: -------------------------------------------------------------------------------- 1 | """customer_id to VARCHAR. 2 | 3 | Revision ID: 165303a20fb1 4 | Revises: a09ac125ea73 5 | Create Date: 2023-07-17 13:53:23.932681 6 | 7 | """ 8 | 9 | from pathlib import Path 10 | 11 | import sqlalchemy as sa 12 | from alembic import op 13 | 14 | revision = "165303a20fb1" 15 | down_revision = "a09ac125ea73" 16 | branch_labels = None 17 | depends_on = None 18 | 19 | 20 | def upgrade() -> None: 21 | conn = op.get_bind() 22 | 23 | revision_file_path = Path(__file__) 24 | with open(revision_file_path.with_suffix(".sql")) as f: 25 | conn.execute(sa.text(f.read())) 26 | 27 | 28 | def downgrade() -> None: 29 | """This migration is irreversible! 30 | 31 | Once the type of `subscriptions.customer_id` has been changed 32 | from UUID to VARCHAR, it is not a failsafe operation to convert whatever value `customer_id` might now hold 33 | into a valid UUID type. 34 | 35 | In future, it will be necessary for downstream users to implement their own schema & data migrations 36 | if they want to (or even feasibly can) change the type of the `customer_id` column. 37 | """ 38 | pass 39 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2023-09-25_da5c9f4cce1c_add_subscription_metadata_to_fulltext_.py: -------------------------------------------------------------------------------- 1 | """Add subscription metadata to fulltext search index. 2 | 3 | Revision ID: da5c9f4cce1c 4 | Revises: 165303a20fb1 5 | Create Date: 2023-09-25 10:23:13.520977 6 | 7 | """ 8 | 9 | from pathlib import Path 10 | 11 | import sqlalchemy as sa 12 | from alembic import op 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "da5c9f4cce1c" 16 | down_revision = "165303a20fb1" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | 21 | def upgrade() -> None: 22 | conn = op.get_bind() 23 | 24 | revision_file_path = Path(__file__) 25 | with open(revision_file_path.with_suffix(".sql")) as f: 26 | conn.execute(sa.text(f.read())) 27 | 28 | 29 | def downgrade() -> None: 30 | """This migration is irreversible! 31 | 32 | Once the type of `subscriptions.customer_id` has been changed 33 | from UUID to VARCHAR, it is not a failsafe operation to convert whatever value `customer_id` might now hold 34 | into a valid UUID type. 35 | 36 | In future, it will be necessary for downstream users to implement their own schema & data migrations 37 | if they want to (or even feasibly can) change the type of the `customer_id` column. 38 | """ 39 | pass 40 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2024-09-27_460ec6748e37_add_uuid_search_workaround.py: -------------------------------------------------------------------------------- 1 | """Add uuid search workaround. 2 | 3 | Note: this workaround was added to existing migration da5c9f4cce1c in orchestrator-core commit 3e93263. 4 | Because of that, it was never deployed to existing environments where the original migration was already executed. 5 | 6 | This migration (460ec6748e37) will ensure the workaround is deployed onto existing environments. 7 | The old migration (da5c9f4cce1c) is restored to its original state before commit 3e93263. 8 | 9 | Revision ID: 460ec6748e37 10 | Revises: 048219045729 11 | Create Date: 2024-09-27 18:01:14.054599 12 | 13 | """ 14 | 15 | from pathlib import Path 16 | 17 | import sqlalchemy as sa 18 | from alembic import op 19 | 20 | # revision identifiers, used by Alembic. 21 | revision = "460ec6748e37" 22 | down_revision = "048219045729" 23 | branch_labels = None 24 | depends_on = None 25 | 26 | 27 | def upgrade() -> None: 28 | conn = op.get_bind() 29 | 30 | revision_file_path = Path(__file__) 31 | with open(revision_file_path.with_suffix(".sql")) as f: 32 | conn.execute(sa.text(f.read())) 33 | 34 | 35 | def downgrade() -> None: 36 | pass 37 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2025-01-19_4fjdn13f83ga_add_validate_product_type_task.py: -------------------------------------------------------------------------------- 1 | """Validate Product Type. 2 | 3 | Revision ID: 4fjdn13f83ga 4 | Revises: 2c7e8a43d4f9 5 | Create Date: 2025-10-13 16:21:43.956814 6 | 7 | """ 8 | 9 | from uuid import uuid4 10 | 11 | import sqlalchemy as sa 12 | from alembic import op 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "4fjdn13f83ga" 16 | down_revision = "4c5859620539" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | 21 | workflow = { 22 | "name": "task_validate_product_type", 23 | "target": "SYSTEM", 24 | "description": "Validate all subscriptions of Product Type", 25 | "workflow_id": uuid4(), 26 | } 27 | 28 | 29 | def upgrade() -> None: 30 | conn = op.get_bind() 31 | conn.execute( 32 | sa.text( 33 | "INSERT INTO workflows VALUES (:workflow_id, :name, :target, :description, now()) ON CONFLICT DO NOTHING" 34 | ), 35 | workflow, 36 | ) 37 | 38 | 39 | def downgrade() -> None: 40 | conn = op.get_bind() 41 | conn.execute(sa.text("DELETE FROM workflows WHERE name = :name"), {"name": workflow["name"]}) 42 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2025-02-20_68d14db1b8da_make_workflow_description_mandatory.py: -------------------------------------------------------------------------------- 1 | """Make workflow description mandatory. 2 | 3 | Revision ID: 68d14db1b8da 4 | Revises: bac6be6f2b4f 5 | Create Date: 2025-02-20 16:39:34.889953 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | from structlog import get_logger 12 | 13 | logger = get_logger(__name__) 14 | 15 | # revision identifiers, used by Alembic. 16 | revision = "68d14db1b8da" 17 | down_revision = "fc5c993a4b4a" 18 | branch_labels = None 19 | depends_on = None 20 | 21 | 22 | def upgrade() -> None: 23 | try: 24 | op.alter_column("workflows", "description", existing_type=sa.TEXT(), nullable=False) 25 | except sa.exc.IntegrityError: 26 | logger.error( 27 | "Unable to execute migrations due to missing descriptions in workflow table, please create a migration to backfill this column." 28 | ) 29 | raise 30 | 31 | 32 | def downgrade() -> None: 33 | op.alter_column("workflows", "description", existing_type=sa.TEXT(), nullable=True) 34 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2025-03-06_42b3d076a85b_subscription_instance_as_json_function.py: -------------------------------------------------------------------------------- 1 | """Add postgres function subscription_instance_as_json. 2 | 3 | Revision ID: 42b3d076a85b 4 | Revises: bac6be6f2b4f 5 | Create Date: 2025-03-06 15:03:09.477225 6 | 7 | """ 8 | 9 | from pathlib import Path 10 | 11 | from alembic import op 12 | from sqlalchemy import text 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "42b3d076a85b" 16 | down_revision = "bac6be6f2b4f" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | 21 | def upgrade() -> None: 22 | conn = op.get_bind() 23 | 24 | revision_file_path = Path(__file__) 25 | with open(revision_file_path.with_suffix(".sql")) as f: 26 | conn.execute(text(f.read())) 27 | 28 | 29 | def downgrade() -> None: 30 | conn = op.get_bind() 31 | 32 | conn.execute(text("DROP FUNCTION IF EXISTS subscription_instance_as_json;")) 33 | conn.execute(text("DROP FUNCTION IF EXISTS subscription_instance_fields_as_json;")) 34 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2025-04-09_fc5c993a4b4a_add_cascade_constraint_on_processes_.py: -------------------------------------------------------------------------------- 1 | """add cascade constraint on processes input state. 2 | 3 | Revision ID: fc5c993a4b4a 4 | Revises: 42b3d076a85b 5 | Create Date: 2025-04-09 18:27:31.922214 6 | 7 | """ 8 | 9 | from alembic import op 10 | 11 | # revision identifiers, used by Alembic. 12 | revision = "fc5c993a4b4a" 13 | down_revision = "42b3d076a85b" 14 | branch_labels = None 15 | depends_on = None 16 | 17 | 18 | def upgrade() -> None: 19 | # Drop the existing foreign key constraint 20 | op.drop_constraint("input_states_pid_fkey", "input_states", type_="foreignkey") 21 | 22 | # Add a new foreign key constraint with cascade delete 23 | op.create_foreign_key( 24 | "input_states_pid_fkey", 25 | "input_states", 26 | "processes", 27 | ["pid"], 28 | ["pid"], 29 | ondelete="CASCADE", 30 | ) 31 | 32 | 33 | def downgrade() -> None: 34 | # Drop the cascade foreign key constraint 35 | op.drop_constraint("input_states_pid_fkey", "input_states", type_="foreignkey") 36 | 37 | # Recreate the original foreign key constraint without cascade 38 | op.create_foreign_key( 39 | "input_states_pid_fkey", 40 | "input_states", 41 | "processes", 42 | ["pid"], 43 | ["pid"], 44 | ) 45 | -------------------------------------------------------------------------------- /orchestrator/migrations/versions/schema/2025-05-08_161918133bec_add_is_task_to_workflow.py: -------------------------------------------------------------------------------- 1 | """Add is_task to workflow. 2 | 3 | Revision ID: 161918133bec 4 | Revises: 68d14db1b8da 5 | Create Date: 2025-05-08 11:25:51.966410 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "161918133bec" 14 | down_revision = "68d14db1b8da" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column("workflows", sa.Column("is_task", sa.Boolean(), server_default=sa.text("false"), nullable=False)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade() -> None: 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column("workflows", "is_task") 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /orchestrator/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/orchestrator/py.typed -------------------------------------------------------------------------------- /orchestrator/schedules/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | from orchestrator.schedules.resume_workflows import run_resume_workflows 16 | from orchestrator.schedules.scheduling import SchedulingFunction 17 | from orchestrator.schedules.task_vacuum import vacuum_tasks 18 | from orchestrator.schedules.validate_products import validate_products 19 | from orchestrator.schedules.validate_subscriptions import validate_subscriptions 20 | 21 | ALL_SCHEDULERS: list[SchedulingFunction] = [ 22 | run_resume_workflows, 23 | vacuum_tasks, 24 | validate_subscriptions, 25 | validate_products, 26 | ] 27 | -------------------------------------------------------------------------------- /orchestrator/schedules/resume_workflows.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | from orchestrator.schedules.scheduling import scheduler 16 | from orchestrator.services.processes import start_process 17 | 18 | 19 | @scheduler(name="Resume workflows", time_unit="hour", period=1) 20 | def run_resume_workflows() -> None: 21 | start_process("task_resume_workflows") 22 | -------------------------------------------------------------------------------- /orchestrator/schedules/task_vacuum.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | from orchestrator.schedules.scheduling import scheduler 16 | from orchestrator.services.processes import start_process 17 | 18 | 19 | @scheduler(name="Clean up tasks", time_unit="hours", period=6) 20 | def vacuum_tasks() -> None: 21 | start_process("task_clean_up_tasks") 22 | -------------------------------------------------------------------------------- /orchestrator/schedules/validate_products.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | from sqlalchemy import func, select 14 | 15 | from orchestrator.db import db 16 | from orchestrator.db.models import ProcessTable 17 | from orchestrator.schedules.scheduling import scheduler 18 | from orchestrator.services.processes import start_process 19 | 20 | 21 | @scheduler(name="Validate Products and inactive subscriptions", time_unit="day", at="02:30") 22 | def validate_products() -> None: 23 | uncompleted_products = db.session.scalar( 24 | select(func.count()) 25 | .select_from(ProcessTable) 26 | .filter(ProcessTable.workflow.name == "validate_products", ProcessTable.last_status != "completed") 27 | ) 28 | if not uncompleted_products: 29 | start_process("task_validate_products") 30 | -------------------------------------------------------------------------------- /orchestrator/schemas/base.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from datetime import datetime 15 | 16 | from pydantic import BaseModel, ConfigDict 17 | 18 | 19 | class OrchestratorBaseModel(BaseModel): 20 | # Unable to refactor in a neat way without using an ugly recursive function. Leaving Pydantic v1 functionality intact 21 | model_config = ConfigDict( 22 | json_encoders={ 23 | datetime: lambda dt: dt.timestamp(), 24 | } 25 | ) 26 | -------------------------------------------------------------------------------- /orchestrator/schemas/fixed_input.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from datetime import datetime 15 | from uuid import UUID 16 | 17 | from pydantic import ConfigDict 18 | 19 | from orchestrator.schemas.base import OrchestratorBaseModel 20 | 21 | TagConfig = dict[str, list[dict[str, bool]]] 22 | 23 | 24 | class FixedInputBaseSchema(OrchestratorBaseModel): 25 | fixed_input_id: UUID | None = None 26 | name: str 27 | value: str 28 | product_id: UUID | None = None 29 | 30 | 31 | class FixedInputSchema(FixedInputBaseSchema): 32 | fixed_input_id: UUID 33 | created_at: datetime 34 | product_id: UUID 35 | model_config = ConfigDict(from_attributes=True) 36 | 37 | 38 | class FixedInputConfigurationItemSchema(OrchestratorBaseModel): 39 | name: str 40 | description: str 41 | values: list[str] 42 | -------------------------------------------------------------------------------- /orchestrator/schemas/problem_detail.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | from orchestrator.schemas.base import OrchestratorBaseModel 16 | 17 | 18 | class ProblemDetailSchema(OrchestratorBaseModel): 19 | detail: str | None = None 20 | status: int | None = None 21 | title: str | None = None 22 | type: str | None = None 23 | -------------------------------------------------------------------------------- /orchestrator/schemas/resource_type.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from uuid import UUID 15 | 16 | from pydantic import ConfigDict 17 | 18 | from orchestrator.schemas.base import OrchestratorBaseModel 19 | 20 | 21 | class ResourceTypeBaseSchema(OrchestratorBaseModel): 22 | resource_type: str 23 | description: str | None = None 24 | resource_type_id: UUID | None = None 25 | 26 | 27 | class ResourceTypeSchema(ResourceTypeBaseSchema): 28 | resource_type_id: UUID 29 | model_config = ConfigDict(from_attributes=True) 30 | 31 | 32 | class ResourceTypePatchSchema(OrchestratorBaseModel): 33 | description: str | None = None 34 | -------------------------------------------------------------------------------- /orchestrator/services/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/services/fixed_inputs.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2024 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from sqlalchemy import select 15 | 16 | from orchestrator.db import FixedInputTable, db 17 | 18 | 19 | def get_fixed_inputs(*, filters: list | None = None) -> list[FixedInputTable]: 20 | stmt = select(FixedInputTable) 21 | for clause in filters or []: 22 | stmt = stmt.where(clause) 23 | return list(db.session.scalars(stmt)) 24 | -------------------------------------------------------------------------------- /orchestrator/services/resource_types.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2024 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from sqlalchemy import select 15 | 16 | from orchestrator.db import ResourceTypeTable, db 17 | 18 | 19 | def get_resource_types(*, filters: list | None = None) -> list[ResourceTypeTable]: 20 | stmt = select(ResourceTypeTable) 21 | for clause in filters or []: 22 | stmt = stmt.where(clause) 23 | return list(db.session.scalars(stmt)) 24 | -------------------------------------------------------------------------------- /orchestrator/targets.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF, GÉANT. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | import strawberry 15 | 16 | from pydantic_forms.types import strEnum 17 | 18 | 19 | @strawberry.enum 20 | class Target(strEnum): 21 | CREATE = "CREATE" 22 | MODIFY = "MODIFY" 23 | TERMINATE = "TERMINATE" 24 | SYSTEM = "SYSTEM" 25 | VALIDATE = "VALIDATE" 26 | -------------------------------------------------------------------------------- /orchestrator/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/utils/deprecation_logger.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2024 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from starlette.requests import Request 15 | from structlog import get_logger 16 | 17 | logger = get_logger(__name__) 18 | 19 | 20 | def deprecated_endpoint(request: Request) -> None: 21 | logger.warning( 22 | "This function is deprecated. Please use the GraphQL query instead", method=request.method, url=str(request.url) 23 | ) 24 | -------------------------------------------------------------------------------- /orchestrator/utils/get_subscription_dict.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | 3 | from orchestrator.domain.base import SubscriptionModel 4 | from orchestrator.services.subscriptions import _generate_etag, build_domain_model, build_extended_domain_model 5 | 6 | 7 | async def get_subscription_dict(subscription_id: UUID, inject_inuseby: bool = True) -> tuple[dict, str]: 8 | """Helper function to get subscription dict by uuid from db or cache.""" 9 | 10 | subscription_model = SubscriptionModel.from_subscription(subscription_id) 11 | 12 | if not inject_inuseby: 13 | subscription = build_domain_model(subscription_model) 14 | else: 15 | subscription = build_extended_domain_model(subscription_model) 16 | etag = _generate_etag(subscription) 17 | return subscription, etag 18 | -------------------------------------------------------------------------------- /orchestrator/utils/strings.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | def remove_redundant_ws(line: str) -> str: 16 | """Remove redundant white space from a line. 17 | 18 | Redundant being multiple spaces where only one is needed. 19 | 20 | >>> remove_redundant_ws(" a b c ") 21 | 'a b c' 22 | 23 | >>> remove_redundant_ws("a b c") 24 | 'a b c' 25 | 26 | >>> remove_redundant_ws(" ") 27 | '' 28 | 29 | Args: 30 | line: the string to remove redundant white space from 31 | 32 | Returns: 33 | Cleaned up string with only one space between textual elements. 34 | 35 | """ 36 | return " ".join(line.split()) 37 | -------------------------------------------------------------------------------- /orchestrator/utils/validate_data_version.py: -------------------------------------------------------------------------------- 1 | def validate_data_version(current_version: int, new_version: int | None = None) -> bool: 2 | return (new_version is not None and new_version == current_version) or new_version is None 3 | -------------------------------------------------------------------------------- /orchestrator/workflows/removed_workflow.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | from orchestrator.workflow import StepList, workflow 16 | 17 | 18 | # This workflow has been made to create the initial import process for a SN7 subscription 19 | # it does not do anything but is needed for the correct showing in the GUI. 20 | @workflow("Dummy workflow to replace removed workflows") 21 | def removed_workflow() -> StepList: 22 | return StepList() 23 | -------------------------------------------------------------------------------- /orchestrator/workflows/tasks/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | -------------------------------------------------------------------------------- /orchestrator/workflows/translations/en-GB.json: -------------------------------------------------------------------------------- 1 | { 2 | "forms": { 3 | "fields": { 4 | "note": "Notes", 5 | "note_info": "Notes, reminders and feedback about this description.", 6 | "subscription_id": "Subscription", 7 | "version": "Version", 8 | "subscription_id_info": "The subscription for this action", 9 | "product_type": "Product Type" 10 | } 11 | }, 12 | "workflow": { 13 | "modify_note": "Modify subscription note", 14 | "task_clean_up_tasks": "Clean up old tasks", 15 | "task_resume_workflows": "Resume all workflows that are stuck on tasks with the status 'waiting'", 16 | "task_validate_products": "Validate Products and Subscriptions", 17 | "task_validate_product_type": "Validate all subscriptions of Product Type", 18 | "reset_subscription_description": "Reset description of a subscription to default" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from importlib.machinery import SourceFileLoader 2 | 3 | import toml 4 | from setuptools import setup 5 | 6 | version = SourceFileLoader("__version__", "orchestrator/__init__.py").load_module() 7 | 8 | setup_variables = toml.load("pyproject.toml")["tool"]["flit"]["metadata"] 9 | 10 | setup( 11 | name=setup_variables["dist-name"], 12 | version=str(version.__version__), 13 | classifiers=setup_variables["classifiers"], 14 | author=setup_variables["author"], 15 | author_email=setup_variables["author-email"], 16 | packages=[setup_variables["module"]], 17 | install_requires=setup_variables["requires"], 18 | description="The Orchestrator core", 19 | long_description=setup_variables["description-file"], 20 | ) 21 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/__init__.py -------------------------------------------------------------------------------- /test/acceptance_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/acceptance_tests/__init__.py -------------------------------------------------------------------------------- /test/acceptance_tests/fixtures/test_orchestrator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/acceptance_tests/fixtures/test_orchestrator/__init__.py -------------------------------------------------------------------------------- /test/acceptance_tests/fixtures/test_orchestrator/devtools/populator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/acceptance_tests/fixtures/test_orchestrator/devtools/populator/__init__.py -------------------------------------------------------------------------------- /test/acceptance_tests/fixtures/test_orchestrator/product_blocks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/acceptance_tests/fixtures/test_orchestrator/product_blocks/__init__.py -------------------------------------------------------------------------------- /test/acceptance_tests/fixtures/test_orchestrator/products/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/acceptance_tests/fixtures/test_orchestrator/products/__init__.py -------------------------------------------------------------------------------- /test/acceptance_tests/fixtures/test_orchestrator/products/test_product.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | 15 | from orchestrator.domain.base import SubscriptionModel 16 | from orchestrator.types import SubscriptionLifecycle 17 | from test_orchestrator.product_blocks.test_product_blocks import ( 18 | TestProductBlock, 19 | TestProductBlockInactive, 20 | TestProductBlockProvisioning, 21 | ) 22 | 23 | 24 | class TestProductInactive(SubscriptionModel, is_base=True): 25 | testproduct: TestProductBlockInactive 26 | 27 | 28 | class TestProductProvisioning(TestProductInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 29 | testproduct: TestProductBlockProvisioning 30 | 31 | 32 | class TestProduct(TestProductProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 33 | testproduct: TestProductBlock 34 | -------------------------------------------------------------------------------- /test/acceptance_tests/fixtures/test_orchestrator/workflows/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/acceptance_tests/fixtures/test_orchestrator/workflows/__init__.py -------------------------------------------------------------------------------- /test/acceptance_tests/test_test_product.py: -------------------------------------------------------------------------------- 1 | from ipaddress import IPv4Address, IPv6Address 2 | from uuid import uuid4 3 | 4 | import pytest 5 | 6 | from test.acceptance_tests.fixtures.test_orchestrator.devtools.populator.test_product_populator import ( 7 | TestProductPopulator, 8 | ) 9 | 10 | 11 | @pytest.mark.acceptance 12 | def test_test_product(new_test_product): 13 | populator = TestProductPopulator( 14 | an_int=1, 15 | a_str="string", 16 | a_bool=False, 17 | an_uuid=str(uuid4()), 18 | an_ipv4=IPv4Address("10.0.0.1"), 19 | an_ipv6=IPv6Address("::cafe:babe:feed:face:dead:beef"), 20 | ) 21 | 22 | populator.start_create_workflow() 23 | populator.run() 24 | -------------------------------------------------------------------------------- /test/unit_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/api/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/api/test_health.py: -------------------------------------------------------------------------------- 1 | # Copyright 2019-2020 SURF. 2 | # Licensed under the Apache License, Version 2.0 (the "License"); 3 | # you may not use this file except in compliance with the License. 4 | # You may obtain a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | from http import HTTPStatus 15 | from unittest import mock 16 | 17 | from sqlalchemy.exc import OperationalError 18 | 19 | 20 | def test_get_health(test_client): 21 | response = test_client.get("/api/health/") 22 | assert HTTPStatus.OK == response.status_code 23 | assert response.json() == "OK" 24 | 25 | 26 | @mock.patch("orchestrator.db.db.session") 27 | def test_get_health_no_connection(mock_session, test_client): 28 | mock_session.execute.side_effect = OperationalError("THIS", "IS", "KABOOM") 29 | response = test_client.get("/api/health/") 30 | assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR 31 | -------------------------------------------------------------------------------- /test/unit_tests/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/cli/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/cli/conftest.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from functools import partial 3 | 4 | import pytest 5 | from typer.testing import CliRunner 6 | 7 | from orchestrator.cli.database import app as db_app 8 | from test.unit_tests.cli.helpers import create_main 9 | 10 | 11 | @pytest.fixture(scope="module") 12 | def monkey_module(): 13 | with pytest.MonkeyPatch.context() as mp: 14 | yield mp 15 | 16 | 17 | @pytest.fixture(scope="module") 18 | def tmp_generate_path(tmp_path_factory): 19 | yield tmp_path_factory.mktemp("generate") 20 | 21 | 22 | @pytest.fixture(scope="module") 23 | def cli_invoke(tmp_generate_path, monkey_module): 24 | monkey_module.chdir(tmp_generate_path) 25 | sys.path.append(str(tmp_generate_path)) 26 | create_main() 27 | 28 | runner = CliRunner() 29 | # Don't catch exceptions because this will cost you grey hair. 30 | invoke = partial(runner.invoke, catch_exceptions=False) 31 | invoke(db_app, ["init"]) 32 | 33 | yield invoke 34 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # create new set of generated code 4 | # 5 | 6 | # exit on first failing command 7 | set -e 8 | 9 | # data folder should only contain a minimal main.py 10 | cd generate 11 | 12 | export PYTHONPATH=../../../../.. 13 | 14 | # generate alembic configuration and folders 15 | python main.py db init 16 | 17 | # generate code for the two sample products 18 | for YAML in ../product_config2.yaml ../product_config1.yaml ../product_config4.yaml 19 | do 20 | python main.py generate product-blocks --config-file $YAML --no-dryrun --force 21 | python main.py generate product --config-file $YAML --no-dryrun --force 22 | python main.py generate workflows --config-file $YAML --no-dryrun --force 23 | python main.py generate migration --config-file $YAML 24 | python main.py generate unit-tests --config-file $YAML --no-dryrun --force 25 | done 26 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # template used to generate migration files 5 | file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s 6 | 7 | # set to 'true' to run the environment during 8 | # the 'revision' command, regardless of autogenerate 9 | # revision_environment = false 10 | script_location = migrations 11 | version_locations = %(here)s/migrations/versions/schema 12 | # Logging configuration 13 | [loggers] 14 | keys = root,sqlalchemy,alembic 15 | 16 | [handlers] 17 | keys = console 18 | 19 | [formatters] 20 | keys = generic 21 | 22 | [logger_root] 23 | level = WARN 24 | handlers = console 25 | qualname = 26 | 27 | [logger_sqlalchemy] 28 | level = WARN 29 | handlers = 30 | qualname = sqlalchemy.engine 31 | 32 | [logger_alembic] 33 | level = INFO 34 | handlers = 35 | qualname = alembic 36 | 37 | [handler_console] 38 | class = StreamHandler 39 | args = (sys.stderr,) 40 | level = NOTSET 41 | formatter = generic 42 | 43 | [formatter_generic] 44 | format = %(levelname)-5.5s [%(name)s] %(message)s 45 | datefmt = %H:%M:%S 46 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/main.py: -------------------------------------------------------------------------------- 1 | from orchestrator import OrchestratorCore 2 | from orchestrator.cli.main import app as core_cli 3 | from orchestrator.settings import AppSettings 4 | 5 | app = OrchestratorCore(base_settings=AppSettings()) 6 | if __name__ == "__main__": 7 | core_cli() 8 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/migrations/helpers.py: -------------------------------------------------------------------------------- 1 | from orchestrator.migrations.helpers import * 2 | 3 | # Write your own helper functions below this line. 4 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message}. 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | import sqlalchemy as sa 9 | from alembic import op 10 | ${imports if imports else ""} 11 | # revision identifiers, used by Alembic. 12 | revision = ${repr(up_revision)} 13 | down_revision = ${repr(down_revision)} 14 | branch_labels = ${repr(branch_labels)} 15 | depends_on = ${repr(depends_on)} 16 | 17 | 18 | def upgrade() -> None: 19 | ${upgrades if upgrades else "pass"} 20 | 21 | 22 | def downgrade() -> None: 23 | ${downgrades if downgrades else "pass"} 24 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/migrations/versions/schema/2024-02-20_59e1199aff7f_create_data_head.py: -------------------------------------------------------------------------------- 1 | """Create data head. 2 | 3 | Revision ID: 59e1199aff7f 4 | Revises: 5 | Create Date: 2024-02-20T21:01:44.918799 6 | 7 | """ 8 | 9 | # revision identifiers, used by Alembic. 10 | revision = "59e1199aff7f" 11 | down_revision = None 12 | branch_labels = ("data",) 13 | depends_on = "048219045729" 14 | 15 | 16 | def upgrade() -> None: 17 | pass 18 | 19 | 20 | def downgrade() -> None: 21 | pass 22 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/__init__.py: -------------------------------------------------------------------------------- 1 | from orchestrator.domain import SUBSCRIPTION_MODEL_REGISTRY 2 | 3 | from products.product_types.example2 import Example2 4 | 5 | SUBSCRIPTION_MODEL_REGISTRY.update( 6 | { 7 | "example2": Example2, 8 | }, 9 | ) # fmt:skip 10 | from products.product_types.example1 import Example1 11 | 12 | SUBSCRIPTION_MODEL_REGISTRY.update( 13 | { 14 | "example1 1": Example1, 15 | "example1 10": Example1, 16 | "example1 100": Example1, 17 | "example1 1000": Example1, 18 | }, 19 | ) # fmt:skip 20 | from products.product_types.example4 import Example4 21 | 22 | SUBSCRIPTION_MODEL_REGISTRY.update( 23 | { 24 | "example4": Example4, 25 | }, 26 | ) # fmt:skip 27 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/product_blocks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/cli/data/generate/products/product_blocks/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/product_blocks/example2.py: -------------------------------------------------------------------------------- 1 | from enum import IntEnum 2 | 3 | from orchestrator.domain.base import ProductBlockModel 4 | from orchestrator.types import SubscriptionLifecycle 5 | from pydantic import computed_field 6 | 7 | 8 | class ExampleIntEnum2(IntEnum): 9 | _1 = 1 10 | _2 = 2 11 | _3 = 3 12 | _4 = 4 13 | 14 | 15 | class Example2BlockInactive(ProductBlockModel, product_block_name="Example2"): 16 | example_int_enum_2: ExampleIntEnum2 | None = None 17 | 18 | 19 | class Example2BlockProvisioning(Example2BlockInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 20 | example_int_enum_2: ExampleIntEnum2 | None = None 21 | 22 | @computed_field 23 | @property 24 | def title(self) -> str: 25 | # TODO: format correct title string 26 | return f"{self.name}" 27 | 28 | 29 | class Example2Block(Example2BlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 30 | example_int_enum_2: ExampleIntEnum2 31 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/product_blocks/example4.py: -------------------------------------------------------------------------------- 1 | from orchestrator.domain.base import ProductBlockModel 2 | from orchestrator.types import SubscriptionLifecycle 3 | from pydantic import computed_field 4 | 5 | from products.product_blocks.example4sub import Example4SubBlock, Example4SubBlockInactive, Example4SubBlockProvisioning 6 | 7 | 8 | class Example4BlockInactive(ProductBlockModel, product_block_name="Example4"): 9 | num_val: int | None = None 10 | sub_block: Example4SubBlockInactive | None = None 11 | 12 | 13 | class Example4BlockProvisioning(Example4BlockInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 14 | num_val: int | None = None 15 | sub_block: Example4SubBlockProvisioning 16 | 17 | @computed_field 18 | @property 19 | def title(self) -> str: 20 | # TODO: format correct title string 21 | return f"{self.name}" 22 | 23 | 24 | class Example4Block(Example4BlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 25 | num_val: int | None = None 26 | sub_block: Example4SubBlock 27 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/product_blocks/example4sub.py: -------------------------------------------------------------------------------- 1 | from orchestrator.domain.base import ProductBlockModel 2 | from orchestrator.types import SubscriptionLifecycle 3 | from pydantic import computed_field 4 | 5 | 6 | class Example4SubBlockInactive(ProductBlockModel, product_block_name="Example4Sub"): 7 | str_val: str | None = None 8 | 9 | 10 | class Example4SubBlockProvisioning(Example4SubBlockInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 11 | str_val: str | None = None 12 | 13 | @computed_field 14 | @property 15 | def title(self) -> str: 16 | # TODO: format correct title string 17 | return f"{self.name}" 18 | 19 | 20 | class Example4SubBlock(Example4SubBlockProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 21 | str_val: str | None = None 22 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/product_types/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/cli/data/generate/products/product_types/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/product_types/example1.py: -------------------------------------------------------------------------------- 1 | from enum import IntEnum 2 | 3 | from orchestrator.domain.base import SubscriptionModel 4 | from orchestrator.types import SubscriptionLifecycle 5 | 6 | from products.product_blocks.example1 import Example1Block, Example1BlockInactive, Example1BlockProvisioning 7 | 8 | 9 | class FixedInput1(IntEnum): 10 | _1 = 1 11 | _10 = 10 12 | _100 = 100 13 | _1000 = 1000 14 | 15 | 16 | class Example1Inactive(SubscriptionModel, is_base=True): 17 | fixed_input_1: FixedInput1 18 | example1: Example1BlockInactive 19 | 20 | 21 | class Example1Provisioning(Example1Inactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 22 | fixed_input_1: FixedInput1 23 | example1: Example1BlockProvisioning 24 | 25 | 26 | class Example1(Example1Provisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 27 | fixed_input_1: FixedInput1 28 | example1: Example1Block 29 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/product_types/example2.py: -------------------------------------------------------------------------------- 1 | from orchestrator.domain.base import SubscriptionModel 2 | from orchestrator.types import SubscriptionLifecycle 3 | 4 | from products.product_blocks.example2 import Example2Block, Example2BlockInactive, Example2BlockProvisioning 5 | 6 | 7 | class Example2Inactive(SubscriptionModel, is_base=True): 8 | example2: Example2BlockInactive 9 | 10 | 11 | class Example2Provisioning(Example2Inactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 12 | example2: Example2BlockProvisioning 13 | 14 | 15 | class Example2(Example2Provisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 16 | example2: Example2Block 17 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/products/product_types/example4.py: -------------------------------------------------------------------------------- 1 | from orchestrator.domain.base import SubscriptionModel 2 | from orchestrator.types import SubscriptionLifecycle 3 | 4 | from products.product_blocks.example4 import Example4Block, Example4BlockInactive, Example4BlockProvisioning 5 | 6 | 7 | class Example4Inactive(SubscriptionModel, is_base=True): 8 | example4: Example4BlockInactive 9 | 10 | 11 | class Example4Provisioning(Example4Inactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 12 | example4: Example4BlockProvisioning 13 | 14 | 15 | class Example4(Example4Provisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 16 | example4: Example4Block 17 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example1/test_validate_example1.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 4 | 5 | 6 | @pytest.mark.workflow 7 | def test_happy_flow(responses, example1_subscription): 8 | # when 9 | 10 | result, _, _ = run_workflow("validate_example1", {"subscription_id": example1_subscription}) 11 | 12 | # then 13 | 14 | assert_complete(result) 15 | state = extract_state(result) 16 | assert state["check_core_db"] is True 17 | 18 | 19 | @pytest.mark.workflow 20 | def test_validate_example_in_some_oss(responses, example1_subscription): 21 | # given 22 | 23 | # TODO: set test conditions or fixture so that "Validate that the example1 subscription is correctly administered in some external system" triggers 24 | 25 | # when 26 | 27 | with pytest.raises(AssertionError) as error: 28 | result, _, _ = run_workflow("validate_example1", [{"subscription_id": example1_subscription}, {}]) 29 | 30 | # then 31 | 32 | assert ( 33 | error.value.errors[0]["msg"] 34 | == "Validate that the example1 subscription is correctly administered in some external system" 35 | ) 36 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example2/test_create_example2.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from orchestrator.db import ProductTable 3 | 4 | from products.product_types.example2 import Example2 5 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 6 | 7 | 8 | @pytest.mark.workflow 9 | def test_happy_flow(responses): 10 | # given 11 | 12 | # TODO insert additional mocks, if needed (ImsMocks) 13 | 14 | product = db.session.scalars(select(ProductTable).where(ProductTable.name == "example2")).one() 15 | 16 | # when 17 | 18 | init_state = { 19 | "customer_id": customer_id, 20 | # TODO add initial state 21 | } 22 | 23 | result, process, step_log = run_workflow("create_example2", [{"product": product.product_id}, init_state]) 24 | 25 | # then 26 | 27 | assert_complete(result) 28 | state = extract_state(result) 29 | 30 | subscription = Example2.from_subscription(state["subscription_id"]) 31 | assert subscription.status == "active" 32 | assert subscription.description == "TODO add correct description" 33 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example2/test_modify_example2.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from orchestrator.types import SubscriptionLifecycle 3 | 4 | from products.product_types.example2 import Example2 5 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 6 | 7 | 8 | @pytest.mark.workflow 9 | def test_happy_flow(responses, example2_subscription): 10 | # given 11 | 12 | customer_id = "3f4fc287-0911-e511-80d0-005056956c1a" 13 | crm = CrmMocks(responses) 14 | crm.get_customer_by_uuid(customer_id) 15 | 16 | # TODO insert additional mocks, if needed (ImsMocks) 17 | 18 | # when 19 | 20 | init_state = {} 21 | 22 | result, process, step_log = run_workflow( 23 | "modify_example2", 24 | [{"subscription_id": example2_subscription}, init_state, {}], 25 | ) 26 | 27 | # then 28 | 29 | assert_complete(result) 30 | state = extract_state(result) 31 | 32 | example2 = Example2.from_subscription(state["subscription_id"]) 33 | assert example2.status == SubscriptionLifecycle.ACTIVE 34 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example2/test_terminate_example2.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from orchestrator.types import SubscriptionLifecycle 3 | 4 | from products.product_types.example2 import Example2 5 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 6 | 7 | 8 | @pytest.mark.workflow 9 | def test_happy_flow(responses, example2_subscription): 10 | # when 11 | 12 | # TODO: insert mocks here if needed 13 | 14 | result, _, _ = run_workflow("terminate_example2", [{"subscription_id": example2_subscription}, {}]) 15 | 16 | # then 17 | 18 | assert_complete(result) 19 | state = extract_state(result) 20 | assert "subscription" in state 21 | 22 | # Check subscription in DB 23 | 24 | example2 = Example2.from_subscription(example2_subscription) 25 | assert example2.end_date is not None 26 | assert example2.status == SubscriptionLifecycle.TERMINATED 27 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example2/test_validate_example2.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 4 | 5 | 6 | @pytest.mark.workflow 7 | def test_happy_flow(responses, example2_subscription): 8 | # when 9 | 10 | result, _, _ = run_workflow("validate_example2", {"subscription_id": example2_subscription}) 11 | 12 | # then 13 | 14 | assert_complete(result) 15 | state = extract_state(result) 16 | assert state["check_core_db"] is True 17 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example4/test_create_example4.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from orchestrator.db import ProductTable 3 | 4 | from products.product_types.example4 import Example4 5 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 6 | 7 | 8 | @pytest.mark.workflow 9 | def test_happy_flow(responses): 10 | # given 11 | 12 | # TODO insert additional mocks, if needed (ImsMocks) 13 | 14 | product = db.session.scalars(select(ProductTable).where(ProductTable.name == "example4")).one() 15 | 16 | # when 17 | 18 | init_state = { 19 | "customer_id": customer_id, 20 | # TODO add initial state 21 | } 22 | 23 | result, process, step_log = run_workflow("create_example4", [{"product": product.product_id}, init_state]) 24 | 25 | # then 26 | 27 | assert_complete(result) 28 | state = extract_state(result) 29 | 30 | subscription = Example4.from_subscription(state["subscription_id"]) 31 | assert subscription.status == "active" 32 | assert subscription.description == "TODO add correct description" 33 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example4/test_modify_example4.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from orchestrator.types import SubscriptionLifecycle 3 | 4 | from products.product_types.example4 import Example4 5 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 6 | 7 | 8 | @pytest.mark.workflow 9 | def test_happy_flow(responses, example4_subscription): 10 | # given 11 | 12 | customer_id = "3f4fc287-0911-e511-80d0-005056956c1a" 13 | crm = CrmMocks(responses) 14 | crm.get_customer_by_uuid(customer_id) 15 | 16 | # TODO insert additional mocks, if needed (ImsMocks) 17 | 18 | # when 19 | 20 | init_state = {} 21 | 22 | result, process, step_log = run_workflow( 23 | "modify_example4", 24 | [{"subscription_id": example4_subscription}, init_state, {}], 25 | ) 26 | 27 | # then 28 | 29 | assert_complete(result) 30 | state = extract_state(result) 31 | 32 | example4 = Example4.from_subscription(state["subscription_id"]) 33 | assert example4.status == SubscriptionLifecycle.ACTIVE 34 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example4/test_terminate_example4.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from orchestrator.types import SubscriptionLifecycle 3 | 4 | from products.product_types.example4 import Example4 5 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 6 | 7 | 8 | @pytest.mark.workflow 9 | def test_happy_flow(responses, example4_subscription): 10 | # when 11 | 12 | # TODO: insert mocks here if needed 13 | 14 | result, _, _ = run_workflow("terminate_example4", [{"subscription_id": example4_subscription}, {}]) 15 | 16 | # then 17 | 18 | assert_complete(result) 19 | state = extract_state(result) 20 | assert "subscription" in state 21 | 22 | # Check subscription in DB 23 | 24 | example4 = Example4.from_subscription(example4_subscription) 25 | assert example4.end_date is not None 26 | assert example4.status == SubscriptionLifecycle.TERMINATED 27 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/test/unit_tests/workflows/example4/test_validate_example4.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 4 | 5 | 6 | @pytest.mark.workflow 7 | def test_happy_flow(responses, example4_subscription): 8 | # when 9 | 10 | result, _, _ = run_workflow("validate_example4", {"subscription_id": example4_subscription}) 11 | 12 | # then 13 | 14 | assert_complete(result) 15 | state = extract_state(result) 16 | assert state["check_core_db"] is True 17 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/translations/en-GB.json: -------------------------------------------------------------------------------- 1 | { 2 | "workflow": { 3 | "create_example1": "Create example1", 4 | "create_example2": "Create example2", 5 | "create_example4": "Create example4", 6 | "modify_example1": "Modify example1", 7 | "modify_example2": "Modify example2", 8 | "modify_example4": "Modify example4", 9 | "terminate_example1": "Terminate example1", 10 | "terminate_example2": "Terminate example2", 11 | "terminate_example4": "Terminate example4", 12 | "validate_example1": "Validate example1", 13 | "validate_example2": "Validate example2", 14 | "validate_example4": "Validate example4" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | from orchestrator.workflows import LazyWorkflowInstance 2 | 3 | LazyWorkflowInstance("workflows.example2.create_example2", "create_example2") 4 | LazyWorkflowInstance("workflows.example2.modify_example2", "modify_example2") 5 | LazyWorkflowInstance("workflows.example2.terminate_example2", "terminate_example2") 6 | LazyWorkflowInstance("workflows.example2.validate_example2", "validate_example2") 7 | LazyWorkflowInstance("workflows.example1.create_example1", "create_example1") 8 | LazyWorkflowInstance("workflows.example1.modify_example1", "modify_example1") 9 | LazyWorkflowInstance("workflows.example1.terminate_example1", "terminate_example1") 10 | LazyWorkflowInstance("workflows.example1.validate_example1", "validate_example1") 11 | LazyWorkflowInstance("workflows.example4.create_example4", "create_example4") 12 | LazyWorkflowInstance("workflows.example4.modify_example4", "modify_example4") 13 | LazyWorkflowInstance("workflows.example4.terminate_example4", "terminate_example4") 14 | LazyWorkflowInstance("workflows.example4.validate_example4", "validate_example4") 15 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/workflows/example1/shared/forms.py: -------------------------------------------------------------------------------- 1 | from products.product_blocks.example1 import AnnotatedInt, ExampleStrEnum1 2 | 3 | 4 | def must_be_unused_to_change_mode_validator(example_str_enum_1: ExampleStrEnum1) -> ExampleStrEnum1: 5 | if False: # TODO: implement validation for example_str_enum_1 6 | raise ValueError("Mode can only be changed when there are no services attached to it") 7 | 8 | return example_str_enum_1 9 | 10 | 11 | def annotated_int_must_be_unique_validator(annotated_int: AnnotatedInt) -> AnnotatedInt: 12 | if False: # TODO: implement validation for annotated_int 13 | raise ValueError("annotated_int must be unique for example1") 14 | 15 | return annotated_int 16 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/workflows/example1/validate_example1.py: -------------------------------------------------------------------------------- 1 | import structlog 2 | from orchestrator.workflow import StepList, begin, step 3 | from orchestrator.workflows.utils import validate_workflow 4 | from pydantic_forms.types import State 5 | 6 | from products.product_types.example1 import Example1 7 | 8 | logger = structlog.get_logger(__name__) 9 | 10 | 11 | @step("Load initial state") 12 | def load_initial_state_example1(subscription: Example1) -> State: 13 | return { 14 | "subscription": subscription, 15 | } 16 | 17 | 18 | @step("Validate that the example1 subscription is correctly administered in some external system") 19 | def check_validate_example_in_some_oss(subscription: Example1) -> State: 20 | # TODO: add validation for "Validate that the example1 subscription is correctly administered in some external system" 21 | if True: 22 | raise ValueError("Validate that the example1 subscription is correctly administered in some external system") 23 | 24 | return {} 25 | 26 | 27 | @validate_workflow("Validate example1") 28 | def validate_example1() -> StepList: 29 | return begin >> load_initial_state_example1 >> check_validate_example_in_some_oss 30 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/workflows/example2/shared/forms.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/generate/workflows/example4/shared/forms.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/invalid_product_config1.yaml: -------------------------------------------------------------------------------- 1 | # Testcase for invalid config: multiple root product blocks 2 | config: 3 | summary_forms: False 4 | name: invalidexample1 5 | type: InvalidExample4 6 | tag: INVALIDEXAMPLE1 7 | description: "Invalid Product example 1" 8 | product_blocks: 9 | - name: block1 10 | type: Block1 11 | tag: BLOCK1 12 | fields: 13 | - name: num_val 14 | type: int 15 | - name: block2 16 | type: Block2 17 | tag: BLOCK2 18 | fields: 19 | - name: str_val 20 | type: str 21 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/invalid_product_config2.yaml: -------------------------------------------------------------------------------- 1 | # Testcase for invalid config: cyclic product blocks 2 | config: 3 | summary_forms: False 4 | name: invalidexample1 5 | type: InvalidExample4 6 | tag: INVALIDEXAMPLE1 7 | description: "Invalid Product example 1" 8 | product_blocks: 9 | - name: block1 10 | type: Block1 11 | tag: BLOCK1 12 | fields: 13 | - name: num_val 14 | type: int 15 | - name: sub_block 16 | type: Block2 17 | - name: block2 18 | type: Block2 19 | tag: BLOCK2 20 | fields: 21 | - name: str_val 22 | type: str 23 | - name: sub_block 24 | type: Block1 25 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/product_config2.yaml: -------------------------------------------------------------------------------- 1 | # Testcase for a simple product with 1 (root) product block 2 | config: 3 | summary_forms: False 4 | name: example2 5 | type: Example2 6 | tag: EXAMPLE2 7 | description: "Product example 2" 8 | product_blocks: 9 | - name: example2 10 | type: Example2 11 | tag: EXAMPLE2 12 | description: "Example 2 root product block" 13 | fields: 14 | - name: example_int_enum_2 15 | type: enum 16 | enum_type: int 17 | values: 18 | - 1 19 | - 2 20 | - 3 21 | - 4 22 | description: "Example 2 int enum" 23 | required: active 24 | modifiable: True 25 | 26 | workflows: 27 | - name: terminate 28 | - name: validate 29 | enabled: false 30 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/product_config3.yaml: -------------------------------------------------------------------------------- 1 | config: 2 | create_summary_forms: false 3 | send_email: false 4 | name: My Product 9000+ 5 | type: MyProduct 6 | tag: MP 7 | description: "This is the description of My Product" 8 | fixed_inputs: 9 | - name: fixed_input_key 10 | type: FixedInputKey 11 | product_blocks: 12 | - name: My Product Block 9200+ 13 | type: MyIntermediateProduct 14 | fields: 15 | - name: num_val 16 | type: int 17 | - name: str_val 18 | type: str 19 | - name: list_val 20 | type: list 21 | list_type: str 22 | workflows: 23 | - name: modify 24 | flows: 25 | - id: my_product_num_val 26 | description: "Modify the MyProduct numerical value" 27 | -------------------------------------------------------------------------------- /test/unit_tests/cli/data/product_config4.yaml: -------------------------------------------------------------------------------- 1 | # Testcase for multiple product blocks defined in the same file 2 | config: 3 | summary_forms: False 4 | name: example4 5 | type: Example4 6 | tag: EXAMPLE4 7 | description: "Product example 4" 8 | product_blocks: 9 | - name: example4 10 | type: Example4 11 | tag: EXAMPLE4 12 | description: "Example 4 root product block" 13 | fields: 14 | - name: num_val 15 | type: int 16 | - name: sub_block 17 | type: Example4Sub 18 | description: "example 4 sub product block" 19 | required: provisioning 20 | - name: example4sub 21 | type: Example4Sub 22 | tag: EXAMPLE4SUB 23 | description: "example 4 sub product block" 24 | fields: 25 | - name: str_val 26 | type: str 27 | 28 | 29 | workflows: 30 | - name: terminate 31 | - name: validate 32 | enabled: false 33 | -------------------------------------------------------------------------------- /test/unit_tests/cli/generator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/cli/generator/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/cli/generator/test_enums.py: -------------------------------------------------------------------------------- 1 | from orchestrator.cli.generator.generator.enums import convert_int_enum, convert_str_enum 2 | 3 | 4 | def test_convert_str_enum(): 5 | field = { 6 | "default": "tagged", 7 | "enum_type": "str", 8 | "name": "port_mode", 9 | "type": "enum", 10 | "values": ["untagged", "tagged", "link_member"], 11 | } 12 | converted_field = convert_str_enum(field) 13 | assert converted_field["type"] == "PortMode" 14 | assert converted_field["default"] == "PortMode.tagged" 15 | 16 | 17 | def test_convert_int_enum(): 18 | field = { 19 | "default": 40000, 20 | "enum_type": "int", 21 | "name": "speed", 22 | "type": "enum", 23 | "values": [1000, 10000, 40000, 100000], 24 | } 25 | converted_field = convert_int_enum(field) 26 | assert converted_field["type"] == "Speed" 27 | assert converted_field["default"] == "Speed._40000" 28 | -------------------------------------------------------------------------------- /test/unit_tests/cli/helpers.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def absolute_path(path: str) -> str: 5 | file = Path(__file__).resolve().parent / "data" / path 6 | return str(file) 7 | 8 | 9 | def create_main(): 10 | with open("main.py", "w") as fp: 11 | fp.write( 12 | "from orchestrator import OrchestratorCore\n" 13 | "from orchestrator.cli.main import app as core_cli\n" 14 | "from orchestrator.settings import AppSettings\n" 15 | "\n" 16 | "app = OrchestratorCore(base_settings=AppSettings())\n" 17 | 'if __name__ == "__main__":\n' 18 | " core_cli()\n" 19 | ) 20 | -------------------------------------------------------------------------------- /test/unit_tests/cli/test_config_validation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import structlog 3 | 4 | from orchestrator.cli.generate import app as generate_app 5 | from test.unit_tests.cli.helpers import absolute_path 6 | 7 | logger = structlog.get_logger() 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "config_file,expected_exception,expected_message", 12 | [ 13 | ("invalid_product_config1.yaml", ValueError, "found multiple"), 14 | ("invalid_product_config2.yaml", ValueError, "Cycle detected"), 15 | ], 16 | ) 17 | def test_product_block_validation(config_file, expected_exception, expected_message, cli_invoke): 18 | config_file = absolute_path(config_file) 19 | with pytest.raises(expected_exception, match=expected_message): 20 | cli_invoke(generate_app, ["product-blocks", "--config-file", config_file]) 21 | -------------------------------------------------------------------------------- /test/unit_tests/config.py: -------------------------------------------------------------------------------- 1 | CITY_TYPE = "city_type" 2 | DOMAIN = "domain" 3 | PORT_SPEED = "port_speed" 4 | SERVICE_SPEED = "service_speed" 5 | PORT_SUBSCRIPTION_ID = "port_subscription_id" 6 | IMS_CIRCUIT_ID = "ims_circuit_id" 7 | IPAM_PREFIX_ID = "ipam_prefix_id" 8 | PARENT_IP_PREFIX_SUBSCRIPTION_ID = "parent_ip_prefix_subscription_id" 9 | INTERNETPINNEN_PREFIX_SUBSCRIPTION_ID = "internetpinnen_prefix_subscription_id" 10 | PEER_GROUP_SUBSCRIPTION_ID = "peer_group_subscription_id" 11 | GRAPHQL_ENDPOINT = "/api/graphql" 12 | GRAPHQL_HEADERS = {"Content-Type": "application/json"} 13 | -------------------------------------------------------------------------------- /test/unit_tests/db/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/db/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/db/test_listeners.py: -------------------------------------------------------------------------------- 1 | from dirty_equals import IsFloat 2 | from sqlalchemy import text 3 | 4 | from orchestrator.db import db 5 | from orchestrator.db.listeners import disable_listeners, monitor_sqlalchemy_queries 6 | 7 | 8 | def test_monitor_sqlalchemy_queries(): 9 | monitor_sqlalchemy_queries() 10 | 11 | try: 12 | 13 | db.session.execute(text("select 1")) 14 | 15 | stats = db.session.connection().info.copy() 16 | assert stats == { 17 | "queries_completed": 1, 18 | "queries_started": 1, 19 | "query_start_time": [], 20 | "query_time_spent": IsFloat, 21 | } 22 | finally: 23 | disable_listeners() 24 | -------------------------------------------------------------------------------- /test/unit_tests/db/test_migration_does_column_exist.py: -------------------------------------------------------------------------------- 1 | from orchestrator.db import db 2 | from orchestrator.migrations.helpers import has_table_column 3 | 4 | 5 | def test_select_from_table(): 6 | # Testing if Table Workflows exist with column is_task 7 | # it should because the db.session depends on the session where all migrations are already run 8 | session = db.session 9 | result = has_table_column(table_name="workflows", column_name="is_task", conn=session) 10 | assert result is True, "Column 'is_task' does not exist in 'workflows' table" 11 | -------------------------------------------------------------------------------- /test/unit_tests/domain/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/domain/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/fixtures/products/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/fixtures/products/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/fixtures/products/product_blocks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/fixtures/products/product_blocks/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/fixtures/products/product_blocks/product_sub_block_one.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from orchestrator.db import ProductBlockTable, db 4 | from orchestrator.domain.base import ProductBlockModel 5 | from orchestrator.types import SubscriptionLifecycle 6 | 7 | 8 | @pytest.fixture 9 | def test_product_sub_block_one(): 10 | class SubBlockOneForTestInactive(ProductBlockModel, product_block_name="SubBlockOneForTest"): 11 | int_field: int | None = None 12 | str_field: str | None = None 13 | 14 | class SubBlockOneForTestProvisioning(SubBlockOneForTestInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 15 | int_field: int 16 | str_field: str | None = None 17 | 18 | class SubBlockOneForTest(SubBlockOneForTestProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 19 | int_field: int 20 | str_field: str 21 | 22 | return SubBlockOneForTestInactive, SubBlockOneForTestProvisioning, SubBlockOneForTest 23 | 24 | 25 | @pytest.fixture 26 | def test_product_sub_block_one_db(resource_type_int, resource_type_str): 27 | sub_block = ProductBlockTable( 28 | name="SubBlockOneForTest", description="Test Sub Block One", tag="TEST", status="active" 29 | ) 30 | 31 | sub_block.resource_types = [resource_type_int, resource_type_str] 32 | 33 | db.session.add(sub_block) 34 | db.session.commit() 35 | return sub_block 36 | -------------------------------------------------------------------------------- /test/unit_tests/fixtures/products/product_blocks/product_sub_block_two.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from orchestrator.db import ProductBlockTable, db 4 | from orchestrator.domain.base import ProductBlockModel 5 | from orchestrator.types import SubscriptionLifecycle 6 | 7 | 8 | @pytest.fixture 9 | def test_product_sub_block_two(): 10 | class SubBlockTwoForTestInactive(ProductBlockModel, product_block_name="SubBlockTwoForTest"): 11 | int_field_2: int # TODO #430 inactive productblocks should not have required fields 12 | 13 | class SubBlockTwoForTestProvisioning(SubBlockTwoForTestInactive, lifecycle=[SubscriptionLifecycle.PROVISIONING]): 14 | int_field_2: int 15 | 16 | class SubBlockTwoForTest(SubBlockTwoForTestProvisioning, lifecycle=[SubscriptionLifecycle.ACTIVE]): 17 | int_field_2: int 18 | 19 | return SubBlockTwoForTestInactive, SubBlockTwoForTestProvisioning, SubBlockTwoForTest 20 | 21 | 22 | @pytest.fixture 23 | def test_product_sub_block_two_db(resource_type_int_2): 24 | sub_block = ProductBlockTable( 25 | name="SubBlockTwoForTest", description="Test Sub Block Two", tag="TEST", status="active" 26 | ) 27 | 28 | sub_block.resource_types = [resource_type_int_2] 29 | 30 | db.session.add(sub_block) 31 | db.session.commit() 32 | return sub_block 33 | -------------------------------------------------------------------------------- /test/unit_tests/fixtures/products/product_types/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/fixtures/products/product_types/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/fixtures/products/resource_types.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from orchestrator.db import ResourceTypeTable 4 | 5 | 6 | @pytest.fixture 7 | def resource_type_list(): 8 | return ResourceTypeTable(resource_type="list_field", description="") 9 | 10 | 11 | @pytest.fixture 12 | def resource_type_int(): 13 | return ResourceTypeTable(resource_type="int_field", description="") 14 | 15 | 16 | @pytest.fixture 17 | def resource_type_int_2(): 18 | return ResourceTypeTable(resource_type="int_field_2", description="") 19 | 20 | 21 | @pytest.fixture 22 | def resource_type_str(): 23 | return ResourceTypeTable(resource_type="str_field", description="") 24 | 25 | 26 | @pytest.fixture 27 | def resource_type_enum(): 28 | return ResourceTypeTable(resource_type="enum_field", description="") 29 | -------------------------------------------------------------------------------- /test/unit_tests/fixtures/workflows.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from orchestrator.db import WorkflowTable, db 4 | from orchestrator.targets import Target 5 | from orchestrator.utils.datetime import nowtz 6 | 7 | 8 | @pytest.fixture 9 | def add_soft_deleted_workflows(): 10 | def _add_soft_deleted_workflow(n: int): 11 | for i in range(n): 12 | db.session.add( 13 | WorkflowTable( 14 | name=f"deleted_workflow_{i}", 15 | description="deleted workflow", 16 | target=Target.SYSTEM, 17 | deleted_at=nowtz(), 18 | ) 19 | ) 20 | db.session.commit() 21 | 22 | return _add_soft_deleted_workflow 23 | -------------------------------------------------------------------------------- /test/unit_tests/forms/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/forms/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/forms/test_customer_id.py: -------------------------------------------------------------------------------- 1 | from orchestrator.forms.validators import CustomerId 2 | from pydantic_forms.core import FormPage 3 | 4 | 5 | def test_customer_id_schema(): 6 | class Form(FormPage): 7 | customer_id: CustomerId 8 | 9 | expected = { 10 | "additionalProperties": False, 11 | "properties": {"customer_id": {"format": "customerId", "title": "Customer Id", "type": "string"}}, 12 | "required": ["customer_id"], 13 | "title": "unknown", 14 | "type": "object", 15 | } 16 | 17 | assert Form.model_json_schema() == expected 18 | -------------------------------------------------------------------------------- /test/unit_tests/graphql/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/graphql/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/graphql/extensions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/graphql/extensions/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/graphql/extensions/test_stats.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | from dirty_equals import IsFloat, IsInt 4 | 5 | from orchestrator.db.listeners import disable_listeners, monitor_sqlalchemy_queries 6 | from orchestrator.settings import app_settings 7 | 8 | 9 | def test_stats_extension(fastapi_app_graphql, test_client): 10 | # given 11 | query = """query MyQuery { 12 | workflows(first: 10) { 13 | page { 14 | name 15 | } 16 | } 17 | }""" 18 | try: 19 | monitor_sqlalchemy_queries() 20 | with patch.object(app_settings, "ENABLE_GRAPHQL_STATS_EXTENSION", True): 21 | fastapi_app_graphql.register_graphql() 22 | 23 | # when 24 | response = test_client.post("/api/graphql", json={"query": query}) 25 | finally: 26 | disable_listeners() 27 | 28 | # then 29 | result = response.json() 30 | assert result["extensions"]["stats"] == {"db_queries": IsInt, "db_time": IsFloat, "operation_time": IsFloat} 31 | -------------------------------------------------------------------------------- /test/unit_tests/graphql/mutations/helpers.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | 3 | from oauth2_lib.settings import oauth2lib_settings 4 | 5 | 6 | @contextmanager 7 | def mutation_authorization(): 8 | old_oauth2_active = oauth2lib_settings.OAUTH2_ACTIVE 9 | old_mutations_enabled = oauth2lib_settings.MUTATIONS_ENABLED 10 | 11 | oauth2lib_settings.OAUTH2_ACTIVE = False 12 | oauth2lib_settings.MUTATIONS_ENABLED = True 13 | 14 | yield 15 | 16 | oauth2lib_settings.OAUTH2_ACTIVE = old_oauth2_active 17 | oauth2lib_settings.MUTATIONS_ENABLED = old_mutations_enabled 18 | -------------------------------------------------------------------------------- /test/unit_tests/graphql/test_version.py: -------------------------------------------------------------------------------- 1 | import json 2 | from http import HTTPStatus 3 | 4 | 5 | def get_version_query() -> bytes: 6 | query = """ 7 | query VersionQuery { 8 | version { 9 | applicationVersions 10 | } 11 | } 12 | """ 13 | return json.dumps({"operationName": "VersionQuery", "query": query}).encode("utf-8") 14 | 15 | 16 | def test_version_query(test_client): 17 | data = get_version_query() 18 | response = test_client.post("/api/graphql", content=data, headers={"Content-Type": "application/json"}) 19 | assert response.status_code == HTTPStatus.OK 20 | result = response.json() 21 | assert "errors" not in result 22 | -------------------------------------------------------------------------------- /test/unit_tests/graphql/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/graphql/utils/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/graphql/utils/test_autoregistration.py: -------------------------------------------------------------------------------- 1 | from orchestrator.graphql.autoregistration import create_strawberry_enums 2 | from test.unit_tests.fixtures.products.product_blocks.product_block_one import DummyEnum 3 | 4 | 5 | def test_create_strawberry_enums(test_product_block_one): 6 | _, _, ProductBlockOneForTest = test_product_block_one 7 | assert create_strawberry_enums(ProductBlockOneForTest, {}) == {"enum_field": DummyEnum} 8 | 9 | 10 | def test_create_strawberry_enums_optional(test_product_block_one): 11 | ProductBlockOneForTestInactive, _, _ = test_product_block_one 12 | assert create_strawberry_enums(ProductBlockOneForTestInactive, {}) == {"enum_field": DummyEnum} 13 | -------------------------------------------------------------------------------- /test/unit_tests/helpers.py: -------------------------------------------------------------------------------- 1 | import json 2 | from unittest import mock 3 | 4 | from deepdiff import DeepDiff 5 | 6 | 7 | def assert_no_diff(expected, actual, exclude_paths=None): 8 | diff = DeepDiff(expected, actual, ignore_order=True, exclude_paths=exclude_paths) 9 | prettydiff = f"Difference: {json.dumps(diff, indent=2, default=lambda x: str(x))}" 10 | 11 | assert diff == {}, f"Difference between expected and actual output\n{prettydiff}" 12 | 13 | 14 | def safe_delete_product_block_id(product_block_class): 15 | """Safely delete product_block_id from product block class if its defined. 16 | 17 | When a product block is not defined within a fixture function, the product_block_id 18 | is stored inside the class and is kept through multiple tests, 19 | which results in a foreign key error product block does not exist. 20 | """ 21 | try: 22 | del product_block_class.product_block_id 23 | except AttributeError: 24 | pass 25 | 26 | 27 | # By default Pydantic v2 includes documentation urls in the errors. 28 | # Update these urls when upgrading Pydantic. 29 | URL_MISSING = {"url": mock.ANY} 30 | URL_STR_TYPE = {"url": mock.ANY} 31 | URL_PARSING = {"url": mock.ANY} 32 | URL_VALUE = {"url": mock.ANY} 33 | -------------------------------------------------------------------------------- /test/unit_tests/metrics/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/metrics/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/metrics/conftest.py: -------------------------------------------------------------------------------- 1 | from http import HTTPStatus 2 | 3 | import pytest 4 | 5 | EMPTY_METRICS = """# HELP wfo_subscriptions_count Number of subscriptions per product, lifecycle state, customer, and in sync state. 6 | # TYPE wfo_subscriptions_count gauge 7 | # HELP wfo_process_count Number of processes per status, creator, task, product, workflow, customer, and target. 8 | # TYPE wfo_process_count gauge 9 | # HELP wfo_process_seconds_total_count Total time spent on processes in seconds. 10 | # TYPE wfo_process_seconds_total_count gauge 11 | # HELP wfo_engine_status Current workflow engine status. 12 | # TYPE wfo_engine_status gauge 13 | wfo_engine_status{wfo_engine_status="PAUSED"} 0.0 14 | wfo_engine_status{wfo_engine_status="PAUSING"} 0.0 15 | wfo_engine_status{wfo_engine_status="RUNNING"} 1.0 16 | # HELP wfo_active_process_count Number of currently running processes in the workflow engine. 17 | # TYPE wfo_active_process_count gauge 18 | wfo_active_process_count 0.0 19 | """ 20 | 21 | 22 | @pytest.fixture(autouse=True, scope="function") 23 | def assume_empty_metrics_at_start(test_client) -> None: 24 | """Assert that at the start of every unit test in this module, the metrics endpoint only contains empty data.""" 25 | response = test_client.get("/api/metrics") 26 | assert HTTPStatus.OK == response.status_code 27 | assert response.text == EMPTY_METRICS 28 | -------------------------------------------------------------------------------- /test/unit_tests/metrics/test_engine_metrics.py: -------------------------------------------------------------------------------- 1 | from test.unit_tests.metrics.conftest import EMPTY_METRICS 2 | 3 | 4 | def test_engine_metrics_success(test_client) -> None: 5 | test_client.put("/api/settings/status", json={"global_lock": True}) 6 | response = test_client.get("api/metrics") 7 | expected_metric_lines = [ 8 | "# HELP wfo_engine_status Current workflow engine status.", 9 | "# TYPE wfo_engine_status gauge", 10 | 'wfo_engine_status{wfo_engine_status="PAUSED"} 1.0', 11 | 'wfo_engine_status{wfo_engine_status="PAUSING"} 0.0', 12 | 'wfo_engine_status{wfo_engine_status="RUNNING"} 0.0', 13 | ] 14 | assert all(line in response.text for line in expected_metric_lines) 15 | 16 | test_client.put("/api/settings/status", json={"global_lock": False}) 17 | response = test_client.get("/api/metrics") 18 | assert response.text == EMPTY_METRICS 19 | -------------------------------------------------------------------------------- /test/unit_tests/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/migrations/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/schedules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/schedules/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/schedules/test_scheduling.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import schedule 3 | 4 | from orchestrator.cli.scheduler import run 5 | from orchestrator.schedules import ALL_SCHEDULERS 6 | from orchestrator.schedules.scheduling import scheduler 7 | 8 | 9 | def test_scheduling_with_period(capsys, monkeypatch): 10 | ref = {"called": False} 11 | 12 | @scheduler(name="test", time_unit="second", period=1) 13 | def test_scheduler(): 14 | ref["called"] = True 15 | print("I've run") # noqa: T001, T201 16 | return schedule.CancelJob 17 | 18 | ALL_SCHEDULERS.clear() 19 | ALL_SCHEDULERS.append(test_scheduler) 20 | 21 | # Avoid having to mock next_run() and idle_seconds() deep in the scheduler as we are only interested in the job: 22 | with pytest.raises(TypeError): 23 | run() 24 | captured = capsys.readouterr() 25 | assert captured.out == "I've run\n" 26 | assert ref["called"] 27 | -------------------------------------------------------------------------------- /test/unit_tests/services/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/services/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/services/test_products.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | 3 | import pytest 4 | from sqlalchemy import select 5 | from sqlalchemy.exc import NoResultFound 6 | 7 | from orchestrator.db import ProductTable, db 8 | from orchestrator.services.products import get_product_by_id, get_product_by_name, get_tags, get_types 9 | 10 | 11 | def test_get_product_by_id(generic_product_1): 12 | product = db.session.scalars(select(ProductTable).where(ProductTable.name == "Product 1")).one() 13 | 14 | result = get_product_by_id(product.product_id) 15 | assert result.product_id == product.product_id 16 | 17 | 18 | def test_get_product_by_id_err(generic_product_1): 19 | assert get_product_by_id(uuid4()) is None 20 | 21 | 22 | def test_get_product_by_name(generic_product_1): 23 | product = db.session.scalars(select(ProductTable).where(ProductTable.name == "Product 1")).one() 24 | 25 | result = get_product_by_name(product.name) 26 | assert result.product_id == product.product_id 27 | 28 | 29 | def test_get_product_by_name_err(generic_product_1): 30 | with pytest.raises(NoResultFound): 31 | assert get_product_by_name("") is None 32 | 33 | 34 | def test_get_types(generic_product_1): 35 | assert get_types() == ["Generic"] 36 | 37 | 38 | def test_get_tags(generic_product_1): 39 | assert get_tags() == ["GEN1"] 40 | -------------------------------------------------------------------------------- /test/unit_tests/test_types.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from orchestrator.types import is_of_type 4 | 5 | 6 | def test_is_of_type(): 7 | """Some tests to see type checks are valid.""" 8 | assert is_of_type(int, Union[int, str]) 9 | assert is_of_type(int, Union[str, int]) 10 | assert is_of_type(str, Union[int, str]) 11 | assert is_of_type(str, Union[str, int]) 12 | assert is_of_type(list[str], Union[str, int]) is False 13 | -------------------------------------------------------------------------------- /test/unit_tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/utils/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/utils/get_subscription_dict.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | from unittest.mock import Mock 3 | 4 | from orchestrator.utils.get_subscription_dict import get_subscription_dict 5 | 6 | 7 | @mock.patch("orchestrator.utils.get_subscription_dict._generate_etag") 8 | async def test_get_subscription_dict_db(generate_etag, generic_subscription_1): 9 | generate_etag.side_effect = Mock(return_value="etag-mock") 10 | await get_subscription_dict(generic_subscription_1) 11 | assert generate_etag.called 12 | -------------------------------------------------------------------------------- /test/unit_tests/utils/test_datetime.py: -------------------------------------------------------------------------------- 1 | from orchestrator.utils.datetime import TIMESTAMP_REGEX, timestamp 2 | 3 | 4 | def test_timestamp_and_regex(): 5 | ts = timestamp() 6 | assert TIMESTAMP_REGEX.fullmatch(ts) 7 | -------------------------------------------------------------------------------- /test/unit_tests/utils/test_strings.py: -------------------------------------------------------------------------------- 1 | from orchestrator.utils.strings import remove_redundant_ws 2 | 3 | 4 | def test_remove_redundant_ws(): 5 | assert "" == remove_redundant_ws(" ") 6 | assert "a b c" == remove_redundant_ws(" a b c ") 7 | -------------------------------------------------------------------------------- /test/unit_tests/websocket/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/websocket/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/workflows/conftest.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/workflows/conftest.py -------------------------------------------------------------------------------- /test/unit_tests/workflows/shared/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/workflows/shared/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/workflows/tasks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/workfloworchestrator/orchestrator-core/fc4c57e1466f7dca330c70fdc3bcc5c52c0380bd/test/unit_tests/workflows/tasks/__init__.py -------------------------------------------------------------------------------- /test/unit_tests/workflows/tasks/test_validate_product_type.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from orchestrator.db import SubscriptionTable, db 4 | from orchestrator.targets import Target 5 | from test.unit_tests.workflows import ( 6 | assert_complete, 7 | extract_state, 8 | run_workflow, 9 | ) 10 | 11 | 12 | @pytest.mark.workflow 13 | def test_check_subscriptions(generic_subscription_1, validation_workflow_instance): 14 | product = db.session.get(SubscriptionTable, generic_subscription_1).product 15 | product.workflows.append(validation_workflow_instance) 16 | db.session.add(product) 17 | db.session.commit() 18 | 19 | init_data = { 20 | "product_type": "Generic", 21 | } 22 | 23 | result, process, step_log = run_workflow("task_validate_product_type", init_data) 24 | 25 | assert_complete(result) 26 | 27 | state = extract_state(result) 28 | 29 | assert state["product_type"] == "Generic" 30 | assert state["workflow_name"] == "task_validate_product_type" 31 | assert state["workflow_target"] == Target.SYSTEM 32 | 33 | result = state["result"] 34 | 35 | assert len(result) == 1 36 | assert result[0]["total_workflows_validated"] == 1 37 | assert len(result[0]["workflows"]) == 1 38 | assert result[0]["workflows"][0]["workflow_name"] == "validation_workflow" 39 | assert result[0]["workflows"][0]["product_type"] == "Generic" 40 | -------------------------------------------------------------------------------- /test/unit_tests/workflows/tasks/test_validate_products.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from test.unit_tests.workflows import assert_complete, run_workflow 4 | 5 | 6 | @pytest.mark.workflow 7 | def test_check_subscriptions(generic_subscription_1, generic_subscription_2): 8 | result, process, step_log = run_workflow("task_validate_products", {}) 9 | assert_complete(result) 10 | -------------------------------------------------------------------------------- /test/unit_tests/workflows/test_modify_note.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from orchestrator.services.subscriptions import get_subscription 4 | from test.unit_tests.workflows import assert_complete, extract_state, run_workflow 5 | 6 | TEST = "Some note" 7 | 8 | 9 | @pytest.mark.workflow 10 | def test_modify_note(responses, generic_subscription_1): 11 | init_state = [{"subscription_id": generic_subscription_1}, {"note": TEST}] 12 | 13 | result, process, step_log = run_workflow("modify_note", init_state) 14 | assert_complete(result) 15 | 16 | # assert state for correctness 17 | state = extract_state(result) 18 | assert state["old_note"] is None 19 | assert state["note"] == TEST 20 | assert state["__old_subscriptions__"].get(generic_subscription_1) 21 | assert state["__old_subscriptions__"][generic_subscription_1]["note"] is None 22 | assert state["__old_subscriptions__"][generic_subscription_1]["description"] == "Generic Subscription One" 23 | 24 | # assert subscription for correctness 25 | subscription = get_subscription(generic_subscription_1) 26 | assert subscription.note == TEST 27 | --------------------------------------------------------------------------------