├── .azuredevops └── pipelines │ └── sdl-checks.yml ├── .conventional-commits ├── GitVersion.yml └── commitlint.precommit.config.js ├── .devcontainer ├── devcontainer.json └── setup.sh ├── .github ├── dependabot.yml └── workflows │ ├── cd.yml │ ├── ci-cd.yml │ ├── ci.yml │ ├── conventional-commits.yml │ └── e2e.yml ├── .gitignore ├── .markdownlint.yaml ├── .markdownlintignore ├── .pre-commit-config.yaml ├── .vscode └── settings.json ├── .yamllint.yaml ├── CODE_OF_CONDUCT.md ├── DEVELOPMENT.md ├── DataFactoryTestingFramework.sln ├── LICENSE ├── MANIFEST.in ├── README.md ├── SECURITY.md ├── SUPPORT.md ├── docs ├── advanced │ ├── debugging.md │ ├── development_workflow.md │ ├── framework_internals.md │ └── overriding_expression_functions.md ├── basic │ ├── activity_testing.md │ ├── getting_started.md │ ├── installing_and_initializing_framework.md │ ├── pipeline_testing.md │ ├── repository_setup.md │ └── state.md └── images │ └── download_support_files.png ├── examples ├── .devcontainer │ ├── devcontainer.json │ ├── postCreateCommand.sh │ └── requirements.txt ├── README.md ├── data_factory │ ├── batch_job │ │ ├── README.md │ │ ├── batch_job.png │ │ ├── pipeline │ │ │ └── batch_job.json │ │ ├── test_data_factory_batchjob_functional.py │ │ └── test_data_factory_batchjob_unit.py │ └── copy_blobs │ │ ├── README.md │ │ ├── copy_blobs.png │ │ ├── pipeline │ │ └── copy_blobs.json │ │ ├── test_data_factory_copy_blobs_functional.py │ │ └── test_data_factory_copy_blobs_unit.py ├── fabric │ ├── batch_job │ │ ├── README.md │ │ ├── batch_job.png │ │ ├── pipeline │ │ │ ├── .platform │ │ │ └── pipeline-content.json │ │ ├── test_fabric_batchjob_functional.py │ │ └── test_fabric_batchjob_unit.py │ ├── nested_ingestion_pipeline │ │ ├── README.md │ │ ├── fabric │ │ │ ├── pl_ingestion.DataPipeline │ │ │ │ ├── .platform │ │ │ │ └── pipeline-content.json │ │ │ ├── pl_main.DataPipeline │ │ │ │ ├── .platform │ │ │ │ └── pipeline-content.json │ │ │ └── tests │ │ │ │ ├── test_pl_ingestion_activity.py │ │ │ │ ├── test_pl_main_activity.py │ │ │ │ ├── test_pl_main_pipeline_child_flag_false.py │ │ │ │ └── test_pl_main_pipeline_child_flag_true.py │ │ ├── pl_ingestion_pipeline.png │ │ └── pl_main_pipeline.png │ └── simple_web_hook │ │ ├── Readme.md │ │ ├── fabric │ │ └── ExamplePipeline.DataPipeline │ │ │ ├── .platform │ │ │ └── pipeline-content.json │ │ └── test_fabric_simple_webhook.py └── synapse │ └── copy_blobs │ ├── README.md │ ├── copy_blobs.png │ ├── pipeline │ └── copy_blobs.json │ ├── test_synapse_copy_blobs_functional.py │ └── test_synapse_copy_blobs_unit.py ├── poetry.lock ├── poetry.toml ├── pyproject.toml ├── setup.py ├── src └── data_factory_testing_framework │ ├── __init__.py │ ├── _deserializers │ ├── __init__.py │ ├── _deserializer_base.py │ ├── _deserializer_data_factory.py │ ├── _deserializer_fabric.py │ └── shared │ │ ├── __init__.py │ │ ├── _activity_deserializer.py │ │ └── _data_factory_element_replacer.py │ ├── _enum_meta.py │ ├── _expression_runtime │ ├── data_factory_expression │ │ ├── __init__.py │ │ ├── data_factory_to_expression_transformer.py │ │ ├── exceptions.py │ │ └── expression_transformer.py │ ├── expression_runtime.py │ └── functions_repository.py │ ├── _pythonnet │ ├── Evaluator.csproj │ ├── __init__.py │ ├── data_factory_testing_framework_expressions_evaluator.py │ └── nuget.config │ ├── _repositories │ ├── __init__.py │ ├── _factories │ │ ├── base_repository_factory.py │ │ ├── data_factory_repository_factory.py │ │ └── fabric_repository_factory.py │ └── data_factory_repository.py │ ├── _test_framework.py │ ├── exceptions │ ├── __init__.py │ ├── _activity_not_found_error.py │ ├── _activity_output_field_not_found_error.py │ ├── _control_activity_expression_evaluated_not_to_expected_type.py │ ├── _data_factory_element_evaluation_error.py │ ├── _function_call_invalid_arguments_count_error.py │ ├── _parameter_not_found_error.py │ ├── _pipeline_activities_circular_dependency_error.py │ ├── _pipeline_not_found_error.py │ ├── _state_iteration_item_not_set_error.py │ ├── _unsupported_function_error.py │ ├── _user_error.py │ ├── _variable_being_evaluated_does_not_exist_error.py │ └── _variable_not_found_error.py │ ├── models │ ├── __init__.py │ ├── _data_factory_element.py │ ├── _data_factory_object_type.py │ ├── _pipeline.py │ └── activities │ │ ├── __init__.py │ │ ├── _activity.py │ │ ├── _activity_dependency.py │ │ ├── _append_variable_activity.py │ │ ├── _control_activity.py │ │ ├── _execute_pipeline_activity.py │ │ ├── _fail_activity.py │ │ ├── _filter_activity.py │ │ ├── _for_each_activity.py │ │ ├── _if_condition_activity.py │ │ ├── _set_variable_activity.py │ │ ├── _switch_activity.py │ │ └── _until_activity.py │ ├── py.typed │ └── state │ ├── __init__.py │ ├── _activity_result.py │ ├── _dependency_condition.py │ ├── _pipeline_run_state.py │ ├── _pipeline_run_variable.py │ ├── _run_parameter.py │ ├── _run_parameter_type.py │ └── _run_state.py └── tests ├── __init__.py ├── functional ├── __init__.py ├── activity_outputs │ ├── .platform │ ├── pipeline-content.json │ └── test_set_activity_output.py ├── api │ ├── classes │ │ ├── state │ │ │ └── test_run_state_api.py │ │ └── test_test_framework_api.py │ └── test_package_api.py ├── append_variable_pipeline │ ├── .platform │ ├── pipeline-content.json │ └── test_append_variable_activity_pipeline.py ├── datafactory_element │ └── test_evaluate_datafactory_element.py ├── execute_child_pipeline │ ├── pipeline │ │ ├── child.json │ │ └── main.json │ └── test_execute_child_pipeline_activity.py ├── filter_activity_pipeline │ ├── .platform │ ├── pipeline-content.json │ └── test_filter_activity_pipeline.py ├── switch_activity_pipeline │ ├── pipeline │ │ └── switchtest.json │ └── test_switch_activity_pipeline.py ├── test_framework │ ├── data │ │ ├── data_factory │ │ │ └── pipeline │ │ │ │ └── default_variables.json │ │ ├── fabric │ │ │ ├── .platform │ │ │ └── pipeline-content.json │ │ └── synapse │ │ │ └── pipeline │ │ │ └── set_date.json │ └── test_test_framework.py ├── utf_16_encoding │ ├── .platform │ ├── pipeline-content.json │ └── test_encoding_support.py ├── utils.py ├── variables_default_value │ ├── pipeline │ │ └── default_variables.json │ └── test_variables_default.py └── xml │ ├── pipeline │ └── xpath_example_08_a.json │ └── test_xml.py └── unit ├── functions └── test_data_factory_testing_framework_expression_evaluator.py ├── models ├── activities │ ├── base │ │ └── test_activity.py │ ├── control_activities │ │ ├── test_filter_activity.py │ │ ├── test_for_each_activity.py │ │ ├── test_if_condition_activity.py │ │ ├── test_switch_activity.py │ │ └── test_until_activity.py │ ├── test_append_variable_activity.py │ ├── test_execute_pipeline_activity_parameters.py │ ├── test_fail_activity.py │ └── test_set_variable_activity.py └── pipelines │ └── test_pipeline_resource.py └── test__test_framework.py /.azuredevops/pipelines/sdl-checks.yml: -------------------------------------------------------------------------------- 1 | trigger: 2 | batch: true 3 | branches: 4 | include: 5 | - main 6 | 7 | schedules: 8 | # Ensure we build nightly to catch any new CVEs and report SDL often. 9 | - cron: "0 0 * * *" 10 | displayName: Nightly Build 11 | branches: 12 | include: 13 | - main 14 | always: true 15 | 16 | pr: 17 | branches: 18 | include: 19 | - main 20 | 21 | resources: 22 | repositories: 23 | - repository: 1es 24 | type: git 25 | name: 1ESPipelineTemplates/1ESPipelineTemplates 26 | ref: refs/tags/release 27 | 28 | extends: 29 | template: v1/1ES.Unofficial.PipelineTemplate.yml@1es # Public Build (not used to publish) 30 | parameters: 31 | pool: 32 | name: dftf-1eshosted-pool 33 | image: dftf-1es-image-vanilla-ubuntu-22.04-ne 34 | os: linux 35 | sdl: 36 | codeql: 37 | compiled: 38 | enabled: true 39 | runSourceLanguagesInSourceAnalysis: true 40 | sourceAnalysisPool: 41 | name: dftf-1eshosted-pool 42 | image: dftf-1es-image-vanilla-windows-2022-ne 43 | os: windows 44 | stages: 45 | - stage: build 46 | displayName: build 47 | jobs: 48 | - job: job 49 | displayName: Job 50 | steps: 51 | - checkout: self 52 | -------------------------------------------------------------------------------- /.conventional-commits/GitVersion.yml: -------------------------------------------------------------------------------- 1 | --- 2 | assembly-versioning-scheme: MajorMinorPatch 3 | mode: Mainline 4 | merge-message-formats: {} 5 | 6 | # Conventional Commit Reference 7 | # https://www.conventionalcommits.org/en/v1.0.0/ 8 | # https://gitversion.net/docs/reference/version-increments#manually-incrementing-the-version 9 | major-version-bump-message: "^(build|chore|ci|docs|feat|fix|perf|refactor|revert|style|test)(\\([\\w\\s-]*\\))?(!:|:.*\\n\\n((.+\\n)+\\n)?BREAKING CHANGE:\\s.+)" 10 | minor-version-bump-message: "^(feat)(\\([\\w\\s-]*\\))?:" 11 | patch-version-bump-message: "^(build|chore|ci|docs|fix|perf|refactor|revert|style|test)(\\([\\w\\s-]*\\))?:" 12 | -------------------------------------------------------------------------------- /.conventional-commits/commitlint.precommit.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: ['@commitlint/config-conventional'], 3 | defaultIgnores: false, 4 | ignores: [ 5 | // allow fixup and squash commits for precommit checks 6 | (c) => new RegExp('(fixup|squash)!').test(c), 7 | ], 8 | }; 9 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/alpine 3 | { 4 | "name": "Debian", 5 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile 6 | "image": "mcr.microsoft.com/devcontainers/base:debian-11", 7 | 8 | // Features to add to the dev container. More info: https://containers.dev/features. 9 | "features": { 10 | "ghcr.io/devcontainers/features/dotnet:2": { 11 | "version": "8.0" 12 | }, 13 | "ghcr.io/devcontainers/features/python:1": { 14 | "installTools": false, 15 | "version": "3.9" 16 | }, 17 | "ghcr.io/devcontainers-contrib/features/poetry:2": { 18 | "version": "1.8.2" 19 | }, 20 | "ghcr.io/devcontainers-contrib/features/pre-commit:2": {}, 21 | "ghcr.io/devcontainers/features/github-cli:1": {} 22 | }, 23 | 24 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 25 | // "forwardPorts": [], 26 | 27 | // Use 'postCreateCommand' to run commands after the container is created. 28 | "postCreateCommand": ".devcontainer/setup.sh", 29 | 30 | // Configure tool-specific properties. 31 | "customizations": { 32 | "vscode": { 33 | "extensions": [ 34 | "GitHub.copilot", 35 | "ms-python.python", 36 | "ms-dotnettools.csharp", 37 | "ryanluker.vscode-coverage-gutters" 38 | ] 39 | } 40 | } 41 | 42 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. 43 | // "remoteUser": "root" 44 | } 45 | -------------------------------------------------------------------------------- /.devcontainer/setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | dotnet build 4 | poetry install --no-interaction --no-root 5 | poetry run pip install -e . 6 | poetry run pre-commit install-hooks 7 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for more information: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | # https://containers.dev/guide/dependabot 6 | 7 | version: 2 8 | updates: 9 | - package-ecosystem: "devcontainers" 10 | directory: "/" 11 | schedule: 12 | interval: weekly 13 | -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CD 3 | 4 | on: 5 | workflow_call: # yamllint disable-line rule:truthy 6 | inputs: 7 | version: 8 | description: 'The version of the package' 9 | required: true 10 | type: string 11 | 12 | jobs: 13 | publish: 14 | runs-on: ubuntu-latest 15 | permissions: 16 | id-token: write 17 | contents: write 18 | environment: 19 | name: pypi 20 | url: https://pypi.org/p/data-factory-testing-framework 21 | steps: 22 | - name: Check out repository 23 | uses: actions/checkout@v4 24 | - name: Download sdist 25 | uses: actions/download-artifact@v4 26 | with: 27 | name: sdist 28 | path: dist 29 | - name: Download whl 30 | uses: actions/download-artifact@v4 31 | with: 32 | name: whl 33 | path: dist 34 | - name: Create release 35 | run: gh release create "v${{ inputs.version }}" --prerelease --target ${{ github.ref }} 36 | env: 37 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 38 | - name: Publish package distributions to PyPI 39 | uses: pypa/gh-action-pypi-publish@release/v1 40 | with: 41 | packages-dir: ./dist 42 | attestations: false 43 | -------------------------------------------------------------------------------- /.github/workflows/ci-cd.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI/CD 3 | 4 | on: 5 | push: 6 | branches: 7 | - main 8 | pull_request: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | ci: 14 | uses: ./.github/workflows/ci.yml 15 | cd: 16 | needs: [ci] 17 | if: github.ref == 'refs/heads/main' 18 | permissions: 19 | id-token: write 20 | contents: write 21 | uses: ./.github/workflows/cd.yml 22 | with: 23 | version: ${{ needs.ci.outputs.version }} 24 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI 3 | 4 | on: 5 | workflow_call: # yamllint disable-line rule:truthy 6 | outputs: 7 | version: 8 | description: 'The version of the package' 9 | value: ${{ jobs.version.outputs.version }} 10 | jobs: 11 | version: 12 | runs-on: ubuntu-latest 13 | outputs: 14 | version: ${{ steps.set-version.outputs.GITVERSION_SEMVER }} 15 | steps: 16 | - name: Check out repository 17 | uses: actions/checkout@v4 18 | with: 19 | # to calculate the version we need the tags and the commit history 20 | fetch-tags: true 21 | fetch-depth: 0 22 | - name: Install GitVersion 23 | uses: gittools/actions/gitversion/setup@v1.1.1 24 | with: 25 | versionSpec: '5.x' 26 | - name: Determine Version 27 | id: gitversion 28 | uses: gittools/actions/gitversion/execute@v0 29 | with: 30 | useConfigFile: true 31 | configFilePath: './.conventional-commits/GitVersion.yml' 32 | - name: Set version output 33 | id: set-version 34 | run: echo "GITVERSION_SEMVER=$GitVersion_MajorMinorPatch$SUFFIX" >> $GITHUB_OUTPUT 35 | env: 36 | SUFFIX: ${{ github.ref != 'refs/heads/main' && format('.dev{0}+{1}', github.run_number, env.GitVersion_ShortSha) || '' }} 37 | build: 38 | runs-on: ubuntu-latest 39 | needs: version 40 | permissions: 41 | contents: read 42 | steps: 43 | - name: Check out repository 44 | uses: actions/checkout@v4 45 | - name: Set up python 46 | id: setup-python 47 | uses: actions/setup-python@v4 48 | with: 49 | python-version: '3.9' 50 | - uses: actions/setup-dotnet@v4 51 | with: 52 | dotnet-version: '8.0.x' 53 | - name: Install Poetry 54 | uses: snok/install-poetry@v1 55 | with: 56 | version: 1.8.2 57 | virtualenvs-create: true 58 | virtualenvs-in-project: true 59 | installer-parallel: true 60 | - name: Install dependencies 61 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 62 | run: poetry install --no-interaction --no-root 63 | - name: Build dotnet dependencies 64 | run: dotnet build src/data_factory_testing_framework/_pythonnet/Evaluator.csproj -o src/data_factory_testing_framework/_pythonnet/bin 65 | - name: Install project 66 | run: poetry run pip install -e . 67 | - name: Run linting and tests 68 | run: poetry run pre-commit run --all-files --show-diff-on-failure 69 | - name: Set build version and build package 70 | run: | 71 | # create a version file with the build version 72 | echo $BUILD_VERSION > VERSION 73 | poetry run python -m build 74 | env: 75 | BUILD_VERSION: ${{ needs.version.outputs.version }} 76 | - name: Upload dist 77 | uses: actions/upload-artifact@v4 78 | with: 79 | name: whl 80 | path: dist/*.whl 81 | - name: Upload sdist 82 | uses: actions/upload-artifact@v4 83 | with: 84 | name: sdist 85 | path: dist/*.tar.gz 86 | e2e: 87 | needs: build 88 | strategy: 89 | matrix: 90 | os: ['ubuntu-latest', 'windows-latest'] 91 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.x'] 92 | dotnet-version: ['8.0.x'] 93 | dist: [whl, sdist] 94 | uses: ./.github/workflows/e2e.yml 95 | with: 96 | os: ${{ matrix.os }} 97 | python-version: ${{ matrix.python-version }} 98 | dotnet-version: ${{ matrix.dotnet-version }} 99 | dist: ${{ matrix.dist }} 100 | -------------------------------------------------------------------------------- /.github/workflows/conventional-commits.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Conventional Commits 3 | on: 4 | pull_request_target: 5 | types: 6 | - opened 7 | - reopened 8 | - edited 9 | - synchronize 10 | permissions: 11 | pull-requests: read 12 | jobs: 13 | lint-pull-request: 14 | name: Lint Pull Request 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: amannn/action-semantic-pull-request@v5 18 | env: 19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 20 | -------------------------------------------------------------------------------- /.github/workflows/e2e.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: E2E 3 | 4 | on: 5 | workflow_call: # yamllint disable-line rule:truthy 6 | inputs: 7 | os: 8 | description: 'Operating system' 9 | required: true 10 | type: string 11 | python-version: 12 | description: 'Python version' 13 | required: true 14 | type: string 15 | dotnet-version: 16 | description: '.NET version' 17 | required: true 18 | type: string 19 | dist: 20 | description: 'Distribution artifact' 21 | required: true 22 | type: string 23 | 24 | jobs: 25 | e2e: 26 | runs-on: ${{ inputs.os }} 27 | permissions: 28 | contents: read 29 | steps: 30 | - name: Check out repository 31 | uses: actions/checkout@v3 32 | - name: Set up python 33 | id: setup-python 34 | uses: actions/setup-python@v4 35 | with: 36 | python-version: ${{ inputs.python-version }} 37 | - uses: actions/setup-dotnet@v4 38 | with: 39 | dotnet-version: ${{ inputs.dotnet-version }} 40 | - name: Download dist 41 | uses: actions/download-artifact@v4 42 | with: 43 | name: ${{ inputs.dist }} 44 | path: dist 45 | - name: Install pytest 46 | run: python -m pip install pytest 47 | - name: Install project 48 | run: pip install $(ls) 49 | working-directory: dist 50 | - name: Run end-to-end tests 51 | run: python -m pytest examples 52 | -------------------------------------------------------------------------------- /.markdownlint.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | MD013: 3 | line_length: 400 4 | -------------------------------------------------------------------------------- /.markdownlintignore: -------------------------------------------------------------------------------- 1 | # ignore Microsoft-generated files 2 | SECURITY.md 3 | SUPPORT.md 4 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | default_install_hook_types: [pre-commit, pre-push, commit-msg] 3 | repos: 4 | - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook 5 | rev: v9.5.0 6 | hooks: 7 | - id: commitlint 8 | stages: [commit-msg] 9 | args: [--config, ./.conventional-commits/commitlint.precommit.config.js] 10 | additional_dependencies: ['@commitlint/config-conventional@18.5'] 11 | - repo: https://github.com/python-poetry/poetry 12 | rev: 1.8.2 13 | hooks: 14 | - id: poetry-check 15 | args: [] 16 | - id: poetry-lock 17 | args: ["--no-update"] 18 | - repo: https://github.com/pre-commit/pre-commit-hooks 19 | rev: v4.5.0 20 | hooks: 21 | - id: end-of-file-fixer 22 | exclude: | 23 | (?x)^( 24 | tests/functional/utf_16_encoding/pipeline-content.json| 25 | examples/fabric/simple_web_hook/fabric/ExamplePipeline.DataPipeline/pipeline-content.json| 26 | tests/functional/test_framework/data/fabric/pipeline-content.json 27 | )$ 28 | - id: trailing-whitespace 29 | - id: mixed-line-ending 30 | args: [--fix=auto] 31 | - repo: https://github.com/adrienverge/yamllint 32 | rev: v1.33.0 33 | hooks: 34 | - id: yamllint 35 | args: [--strict] 36 | - repo: https://github.com/igorshubovych/markdownlint-cli 37 | rev: v0.38.0 38 | hooks: 39 | - id: markdownlint-fix 40 | exclude: | 41 | (?x)^( 42 | examples/fabric/simple_web_hook/fabric/Readme.md 43 | )$ 44 | - repo: https://github.com/astral-sh/ruff-pre-commit 45 | rev: v0.1.5 46 | hooks: 47 | - id: ruff 48 | args: [--fix] 49 | - id: ruff-format 50 | - repo: local 51 | hooks: 52 | - id: pytest 53 | name: pytest 54 | entry: poetry run pytest . 55 | language: system 56 | types: [python] 57 | require_serial: true 58 | pass_filenames: false 59 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | // VSCode settings for Python 2 | // We track these settings in the repository so that they are consistent across 3 | // for contributors. 4 | { 5 | "python.testing.pytestArgs": [ 6 | "tests", 7 | "examples", 8 | "-vv" 9 | ], 10 | "python.testing.cwd": "${workspaceFolder}", 11 | "python.testing.pytestEnabled": true, 12 | "python.testing.unittestEnabled": false, 13 | "python.defaultInterpreterPath": ".venv/bin/python", 14 | } 15 | -------------------------------------------------------------------------------- /.yamllint.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | rules: 3 | new-lines: 4 | type: platform 5 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Microsoft Open Source Code of Conduct 2 | 3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 4 | 5 | Resources: 6 | 7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/) 8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) 9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns 10 | -------------------------------------------------------------------------------- /DEVELOPMENT.md: -------------------------------------------------------------------------------- 1 | # Azure Data Factory v2 - Unit Testing Framework 2 | 3 | ## Development 4 | 5 | ### Prerequisites 6 | 7 | * poetry == 1.6.1 8 | 9 | ### Installing the project 10 | 11 | Make sure to create a virtual environment and install the requirements by running: 12 | `poetry install --with dev` 13 | 14 | Build the .NET project: 15 | `dotnet build` 16 | 17 | ### Pre-Commit Hooks 18 | 19 | We use pre-commit hooks to ensure that the code is formatted correctly and that the code is linted before committing. 20 | 21 | To install the pre-commit hooks, run the following command: 22 | 23 | ```bash 24 | poetry run pre-commit install 25 | ``` 26 | 27 | To run the pre-commit hooks use the following command (append `--all-files` to run on all files)): 28 | 29 | ```bash 30 | poetry run pre-commit run 31 | ``` 32 | 33 | ### Run Linting 34 | 35 | We use Ruff to lint our code as it provides similar rule sets to well-established linters into one 36 | (e.g., black, flake8, isort, and pydocstyle). 37 | 38 | To run linting, run the following command: 39 | 40 | ```bash 41 | poetry run ruff . 42 | ``` 43 | 44 | ### Run tests 45 | 46 | We use pytest to test our code, coverage.py to generate coverage reports and [coverage gutters](https://marketplace.visualstudio.com/items?itemName=semasquare.vscode-coverage-gutters#:~:text=Features.%20Supports%20any%20language%20as%20long%20as%20you) VSCode extension to visualize code coverage in the editor. 47 | 48 | To run all tests and generate coverage data run the following command: 49 | 50 | ```bash 51 | poetry run coverage run --source=src -m pytest . 52 | ``` 53 | 54 | This will generate a .coverage file. To visualize code coverage, generate a coverage.xml file from the .coverage file by running: 55 | 56 | ```bash 57 | poetry run coverage xml 58 | ``` 59 | 60 | Once the xml file is generated, you can view the coverage results in the editor by running the `Coverage Gutters: Watch` command. 61 | 62 | To run tests and generate coverage.xml in a single command, run: 63 | 64 | ```bash 65 | poetry run coverage run --source=src -m pytest . && coverage xml 66 | ``` 67 | 68 | ### Updating lock file 69 | 70 | When updating Poetry's lock file your local cache can become outdated. 71 | You can clear your cache with `poetry cache clear PyPI --all` 72 | -------------------------------------------------------------------------------- /DataFactoryTestingFramework.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 17 4 | VisualStudioVersion = 17.9.34616.47 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{35B3C0BC-05AE-4FA2-8250-DC710420F84A}" 7 | EndProject 8 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Evaluator", "src/data_factory_testing_framework/_pythonnet/Evaluator.csproj", "{636E6B65-E552-41D1-94D5-63B8E6D09F73}" 9 | EndProject 10 | Global 11 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 12 | Release|Any CPU = Release|Any CPU 13 | Release|x64 = Release|x64 14 | Release|x86 = Release|x86 15 | EndGlobalSection 16 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 17 | {636E6B65-E552-41D1-94D5-63B8E6D09F73}.Release|Any CPU.ActiveCfg = DataFactoryTestingFrameworkEvaluator|Any CPU 18 | {636E6B65-E552-41D1-94D5-63B8E6D09F73}.Release|Any CPU.Build.0 = DataFactoryTestingFrameworkEvaluator|Any CPU 19 | {636E6B65-E552-41D1-94D5-63B8E6D09F73}.Release|x64.ActiveCfg = DataFactoryTestingFrameworkEvaluator|Any CPU 20 | {636E6B65-E552-41D1-94D5-63B8E6D09F73}.Release|x64.Build.0 = DataFactoryTestingFrameworkEvaluator|Any CPU 21 | {636E6B65-E552-41D1-94D5-63B8E6D09F73}.Release|x86.ActiveCfg = DataFactoryTestingFrameworkEvaluator|Any CPU 22 | {636E6B65-E552-41D1-94D5-63B8E6D09F73}.Release|x86.Build.0 = DataFactoryTestingFrameworkEvaluator|Any CPU 23 | EndGlobalSection 24 | GlobalSection(SolutionProperties) = preSolution 25 | HideSolutionNode = FALSE 26 | EndGlobalSection 27 | GlobalSection(NestedProjects) = preSolution 28 | {636E6B65-E552-41D1-94D5-63B8E6D09F73} = {35B3C0BC-05AE-4FA2-8250-DC710420F84A} 29 | EndGlobalSection 30 | GlobalSection(ExtensibilityGlobals) = postSolution 31 | SolutionGuid = {63AF779A-5A49-4A39-B72B-22512AB23EFE} 32 | EndGlobalSection 33 | EndGlobal 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include VERSION 2 | include DataFactoryTestingFramework.sln 3 | recursive-include src *.cs *.cs 4 | recursive-include src *.csproj *.csproj 5 | prune src/data_factory_testing_framework/_pythonnet/obj 6 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Security 4 | 5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). 6 | 7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. 8 | 9 | ## Reporting Security Issues 10 | 11 | **Please do not report security vulnerabilities through public GitHub issues.** 12 | 13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). 14 | 15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). 16 | 17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). 18 | 19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 20 | 21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 22 | * Full paths of source file(s) related to the manifestation of the issue 23 | * The location of the affected source code (tag/branch/commit or direct URL) 24 | * Any special configuration required to reproduce the issue 25 | * Step-by-step instructions to reproduce the issue 26 | * Proof-of-concept or exploit code (if possible) 27 | * Impact of the issue, including how an attacker might exploit the issue 28 | 29 | This information will help us triage your report more quickly. 30 | 31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. 32 | 33 | ## Preferred Languages 34 | 35 | We prefer all communications to be in English. 36 | 37 | ## Policy 38 | 39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). 40 | 41 | 42 | -------------------------------------------------------------------------------- /SUPPORT.md: -------------------------------------------------------------------------------- 1 | # Support 2 | 3 | ## How to file issues and get help 4 | 5 | This project uses GitHub Issues to track bugs and feature requests. Please search the existing 6 | issues before filing new issues to avoid duplicates. For new issues, file your bug or 7 | feature request as a new Issue. 8 | 9 | For help and questions about using this project, feel free ask your question via GitHub issues. In the future we will think about other channels. 10 | 11 | ## Microsoft Support Policy 12 | 13 | Support for this **PROJECT or PRODUCT** is limited to the resources listed above. 14 | -------------------------------------------------------------------------------- /docs/advanced/debugging.md: -------------------------------------------------------------------------------- 1 | # Debugging 2 | 3 | As the framework is dynamically parsing and interpreting data factory resource files, it can be challenging to identify which objects you are working with. It is recommended to use the debugger during development of your tests to get a better idea of what activities are being returned and to understand the structure of the activity and its properties. 4 | -------------------------------------------------------------------------------- /docs/advanced/development_workflow.md: -------------------------------------------------------------------------------- 1 | # Recommended development workflow for Azure Data Factory (ADF) v2 and Azure Synapse Analytics 2 | 3 | * Use ADF / Azure Synapse Analytics Git integration 4 | * Use UI to create a feature branch, build the initial pipeline, and save it to the feature branch 5 | * Pull feature branch locally 6 | * Start writing unit and functional tests, run them locally for immediate feedback, and fix bugs 7 | * Push changes to the feature branch 8 | * Test the new features manually through the UI in a sandbox environment 9 | * Create PR, which will run the tests in the CI pipeline 10 | * Approve PR 11 | * Merge to main and start deploying to dev/test/prod environments 12 | * Run e2e tests after each deployment to validate all happy flows work on that specific environment 13 | -------------------------------------------------------------------------------- /docs/advanced/framework_internals.md: -------------------------------------------------------------------------------- 1 | # Framework internals 2 | 3 | This page will be used to describe the internals of the testing framework. It will be used to document the architecture, design decisions, and implementation details of the framework. 4 | -------------------------------------------------------------------------------- /docs/advanced/overriding_expression_functions.md: -------------------------------------------------------------------------------- 1 | # Overriding expression functions 2 | 3 | The framework interprets expressions containing functions, which are implemented within the framework, and they might contain bugs. 4 | You can override their implementation as illustrated below: 5 | 6 | ```python 7 | FunctionsRepository.register("concat", lambda arguments: "".join(arguments)) 8 | FunctionsRepository.register("trim", lambda text, trim_argument: text.strip(trim_argument[0])) 9 | ``` 10 | -------------------------------------------------------------------------------- /docs/basic/getting_started.md: -------------------------------------------------------------------------------- 1 | # Getting started 2 | 3 | This _getting started_ focuses on novice users who are not familiar with Python or package management. It guides the user through the process of downloading the data factory pipeline files, setting up a Python project and installing the framework so that they can start writing tests. 4 | 5 | >For experienced Python and package management users, you can skip this page and go directly to the [repository setup](repository_setup.md) page. 6 | 7 | ## Install dotnet runtime 8 | 9 | Install the dotnet runtime (not SDK) from [here](https://dotnet.microsoft.com/en-us/download/dotnet/8.0). This is required to run some expression functions on dotnet just like in Data Factory. 10 | 11 | ## Data Factory pipeline files 12 | 13 | The framework is designed to work with the `json` files that define the data factory pipelines and activities. Files from the data factory environment can be downloaded from your data factory environment as described in the [repository setup](repository_setup.md) page. 14 | 15 | ## Setting up the Python project 16 | 17 | For Visual Studio Code, the guidance is: 18 | 19 | 1. Create a new folder alongside the data factory pipeline files for the test project called `tests`. 20 | 2. Open the new folder in Visual Studio Code. 21 | 3. Install the framework by installing the library the terminal with pip: 22 | 23 | ```bash 24 | pip install data-factory-testing-framework 25 | ``` 26 | 27 | 4. Install pytest as testing library. All examples in this documentation are using pytest. 28 | 29 | ```bash 30 | pip install pytest 31 | ``` 32 | 33 | 5. Download the pipeline files from data factory environment and place them in the project folder as described in the [repository setup](repository_setup.md) page. 34 | 35 | Additional resources: 36 | 37 | * [Get Started Tutorial for Python in Visual Studio Code](https://code.visualstudio.com/docs/python/python-tutorial) 38 | * [Integrated Terminal in Visual Studio Code](https://code.visualstudio.com/docs/terminal/basics) 39 | * [pytest: helps you write better programs — pytest documentation](https://docs.pytest.org/en/7.4.x/) 40 | 41 | Once the setup is finished, reading the following pages is recommended to learn how to write tests for the data factory: 42 | 43 | 1. [Initializing the framework](installing_and_initializing_framework.md) (make sure to initialize the root folder of the framework with the path to the folder containing the pipeline definitions). 44 | 2. [Activity testing](activity_testing.md) 45 | 3. [Pipeline testing](pipeline_testing.md) 46 | -------------------------------------------------------------------------------- /docs/basic/installing_and_initializing_framework.md: -------------------------------------------------------------------------------- 1 | # Installing and initializing the framework 2 | 3 | ## Install dotnet runtime 4 | 5 | Install the dotnet runtime (not SDK) from [here](https://dotnet.microsoft.com/en-us/download/dotnet/8.0). 6 | Version 8.0 is recommended. 7 | The dotnet runtime is required to evaluate the Data Factory expression functions on dotnet just like in Data Factory. 8 | 9 | ## Installing the framework 10 | 11 | The framework is available as a Python package on PyPI. You can install it using your preferred package manager: [data-factory-testing-framework](https://pypi.org/project/data-factory-testing-framework/). 12 | 13 | ## Initializing the framework 14 | 15 | To initialize the framework, you need to create a `TestFramework` instance. This instance is the entry point to the framework and provides access to the pipeline and activity definitions. Specify the type of data factory (i.e. Fabric or DataFactory for Azure Data Factory) and pass the path to the folder containing the pipeline definitions to the `TestFramework` constructor. 16 | 17 | ```python 18 | from data_factory_testing_framework import TestFramework, TestFrameworkType 19 | 20 | test_framework = TestFramework( 21 | framework_type=TestFrameworkType.DataFactory, 22 | root_folder_path='/factory', 23 | ) 24 | ``` 25 | 26 | The TestFramework will automatically load all the pipeline and activity definitions from the specified folder. It will make them available through the `repository` property. Pipelines can easily be retrieved by name: 27 | 28 | ```python 29 | pipeline = test_framework.get_pipeline_by_name("batch_job") 30 | ``` 31 | 32 | Activities can be retrieved from the pipeline by name: 33 | 34 | ```python 35 | activity = pipeline.get_activity_by_name("webactivity_name") 36 | ``` 37 | 38 | See the following pages for more information on how to write tests for activities and pipelines: 39 | 40 | 1. [Activity testing](activity_testing.md) 41 | 2. [Pipeline testing](pipeline_testing.md) 42 | -------------------------------------------------------------------------------- /docs/basic/repository_setup.md: -------------------------------------------------------------------------------- 1 | # Repository setup 2 | 3 | ## Git integration 4 | 5 | To be able to write tests for data factory, the pipeline and activity definitions need to be available. The recommended way to do this is to sync the Data Factory instance to a git repository, so that a `tests` folder can be created in the same repository and write tests for your data factory. The git integration process can be found here: 6 | 7 | 1. [Fabric - Git integration process](https://learn.microsoft.com/fabric/cicd/git-integration/git-integration-process) 8 | 2. [Azure Data Factory - Git integration process](https://learn.microsoft.com/azure/data-factory/source-control) 9 | 3. [Azure Synapse Analytics - Git integration process](https://learn.microsoft.com/en-us/azure/synapse-analytics/cicd/source-control) 10 | 11 | ### Alternative for Azure Data Factory and Azure Synapse Analytics 12 | 13 | To download a single JSON file for testing purposes, follow these steps: 14 | 15 | 1. Open the Data Factory or Synapse Analytics instance, and open the pipeline to be tested. 16 | 2. Click on the action ellipses 17 | 3. Click "Download support files" 18 | 4. Extract the zip file containing the pipeline definition in a folder of choice. 19 | 20 | > Remember the location of this folder, as it will be needed to initialize the framework. 21 | 22 | ![Download support files](../images/download_support_files.png) 23 | 24 | Once the repository is set up, the framework can be installed and initialized as described in the [installing and initializing the framework](installing_and_initializing_framework.md) page. 25 | -------------------------------------------------------------------------------- /docs/basic/state.md: -------------------------------------------------------------------------------- 1 | # State 2 | 3 | The framework provides a `PipelineRunState` class that represents the state of a pipeline run. It's used to evaluate how an activity behaves for a given a specific input. 4 | 5 | ## Interface 6 | 7 | The constructor of the `PipelineRunState` class has the following properties: 8 | 9 | * `parameters`: A list of `RunParameter` instances representing the input parameters of the pipeline run. 10 | * `variables`: A list of `PipelineRunVariable` instances representing the variables of the pipeline run. 11 | * `activity_results`: A list of activity results that can be used to simulate the output of activities in the pipeline run. 12 | * `iteration_item`: An optional iteration item that can be used to simulate the iteration item of a ForEach activity. 13 | 14 | The `PipelineRunState` class has the following methods: 15 | 16 | * `add_activity_result`: Adds a result for an activity to the state. This is useful if an expression in an activity references another activities output, like: `@activity('another_activity_name').output.some_field`. 17 | * `set_iteration_item`: Sets the iteration item of the state. This is useful if an expression in an activity references the iteration item of a ForEach activity, like: `@item().JobId`. 18 | 19 | ## Usage 20 | 21 | ### Parameters and variables 22 | 23 | In the scenario where an activity has an expression that references a global parameter called `BaseUrl` and a variable called `JobName`, the `PipelineRunState` can be used to simulate the evaluation of the expression. 24 | 25 | ```python 26 | state = PipelineRunState( 27 | parameters=[ 28 | RunParameter(RunParameterType.Global, "BaseUrl", "https://example.com"), 29 | ], 30 | variables=[ 31 | PipelineRunVariable("JobName", "Job-123"), 32 | ]) 33 | ``` 34 | 35 | The following `RunParameterType` values are available: 36 | 37 | * `RunParameterType.Parameter` to set a pipeline parameter: `@pipeline().parameters.` 38 | * `RunParameterType.Global` to set a global parameter: `@pipeline().globalParameters.` 39 | * `RunParameterType.System` to set a system parameter: `@pipeline().` 40 | * `RunParameterType.Dataset` to set a dataset: `@dataset().` 41 | * `RunParameterType.LinkedService` to set a linked service: `@linkedService().` 42 | * `RunParameterType.LibraryVariables` to set a library variable: `@pipeline().libraryVariables.` 43 | 44 | ### Activity results 45 | 46 | In the scenario where an activity has an expression that references the output of another activity `activity('another_activity_name').output.some_field`, the `PipelineRunState` can be used to configure the output of the `another_activity_name` activity. 47 | 48 | Via the constructor: 49 | 50 | ```python 51 | state = PipelineRunState( 52 | activity_results=[ 53 | ActivityResult("another_activity_name", DependencyCondition.SUCCEEDED, {"some_field": "some_value!"}), 54 | ]) 55 | ``` 56 | 57 | Via a method on an existing state: 58 | 59 | ```python 60 | state.add_activity_result("another_activity_name", DependencyCondition.SUCCEEDED, {"some_field": "some_value!"}) 61 | ``` 62 | 63 | ### Iteration item 64 | 65 | In the scenario where an activity has an expression that references the iteration item of a ForEach activity `@item().JobId`, the `PipelineRunState` can be used to configure the iteration item. 66 | 67 | ```python 68 | state.set_iteration_item({"JobId": "123"}) 69 | ``` 70 | -------------------------------------------------------------------------------- /docs/images/download_support_files.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/docs/images/download_support_files.png -------------------------------------------------------------------------------- /examples/.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/python 3 | { 4 | "name": "Python 3", 5 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile 6 | "image": "mcr.microsoft.com/vscode/devcontainers/python:0-3.11", 7 | "features": { 8 | "ghcr.io/devcontainers-contrib/features/bandit:2": {}, 9 | "ghcr.io/devcontainers-contrib/features/black:2": {}, 10 | "ghcr.io/devcontainers-contrib/features/curl-apt-get:1": {}, 11 | "ghcr.io/devcontainers-contrib/features/flake8:2": {}, 12 | "ghcr.io/devcontainers-contrib/features/pylint:2": {} 13 | }, 14 | "customizations": { 15 | "vscode": { 16 | "extensions": [ 17 | "ms-python.python", 18 | "ms-python.black-formatter", 19 | "ms-python.flake8", 20 | "eamodio.gitlens", 21 | "VisualStudioExptTeam.vscodeintellicode", 22 | "VisualStudioExptTeam.intellicode-api-usage-examples", 23 | "DavidAnson.vscode-markdownlint", 24 | "yzhang.markdown-all-in-one" 25 | ] 26 | } 27 | }, 28 | // Features to add to the dev container. More info: https://containers.dev/features. 29 | // "features": {}, 30 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 31 | // "forwardPorts": [], 32 | // Use 'postCreateCommand' to run commands after the container is created. 33 | "postCreateCommand": "bash ./.devcontainer/postCreateCommand.sh" 34 | // Configure tool-specific properties. 35 | // "customizations": {}, 36 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. 37 | // "remoteUser": "root", 38 | //"runArgs": ["--env-file","src/.env"] 39 | } 40 | -------------------------------------------------------------------------------- /examples/.devcontainer/postCreateCommand.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | python -m pip install --upgrade pip 3 | pip install -r ./.devcontainer/requirements.txt 4 | -------------------------------------------------------------------------------- /examples/.devcontainer/requirements.txt: -------------------------------------------------------------------------------- 1 | python-dateutil 2 | python-dotenv 3 | pytest 4 | data-factory-testing-framework 5 | -------------------------------------------------------------------------------- /examples/data_factory/batch_job/README.md: -------------------------------------------------------------------------------- 1 | # Batch Job 2 | 3 | This pipeline is an example on how a batch job can be triggered from an Azure Data Factory pipeline. 4 | It configures a set of variables, create a storage container to be used by the batch job, trigger the job, monitors it, 5 | once complete it moves the output files to another storage account and finally deletes the storage container. 6 | 7 | ![batch_job.png](batch_job.png) 8 | -------------------------------------------------------------------------------- /examples/data_factory/batch_job/batch_job.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/examples/data_factory/batch_job/batch_job.png -------------------------------------------------------------------------------- /examples/data_factory/copy_blobs/README.md: -------------------------------------------------------------------------------- 1 | # Copy Blobs 2 | 3 | This is an example pipeline which intends to list all the blobs in a given container and copies these blobs to another container 4 | 5 | ![image](copy_blobs.png) 6 | 7 | The pipeline has two activities: 8 | 9 | 1. **List folders**: Web activity to list all blobs in a container that has a given prefix 10 | 2. **For each activity**: Iterates over each item in the list returned above and executes the sub-activity on each item. 11 | 12 | 2.1. **Copy files to destination**: Copy activity which copies the blobs to a given destination. 13 | -------------------------------------------------------------------------------- /examples/data_factory/copy_blobs/copy_blobs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/examples/data_factory/copy_blobs/copy_blobs.png -------------------------------------------------------------------------------- /examples/data_factory/copy_blobs/pipeline/copy_blobs.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "copy_blobs", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "List Folders", 7 | "type": "WebActivity", 8 | "dependsOn": [], 9 | "policy": { 10 | "timeout": "0.12:00:00", 11 | "retry": 0, 12 | "retryIntervalInSeconds": 30, 13 | "secureOutput": false, 14 | "secureInput": false 15 | }, 16 | "userProperties": [], 17 | "typeProperties": { 18 | "url": { 19 | "value": "@concat('https://',pipeline().globalParameters.SourceStorageAccountName,'.blob.core.windows.net/',pipeline().parameters.SourceContainerName,'?restype=container&comp=list&prefix=',pipeline().parameters.SourceFolderPrefix,'&delimiter=$SourceBlobDelimiter')", 20 | "type": "Expression" 21 | }, 22 | "method": "GET", 23 | "headers": { 24 | "x-ms-version": "2023-01-03" 25 | }, 26 | "authentication": { 27 | "type": "MSI", 28 | "resource": "https://storage.azure.com" 29 | } 30 | } 31 | }, 32 | { 33 | "name": "For Each SourceFolder", 34 | "type": "ForEach", 35 | "dependsOn": [ 36 | { 37 | "activity": "List Folders", 38 | "dependencyConditions": [ 39 | "Succeeded" 40 | ] 41 | } 42 | ], 43 | "userProperties": [], 44 | "typeProperties": { 45 | "items": { 46 | "value": "@xpath(xml(activity('List Folders').output.Response),'/EnumerationResults/Blobs/BlobPrefix/Name/text()')", 47 | "type": "Expression" 48 | }, 49 | "activities": [ 50 | { 51 | "name": "Copy files to Destination", 52 | "type": "Copy", 53 | "dependsOn": [], 54 | "policy": { 55 | "timeout": "0.12:00:00", 56 | "retry": 0, 57 | "retryIntervalInSeconds": 30, 58 | "secureOutput": false, 59 | "secureInput": false 60 | }, 61 | "userProperties": [], 62 | "typeProperties": { 63 | "source": { 64 | "type": "BinarySource", 65 | "storeSettings": { 66 | "type": "AzureBlobStorageReadSettings", 67 | "recursive": true, 68 | "wildcardFolderPath": { 69 | "value": "@item()", 70 | "type": "Expression" 71 | }, 72 | "deleteFilesAfterCompletion": false 73 | }, 74 | "formatSettings": { 75 | "type": "BinaryReadSettings" 76 | } 77 | }, 78 | "sink": { 79 | "type": "BinarySink", 80 | "storeSettings": { 81 | "type": "AzureBlobStorageWriteSettings" 82 | } 83 | }, 84 | "enableStaging": false 85 | }, 86 | "inputs": [ 87 | { 88 | "referenceName": "DynamicBlobStorage", 89 | "type": "DatasetReference", 90 | "parameters": { 91 | "ServiceURI": { 92 | "value": "@concat('https://',pipeline().globalParameters.SourceStorageAccountName,'.blob.core.windows.net')", 93 | "type": "Expression" 94 | }, 95 | "ContainerName": { 96 | "value": "@pipeline().parameters.SourceContainerName", 97 | "type": "Expression" 98 | }, 99 | "FolderName": { 100 | "value": "@coalesce(null)", 101 | "type": "Expression" 102 | } 103 | } 104 | } 105 | ], 106 | "outputs": [ 107 | { 108 | "referenceName": "DynamicBlobStorage", 109 | "type": "DatasetReference", 110 | "parameters": { 111 | "ServiceURI": { 112 | "value": "@concat('https://',pipeline().parameters.SinkStorageAccountName,'.blob.core.windows.net')", 113 | "type": "Expression" 114 | }, 115 | "ContainerName": { 116 | "value": "@pipeline().parameters.SinkContainerName", 117 | "type": "Expression" 118 | }, 119 | "FolderName": { 120 | "value": "@pipeline().parameters.SinkFolderName", 121 | "type": "Expression" 122 | } 123 | } 124 | } 125 | ] 126 | } 127 | ] 128 | } 129 | } 130 | ], 131 | "parameters": { 132 | "SourceContainerName": { 133 | "type": "string" 134 | }, 135 | "SourceFolderPrefix": { 136 | "type": "string" 137 | }, 138 | "SinkStorageAccountName": { 139 | "type": "string" 140 | }, 141 | "SinkContainerName": { 142 | "type": "string" 143 | }, 144 | "SinkFolderName": { 145 | "type": "string" 146 | } 147 | }, 148 | "folder": { 149 | "name": "batch" 150 | }, 151 | "annotations": [] 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /examples/data_factory/copy_blobs/test_data_factory_copy_blobs_functional.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.state import ( 4 | DependencyCondition, 5 | RunParameter, 6 | RunParameterType, 7 | ) 8 | 9 | 10 | def test_copy_blobs_pipeline(request: pytest.FixtureRequest) -> None: 11 | # Arrange 12 | test_framework = TestFramework( 13 | framework_type=TestFrameworkType.DataFactory, root_folder_path=request.fspath.dirname 14 | ) 15 | pipeline = test_framework.get_pipeline_by_name("copy_blobs") 16 | 17 | # Act 18 | activities = test_framework.evaluate_pipeline( 19 | pipeline=pipeline, 20 | parameters=[ 21 | RunParameter(RunParameterType.Global, "SourceStorageAccountName", "sourcestorageaccount"), 22 | RunParameter(RunParameterType.Pipeline, "SourceContainerName", "sourcecontainer"), 23 | RunParameter(RunParameterType.Pipeline, "SourceFolderPrefix", "sourcefolder"), 24 | RunParameter(RunParameterType.Pipeline, "SinkStorageAccountName", "sinkstorageaccount"), 25 | RunParameter(RunParameterType.Pipeline, "SinkContainerName", "sinkcontainer"), 26 | RunParameter(RunParameterType.Pipeline, "SinkFolderName", "sinkfolder"), 27 | ], 28 | ) 29 | 30 | # Assert 31 | list_folder_activity = next(activities) 32 | assert list_folder_activity.name == "List Folders" 33 | assert ( 34 | list_folder_activity.type_properties["url"].result 35 | == "https://sourcestorageaccount.blob.core.windows.net/sourcecontainer?restype=container&comp=list&prefix=sourcefolder&delimiter=$SourceBlobDelimiter" 36 | ) 37 | assert list_folder_activity.type_properties["method"] == "GET" 38 | list_folder_activity.set_result( 39 | result=DependencyCondition.SUCCEEDED, 40 | output={ 41 | "Response": """ 42 | 43 | testfolder 44 | $SourceBlobDelimiter 45 | 46 | 47 | testfolder_1/$SourceBlobDelimiter 48 | 49 | 50 | testfolder_2/$SourceBlobDelimiter 51 | 52 | 53 | 54 | """ 55 | }, 56 | ) 57 | 58 | copy_activity = next(activities) 59 | 60 | assert copy_activity.name == "Copy files to Destination" 61 | assert copy_activity.type == "Copy" 62 | assert ( 63 | copy_activity.type_properties["source"]["storeSettings"]["wildcardFolderPath"].result 64 | == "testfolder_1/$SourceBlobDelimiter" 65 | ) 66 | 67 | copy_activity = next(activities) 68 | assert copy_activity.name == "Copy files to Destination" 69 | assert copy_activity.type == "Copy" 70 | assert ( 71 | copy_activity.type_properties["source"]["storeSettings"]["wildcardFolderPath"].result 72 | == "testfolder_2/$SourceBlobDelimiter" 73 | ) 74 | 75 | pytest.raises(StopIteration, lambda: next(activities)) 76 | -------------------------------------------------------------------------------- /examples/fabric/batch_job/README.md: -------------------------------------------------------------------------------- 1 | # Batch Job 2 | 3 | This pipeline is an example on how a batch job can be triggered by a Fabric pipeline. 4 | It configures a set of variables, create a storage container to be used by the batch job, trigger the job, monitors it, 5 | once complete it moves the output files to another storage account and finally deletes the storage container. 6 | 7 | ![Batch Job.png](batch_job.png) 8 | -------------------------------------------------------------------------------- /examples/fabric/batch_job/batch_job.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/examples/fabric/batch_job/batch_job.png -------------------------------------------------------------------------------- /examples/fabric/batch_job/pipeline/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "Pipeline", 5 | "displayName": "batch_job" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "9c079e1a-3d08-43b7-aa02-eb0fb7154b11" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /examples/fabric/nested_ingestion_pipeline/fabric/pl_ingestion.DataPipeline/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "DataPipeline", 5 | "displayName": "pl_ingestion" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "f2a51fa4-bd34-4b81-86a5-9c88a446415f" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /examples/fabric/nested_ingestion_pipeline/fabric/pl_main.DataPipeline/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "DataPipeline", 5 | "displayName": "pl_main" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "67388440-2c26-46b6-a493-9e4e48c037d9" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /examples/fabric/nested_ingestion_pipeline/fabric/pl_main.DataPipeline/pipeline-content.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "activities": [ 4 | { 5 | "type": "Lookup", 6 | "typeProperties": { 7 | "source": { 8 | "type": "JsonSource", 9 | "storeSettings": { 10 | "type": "LakehouseReadSettings", 11 | "recursive": true, 12 | "enablePartitionDiscovery": false 13 | }, 14 | "formatSettings": { 15 | "type": "JsonReadSettings" 16 | } 17 | }, 18 | "datasetSettings": { 19 | "type": "Json", 20 | "typeProperties": { 21 | "location": { 22 | "type": "LakehouseLocation", 23 | "folderPath": "config", 24 | "fileName": "lh_config.json" 25 | } 26 | }, 27 | "schema": {}, 28 | "linkedService": { 29 | "properties": { 30 | "type": "Lakehouse", 31 | "typeProperties": { 32 | "artifactId": "dd0b83a5-5f0f-4ea9-bb05-368b4c37d15e", 33 | "workspaceId": "00000000-0000-0000-0000-000000000000", 34 | "rootFolder": "Files" 35 | }, 36 | "annotations": [] 37 | }, 38 | "name": "testing_framework" 39 | }, 40 | "annotations": [] 41 | } 42 | }, 43 | "policy": { 44 | "timeout": "0.12:00:00", 45 | "retry": 0, 46 | "retryIntervalInSeconds": 30, 47 | "secureInput": false, 48 | "secureOutput": false 49 | }, 50 | "name": "Read Configuration File", 51 | "dependsOn": [] 52 | }, 53 | { 54 | "type": "ForEach", 55 | "typeProperties": { 56 | "items": { 57 | "value": "@activity('Read Configuration File').output.firstRow.lakeHouseProperties", 58 | "type": "Expression" 59 | }, 60 | "activities": [ 61 | { 62 | "type": "IfCondition", 63 | "typeProperties": { 64 | "expression": { 65 | "value": "@or(equals(item().lastUpdatedDatalake,''),greaterOrEquals(item().lastUpdatedSourceSystem,addSeconds(item().lastUpdatedDatalake,-1)))", 66 | "type": "Expression" 67 | }, 68 | "ifTrueActivities": [ 69 | { 70 | "type": "ExecutePipeline", 71 | "typeProperties": { 72 | "pipeline": { 73 | "referenceName": "f2a51fa4-bd34-4b81-86a5-9c88a446415f", 74 | "type": "PipelineReference" 75 | }, 76 | "parameters": { 77 | "dynamicmonth": { 78 | "value": "@item().month", 79 | "type": "Expression" 80 | }, 81 | "dynamicyear": { 82 | "value": "@item().year", 83 | "type": "Expression" 84 | } 85 | }, 86 | "waitOnCompletion": true 87 | }, 88 | "name": "Invoke Ingestion Pipeline", 89 | "dependsOn": [] 90 | } 91 | ], 92 | "ifFalseActivities": [] 93 | }, 94 | "name": "If New Or Updated", 95 | "dependsOn": [] 96 | } 97 | ] 98 | }, 99 | "name": "ForEachYearMonthPair", 100 | "dependsOn": [ 101 | { 102 | "activity": "Read Configuration File", 103 | "dependencyConditions": [ 104 | "Succeeded" 105 | ] 106 | } 107 | ] 108 | } 109 | ], 110 | "annotations": [] 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /examples/fabric/nested_ingestion_pipeline/fabric/tests/test_pl_ingestion_activity.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import pytest 5 | from data_factory_testing_framework import TestFramework, TestFrameworkType 6 | from data_factory_testing_framework.models import Pipeline 7 | from data_factory_testing_framework.state import ( 8 | PipelineRunState, 9 | RunParameter, 10 | RunParameterType, 11 | ) 12 | 13 | 14 | @pytest.fixture 15 | def test_framework(request: pytest.FixtureRequest) -> TestFramework: 16 | return TestFramework( 17 | framework_type=TestFrameworkType.Fabric, 18 | root_folder_path=os.path.join(Path(request.fspath.dirname).parent), 19 | ) 20 | 21 | 22 | @pytest.fixture 23 | def pipeline(test_framework: TestFramework) -> Pipeline: 24 | return test_framework.get_pipeline_by_name("pl_ingestion") 25 | 26 | 27 | def test_copy_nyc_data_from_web_to_adls2(pipeline: Pipeline) -> None: 28 | # Arrange 29 | activity = pipeline.get_activity_by_name("Copy NYCData from Web to ADLS") 30 | state = PipelineRunState( 31 | parameters=[ 32 | RunParameter(RunParameterType.Pipeline, name="dynamicmonth", value="01"), 33 | RunParameter(RunParameterType.Pipeline, name="dynamicyear", value="2023"), 34 | ], 35 | ) 36 | 37 | # Act 38 | activity.evaluate(state) 39 | 40 | # Assert 41 | assert ( 42 | activity.type_properties["source"]["datasetSettings"]["typeProperties"]["location"]["relativeUrl"].result 43 | == "yellow_tripdata_2023-01.parquet" 44 | ) 45 | assert ( 46 | activity.type_properties["sink"]["datasetSettings"]["typeProperties"]["location"]["fileName"].result 47 | == "yellow_tripdata_2023-01.parquet" 48 | ) 49 | assert ( 50 | activity.type_properties["sink"]["datasetSettings"]["typeProperties"]["location"]["folderPath"].result 51 | == "nyc_taxi_data/2023/01" 52 | ) 53 | 54 | 55 | def test_copy_nyc_data_from_adls2_to_lakehouse(pipeline: Pipeline) -> None: 56 | # Arrange 57 | activity = pipeline.get_activity_by_name("Copy NYCData from ADLS to Lakehouse") 58 | state = PipelineRunState( 59 | parameters=[ 60 | RunParameter(RunParameterType.Pipeline, name="dynamicmonth", value="01"), 61 | RunParameter(RunParameterType.Pipeline, name="dynamicyear", value="2023"), 62 | ], 63 | ) 64 | 65 | # Act 66 | activity.evaluate(state) 67 | 68 | # Assert 69 | assert ( 70 | activity.type_properties["source"]["datasetSettings"]["typeProperties"]["location"]["fileSystem"] 71 | == "nyctaxidata" 72 | ) 73 | assert ( 74 | activity.type_properties["source"]["datasetSettings"]["typeProperties"]["location"]["folderPath"].result 75 | == "2023/01" 76 | ) 77 | assert ( 78 | activity.type_properties["source"]["datasetSettings"]["typeProperties"]["location"]["fileName"].result 79 | == "yellow_tripdata_2023-01.parquet" 80 | ) 81 | assert ( 82 | activity.type_properties["sink"]["datasetSettings"]["typeProperties"]["location"]["fileName"].result 83 | == "yellow_tripdata_2023-01.parquet" 84 | ) 85 | assert ( 86 | activity.type_properties["sink"]["datasetSettings"]["typeProperties"]["location"]["folderPath"].result 87 | == "nyc_taxi_data/2023/01" 88 | ) 89 | -------------------------------------------------------------------------------- /examples/fabric/nested_ingestion_pipeline/pl_ingestion_pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/examples/fabric/nested_ingestion_pipeline/pl_ingestion_pipeline.png -------------------------------------------------------------------------------- /examples/fabric/nested_ingestion_pipeline/pl_main_pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/examples/fabric/nested_ingestion_pipeline/pl_main_pipeline.png -------------------------------------------------------------------------------- /examples/fabric/simple_web_hook/Readme.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/examples/fabric/simple_web_hook/Readme.md -------------------------------------------------------------------------------- /examples/fabric/simple_web_hook/fabric/ExamplePipeline.DataPipeline/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "DataPipeline", 5 | "displayName": "ExamplePipeline" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "c7986cc7-a6df-45fc-b4b6-dfc3cbddf2a6" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /examples/fabric/simple_web_hook/fabric/ExamplePipeline.DataPipeline/pipeline-content.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/examples/fabric/simple_web_hook/fabric/ExamplePipeline.DataPipeline/pipeline-content.json -------------------------------------------------------------------------------- /examples/fabric/simple_web_hook/test_fabric_simple_webhook.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | from data_factory_testing_framework import TestFramework, TestFrameworkType 5 | 6 | 7 | def test_simple_web_hook(request: pytest.FixtureRequest) -> None: 8 | # Arrange 9 | fabric_folder = Path(request.fspath.dirname, "fabric") 10 | test_framework = TestFramework(framework_type=TestFrameworkType.Fabric, root_folder_path=fabric_folder) 11 | pipeline = test_framework.get_pipeline_by_name("ExamplePipeline") 12 | 13 | # Act 14 | activities = test_framework.evaluate_pipeline(pipeline, []) 15 | 16 | # Assert 17 | activity = next(activities) 18 | assert activity.name == "Set Input Data" 19 | 20 | activity = next(activities) 21 | assert activity.name == "Call Webhook" 22 | 23 | activity = next(activities) 24 | assert activity.name == "Call Webhook" 25 | 26 | # Assert that there are no more activities 27 | with pytest.raises(StopIteration): 28 | next(activities) 29 | -------------------------------------------------------------------------------- /examples/synapse/copy_blobs/README.md: -------------------------------------------------------------------------------- 1 | # Copy Blobs 2 | 3 | This is an example pipeline which intends to list all the blobs in a given container and copies these blobs to another container 4 | 5 | ![image](copy_blobs.png) 6 | 7 | The pipeline has two activities: 8 | 9 | 1. **List folders**: Web activity to list all blobs in a container that has a given prefix 10 | 2. **For each activity**: Iterates over each item in the list returned above and executes the sub-activity on each item. 11 | 12 | 2.1. **Copy files to destination**: Copy activity which copies the blobs to a given destination. 13 | -------------------------------------------------------------------------------- /examples/synapse/copy_blobs/copy_blobs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/examples/synapse/copy_blobs/copy_blobs.png -------------------------------------------------------------------------------- /examples/synapse/copy_blobs/test_synapse_copy_blobs_functional.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.state import ( 4 | DependencyCondition, 5 | RunParameter, 6 | RunParameterType, 7 | ) 8 | 9 | 10 | def test_copy_blobs_pipeline(request: pytest.FixtureRequest) -> None: 11 | # Arrange 12 | test_framework = TestFramework( 13 | framework_type=TestFrameworkType.DataFactory, root_folder_path=request.fspath.dirname 14 | ) 15 | pipeline = test_framework.get_pipeline_by_name("copy_blobs") 16 | 17 | # Act 18 | activities = test_framework.evaluate_pipeline( 19 | pipeline=pipeline, 20 | parameters=[ 21 | RunParameter(RunParameterType.Pipeline, "SourceStorageAccountName", "sourcestorageaccount"), 22 | RunParameter(RunParameterType.Pipeline, "SourceContainerName", "sourcecontainer"), 23 | RunParameter(RunParameterType.Pipeline, "SourceFolderPrefix", "sourcefolder"), 24 | RunParameter(RunParameterType.Pipeline, "SinkStorageAccountName", "sinkstorageaccount"), 25 | RunParameter(RunParameterType.Pipeline, "SinkContainerName", "sinkcontainer"), 26 | RunParameter(RunParameterType.Pipeline, "SinkFolderName", "sinkfolder"), 27 | ], 28 | ) 29 | 30 | # Assert 31 | list_folder_activity = next(activities) 32 | assert list_folder_activity.name == "List Folders" 33 | assert ( 34 | list_folder_activity.type_properties["url"].result 35 | == "https://sourcestorageaccount.blob.core.windows.net/sourcecontainer?restype=container&comp=list&prefix=sourcefolder&delimiter=$SourceBlobDelimiter" 36 | ) 37 | assert list_folder_activity.type_properties["method"] == "GET" 38 | list_folder_activity.set_result( 39 | result=DependencyCondition.SUCCEEDED, 40 | output={ 41 | "Response": """ 42 | 43 | testfolder 44 | $SourceBlobDelimiter 45 | 46 | 47 | testfolder_1/$SourceBlobDelimiter 48 | 49 | 50 | testfolder_2/$SourceBlobDelimiter 51 | 52 | 53 | 54 | """ 55 | }, 56 | ) 57 | 58 | copy_activity = next(activities) 59 | 60 | assert copy_activity.name == "Copy files to Destination" 61 | assert copy_activity.type == "Copy" 62 | assert ( 63 | copy_activity.type_properties["source"]["storeSettings"]["wildcardFolderPath"].result 64 | == "testfolder_1/$SourceBlobDelimiter" 65 | ) 66 | 67 | copy_activity = next(activities) 68 | assert copy_activity.name == "Copy files to Destination" 69 | assert copy_activity.type == "Copy" 70 | assert ( 71 | copy_activity.type_properties["source"]["storeSettings"]["wildcardFolderPath"].result 72 | == "testfolder_2/$SourceBlobDelimiter" 73 | ) 74 | 75 | pytest.raises(StopIteration, lambda: next(activities)) 76 | -------------------------------------------------------------------------------- /poetry.toml: -------------------------------------------------------------------------------- 1 | virtualenvs.in-project = true 2 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "data-factory-testing-framework" 3 | dynamic = ["version"] 4 | authors = [ 5 | {name = "Data Factory Testing Framework", email = "dftf@microsoft.com"}, 6 | ] 7 | requires-python = ">=3.9,<3.14" 8 | classifiers = [ 9 | "Programming Language :: Python :: 3.9", 10 | "Programming Language :: Python :: 3.10", 11 | "Programming Language :: Python :: 3.11", 12 | "Programming Language :: Python :: 3.12", 13 | "Operating System :: OS Independent", 14 | ] 15 | description = "A stand-alone test framework that allows to write unit tests for Data Factory pipelines on Microsoft Fabric, Azure Data Factory and Azure Synapse Analytics." 16 | readme = "README.md" 17 | license = "MIT" 18 | license-files = ["LICENSE"] 19 | keywords = [ 20 | "fabric", 21 | "datafactory", 22 | "synapse analytics", 23 | "unit-testing", 24 | "functional-testing", 25 | "azure" 26 | ] 27 | 28 | dependencies = [ 29 | "lark>=1.1.8,<2.0.0", 30 | "pythonnet>=3.0.3,<4.0.0" 31 | ] 32 | 33 | [project.urls] 34 | Homepage = "https://github.com/microsoft/data-factory-testing-framework" 35 | Documentation = "https://github.com/microsoft/data-factory-testing-framework/blob/main/README.md" 36 | Issues = "https://github.com/microsoft/data-factory-testing-framework/issues" 37 | Source = "https://github.com/microsoft/data-factory-testing-framework.git" 38 | 39 | [tool.poetry] 40 | package-mode = false 41 | 42 | [tool.poetry.dependencies] 43 | python = ">=3.9,<3.14" 44 | lark = "^1.1.8" 45 | pythonnet = "^3.0.3" 46 | 47 | [tool.poetry.group.dev.dependencies] 48 | mutatest = "^3.1.0" 49 | pytest = "^7.4.3" 50 | ruff = "^0.1.5" 51 | pre-commit = "^3.5.0" 52 | astor = "^0.8.1" 53 | docstring-parser = "^0.15" 54 | build = "^1.1.1" 55 | wheel = "^0.43.0" 56 | setuptools = "^70.0" 57 | 58 | [tool.ruff] 59 | select = [ 60 | "A", # flake8 builtins 61 | "ANN", # annotations 62 | "B", # bugbear 63 | "COM", # flake8 commas 64 | "D", # Pydoc style docstrings 65 | "E", # PEP8 conventions 66 | "F", # pyflakes 67 | "N", # PEP8 naming conventions 68 | "I" 69 | ] 70 | ignore = [ 71 | "ANN101", # Ignore missing type annotation for self in method 72 | "D100", # Ignore missing docstring in public module 73 | "D101", 74 | "D102", 75 | "D103", 76 | "D104", # Ignore missing docstring in public package 77 | # ruff format conflicts certain rules (see https://docs.astral.sh/ruff/formatter/#format-suppression) 78 | "E111", 79 | "E114", 80 | "E117", 81 | "COM812", 82 | "COM819", 83 | "D206", 84 | "D300", 85 | "ISC001", 86 | "ISC002", 87 | "Q000", 88 | "Q001", 89 | "Q002", 90 | "Q003", 91 | "W191", 92 | ] 93 | extend-exclude = ["azure_data_factory_testing_framework/data_factory/generated/**"] 94 | line-length = 120 95 | 96 | [tool.ruff.lint.pycodestyle] 97 | max-line-length = 160 # relax line length limit to 140 characters (wrapping happens at 120) 98 | 99 | [tool.ruff.lint.pydocstyle] 100 | convention = "google" 101 | 102 | [tool.pytest.ini_options] 103 | minversion = "7.0" 104 | pythonpath = ["."] 105 | 106 | [tool.setuptools.package-data] 107 | data_factory_testing_framework = [] 108 | 109 | [build-system] 110 | requires = ["setuptools>=61.0", "wheel"] 111 | build-backend = "setuptools.build_meta" 112 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Custom build script for building the C# project with setuptools. 2 | 3 | Based on the following discussion: https://github.com/pypa/setuptools/discussions/3762 4 | """ 5 | import shutil 6 | import subprocess 7 | from contextlib import suppress 8 | from pathlib import Path 9 | 10 | from setuptools import Command, setup 11 | from setuptools.command.build import build 12 | 13 | 14 | class CustomCommand(Command): 15 | def initialize_options(self) -> None: 16 | self.pkg_name = self.distribution.get_name().replace("-", "_") 17 | self.bdist_dir = None 18 | 19 | def finalize_options(self) -> None: 20 | with suppress(Exception): 21 | self.bdist_dir = Path(self.get_finalized_command("bdist_wheel").bdist_dir) 22 | 23 | def run(self) -> None: 24 | if self.bdist_dir: 25 | dotnet_path = shutil.which("dotnet") 26 | output_dir = self.bdist_dir / self.pkg_name / "_pythonnet" / "bin" 27 | output_dir.mkdir(parents=True, exist_ok=True) 28 | 29 | if dotnet_path is None: 30 | raise Exception("dotnet not found") 31 | 32 | subprocess.check_call( 33 | [ 34 | dotnet_path, 35 | "build", 36 | "-c", 37 | "Release", 38 | "-o", 39 | str(output_dir), 40 | Path("src", self.pkg_name, "_pythonnet", "Evaluator.csproj"), 41 | ] 42 | ) 43 | 44 | 45 | class CustomBuild(build): 46 | sub_commands = [("build_custom", None)] + build.sub_commands 47 | 48 | 49 | try: 50 | root = Path(__file__).parent 51 | with open(root / "VERSION") as version_file: 52 | version = version_file.read().strip() 53 | except FileNotFoundError: 54 | # Set a default version (for local builds) 55 | version = "0.0.0.dev0" 56 | 57 | setup(cmdclass={"build": CustomBuild, "build_custom": CustomCommand}, version=version) 58 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/__init__.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path as _Path 2 | 3 | from data_factory_testing_framework._test_framework import TestFramework, TestFrameworkType 4 | 5 | try: 6 | root = _Path(__file__).parent 7 | with open(root / "VERSION") as version_file: 8 | version = version_file.read().strip() 9 | except FileNotFoundError: 10 | # Set a default version (for local builds) 11 | version = "0.0.0.dev0" 12 | 13 | __all__ = ["TestFramework", "TestFrameworkType"] 14 | __version__ = version 15 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_deserializers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/src/data_factory_testing_framework/_deserializers/__init__.py -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_deserializers/_deserializer_base.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework._deserializers.shared._activity_deserializer import ( 2 | _get_activity_from_activity_data, 3 | ) 4 | from data_factory_testing_framework._deserializers.shared._data_factory_element_replacer import ( 5 | _find_and_replace_expressions_in_dict, 6 | ) 7 | from data_factory_testing_framework.models import Pipeline 8 | 9 | 10 | def _parse_pipeline_from_json(pipeline_id: str, name: str, json_data: dict) -> Pipeline: 11 | properties = json_data.get("properties", {}) 12 | activities = properties.get("activities", []) 13 | 14 | for activity_data in activities: 15 | activities[activities.index(activity_data)] = _get_activity_from_activity_data(activity_data) 16 | 17 | pipeline = Pipeline(pipeline_id, name, **properties) 18 | 19 | _find_and_replace_expressions_in_dict(pipeline) 20 | 21 | return pipeline 22 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_deserializers/_deserializer_data_factory.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from data_factory_testing_framework._deserializers._deserializer_base import _parse_pipeline_from_json 4 | from data_factory_testing_framework.models import Pipeline 5 | 6 | 7 | def parse_data_factory_pipeline_from_pipeline_json(pipeline_json: str) -> Pipeline: 8 | json_data = json.loads(pipeline_json) 9 | name = json_data["name"] 10 | 11 | # The name is used as the id, because this is how Azure Data Factory uniquely identifies pipelines 12 | return _parse_pipeline_from_json(name, name, json_data) 13 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_deserializers/_deserializer_fabric.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from data_factory_testing_framework._deserializers._deserializer_base import _parse_pipeline_from_json 4 | from data_factory_testing_framework.models import Pipeline 5 | 6 | 7 | def parse_fabric_pipeline_from_pipeline_json_files( 8 | pipeline_json: str, config_json: str, metadata_json: str 9 | ) -> Pipeline: 10 | pipeline_logical_id = config_json["logicalId"] 11 | pipeline_name = metadata_json["displayName"] 12 | pipeline_json = json.loads(pipeline_json) 13 | return _parse_pipeline_from_json(pipeline_logical_id, pipeline_name, pipeline_json) 14 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_deserializers/shared/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/src/data_factory_testing_framework/_deserializers/shared/__init__.py -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_deserializers/shared/_activity_deserializer.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from data_factory_testing_framework.models.activities import ( 4 | Activity, 5 | AppendVariableActivity, 6 | ExecutePipelineActivity, 7 | FailActivity, 8 | FilterActivity, 9 | ForEachActivity, 10 | IfConditionActivity, 11 | SetVariableActivity, 12 | SwitchActivity, 13 | UntilActivity, 14 | ) 15 | 16 | 17 | def _get_activity_from_activity_data(activity_data: dict) -> Activity: 18 | type_properties = activity_data["typeProperties"] 19 | if activity_data["type"] == "SetVariable": 20 | return SetVariableActivity(**activity_data) 21 | if activity_data["type"] == "AppendVariable": 22 | return AppendVariableActivity(**activity_data) 23 | elif activity_data["type"] == "Until": 24 | activities = _get_activity_from_activities_data(type_properties["activities"]) 25 | return UntilActivity(activities=activities, **activity_data) 26 | elif activity_data["type"] == "ExecutePipeline": 27 | return ExecutePipelineActivity(**activity_data) 28 | elif activity_data["type"] == "IfCondition": 29 | if_true_activities = ( 30 | _get_activity_from_activities_data(type_properties["ifTrueActivities"]) 31 | if "ifTrueActivities" in type_properties 32 | else [] 33 | ) 34 | if_false_activities = ( 35 | _get_activity_from_activities_data(type_properties["ifFalseActivities"]) 36 | if "ifFalseActivities" in type_properties 37 | else [] 38 | ) 39 | return IfConditionActivity( 40 | if_true_activities=if_true_activities, if_false_activities=if_false_activities, **activity_data 41 | ) 42 | elif activity_data["type"] == "ForEach": 43 | child_activities = _get_activity_from_activities_data(type_properties["activities"]) 44 | return ForEachActivity(activities=child_activities, **activity_data) 45 | elif activity_data["type"] == "Switch": 46 | default_activities = ( 47 | _get_activity_from_activities_data(type_properties["defaultActivities"]) 48 | if "defaultActivities" in type_properties 49 | else [] 50 | ) 51 | cases_activities = {} 52 | for case in type_properties["cases"]: 53 | case_value = case["value"] 54 | activities = case["activities"] 55 | cases_activities[case_value] = _get_activity_from_activities_data(activities) 56 | return SwitchActivity(default_activities=default_activities, cases_activities=cases_activities, **activity_data) 57 | elif activity_data["type"] == "Filter": 58 | return FilterActivity(**activity_data) 59 | elif activity_data["type"] == "Fail": 60 | return FailActivity(**activity_data) 61 | else: 62 | return Activity(**activity_data) 63 | 64 | 65 | def _get_activity_from_activities_data(activities_data: dict) -> List[Activity]: 66 | activities = [] 67 | for activity_data in activities_data: 68 | activities.append(_get_activity_from_activity_data(activity_data)) 69 | 70 | return activities 71 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_deserializers/shared/_data_factory_element_replacer.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from data_factory_testing_framework.models import DataFactoryElement 4 | 5 | 6 | def _find_and_replace_expressions_in_dict(obj: any, visited: list = None) -> None: 7 | if visited is None: 8 | visited = [] 9 | 10 | if obj in visited: 11 | return 12 | 13 | visited.append(obj) 14 | 15 | # Attributes 16 | attribute_names = [ 17 | attribute for attribute in dir(obj) if not attribute.startswith("_") and not callable(getattr(obj, attribute)) 18 | ] 19 | for attribute_name in attribute_names: 20 | attribute = getattr(obj, attribute_name) 21 | if attribute is None: 22 | continue 23 | 24 | if _is_obj_expression_dict(attribute): 25 | value = _get_obj_expression_value(attribute) 26 | setattr(obj, attribute_name, DataFactoryElement(value)) 27 | else: 28 | _find_and_replace_expressions_in_dict(attribute, visited) 29 | 30 | # Dictionary 31 | if isinstance(obj, dict): 32 | for key in obj.keys(): 33 | if _is_obj_expression_dict(obj[key]): 34 | value = _get_obj_expression_value(obj[key]) 35 | obj[key] = DataFactoryElement(value) 36 | continue 37 | 38 | _find_and_replace_expressions_in_dict(obj[key], visited) 39 | 40 | # List 41 | if isinstance(obj, list): 42 | for item in obj: 43 | if _is_obj_expression_dict(item): 44 | value = _get_obj_expression_value(item) 45 | obj[obj.index(item)] = DataFactoryElement(value) 46 | continue 47 | 48 | _find_and_replace_expressions_in_dict(item, visited) 49 | 50 | 51 | def _is_obj_expression_dict(obj: Any) -> bool: # noqa: ANN401 52 | return ( 53 | isinstance(obj, dict) 54 | and ("type" in obj.keys()) 55 | and (obj["type"] == "Expression") 56 | and (("value" in obj.keys()) or ("content" in obj.keys())) 57 | and len(obj.keys()) == 2 58 | ) 59 | 60 | 61 | def _get_obj_expression_value(obj: Any) -> Any: # noqa: ANN401 62 | if "value" in obj.keys(): 63 | return obj["value"] 64 | 65 | if "content" in obj.keys(): 66 | return obj["content"] 67 | 68 | raise ValueError("Expression object does not contain a value or content key") 69 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_enum_meta.py: -------------------------------------------------------------------------------- 1 | # -------------------------------------------------------------------------- 2 | # 3 | # Copyright (c) Microsoft Corporation. All rights reserved. 4 | # 5 | # The MIT License (MIT) 6 | # 7 | # Permission is hereby granted, free of charge, to any person obtaining a copy 8 | # of this software and associated documentation files (the ""Software""), to 9 | # deal in the Software without restriction, including without limitation the 10 | # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 11 | # sell copies of the Software, and to permit persons to whom the Software is 12 | # furnished to do so, subject to the following conditions: 13 | # 14 | # The above copyright notice and this permission notice shall be included in 15 | # all copies or substantial portions of the Software. 16 | # 17 | # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 22 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 23 | # IN THE SOFTWARE. 24 | # 25 | # -------------------------------------------------------------------------- 26 | from enum import Enum, EnumMeta 27 | from typing import Any 28 | 29 | 30 | class CaseInsensitiveEnumMeta(EnumMeta): 31 | """Enum metaclass to allow for interoperability with case-insensitive strings. 32 | 33 | Consuming this metaclass in an SDK should be done in the following manner: 34 | 35 | .. code-block:: python 36 | 37 | from enum import Enum 38 | from azure.core import CaseInsensitiveEnumMeta 39 | 40 | class MyCustomEnum(str, Enum, metaclass=CaseInsensitiveEnumMeta): 41 | FOO = 'foo' 42 | BAR = 'bar' 43 | 44 | """ 45 | 46 | def __getitem__(cls, name: str) -> Any: # noqa: N805, ANN401 47 | return super(CaseInsensitiveEnumMeta, cls).__getitem__(name.upper()) 48 | 49 | def __getattr__(cls, name: str) -> Enum: # noqa: N805 50 | """Return the enum member matching `name`. 51 | 52 | We use __getattr__ instead of descriptors or inserting into the enum 53 | class' __dict__ in order to support `name` and `value` being both 54 | properties for enum members (which live in the class' __dict__) and 55 | enum members themselves. 56 | 57 | :param str name: The name of the enum member to retrieve. 58 | :rtype: ~azure.core.CaseInsensitiveEnumMeta 59 | :return: The enum member matching `name`. 60 | :raises AttributeError: If `name` is not a valid enum member. 61 | """ 62 | try: 63 | return cls._member_map_[name.upper()] 64 | except KeyError as err: 65 | raise AttributeError(name) from err 66 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_expression_runtime/data_factory_expression/__init__.py: -------------------------------------------------------------------------------- 1 | from .expression_transformer import ExpressionTransformer 2 | 3 | __all__ = [ 4 | "ExpressionTransformer", 5 | ] 6 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_expression_runtime/data_factory_expression/data_factory_to_expression_transformer.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from lark import Token, Transformer, Tree, v_args 4 | 5 | 6 | @v_args(tree=True) 7 | class DataFactoryExpressionTransformer(Transformer): 8 | def expression_datafactory_parameters_reference(self, tree: Tree) -> Union[Token, Tree]: 9 | pipeline_property = tree.children[0] 10 | variable_name = Token("EXPRESSION_PIPELINE_PROPERTY", f"{pipeline_property}") 11 | tree = Tree(Token("RULE", "expression_pipeline_reference"), [variable_name]) 12 | return tree 13 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_expression_runtime/data_factory_expression/exceptions.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | 4 | class ExpressionEvaluationError(Exception): 5 | pass 6 | 7 | 8 | class ExpressionEvaluationInvalidNumberOfChildrenError(ExpressionEvaluationError): 9 | """Expression evaluation invalid number of children error.""" 10 | 11 | def __init__(self, required: int, actual: int) -> None: 12 | """Initialize expression evaluation invalid number of children error.""" 13 | super().__init__(f"Invalid number of children. Required: {required}, Actual: {actual}") 14 | 15 | 16 | class ExpressionEvaluationInvalidChildTypeError(ExpressionEvaluationError): 17 | """Expression evaluation invalid child type error.""" 18 | 19 | def __init__(self, child_index: int, expected_types: Union[tuple[type], type], actual_type: type) -> None: 20 | """Initialize expression evaluation invalid child type error.""" 21 | super().__init__( 22 | f"Invalid child type at index {child_index}. Expected: {expected_types}, Actual: {actual_type}" 23 | ) 24 | 25 | 26 | class ExpressionParsingError(Exception): 27 | """Exception raised when an expression cannot be parsed.""" 28 | 29 | pass 30 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_expression_runtime/functions_repository.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | 4 | class FunctionsRepository: 5 | _functions: List[str] = { 6 | "add", 7 | "addDays", 8 | "addHours", 9 | "addMinutes", 10 | "addSeconds", 11 | "addToTime", 12 | "array", 13 | "base64", 14 | "base64ToBinary", 15 | "base64ToString", 16 | "binary", 17 | "bool", 18 | "coalesce", 19 | "concat", 20 | "contains", 21 | "convertFromUtc", 22 | "convertTimeZone", 23 | "convertToUtc", 24 | "createArray", 25 | "dataUri", 26 | "dataUriToBinary", 27 | "dataUriToString", 28 | "dayOfMonth", 29 | "dayOfWeek", 30 | "dayOfYear", 31 | "decodeBase64", 32 | "decodeDataUri", 33 | "decodeUriComponent", 34 | "div", 35 | "encodeUriComponent", 36 | "empty", 37 | "endsWith", 38 | "equals", 39 | "first", 40 | "float", 41 | "formatDateTime", 42 | "getFutureTime", 43 | "getPastTime", 44 | "greater", 45 | "greaterOrEquals", 46 | "guid", 47 | "indexOf", 48 | "int", 49 | "json", 50 | "intersection", 51 | "join", 52 | "last", 53 | "lastIndexOf", 54 | "length", 55 | "less", 56 | "lessOrEquals", 57 | "max", 58 | "min", 59 | "mod", 60 | "mul", 61 | "not", 62 | "rand", 63 | "range", 64 | "replace", 65 | "skip", 66 | "split", 67 | "startOfDay", 68 | "startOfHour", 69 | "startOfMonth", 70 | "startsWith", 71 | "string", 72 | "sub", 73 | "substring", 74 | "subtractFromTime", 75 | "take", 76 | "ticks", 77 | "toLower", 78 | "toUpper", 79 | "trim", 80 | "union", 81 | "uriComponent", 82 | "uriComponentToBinary", 83 | "uriComponentToString", 84 | "utcNow", 85 | "utcnow", 86 | "xml", 87 | "xpath", 88 | "if", 89 | "or", 90 | "and", 91 | } 92 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_pythonnet/Evaluator.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | net5.0 4 | false 5 | disable 6 | true 7 | false 8 | false 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_pythonnet/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import pythonnet 5 | 6 | pythonnet.load("coreclr") 7 | import clr # noqa: E402 8 | 9 | 10 | def load_dotnet_assemblies() -> None: 11 | # Load the .NET assemblies 12 | for dll in (Path(__file__).parent / "bin").glob("**/*.dll"): 13 | dll = os.path.abspath(dll) 14 | try: 15 | clr.AddReference(dll) 16 | except Exception: 17 | pass 18 | 19 | 20 | load_dotnet_assemblies() 21 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_pythonnet/data_factory_testing_framework_expressions_evaluator.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Union 3 | 4 | # from System import Activator # this is the .NET class that we want to use in Python 5 | from Microsoft.Azure.DataFactoryTestingFramework.Expressions import ( # type: ignore 6 | Evaluator, # this is the .NET class that we want to use in Python 7 | ) 8 | 9 | from data_factory_testing_framework.state import PipelineRunState, RunParameterType 10 | 11 | 12 | class DataFactoryTestingFrameworkExpressionsEvaluator: 13 | @staticmethod 14 | def evaluate(expression: str, state: PipelineRunState) -> Union[str, int, float, bool, dict, list]: 15 | evaluator = Evaluator() 16 | parameters = { 17 | "globalParameters": {}, 18 | "parameters": {}, 19 | "libraryVariables": {}, 20 | "dataset": {}, 21 | "linkedService": {}, 22 | } 23 | 24 | for parameter in state.parameters: 25 | if parameter.type == RunParameterType.System: 26 | parameters[parameter.name] = parameter.value 27 | elif parameter.type == RunParameterType.Global: 28 | parameters["globalParameters"][parameter.name] = parameter.value 29 | elif parameter.type == RunParameterType.Pipeline: 30 | parameters["parameters"][parameter.name] = parameter.value 31 | elif parameter.type == RunParameterType.LibraryVariables: 32 | parameters["libraryVariables"][parameter.name] = parameter.value 33 | elif parameter.type == RunParameterType.Dataset: 34 | parameters["dataset"][parameter.name] = parameter.value 35 | elif parameter.type == RunParameterType.LinkedService: 36 | parameters["linkedService"][parameter.name] = parameter.value 37 | 38 | activity_results = {} 39 | for activity in state.activity_results: 40 | activity_result_dir = { 41 | "outputs": { 42 | "body": { 43 | "output": activity.output, 44 | "status": activity.status, 45 | "error": activity.error, 46 | } 47 | } 48 | } 49 | activity_results[activity.activity_name] = activity_result_dir 50 | 51 | variables = {variable.name: variable.value for variable in state.variables} 52 | 53 | state_iter_item_json = json.dumps(state.iteration_item) if state.iteration_item else None 54 | 55 | result = evaluator.EvaluateExpression( 56 | expression, 57 | json.dumps(parameters), 58 | json.dumps(variables), 59 | state_iter_item_json, 60 | json.dumps(activity_results), 61 | ) 62 | return json.loads(result)["result"] 63 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_pythonnet/nuget.config: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_repositories/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/src/data_factory_testing_framework/_repositories/__init__.py -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_repositories/_factories/base_repository_factory.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from data_factory_testing_framework._repositories.data_factory_repository import DataFactoryRepository 4 | from data_factory_testing_framework.models import Pipeline 5 | 6 | 7 | class BaseRepositoryFactory(ABC): 8 | def parse_from_folder(self, folder_path: str) -> DataFactoryRepository: 9 | pipelines = self._get_data_factory_pipelines_by_folder_path(folder_path) 10 | return DataFactoryRepository(pipelines) 11 | 12 | @abstractmethod 13 | def _get_data_factory_pipelines_by_folder_path(self, folder_path: str) -> list[Pipeline]: 14 | pass 15 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_repositories/_factories/data_factory_repository_factory.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from data_factory_testing_framework._deserializers._deserializer_data_factory import ( 4 | parse_data_factory_pipeline_from_pipeline_json, 5 | ) 6 | from data_factory_testing_framework._repositories._factories.base_repository_factory import BaseRepositoryFactory 7 | from data_factory_testing_framework.models import Pipeline 8 | 9 | 10 | class DataFactoryRepositoryFactory(BaseRepositoryFactory): 11 | def _get_data_factory_pipelines_by_folder_path(self, folder_path: str) -> list[Pipeline]: 12 | pipeline_path = os.path.join(folder_path, "pipeline") 13 | pipelines = [] 14 | files = os.listdir(pipeline_path) 15 | for file in files: 16 | file_path = os.path.join(pipeline_path, file) 17 | if file.endswith(".json"): 18 | with open(file_path, "r") as f: 19 | pipelines.append(parse_data_factory_pipeline_from_pipeline_json(f.read())) 20 | 21 | return pipelines 22 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_repositories/_factories/fabric_repository_factory.py: -------------------------------------------------------------------------------- 1 | import codecs 2 | import json 3 | import os 4 | from typing import List 5 | 6 | from data_factory_testing_framework._deserializers._deserializer_fabric import ( 7 | parse_fabric_pipeline_from_pipeline_json_files, 8 | ) 9 | from data_factory_testing_framework._repositories._factories.base_repository_factory import BaseRepositoryFactory 10 | from data_factory_testing_framework.models import Pipeline 11 | 12 | REQUIRED_FILES = ["pipeline-content.json", ".platform"] 13 | 14 | 15 | class FabricRepositoryFactory(BaseRepositoryFactory): 16 | def _get_data_factory_pipelines_by_folder_path(self, folder_path: str) -> list[Pipeline]: 17 | pipeline_folders = FabricRepositoryFactory._find_folders_containing_pipeline(folder_path) 18 | pipelines = [] 19 | for pipeline_folder in pipeline_folders: 20 | pipeline_file = os.path.join(pipeline_folder, "pipeline-content.json") 21 | pipeline_content_encoding = FabricRepositoryFactory._detect_encoding(pipeline_file) 22 | 23 | platform_file = os.path.join(pipeline_folder, ".platform") 24 | with open(pipeline_file, "r", encoding=pipeline_content_encoding) as pipeline_file, open( 25 | platform_file, "r" 26 | ) as platform_file: 27 | pipeline_contents = pipeline_file.read() 28 | platform_config: dict = json.load(platform_file) 29 | 30 | if not all(key in platform_config.keys() for key in ["metadata", "config"]): 31 | raise ValueError( 32 | f"Platform file {platform_file} does not contain the required keys metadata and config" 33 | ) 34 | 35 | pipelines.append( 36 | parse_fabric_pipeline_from_pipeline_json_files( 37 | pipeline_contents, platform_config["config"], platform_config["metadata"] 38 | ) 39 | ) 40 | 41 | return pipelines 42 | 43 | @staticmethod 44 | def _find_folders_containing_pipeline(search_path: str) -> List[str]: 45 | pipeline_folders = [] 46 | 47 | # Walk through the directory tree and find pipeline folders 48 | for root, _, files in os.walk(search_path): 49 | if "pipeline-content.json" in files: 50 | pipeline_folders.append(root) 51 | 52 | # Check if each folder contains the required files 53 | for pipeline_folder in pipeline_folders: 54 | list_dir = os.listdir(pipeline_folder) 55 | if not all(file in list_dir for file in REQUIRED_FILES): 56 | raise FileNotFoundError( 57 | f"Pipeline folder {pipeline_folder} does not contain the required files {REQUIRED_FILES}" 58 | ) 59 | 60 | return pipeline_folders 61 | 62 | @staticmethod 63 | def _detect_encoding(file_path: str) -> str: 64 | """Detects the encoding of the file and returns it as a string. 65 | 66 | Args: 67 | file_path (str): The path to the file. 68 | 69 | Returns: 70 | str: The encoding of the file. Possible values are "utf-8" and "utf-16" (little or big endian). 71 | """ 72 | with open(file_path, "rb") as file: 73 | bom = file.read(2) 74 | if bom == codecs.BOM_UTF16_LE or bom == codecs.BOM_UTF16_BE: 75 | return "utf-16" 76 | else: 77 | return "utf-8" 78 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/_repositories/data_factory_repository.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from data_factory_testing_framework.exceptions import PipelineNotFoundError 4 | from data_factory_testing_framework.models import Pipeline 5 | 6 | 7 | class DataFactoryRepository: 8 | def __init__(self, pipelines: List[Pipeline]) -> None: 9 | """Initializes the repository with pipelines, linkedServices, datasets and triggers. 10 | 11 | Args: 12 | pipelines: List of pipelines. 13 | """ 14 | self.pipelines = pipelines 15 | 16 | def get_pipeline_by_id(self, pipeline_id: str) -> Pipeline: 17 | """Get a pipeline by id. Throws an exception if the pipeline is not found. 18 | 19 | Args: 20 | pipeline_id: The identifier of the pipeline to get. 21 | 22 | Returns: 23 | The pipeline with the given id. 24 | """ 25 | for pipeline in self.pipelines: 26 | if pipeline.pipeline_id == pipeline_id: 27 | return pipeline 28 | 29 | raise PipelineNotFoundError(f"Pipeline with pipeline_id: '{pipeline_id}' not found") 30 | 31 | def get_pipeline_by_name(self, name: str) -> Pipeline: 32 | """Get a pipeline by name. Throws an exception if the pipeline is not found. 33 | 34 | Args: 35 | name: Name of the pipeline. 36 | """ 37 | for pipeline in self.pipelines: 38 | if pipeline.name == name: 39 | return pipeline 40 | 41 | raise PipelineNotFoundError(f"Pipeline with name: '{name}' not found") 42 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/__init__.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._activity_not_found_error import ActivityNotFoundError 2 | from data_factory_testing_framework.exceptions._activity_output_field_not_found_error import ( 3 | ActivityOutputFieldNotFoundError, 4 | ) 5 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 6 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 7 | ) 8 | from data_factory_testing_framework.exceptions._data_factory_element_evaluation_error import ( 9 | DataFactoryElementEvaluationError, 10 | ) 11 | from data_factory_testing_framework.exceptions._function_call_invalid_arguments_count_error import ( 12 | FunctionCallInvalidArgumentsCountError, 13 | ) 14 | from data_factory_testing_framework.exceptions._parameter_not_found_error import ParameterNotFoundError 15 | from data_factory_testing_framework.exceptions._pipeline_activities_circular_dependency_error import ( 16 | NoRemainingPipelineActivitiesMeetDependencyConditionsError, 17 | ) 18 | from data_factory_testing_framework.exceptions._pipeline_not_found_error import PipelineNotFoundError 19 | from data_factory_testing_framework.exceptions._state_iteration_item_not_set_error import StateIterationItemNotSetError 20 | from data_factory_testing_framework.exceptions._unsupported_function_error import UnsupportedFunctionError 21 | from data_factory_testing_framework.exceptions._variable_being_evaluated_does_not_exist_error import ( 22 | VariableBeingEvaluatedDoesNotExistError, 23 | ) 24 | from data_factory_testing_framework.exceptions._variable_not_found_error import VariableNotFoundError 25 | 26 | __all__ = [ 27 | "ActivityNotFoundError", 28 | "ActivityOutputFieldNotFoundError", 29 | "DataFactoryElementEvaluationError", 30 | "FunctionCallInvalidArgumentsCountError", 31 | "ParameterNotFoundError", 32 | "NoRemainingPipelineActivitiesMeetDependencyConditionsError", 33 | "PipelineNotFoundError", 34 | "StateIterationItemNotSetError", 35 | "UnsupportedFunctionError", 36 | "VariableBeingEvaluatedDoesNotExistError", 37 | "VariableNotFoundError", 38 | "ControlActivityExpressionEvaluatedNotToExpectedTypeError", 39 | ] 40 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_activity_not_found_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class ActivityNotFoundError(UserError): 5 | def __init__(self, activity_name: str) -> None: 6 | """Error raised when an activity is not found.""" 7 | super().__init__(f"Activity with name '{activity_name}' not found") 8 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_activity_output_field_not_found_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class ActivityOutputFieldNotFoundError(UserError): 5 | def __init__(self, activity_name: str, output_field_name: str) -> None: 6 | """Exception raised when an activity does not have an expected output field.""" 7 | super().__init__( 8 | f"Activity '{activity_name}' does not have output field '{output_field_name}'. Consider setting it through activity.setResult()." 9 | ) 10 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_control_activity_expression_evaluated_not_to_expected_type.py: -------------------------------------------------------------------------------- 1 | from typing import Type 2 | 3 | from data_factory_testing_framework.exceptions._user_error import UserError 4 | 5 | 6 | class ControlActivityExpressionEvaluatedNotToExpectedTypeError(UserError): 7 | """ControlActivityExpressionEvaluatedNotToExpectedType. 8 | 9 | This exception is raised when a ControlActivities iteration expression is not evaluating to the expected type. 10 | This might be due incorrect expression or incorrectly registering activity results (e.g. registering a dictionary instead of expected list) 11 | """ 12 | 13 | def __init__(self, activity_name: str, expected_type: Type) -> None: 14 | super().__init__( 15 | f"Iteration expression of Activity: '{activity_name}' does not evaluate to the expected type: '{expected_type.__name__}'." 16 | ) 17 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_data_factory_element_evaluation_error.py: -------------------------------------------------------------------------------- 1 | class DataFactoryElementEvaluationError(Exception): 2 | """DataFactoryElementEvaluationError. 3 | 4 | This exception is raised when an error occurs while evaluating a DataFactoryElement. 5 | It is a technical error and should not occur in normal operation of the framework. 6 | This assumes that the expression is valid and the state is correct. 7 | """ 8 | 9 | pass 10 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_function_call_invalid_arguments_count_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class FunctionCallInvalidArgumentsCountError(UserError): 5 | def __init__(self, name: str, evaluated_arguments: list, expected_argument_names: list) -> None: 6 | """Error raised when a function call has an invalid arguments count.""" 7 | message = ( 8 | f"FunctionCall {name} has invalid arguments count. " 9 | f"Evaluated arguments: \"{', '.join(map(str, evaluated_arguments))}\". " 10 | f"Expected argument types: {', '.join(expected_argument_names)}" 11 | ) 12 | super().__init__(message) 13 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_parameter_not_found_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class ParameterNotFoundError(UserError): 5 | def __init__(self, pipeline_type: str, name: str) -> None: 6 | """Error raised when a parameter is not found.""" 7 | super().__init__(f"Parameter: '{name}' of type '{pipeline_type}' not found") 8 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_pipeline_activities_circular_dependency_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class NoRemainingPipelineActivitiesMeetDependencyConditionsError(UserError): 5 | """NoRemainingPipelineActivitiesMeetDependencyConditionsError. 6 | 7 | This error is raised when there are still pending pipeline activities, but no conditions are met to execute these activities. 8 | Scenarios where this error can be raised: 9 | * Circular dependencies in pipeline activities 10 | * Misconfigured pipeline activities 11 | """ 12 | 13 | def __init__(self) -> None: 14 | """Initialize NoRemainingPipelineActivitiesMeetDependencyConditionsError.""" 15 | super().__init__("No remaining pipeline activities meet dependency conditions.") 16 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_pipeline_not_found_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class PipelineNotFoundError(UserError): 5 | def __init__(self, error_message: str) -> None: 6 | """Error raised when a pipeline is not found.""" 7 | super().__init__(f"{error_message}") 8 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_state_iteration_item_not_set_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class StateIterationItemNotSetError(UserError): 5 | def __init__(self) -> None: 6 | """Error raised when an iteration item is not set.""" 7 | super().__init__("Iteration item not set.") 8 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_unsupported_function_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class UnsupportedFunctionError(UserError): 5 | def __init__(self, function_name: str) -> None: 6 | """Error raised when a function is not supported.""" 7 | super().__init__(f"Unsupported function: {function_name}") 8 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_user_error.py: -------------------------------------------------------------------------------- 1 | import abc 2 | 3 | 4 | class UserError(abc.ABC, Exception): 5 | """UserError. 6 | 7 | Base class for all user errors. 8 | """ 9 | 10 | 11 | pass 12 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_variable_being_evaluated_does_not_exist_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class VariableBeingEvaluatedDoesNotExistError(UserError): 5 | def __init__(self, variable_name: str) -> None: 6 | """Error raised when a variable being evaluated does not exist.""" 7 | super().__init__(f"Variable being evaluated does not exist: {variable_name}") 8 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/exceptions/_variable_not_found_error.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.exceptions._user_error import UserError 2 | 3 | 4 | class VariableNotFoundError(UserError): 5 | def __init__(self, variable_name: str) -> None: 6 | """Error raised when a variable is not found.""" 7 | super().__init__(f"Variable '{variable_name}' not found") 8 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/__init__.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 2 | from data_factory_testing_framework.models._data_factory_object_type import DataFactoryObjectType 3 | from data_factory_testing_framework.models._pipeline import Pipeline 4 | 5 | __all__ = ["DataFactoryObjectType", "DataFactoryElement", "Pipeline"] 6 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/_data_factory_element.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Any 3 | 4 | from data_factory_testing_framework._expression_runtime.expression_runtime import ExpressionRuntime 5 | from data_factory_testing_framework.exceptions import ( 6 | DataFactoryElementEvaluationError, 7 | ) 8 | from data_factory_testing_framework.exceptions._user_error import UserError 9 | from data_factory_testing_framework.models._data_factory_object_type import DataFactoryObjectType 10 | from data_factory_testing_framework.state import RunState 11 | 12 | 13 | class DataFactoryElement: 14 | expression: str 15 | result: DataFactoryObjectType 16 | 17 | def __init__(self, expression: str) -> None: 18 | """DataFactoryElement. 19 | 20 | Args: 21 | expression: Expression to evaluate. (e.g. @concat(@pipeline().parameters.pipelineName, '-pipeline')) 22 | """ 23 | self.expression = expression 24 | self.result: DataFactoryObjectType = None 25 | 26 | def evaluate(self, state: RunState) -> DataFactoryObjectType: 27 | """Evaluate the expression.""" 28 | try: 29 | expression_runtime = ExpressionRuntime() 30 | self.result = expression_runtime.evaluate(self.expression, state) 31 | return self.result 32 | except UserError as e: 33 | raise e from e 34 | except Exception as e: 35 | raise DataFactoryElementEvaluationError(f"Error evaluating expression: {self.expression}") from e 36 | 37 | return self.result 38 | 39 | def get_json_value(self) -> Any: # noqa: ANN401 40 | """Loads the value as a json object.""" 41 | if self.result: 42 | return json.loads(self.result) 43 | 44 | return None 45 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/_data_factory_object_type.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar 2 | 3 | DataFactoryObjectType = TypeVar("DataFactoryObjectType", str, int, bool, float, list, dict, None) 4 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/__init__.py: -------------------------------------------------------------------------------- 1 | from ._activity import Activity 2 | from ._activity_dependency import ActivityDependency 3 | from ._append_variable_activity import AppendVariableActivity 4 | from ._control_activity import ControlActivity 5 | from ._execute_pipeline_activity import ExecutePipelineActivity 6 | from ._fail_activity import FailActivity 7 | from ._filter_activity import FilterActivity 8 | from ._for_each_activity import ForEachActivity 9 | from ._if_condition_activity import IfConditionActivity 10 | from ._set_variable_activity import SetVariableActivity 11 | from ._switch_activity import SwitchActivity 12 | from ._until_activity import UntilActivity 13 | 14 | __all__ = [ 15 | "Activity", 16 | "ActivityDependency", 17 | "AppendVariableActivity", 18 | "ControlActivity", 19 | "ExecutePipelineActivity", 20 | "FailActivity", 21 | "FilterActivity", 22 | "ForEachActivity", 23 | "IfConditionActivity", 24 | "SetVariableActivity", 25 | "SwitchActivity", 26 | "UntilActivity", 27 | ] 28 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any, List, Optional 2 | 3 | from data_factory_testing_framework.models import DataFactoryElement 4 | from data_factory_testing_framework.models.activities._activity_dependency import ( 5 | ActivityDependency, 6 | ) 7 | from data_factory_testing_framework.state import DependencyCondition, PipelineRunState 8 | 9 | 10 | class Activity: 11 | def __init__(self, name: str, type: str, policy: Optional[dict] = None, **kwargs: Any) -> None: # noqa: ANN401, A002 12 | """Activity with dynamic dicts. 13 | 14 | Args: 15 | name: Name of the activity. 16 | type: Type of the activity. 17 | policy: Policy of the activity. 18 | **kwargs: Activity properties coming directly from the json representation of the activity. 19 | """ 20 | if policy is None: 21 | policy = {} 22 | 23 | self.name = name 24 | self.type = type 25 | self.policy = policy 26 | self.type_properties = kwargs["typeProperties"] if "typeProperties" in kwargs else {} 27 | 28 | self.depends_on: List[ActivityDependency] = [] 29 | if "dependsOn" in kwargs: 30 | for dependency in kwargs["dependsOn"]: 31 | self.depends_on.append(ActivityDependency(**dependency)) 32 | 33 | self.all_properties = kwargs 34 | 35 | self.status: DependencyCondition = None 36 | self.output = {} 37 | 38 | def evaluate(self, state: PipelineRunState) -> "Activity": 39 | self._evaluate_expressions(self, state, types_to_ignore=[Activity]) 40 | self.status = DependencyCondition.Succeeded 41 | self.output = {} 42 | return self 43 | 44 | def are_dependency_condition_met(self, state: PipelineRunState) -> bool: 45 | if not self.depends_on: 46 | return True 47 | 48 | for dependency in self.depends_on: 49 | dependency_activity = state.try_get_activity_result_by_name(dependency.activity) 50 | 51 | if dependency_activity is None: 52 | return False 53 | 54 | for dependency_condition in dependency.dependency_conditions: 55 | if ( 56 | dependency_activity["status"] != dependency_condition 57 | and dependency_condition != DependencyCondition.COMPLETED 58 | ): 59 | return False 60 | 61 | return True 62 | 63 | def _evaluate_expressions( 64 | self, 65 | obj: Any, # noqa: ANN401 66 | state: PipelineRunState, 67 | visited: Optional[List[Any]] = None, # noqa: ANN401 68 | types_to_ignore: Optional[List[Any]] = None, # noqa: ANN401 69 | ) -> None: 70 | if visited is None: 71 | visited = [] 72 | 73 | if obj in visited: 74 | return 75 | 76 | visited.append(obj) 77 | 78 | if data_factory_element := isinstance(obj, DataFactoryElement) and obj: 79 | data_factory_element.evaluate(state) 80 | return 81 | 82 | # Attributes 83 | attribute_names = [ 84 | attribute 85 | for attribute in dir(obj) 86 | if not attribute.startswith("_") and not callable(getattr(obj, attribute)) 87 | ] 88 | for attribute_name in attribute_names: 89 | if "activities" in attribute_name: 90 | continue 91 | 92 | attribute = getattr(obj, attribute_name) 93 | if attribute is None: 94 | continue 95 | 96 | self._evaluate_expressions(attribute, state, visited, types_to_ignore) 97 | 98 | # Dictionary 99 | if isinstance(obj, dict): 100 | for key in obj.keys(): 101 | if "activities" in key: 102 | continue 103 | 104 | self._evaluate_expressions(obj[key], state, visited, types_to_ignore) 105 | 106 | # List 107 | if isinstance(obj, list): 108 | for item in obj: 109 | ignore_item = False 110 | for type_to_ignore in types_to_ignore: 111 | if isinstance(item, type_to_ignore): 112 | ignore_item = True 113 | 114 | if ignore_item: 115 | continue 116 | 117 | self._evaluate_expressions(item, state, visited, types_to_ignore) 118 | 119 | def set_result(self, result: DependencyCondition, output: Optional[Any] = None) -> None: # noqa: ANN401 120 | self.status = result 121 | self.output = output 122 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_activity_dependency.py: -------------------------------------------------------------------------------- 1 | from typing import List, Union 2 | 3 | from data_factory_testing_framework.state import DependencyCondition 4 | 5 | 6 | class ActivityDependency: 7 | def __init__(self, activity: str, dependencyConditions: List[Union[str, DependencyCondition]] = None) -> None: # noqa: ANN401, N803 8 | """ActivityDependency. 9 | 10 | Args: 11 | activity: Name of the activity. 12 | dependencyConditions: List of dependency conditions. 13 | """ 14 | if dependencyConditions is None: 15 | dependencyConditions = [] # noqa: N806 16 | 17 | self.activity: str = activity 18 | self.dependency_conditions: List[DependencyCondition] = dependencyConditions 19 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_append_variable_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 4 | from data_factory_testing_framework.models.activities._control_activity import ControlActivity 5 | from data_factory_testing_framework.state import PipelineRunState 6 | 7 | 8 | class AppendVariableActivity(ControlActivity): 9 | def __init__(self, **kwargs: Any) -> None: # noqa: ANN401 10 | """This is the class that represents the Append Variable activity in the pipeline. 11 | 12 | Args: 13 | **kwargs: AppendVariableActivity properties coming directly from the json representation of the activity. 14 | """ 15 | kwargs["type"] = "AppendVariable" 16 | 17 | super(ControlActivity, self).__init__(**kwargs) 18 | 19 | self.variable_name: str = self.type_properties["variableName"] 20 | self.value: DataFactoryElement = self.type_properties["value"] 21 | 22 | def evaluate(self, state: PipelineRunState) -> "AppendVariableActivity": 23 | super(ControlActivity, self).evaluate(state) 24 | 25 | if isinstance(self.value, DataFactoryElement): 26 | evaluated_value = self.value.evaluate(state) 27 | else: 28 | evaluated_value = self.value 29 | 30 | state.append_variable(self.type_properties["variableName"], evaluated_value) 31 | 32 | return self 33 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_control_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Iterator, List 2 | 3 | from data_factory_testing_framework.models.activities import Activity 4 | from data_factory_testing_framework.state import PipelineRunState 5 | 6 | 7 | class ControlActivity(Activity): 8 | def __init__(self, **kwargs: Any) -> None: # noqa: ANN401 9 | """This is the base class for all control activities in the pipeline. 10 | 11 | Args: 12 | **kwargs: ControlActivity properties coming directly from the json representation of the activity. 13 | """ 14 | super(Activity, self).__init__(**kwargs) 15 | 16 | def evaluate_control_activities( 17 | self, 18 | state: PipelineRunState, 19 | evaluate_activities: Callable[[List[Activity], PipelineRunState], Iterator[Activity]], 20 | ) -> Iterator[Activity]: 21 | yield from list() 22 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_execute_pipeline_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Iterator, List 2 | 3 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 4 | from data_factory_testing_framework.models._pipeline import Pipeline 5 | from data_factory_testing_framework.models.activities._activity import Activity 6 | from data_factory_testing_framework.models.activities._control_activity import ControlActivity 7 | from data_factory_testing_framework.state import PipelineRunState, RunParameter, RunParameterType 8 | 9 | 10 | class ExecutePipelineActivity(ControlActivity): 11 | def __init__(self, **kwargs: Any) -> None: # noqa: ANN401 12 | """This is the class that represents the Execute Pipeline activity in the pipeline. 13 | 14 | Args: 15 | **kwargs: ExecutePipelineActivity properties coming directly from the json representation of the activity. 16 | """ 17 | kwargs["type"] = "ExecutePipeline" 18 | 19 | super(ControlActivity, self).__init__(**kwargs) 20 | 21 | self.parameters: dict = {} 22 | if "parameters" in self.type_properties: 23 | self.parameters = self.type_properties["parameters"] 24 | 25 | def get_child_run_parameters(self, state: PipelineRunState) -> List[RunParameter]: 26 | child_parameters = [] 27 | for parameter in state.parameters: 28 | if parameter.type == RunParameterType.Global or parameter.type == RunParameterType.System: 29 | child_parameters.append(RunParameter(parameter.type, parameter.name, parameter.value)) 30 | 31 | for parameter_name, parameter_value in self.parameters.items(): 32 | parameter_value = ( 33 | parameter_value.result if isinstance(parameter_value, DataFactoryElement) else parameter_value 34 | ) 35 | child_parameters.append(RunParameter(RunParameterType.Pipeline, parameter_name, parameter_value)) 36 | 37 | return child_parameters 38 | 39 | def evaluate_pipeline( 40 | self, 41 | pipeline: Pipeline, 42 | parameters: List[RunParameter], 43 | evaluate_activities: Callable[[List[Activity], PipelineRunState], Iterator[Activity]], 44 | ) -> Iterator[Activity]: 45 | parameters = pipeline.validate_and_append_default_parameters(parameters) 46 | scoped_state = PipelineRunState(parameters, pipeline.get_run_variables()) 47 | for activity in evaluate_activities(pipeline.activities, scoped_state): 48 | yield activity 49 | 50 | # Set the pipelineReturnValues as evaluated by SetVariable activities to the ExecutePipelineActivity output 51 | self.output["pipelineReturnValue"] = {} 52 | for key in scoped_state.return_values: 53 | self.output["pipelineReturnValue"][key] = scoped_state.return_values[key] 54 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_fail_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from data_factory_testing_framework.models.activities import ControlActivity 4 | from data_factory_testing_framework.state import DependencyCondition, PipelineRunState 5 | 6 | 7 | class FailActivity(ControlActivity): 8 | def __init__(self, **kwargs: Any) -> None: # noqa: ANN401 9 | """This is the class that represents the Fail activity in the pipeline. 10 | 11 | Args: 12 | **kwargs: FailActivity properties coming directly from the json representation of the activity. 13 | """ 14 | kwargs["type"] = "Fail" 15 | 16 | super(ControlActivity, self).__init__(**kwargs) 17 | 18 | def evaluate(self, state: PipelineRunState) -> "FailActivity": 19 | super(ControlActivity, self).evaluate(state) 20 | 21 | self.set_result(DependencyCondition.FAILED) 22 | 23 | return self 24 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_filter_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 4 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 5 | ) 6 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 7 | from data_factory_testing_framework.models.activities import ControlActivity 8 | from data_factory_testing_framework.state import DependencyCondition, PipelineRunState 9 | 10 | 11 | class FilterActivity(ControlActivity): 12 | def __init__( 13 | self, 14 | **kwargs: Any, # noqa: ANN401 15 | ) -> None: 16 | """This is the class that represents the Filter activity in the pipeline. 17 | 18 | Args: 19 | **kwargs: FilterActivity properties coming directly from the json representation of the activity. 20 | """ 21 | kwargs["type"] = "Filter" 22 | 23 | super(ControlActivity, self).__init__(**kwargs) 24 | 25 | self.items: DataFactoryElement = self.type_properties["items"] 26 | self.condition: DataFactoryElement = self.type_properties["condition"] 27 | 28 | def evaluate(self, state: PipelineRunState) -> "FilterActivity": 29 | items = self.items.evaluate(state) 30 | if not isinstance(items, list): 31 | raise ControlActivityExpressionEvaluatedNotToExpectedTypeError(self.name, list) 32 | 33 | value = [] 34 | for item in items: 35 | scoped_state = state.create_iteration_scope(item) 36 | if self.condition.evaluate(scoped_state): 37 | value.append(item) 38 | 39 | self.set_result(DependencyCondition.SUCCEEDED, {"value": value}) 40 | 41 | return self 42 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_for_each_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Iterator, List 2 | 3 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 4 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 5 | ) 6 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 7 | from data_factory_testing_framework.models.activities import Activity, ControlActivity 8 | from data_factory_testing_framework.state import PipelineRunState 9 | 10 | 11 | class ForEachActivity(ControlActivity): 12 | def __init__( 13 | self, 14 | activities: List[Activity], 15 | **kwargs: Any, # noqa: ANN401 16 | ) -> None: 17 | """This is the class that represents the For Each activity in the pipeline. 18 | 19 | Args: 20 | activities: The deserialized activities that will be executed for each item in the items array. 21 | **kwargs: ForEachActivity properties coming directly from the json representation of the activity. 22 | """ 23 | kwargs["type"] = "ForEach" 24 | 25 | super(ControlActivity, self).__init__(**kwargs) 26 | 27 | self.activities = activities 28 | self.items: DataFactoryElement = self.type_properties["items"] 29 | 30 | def evaluate(self, state: PipelineRunState) -> "ForEachActivity": 31 | items = self.items.evaluate(state) 32 | if not isinstance(items, list): 33 | raise ControlActivityExpressionEvaluatedNotToExpectedTypeError(self.name, list) 34 | 35 | super(ControlActivity, self).evaluate(state) 36 | 37 | return self 38 | 39 | def evaluate_control_activities( 40 | self, 41 | state: PipelineRunState, 42 | evaluate_activities: Callable[[List[Activity], PipelineRunState], Iterator[Activity]], 43 | ) -> Iterator[Activity]: 44 | for item in self.items.result: 45 | scoped_state = state.create_iteration_scope(item) 46 | for activity in evaluate_activities(self.activities, scoped_state): 47 | yield activity 48 | 49 | state.add_scoped_activity_results_from_scoped_state(scoped_state) 50 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_if_condition_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Iterator, List 2 | 3 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 4 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 5 | ) 6 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 7 | from data_factory_testing_framework.models.activities import Activity, ControlActivity 8 | from data_factory_testing_framework.state import PipelineRunState 9 | 10 | 11 | class IfConditionActivity(ControlActivity): 12 | def __init__( 13 | self, 14 | if_true_activities: List[Activity], 15 | if_false_activities: List[Activity], 16 | **kwargs: Any, # noqa: ANN401 17 | ) -> None: 18 | """This is the class that represents the If Condition activity in the pipeline. 19 | 20 | Args: 21 | if_true_activities: The deserialized activities that will be executed if the condition is true. 22 | if_false_activities: The deserialized activities that will be executed if the condition is false. 23 | **kwargs: IfConditionActivity properties coming directly from the json representation of the activity. 24 | """ 25 | kwargs["type"] = "IfCondition" 26 | 27 | super(ControlActivity, self).__init__(**kwargs) 28 | 29 | self.if_true_activities = if_true_activities 30 | self.if_false_activities = if_false_activities 31 | self.expression: DataFactoryElement = self.type_properties["expression"] 32 | 33 | def evaluate(self, state: PipelineRunState) -> "IfConditionActivity": 34 | evaluated_expression = self.expression.evaluate(state) 35 | if not isinstance(evaluated_expression, bool): 36 | raise ControlActivityExpressionEvaluatedNotToExpectedTypeError(self.name, bool) 37 | 38 | super(ControlActivity, self).evaluate(state) 39 | 40 | return self 41 | 42 | def evaluate_control_activities( 43 | self, 44 | state: PipelineRunState, 45 | evaluate_activities: Callable[[List[Activity], PipelineRunState], Iterator[Activity]], 46 | ) -> Iterator[Activity]: 47 | scoped_state = state.create_iteration_scope() 48 | activities = self.if_true_activities if self.expression.result else self.if_false_activities 49 | for activity in evaluate_activities(activities, scoped_state): 50 | yield activity 51 | 52 | state.add_scoped_activity_results_from_scoped_state(scoped_state) 53 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_set_variable_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 4 | from data_factory_testing_framework.models.activities import ControlActivity 5 | from data_factory_testing_framework.state import PipelineRunState 6 | 7 | 8 | class SetVariableActivity(ControlActivity): 9 | def __init__(self, **kwargs: Any) -> None: # noqa: ANN401 10 | """This is the class that represents the Set Variable activity in the pipeline. 11 | 12 | Args: 13 | **kwargs: SetVariableActivity properties coming directly from the json representation of the activity. 14 | """ 15 | kwargs["type"] = "SetVariable" 16 | 17 | super(ControlActivity, self).__init__(**kwargs) 18 | 19 | self.variable_name: str = self.type_properties["variableName"] 20 | self.value: DataFactoryElement = self.type_properties["value"] 21 | 22 | def evaluate(self, state: PipelineRunState) -> "SetVariableActivity": 23 | super(ControlActivity, self).evaluate(state) 24 | 25 | if self.type_properties["variableName"] == "pipelineReturnValue": 26 | for return_value in self.type_properties["value"]: 27 | value = return_value["value"] 28 | if isinstance(value, DataFactoryElement): 29 | evaluated_value = value.evaluate(state) 30 | else: 31 | evaluated_value = value 32 | 33 | state.set_return_value(return_value["key"], evaluated_value) 34 | 35 | return self 36 | 37 | if isinstance(self.value, DataFactoryElement): 38 | evaluated_value = self.value.evaluate(state) 39 | else: 40 | evaluated_value = self.value 41 | 42 | state.set_variable(self.type_properties["variableName"], evaluated_value) 43 | 44 | return self 45 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_switch_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Dict, Generator, Iterator, List 2 | 3 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 4 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 5 | ) 6 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 7 | from data_factory_testing_framework.models.activities import Activity, ControlActivity 8 | from data_factory_testing_framework.state import PipelineRunState 9 | 10 | 11 | class SwitchActivity(ControlActivity): 12 | def __init__( 13 | self, 14 | default_activities: List[Activity], 15 | cases_activities: Dict[str, List[Activity]], 16 | **kwargs: Any, # noqa: ANN401 17 | ) -> None: 18 | """This is the class that represents the Switch activity in the pipeline. 19 | 20 | Args: 21 | default_activities: The deserialized activities that will be executed if none of the cases matches. 22 | cases_activities: The deserialized activities that will be executed if the case matches. 23 | **kwargs: SwitchActivity properties coming directly from the json representation of the activity. 24 | """ 25 | kwargs["type"] = "Switch" 26 | 27 | super(ControlActivity, self).__init__(**kwargs) 28 | 29 | self.default_activities = default_activities 30 | self.cases_activities = cases_activities 31 | self.on: DataFactoryElement = self.type_properties["on"] 32 | 33 | def evaluate(self, state: PipelineRunState) -> "SwitchActivity": 34 | evaluated_on = self.on.evaluate(state) 35 | if not isinstance(evaluated_on, str): 36 | raise ControlActivityExpressionEvaluatedNotToExpectedTypeError(self.name, str) 37 | 38 | super(ControlActivity, self).evaluate(state) 39 | 40 | return self 41 | 42 | def evaluate_control_activities( 43 | self, 44 | state: PipelineRunState, 45 | evaluate_activities: Callable[[List[Activity], PipelineRunState], Iterator[Activity]], 46 | ) -> Iterator[Activity]: 47 | for case, activities in self.cases_activities.items(): 48 | if case == self.on.result: 49 | return self._run_activities_in_scope(state, activities, evaluate_activities) 50 | 51 | return self._run_activities_in_scope(state, self.default_activities, evaluate_activities) 52 | 53 | @staticmethod 54 | def _run_activities_in_scope( 55 | state: PipelineRunState, 56 | activities: List[Activity], 57 | evaluate_activities: Callable[[List[Activity], PipelineRunState], Iterator[Activity]], 58 | ) -> Generator[Activity, None, None]: 59 | scoped_state = state.create_iteration_scope() 60 | for activity in evaluate_activities(activities, scoped_state): 61 | yield activity 62 | state.add_scoped_activity_results_from_scoped_state(scoped_state) 63 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/models/activities/_until_activity.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Iterator, List 2 | 3 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 4 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 5 | ) 6 | from data_factory_testing_framework.models._data_factory_element import DataFactoryElement 7 | from data_factory_testing_framework.models.activities import Activity, ControlActivity 8 | from data_factory_testing_framework.state import DependencyCondition, PipelineRunState 9 | 10 | 11 | class UntilActivity(ControlActivity): 12 | def __init__( 13 | self, 14 | activities: List[Activity], 15 | **kwargs: Any, # noqa: ANN401 16 | ) -> None: 17 | """This is the class that represents the Until activity in the pipeline. 18 | 19 | Args: 20 | activities: The deserialized activities that will be executed until the expression evaluates to true. 21 | **kwargs: UntilActivity properties coming directly from the json representation of the activity. 22 | """ 23 | kwargs["type"] = "Until" 24 | 25 | super(ControlActivity, self).__init__(**kwargs) 26 | 27 | self.expression: DataFactoryElement = self.type_properties["expression"] 28 | self.activities = activities 29 | 30 | def evaluate(self, state: PipelineRunState) -> "UntilActivity": 31 | # Explicitly not evaluate here, but in the evaluate_control_activities method after the first iteration 32 | return self 33 | 34 | def evaluate_control_activities( 35 | self, 36 | state: PipelineRunState, 37 | evaluate_activities: Callable[[List[Activity], PipelineRunState], Iterator[Activity]], 38 | ) -> Iterator[Activity]: 39 | while True: 40 | scoped_state = state.create_iteration_scope() 41 | for activity in evaluate_activities(self.activities, scoped_state): 42 | yield activity 43 | 44 | state.add_scoped_activity_results_from_scoped_state(scoped_state) 45 | 46 | evaluated_expression = self.expression.evaluate(state) 47 | if not isinstance(evaluated_expression, bool): 48 | raise ControlActivityExpressionEvaluatedNotToExpectedTypeError(self.name, bool) 49 | 50 | if evaluated_expression: 51 | state.add_activity_result(self.name, DependencyCondition.Succeeded) 52 | self.set_result(DependencyCondition.Succeeded) 53 | break 54 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/src/data_factory_testing_framework/py.typed -------------------------------------------------------------------------------- /src/data_factory_testing_framework/state/__init__.py: -------------------------------------------------------------------------------- 1 | from ._activity_result import ActivityResult 2 | from ._dependency_condition import DependencyCondition 3 | from ._pipeline_run_state import PipelineRunState 4 | from ._pipeline_run_variable import PipelineRunVariable 5 | from ._run_parameter import RunParameter 6 | from ._run_parameter_type import RunParameterType 7 | from ._run_state import RunState 8 | 9 | __all__ = [ 10 | "DependencyCondition", 11 | "PipelineRunState", 12 | "PipelineRunVariable", 13 | "RunParameter", 14 | "RunParameterType", 15 | "RunState", 16 | "ActivityResult", 17 | ] 18 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/state/_activity_result.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Optional 2 | 3 | from data_factory_testing_framework.state._dependency_condition import DependencyCondition 4 | 5 | 6 | class ActivityResult: 7 | def __init__( 8 | self, 9 | activity_name: str, 10 | status: Optional[DependencyCondition] = None, 11 | output: Optional[Any] = None, # noqa: ANN401 12 | error: Optional[Any] = None, # noqa: ANN401 13 | ) -> None: 14 | """Represents the result of an activity. 15 | 16 | Args: 17 | activity_name: Name of the activity. 18 | status: Status of the activity. 19 | output: Output of the activity. (e.g. { "count": 1 } for activity('activityName').output.count) 20 | error: Error of the activity. (e.g. { "code": "ErrorCode", "message": "ErrorMessage" } for activity('activityName').Error) 21 | """ 22 | self.activity_name = activity_name 23 | self.status = status if status is not None else DependencyCondition.SUCCEEDED 24 | self.output = output 25 | self.error = error 26 | 27 | def __getitem__(self, item: str) -> Any: # noqa: ANN401 28 | return getattr(self, item) 29 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/state/_dependency_condition.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | from data_factory_testing_framework._enum_meta import CaseInsensitiveEnumMeta 4 | 5 | 6 | class DependencyCondition(str, Enum, metaclass=CaseInsensitiveEnumMeta): 7 | """DependencyCondition.""" 8 | 9 | SUCCEEDED = "Succeeded" 10 | FAILED = "Failed" 11 | SKIPPED = "Skipped" 12 | COMPLETED = "Completed" 13 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/state/_pipeline_run_variable.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from data_factory_testing_framework.models._data_factory_object_type import DataFactoryObjectType 4 | 5 | 6 | class PipelineRunVariable: 7 | def __init__(self, name: str, default_value: Optional[DataFactoryObjectType] = None) -> None: 8 | """Represents a pipeline variable that is being tracked during a pipeline run. 9 | 10 | Args: 11 | name: Name of the variable. 12 | default_value: Default value of the variable. Defaults to None. 13 | """ 14 | self.name = name 15 | self.value = default_value 16 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/state/_run_parameter.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.models._data_factory_object_type import DataFactoryObjectType 2 | from data_factory_testing_framework.state._run_parameter_type import RunParameterType 3 | 4 | 5 | class RunParameter: 6 | def __init__(self, parameter_type: RunParameterType, name: str, value: DataFactoryObjectType) -> None: 7 | """Run parameter. Represents a parameter that is being tracked during a pipeline run. 8 | 9 | Args: 10 | parameter_type: Type of the parameter. 11 | name: Name of the parameter. 12 | value: Value of the parameter. 13 | """ 14 | self.type = parameter_type 15 | self.name = name 16 | self.value = value 17 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/state/_run_parameter_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | from data_factory_testing_framework._enum_meta import CaseInsensitiveEnumMeta 4 | 5 | 6 | class RunParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): 7 | Pipeline = "Pipeline" 8 | Global = "Global" 9 | Dataset = "Dataset" 10 | LinkedService = "LinkedService" 11 | System = "System" 12 | LibraryVariables = "LibraryVariables" 13 | 14 | def __str__(self) -> str: 15 | """Get the string representation of the enum. 16 | 17 | We override this method to make sure that the string representation 18 | is the same across all Python versions. 19 | 20 | Returns: 21 | The string representation of the enum. 22 | """ 23 | super().__str__() 24 | return f"{RunParameterType.__name__}.{self.name}" 25 | -------------------------------------------------------------------------------- /src/data_factory_testing_framework/state/_run_state.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from data_factory_testing_framework.state._run_parameter import RunParameter 4 | 5 | 6 | class RunState: 7 | def __init__(self, parameters: Optional[List[RunParameter]] = None) -> None: 8 | """Represents the RunState for non-pipeline runs, like LinkedServices, Datasets and Triggers. 9 | 10 | Args: 11 | parameters: The global and regular parameters to be used for evaluating expressions. 12 | """ 13 | if parameters is None: 14 | parameters = [] 15 | 16 | self.parameters = parameters 17 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/tests/__init__.py -------------------------------------------------------------------------------- /tests/functional/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/tests/functional/__init__.py -------------------------------------------------------------------------------- /tests/functional/activity_outputs/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "Pipeline", 5 | "displayName": "set_version" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "9c079e1a-3d08-43b7-aa02-eb0fb7154b11" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/functional/activity_outputs/pipeline-content.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "set_version", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "SetVersion", 7 | "description": "", 8 | "type": "SetVariable", 9 | "state": "Active", 10 | "onInactiveMarkAs": "Succeeded", 11 | "dependsOn": [ 12 | { 13 | "activity": "GetVersion", 14 | "dependencyConditions": [ 15 | "Succeeded" 16 | ] 17 | } 18 | ], 19 | "policy": { 20 | "secureOutput": false, 21 | "secureInput": false 22 | }, 23 | "typeProperties": { 24 | "variableName": "version", 25 | "value": { 26 | "value": "@activity('GetVersion').output.version", 27 | "type": "Expression" 28 | } 29 | } 30 | }, 31 | { 32 | "name": "GetVersion", 33 | "type": "WebActivity", 34 | "dependsOn": [], 35 | "policy": { 36 | "timeout": "0.12:00:00", 37 | "retry": 0, 38 | "retryIntervalInSeconds": 30, 39 | "secureOutput": false, 40 | "secureInput": false 41 | }, 42 | "typeProperties": { 43 | "relativeUrl": "version", 44 | "method": "GET" 45 | }, 46 | "externalReferences": { 47 | "connection": "6d70b649-d684-439b-a9c2-d2bb5241cd39" 48 | } 49 | } 50 | ], 51 | "variables": { 52 | "version": { 53 | "type": "String" 54 | } 55 | }, 56 | "annotations": [] 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /tests/functional/activity_outputs/test_set_activity_output.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.state import DependencyCondition 4 | 5 | 6 | def test_execute_pipeline_activity_child_activities_executed(request: pytest.FixtureRequest) -> None: 7 | # Arrange 8 | test_framework = TestFramework( 9 | framework_type=TestFrameworkType.Fabric, 10 | root_folder_path=request.fspath.dirname, 11 | should_evaluate_child_pipelines=True, 12 | ) 13 | pipeline = test_framework.get_pipeline_by_name("set_version") 14 | 15 | # Act 16 | activities = test_framework.evaluate_pipeline( 17 | pipeline, 18 | [], 19 | ) 20 | activity = next(activities) 21 | 22 | # Assert 23 | assert activity is not None 24 | assert activity.name == "GetVersion" 25 | assert activity.type == "WebActivity" 26 | assert activity.type_properties["relativeUrl"] == "version" 27 | assert activity.type_properties["method"] == "GET" 28 | assert activity.all_properties["externalReferences"]["connection"] == "6d70b649-d684-439b-a9c2-d2bb5241cd39" 29 | activity.set_result(DependencyCondition.Succeeded, {"version": "1.2.3"}) 30 | 31 | activity = next(activities) 32 | assert activity is not None 33 | assert activity.name == "SetVersion" 34 | assert activity.type == "SetVariable" 35 | assert activity.type_properties["variableName"] == "version" 36 | assert activity.type_properties["value"].result == "1.2.3" 37 | 38 | with pytest.raises(StopIteration): 39 | next(activities) 40 | -------------------------------------------------------------------------------- /tests/functional/api/classes/state/test_run_state_api.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from typing import List, Optional 3 | 4 | from data_factory_testing_framework.state import RunParameter 5 | 6 | from tests.functional import utils 7 | 8 | 9 | def test_run_state_api() -> None: 10 | # Arrange 11 | from data_factory_testing_framework.state import RunState 12 | 13 | # Act 14 | public_attributes = [attribute for attribute in dir(RunState) if not attribute.startswith("_")] 15 | 16 | # Assert 17 | assert public_attributes == [] 18 | 19 | 20 | def test_run_state_method_signatures() -> None: 21 | # Arrange 22 | from data_factory_testing_framework.state import RunState 23 | 24 | methods = [method[0] for method in inspect.getmembers(RunState, predicate=utils.is_public_method)] 25 | 26 | # Act 27 | method_signatures = {name: inspect.signature(getattr(RunState, name)) for name in methods} 28 | 29 | # Assert 30 | assert method_signatures == { 31 | "__init__": inspect.Signature( 32 | parameters=[ 33 | inspect.Parameter(name="self", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD), 34 | inspect.Parameter( 35 | name="parameters", 36 | kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, 37 | default=None, 38 | annotation=Optional[List[RunParameter]], 39 | ), 40 | ], 41 | return_annotation=None, 42 | ), 43 | } 44 | -------------------------------------------------------------------------------- /tests/functional/append_variable_pipeline/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "Pipeline", 5 | "displayName": "append-variable-test" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "9c079e1a-3d08-43b7-aa02-eb0fb7154b11" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/functional/append_variable_pipeline/pipeline-content.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "append-variable-test", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Append variable1", 7 | "type": "AppendVariable", 8 | "dependsOn": [ 9 | { 10 | "activity": "Set variable1", 11 | "dependencyConditions": [ 12 | "Succeeded" 13 | ] 14 | } 15 | ], 16 | "typeProperties": { 17 | "variableName": "values", 18 | "value": { 19 | "value": "@pipeline().parameters.appended_value", 20 | "type": "Expression" 21 | } 22 | } 23 | }, 24 | { 25 | "name": "Set variable1", 26 | "type": "SetVariable", 27 | "dependsOn": [], 28 | "policy": { 29 | "secureOutput": false, 30 | "secureInput": false 31 | }, 32 | "typeProperties": { 33 | "variableName": "values", 34 | "value": { 35 | "value": "@pipeline().parameters.initial_value", 36 | "type": "Expression" 37 | } 38 | } 39 | } 40 | ], 41 | "parameters": { 42 | "initial_value": { 43 | "type": "array", 44 | "defaultValue": [] 45 | }, 46 | "appended_value": { 47 | "type": "int" 48 | } 49 | }, 50 | "variables": { 51 | "values": { 52 | "type": "Array", 53 | "defaultValue": [] 54 | } 55 | }, 56 | "annotations": [] 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /tests/functional/append_variable_pipeline/test_append_variable_activity_pipeline.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import pytest 4 | from data_factory_testing_framework import TestFramework, TestFrameworkType 5 | from data_factory_testing_framework.models.activities import AppendVariableActivity, SetVariableActivity 6 | from data_factory_testing_framework.state import RunParameter, RunParameterType 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "initial_value,appended_value,expected_value", 11 | [ 12 | ([1, 2], 3, [1, 2, 3]), 13 | ([], 1, [1]), 14 | ([4], 5, [4, 5]), 15 | ], 16 | ) 17 | def test_append_variable_activity( 18 | initial_value: List[int], appended_value: int, expected_value: List[int], request: pytest.FixtureRequest 19 | ) -> None: 20 | # Arrange 21 | test_framework = TestFramework( 22 | framework_type=TestFrameworkType.Fabric, 23 | root_folder_path=request.fspath.dirname, 24 | should_evaluate_child_pipelines=True, 25 | ) 26 | pipeline = test_framework.get_pipeline_by_name("append-variable-test") 27 | 28 | # Act 29 | activities = test_framework.evaluate_pipeline( 30 | pipeline, 31 | [ 32 | RunParameter(RunParameterType.Pipeline, "initial_value", initial_value), 33 | RunParameter(RunParameterType.Pipeline, "appended_value", appended_value), 34 | ], 35 | ) 36 | 37 | # Assert 38 | activity: SetVariableActivity = next(activities) 39 | assert activity.type == "SetVariable" 40 | assert activity.value.result == initial_value 41 | 42 | activity: AppendVariableActivity = next(activities) 43 | assert activity.type == "AppendVariable" 44 | assert activity.value.result == appended_value 45 | -------------------------------------------------------------------------------- /tests/functional/datafactory_element/test_evaluate_datafactory_element.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework.exceptions import ( 3 | DataFactoryElementEvaluationError, 4 | ParameterNotFoundError, 5 | ) 6 | from data_factory_testing_framework.exceptions._user_error import UserError 7 | from data_factory_testing_framework.models import DataFactoryElement 8 | from data_factory_testing_framework.state import PipelineRunState 9 | 10 | 11 | def test_evaluate_datafactory_element() -> None: 12 | # Arrange 13 | expression = "@mul(2, 3)" 14 | state = PipelineRunState() 15 | data_factory_element = DataFactoryElement(expression) 16 | 17 | # Act 18 | result = data_factory_element.evaluate(state) 19 | 20 | # Assert 21 | assert result == 6 22 | 23 | 24 | def test_evaluate_datafactory_element_passes_user_error_through() -> None: 25 | # Arrange 26 | expression = "@pipeline().parameters.pipelineName" 27 | state = PipelineRunState() 28 | data_factory_element = DataFactoryElement(expression) 29 | 30 | # Act 31 | with pytest.raises(UserError) as e: 32 | data_factory_element.evaluate(state) 33 | 34 | # Assert 35 | assert isinstance(e.value, ParameterNotFoundError) 36 | 37 | 38 | def test_evaluate_datafactory_element_raises_technical_errors() -> None: 39 | # Arrange 40 | expression = "@FAULTY_EXPRESSION()" 41 | state = PipelineRunState() 42 | data_factory_element = DataFactoryElement(expression) 43 | 44 | # Act 45 | with pytest.raises(DataFactoryElementEvaluationError): 46 | data_factory_element.evaluate(state) 47 | -------------------------------------------------------------------------------- /tests/functional/execute_child_pipeline/pipeline/child.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "child", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "API Call", 7 | "type": "WebActivity", 8 | "dependsOn": [], 9 | "policy": { 10 | "timeout": "0.12:00:00", 11 | "retry": 0, 12 | "retryIntervalInSeconds": 30, 13 | "secureOutput": false, 14 | "secureInput": false 15 | }, 16 | "userProperties": [], 17 | "typeProperties": { 18 | "url": { 19 | "value": "@pipeline().parameters.Url", 20 | "type": "Expression" 21 | }, 22 | "method": "POST", 23 | "body": { 24 | "value": "@pipeline().parameters.Body", 25 | "type": "Expression" 26 | } 27 | } 28 | } 29 | ], 30 | "parameters": { 31 | "Url": { 32 | "type": "string" 33 | }, 34 | "Body": { 35 | "type": "string" 36 | } 37 | }, 38 | "folder": { 39 | "name": "tests" 40 | }, 41 | "annotations": [] 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /tests/functional/execute_child_pipeline/pipeline/main.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "main", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Execute Child pipeline", 7 | "type": "ExecutePipeline", 8 | "dependsOn": [], 9 | "userProperties": [], 10 | "typeProperties": { 11 | "pipeline": { 12 | "referenceName": "child", 13 | "type": "PipelineReference" 14 | }, 15 | "waitOnCompletion": true, 16 | "parameters": { 17 | "Url": { 18 | "value": "@pipeline().parameters.Url", 19 | "type": "Expression" 20 | }, 21 | "Body": { 22 | "value": "@pipeline().parameters.Body", 23 | "type": "Expression" 24 | } 25 | } 26 | } 27 | } 28 | ], 29 | "parameters": { 30 | "Url": { 31 | "type": "string" 32 | }, 33 | "Body": { 34 | "type": "string" 35 | } 36 | }, 37 | "folder": { 38 | "name": "tests" 39 | }, 40 | "annotations": [] 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /tests/functional/execute_child_pipeline/test_execute_child_pipeline_activity.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.exceptions import PipelineNotFoundError 4 | from data_factory_testing_framework.state import RunParameter, RunParameterType 5 | 6 | 7 | def test_execute_pipeline_activity_child_activities_executed(request: pytest.FixtureRequest) -> None: 8 | # Arrange 9 | test_framework = TestFramework( 10 | framework_type=TestFrameworkType.DataFactory, 11 | root_folder_path=request.fspath.dirname, 12 | should_evaluate_child_pipelines=True, 13 | ) 14 | pipeline = test_framework.get_pipeline_by_name("main") 15 | 16 | # Act 17 | activities = test_framework.evaluate_pipeline( 18 | pipeline, 19 | [ 20 | RunParameter(RunParameterType.Pipeline, "Url", "https://example.com"), 21 | RunParameter(RunParameterType.Pipeline, "Body", '{ "key": "value" }'), 22 | ], 23 | ) 24 | child_web_activity = next(activities) 25 | 26 | # Assert 27 | assert child_web_activity is not None 28 | assert child_web_activity.name == "API Call" 29 | assert child_web_activity.type_properties["url"].result == "https://example.com" 30 | assert child_web_activity.type_properties["body"].result == '{ "key": "value" }' 31 | 32 | with pytest.raises(StopIteration): 33 | next(activities) 34 | 35 | 36 | def test_execute_pipeline_activity_evaluate_child_pipelines_child_pipeline_not_known_exception_thrown( 37 | request: pytest.FixtureRequest, 38 | ) -> None: 39 | # Arrange 40 | test_framework = TestFramework( 41 | framework_type=TestFrameworkType.DataFactory, 42 | root_folder_path=request.fspath.dirname, 43 | should_evaluate_child_pipelines=True, 44 | ) 45 | test_framework._repository.pipelines.remove(test_framework._repository.get_pipeline_by_name("child")) 46 | pipeline = test_framework.get_pipeline_by_name("main") 47 | 48 | # Act & Assert 49 | with pytest.raises(PipelineNotFoundError) as exception_info: 50 | next( 51 | test_framework.evaluate_pipeline( 52 | pipeline, 53 | [ 54 | RunParameter(RunParameterType.Pipeline, "Url", "https://example.com"), 55 | RunParameter(RunParameterType.Pipeline, "Body", '{ "key": "value" }'), 56 | ], 57 | ), 58 | ) 59 | 60 | assert exception_info.value.args[0] == "Pipeline with name: 'child' not found" 61 | -------------------------------------------------------------------------------- /tests/functional/filter_activity_pipeline/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "Pipeline", 5 | "displayName": "filter-test" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "9c079e1a-3d08-43b7-aa02-eb0fb7154b11" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/functional/filter_activity_pipeline/pipeline-content.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "filter-test", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Filter1", 7 | "type": "Filter", 8 | "dependsOn": [], 9 | "typeProperties": { 10 | "items": { 11 | "value": "@pipeline().parameters.input_values", 12 | "type": "Expression" 13 | }, 14 | "condition": { 15 | "value": "@lessOrEquals(item(), 3)", 16 | "type": "Expression" 17 | } 18 | } 19 | }, 20 | { 21 | "name": "Set variable1", 22 | "type": "SetVariable", 23 | "dependsOn": [ 24 | { 25 | "activity": "Filter1", 26 | "dependencyConditions": [ 27 | "Succeeded" 28 | ] 29 | } 30 | ], 31 | "policy": { 32 | "secureOutput": false, 33 | "secureInput": false 34 | }, 35 | "typeProperties": { 36 | "variableName": "filtered_values", 37 | "value": { 38 | "value": "@activity('Filter1').output.value", 39 | "type": "Expression" 40 | } 41 | } 42 | } 43 | ], 44 | "parameters": { 45 | "input_values": { 46 | "type": "array", 47 | "defaultValue": [ 48 | 1, 49 | 2, 50 | 3, 51 | 4, 52 | 5 53 | ] 54 | } 55 | }, 56 | "variables": { 57 | "filtered_values": { 58 | "type": "Array" 59 | } 60 | }, 61 | "annotations": [] 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /tests/functional/filter_activity_pipeline/test_filter_activity_pipeline.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.models.activities import Activity, FilterActivity 4 | from data_factory_testing_framework.state import RunParameter, RunParameterType 5 | 6 | 7 | @pytest.mark.parametrize( 8 | "input_values,expected_filtered_values", 9 | [ 10 | ([1, 2, 3, 4, 5], [1, 2, 3]), 11 | ([], []), 12 | ([4], []), 13 | ([3, 4, 5, 6], [3]), 14 | ([4, 5, 6], []), 15 | ([-1, 3, 4], [-1, 3]), 16 | ], 17 | ) 18 | def test_filter_activity(input_values: [], expected_filtered_values: [], request: pytest.FixtureRequest) -> None: 19 | # Arrange 20 | test_framework = TestFramework( 21 | framework_type=TestFrameworkType.Fabric, 22 | root_folder_path=request.fspath.dirname, 23 | should_evaluate_child_pipelines=True, 24 | ) 25 | pipeline = test_framework.get_pipeline_by_name("filter-test") 26 | 27 | # Act 28 | activities = test_framework.evaluate_pipeline( 29 | pipeline, 30 | [ 31 | RunParameter(RunParameterType.Pipeline, "input_values", input_values), 32 | ], 33 | ) 34 | 35 | # Assert 36 | activity: FilterActivity = next(activities) 37 | assert activity.type == "Filter" 38 | assert activity.items.result == input_values 39 | assert activity.output["value"] == expected_filtered_values 40 | 41 | activity: Activity = next(activities) 42 | assert activity.type == "SetVariable" 43 | assert activity.type_properties["value"].result == expected_filtered_values 44 | -------------------------------------------------------------------------------- /tests/functional/switch_activity_pipeline/pipeline/switchtest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "switchtest", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Switch1", 7 | "type": "Switch", 8 | "dependsOn": [], 9 | "userProperties": [], 10 | "typeProperties": { 11 | "on": { 12 | "value": "@pipeline().parameters.current_value", 13 | "type": "Expression" 14 | }, 15 | "cases": [ 16 | { 17 | "value": "case_1", 18 | "activities": [ 19 | { 20 | "name": "Set variable2", 21 | "type": "SetVariable", 22 | "dependsOn": [], 23 | "policy": { 24 | "secureOutput": false, 25 | "secureInput": false 26 | }, 27 | "userProperties": [], 28 | "typeProperties": { 29 | "variableName": "case_hit", 30 | "value": "case_1_hit" 31 | } 32 | } 33 | ] 34 | }, 35 | { 36 | "value": "case_2", 37 | "activities": [ 38 | { 39 | "name": "Set variable3", 40 | "type": "SetVariable", 41 | "dependsOn": [], 42 | "policy": { 43 | "secureOutput": false, 44 | "secureInput": false 45 | }, 46 | "userProperties": [], 47 | "typeProperties": { 48 | "variableName": "case_hit", 49 | "value": "case_2_hit" 50 | } 51 | } 52 | ] 53 | } 54 | ], 55 | "defaultActivities": [ 56 | { 57 | "name": "Set variable1", 58 | "type": "SetVariable", 59 | "dependsOn": [], 60 | "policy": { 61 | "secureOutput": false, 62 | "secureInput": false 63 | }, 64 | "userProperties": [], 65 | "typeProperties": { 66 | "variableName": "case_hit", 67 | "value": "default_hit" 68 | } 69 | } 70 | ] 71 | } 72 | } 73 | ], 74 | "parameters": { 75 | "current_value": { 76 | "type": "string", 77 | "defaultValue": "1" 78 | } 79 | }, 80 | "variables": { 81 | "case_hit": { 82 | "type": "String" 83 | } 84 | }, 85 | "annotations": [], 86 | "lastModifiedByObjectId": "80311eb5-b33b-4d7f-bfa3-879f8c8261c1", 87 | "lastPublishTime": "2023-11-23T08:44:44Z" 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /tests/functional/switch_activity_pipeline/test_switch_activity_pipeline.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.state import RunParameter, RunParameterType 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "on_value,expected_outcome", 8 | [ 9 | ("case_1", "case_1_hit"), 10 | ("case_2", "case_2_hit"), 11 | ("case_3", "default_hit"), 12 | ("case_4", "default_hit"), 13 | ("case_anything", "default_hit"), 14 | ], 15 | ) 16 | def test_switch_activity(on_value: str, expected_outcome: str, request: pytest.FixtureRequest) -> None: 17 | # Arrange 18 | test_framework = TestFramework( 19 | framework_type=TestFrameworkType.DataFactory, 20 | root_folder_path=request.fspath.dirname, 21 | should_evaluate_child_pipelines=True, 22 | ) 23 | pipeline = test_framework.get_pipeline_by_name("switchtest") 24 | 25 | # Act 26 | activities = test_framework.evaluate_pipeline( 27 | pipeline, 28 | [ 29 | RunParameter(RunParameterType.Pipeline, "current_value", on_value), 30 | ], 31 | ) 32 | 33 | # Assert 34 | activity = next(activities) 35 | assert activity.type == "SetVariable" 36 | assert activity.type_properties["value"] == expected_outcome 37 | -------------------------------------------------------------------------------- /tests/functional/test_framework/data/data_factory/pipeline/default_variables.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "default_variables", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Set outputStringVar", 7 | "type": "SetVariable", 8 | "dependsOn": [], 9 | "policy": { 10 | "secureOutput": false, 11 | "secureInput": false 12 | }, 13 | "userProperties": [], 14 | "typeProperties": { 15 | "variableName": "pipelineReturnValue", 16 | "value": [ 17 | { 18 | "key": "outputStringVar", 19 | "value": { 20 | "type": "Expression", 21 | "content": "@if(equals(variables('stringVar'), null), 'is null', concat('is not null: ', variables('stringVar')))\n" 22 | } 23 | } 24 | ], 25 | "setSystemVariable": true 26 | } 27 | }, 28 | { 29 | "name": "Set outputIntVar", 30 | "type": "SetVariable", 31 | "dependsOn": [], 32 | "policy": { 33 | "secureOutput": false, 34 | "secureInput": false 35 | }, 36 | "userProperties": [], 37 | "typeProperties": { 38 | "variableName": "pipelineReturnValue", 39 | "value": [ 40 | { 41 | "key": "outputIntVar", 42 | "value": { 43 | "type": "Expression", 44 | "content": "@if(equals(variables('intVar'), null), 'is null', concat('is not null: ', variables('intVar')))\n" 45 | } 46 | } 47 | ], 48 | "setSystemVariable": true 49 | } 50 | }, 51 | { 52 | "name": "Set outputBoolVar", 53 | "type": "SetVariable", 54 | "dependsOn": [], 55 | "policy": { 56 | "secureOutput": false, 57 | "secureInput": false 58 | }, 59 | "userProperties": [], 60 | "typeProperties": { 61 | "variableName": "pipelineReturnValue", 62 | "value": [ 63 | { 64 | "key": "outputBoolVar", 65 | "value": { 66 | "type": "Expression", 67 | "content": "@if(equals(variables('boolVar'), null), 'is null', concat('is not null: ', variables('boolVar')))\n" 68 | } 69 | } 70 | ], 71 | "setSystemVariable": true 72 | } 73 | }, 74 | { 75 | "name": "Set outputArrayVar", 76 | "type": "SetVariable", 77 | "dependsOn": [], 78 | "policy": { 79 | "secureOutput": false, 80 | "secureInput": false 81 | }, 82 | "userProperties": [], 83 | "typeProperties": { 84 | "variableName": "pipelineReturnValue", 85 | "value": [ 86 | { 87 | "key": "outputArrayVar", 88 | "value": { 89 | "type": "Expression", 90 | "content": "@if(equals(variables('arrayVar'), null), 'is null', concat('is not null: ', variables('arrayVar')))\n" 91 | } 92 | } 93 | ], 94 | "setSystemVariable": true 95 | } 96 | } 97 | ], 98 | "variables": { 99 | "stringVar": { 100 | "type": "String" 101 | }, 102 | "intVar": { 103 | "type": "Integer" 104 | }, 105 | "boolVar": { 106 | "type": "Boolean" 107 | }, 108 | "arrayVar": { 109 | "type": "Array" 110 | } 111 | }, 112 | "annotations": [] 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /tests/functional/test_framework/data/fabric/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "DataPipeline", 5 | "displayName": "ExamplePipeline" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "c7986cc7-a6df-45fc-b4b6-dfc3cbddf2a6" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/functional/test_framework/data/fabric/pipeline-content.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/tests/functional/test_framework/data/fabric/pipeline-content.json -------------------------------------------------------------------------------- /tests/functional/test_framework/data/synapse/pipeline/set_date.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "set_date", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Set datetime", 7 | "type": "SetVariable", 8 | "dependsOn": [], 9 | "policy": { 10 | "secureOutput": false, 11 | "secureInput": false 12 | }, 13 | "userProperties": [], 14 | "typeProperties": { 15 | "variableName": "Datetime", 16 | "value": { 17 | "value": "@utcnow()", 18 | "type": "Expression" 19 | } 20 | } 21 | } 22 | ], 23 | "variables": { 24 | "Datetime": { 25 | "type": "String" 26 | } 27 | }, 28 | "annotations": [] 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /tests/functional/test_framework/test_test_framework.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from data_factory_testing_framework import TestFramework, TestFrameworkType 5 | 6 | 7 | @pytest.mark.parametrize( 8 | "framework_type, example_folder, pipeline_id", 9 | [ 10 | (TestFrameworkType.DataFactory, "data_factory", "default_variables"), 11 | (TestFrameworkType.Synapse, "synapse", "set_date"), 12 | (TestFrameworkType.Fabric, "fabric", "c7986cc7-a6df-45fc-b4b6-dfc3cbddf2a6"), 13 | ], 14 | ) 15 | def test_initializing_test_framework_should_set_framework_type_and_repository( 16 | framework_type: TestFrameworkType, example_folder: str, pipeline_id: str, request: pytest.FixtureRequest 17 | ) -> None: 18 | # Arrange & Act 19 | root_folder_path = os.path.join(request.fspath.dirname, "data", example_folder) 20 | test_framework = TestFramework(framework_type, root_folder_path=root_folder_path) 21 | 22 | # Assert 23 | assert test_framework._framework_type == framework_type 24 | assert test_framework._repository is not None 25 | assert len(test_framework._repository.pipelines) == 1 26 | assert test_framework._repository.pipelines[0].pipeline_id == pipeline_id 27 | -------------------------------------------------------------------------------- /tests/functional/utf_16_encoding/.platform: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://developer.microsoft.com/json-schemas/fabric/gitIntegration/platformProperties/2.0.0/schema.json", 3 | "metadata": { 4 | "type": "Pipeline", 5 | "displayName": "utf_16_encoding" 6 | }, 7 | "config": { 8 | "version": "2.0", 9 | "logicalId": "9c079e1a-3d08-43b7-aa02-eb0fb7154b11" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/functional/utf_16_encoding/pipeline-content.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/data-factory-testing-framework/d72a73cd52147360bce62e6d59d6f7609d9db065/tests/functional/utf_16_encoding/pipeline-content.json -------------------------------------------------------------------------------- /tests/functional/utf_16_encoding/test_encoding_support.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | 4 | 5 | def test_fabric_supports_utf_16_le_encoded_pipeline_content(request: pytest.FixtureRequest) -> None: 6 | # Arrange 7 | test_framework = TestFramework( 8 | framework_type=TestFrameworkType.Fabric, 9 | root_folder_path=request.fspath.dirname, 10 | should_evaluate_child_pipelines=True, 11 | ) 12 | 13 | # Act 14 | pipeline = test_framework.get_pipeline_by_name("utf_16_encoding") 15 | 16 | # Assert 17 | assert pipeline is not None 18 | assert pipeline.name == "utf_16_encoding" 19 | -------------------------------------------------------------------------------- /tests/functional/utils.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from typing import Callable 3 | 4 | 5 | def is_public_module(module: object) -> bool: 6 | if not inspect.ismodule(module): 7 | return False 8 | # only get the last part of the module name 9 | module_name = module.__name__.split(".")[-1] 10 | return not module_name.startswith("_") 11 | 12 | 13 | def is_public_method(method: object) -> bool: 14 | if not inspect.isfunction(method): 15 | return False 16 | return method.__name__ == "__init__" or not method.__name__.startswith("_") 17 | 18 | 19 | def is_public_class(class_: object) -> bool: 20 | if not inspect.isclass(class_): 21 | return False 22 | return not class_.__name__.startswith("_") 23 | 24 | 25 | def get_public_members(inspectable: object, predicate: Callable) -> list[str]: 26 | member_names = [module[0] for module in inspect.getmembers(inspectable, predicate=predicate)] 27 | # getmembers filters with predicate on the actual class - for modules we need to filter names again 28 | return list(filter(lambda x: not x.startswith("_"), member_names)) 29 | 30 | 31 | def is_property(method: object) -> bool: 32 | if isinstance(method, property): 33 | return True 34 | return False 35 | -------------------------------------------------------------------------------- /tests/functional/variables_default_value/pipeline/default_variables.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "default_variables", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Set outputStringVar", 7 | "type": "SetVariable", 8 | "dependsOn": [], 9 | "policy": { 10 | "secureOutput": false, 11 | "secureInput": false 12 | }, 13 | "userProperties": [], 14 | "typeProperties": { 15 | "variableName": "pipelineReturnValue", 16 | "value": [ 17 | { 18 | "key": "outputStringVar", 19 | "value": { 20 | "type": "Expression", 21 | "content": "@if(equals(variables('stringVar'), null), 'is null', concat('is not null: ', variables('stringVar')))\n" 22 | } 23 | } 24 | ], 25 | "setSystemVariable": true 26 | } 27 | }, 28 | { 29 | "name": "Set outputIntVar", 30 | "type": "SetVariable", 31 | "dependsOn": [], 32 | "policy": { 33 | "secureOutput": false, 34 | "secureInput": false 35 | }, 36 | "userProperties": [], 37 | "typeProperties": { 38 | "variableName": "pipelineReturnValue", 39 | "value": [ 40 | { 41 | "key": "outputIntVar", 42 | "value": { 43 | "type": "Expression", 44 | "content": "@if(equals(variables('intVar'), null), 'is null', concat('is not null: ', variables('intVar')))\n" 45 | } 46 | } 47 | ], 48 | "setSystemVariable": true 49 | } 50 | }, 51 | { 52 | "name": "Set outputBoolVar", 53 | "type": "SetVariable", 54 | "dependsOn": [], 55 | "policy": { 56 | "secureOutput": false, 57 | "secureInput": false 58 | }, 59 | "userProperties": [], 60 | "typeProperties": { 61 | "variableName": "pipelineReturnValue", 62 | "value": [ 63 | { 64 | "key": "outputBoolVar", 65 | "value": { 66 | "type": "Expression", 67 | "content": "@if(equals(variables('boolVar'), null), 'is null', concat('is not null: ', variables('boolVar')))\n" 68 | } 69 | } 70 | ], 71 | "setSystemVariable": true 72 | } 73 | }, 74 | { 75 | "name": "Set outputArrayVar", 76 | "type": "SetVariable", 77 | "dependsOn": [], 78 | "policy": { 79 | "secureOutput": false, 80 | "secureInput": false 81 | }, 82 | "userProperties": [], 83 | "typeProperties": { 84 | "variableName": "pipelineReturnValue", 85 | "value": [ 86 | { 87 | "key": "outputArrayVar", 88 | "value": { 89 | "type": "Expression", 90 | "content": "@if(equals(variables('arrayVar'), null), 'is null', concat('is not null: ', variables('arrayVar')))\n" 91 | } 92 | } 93 | ], 94 | "setSystemVariable": true 95 | } 96 | } 97 | ], 98 | "variables": { 99 | "stringVar": { 100 | "type": "String" 101 | }, 102 | "intVar": { 103 | "type": "Integer" 104 | }, 105 | "boolVar": { 106 | "type": "Boolean" 107 | }, 108 | "arrayVar": { 109 | "type": "Array" 110 | } 111 | }, 112 | "annotations": [] 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /tests/functional/variables_default_value/test_variables_default.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | 4 | 5 | def test_string_default_variables(request: pytest.FixtureRequest) -> None: 6 | # Arrange 7 | test_framework = TestFramework( 8 | framework_type=TestFrameworkType.DataFactory, root_folder_path=request.fspath.dirname 9 | ) 10 | pipeline = test_framework.get_pipeline_by_name("default_variables") 11 | 12 | # Act 13 | activities = test_framework.evaluate_pipeline( 14 | pipeline, 15 | [], 16 | ) 17 | 18 | # Assert 19 | activity = next(activities) 20 | assert activity.name == "Set outputStringVar" 21 | assert activity.type_properties["value"][0]["key"] == "outputStringVar" 22 | assert activity.type_properties["value"][0]["value"].result == "is not null: " 23 | 24 | activity = next(activities) 25 | assert activity.name == "Set outputIntVar" 26 | assert activity.type_properties["value"][0]["key"] == "outputIntVar" 27 | assert activity.type_properties["value"][0]["value"].result == "is not null: 0" 28 | 29 | activity = next(activities) 30 | assert activity.name == "Set outputBoolVar" 31 | assert activity.type_properties["value"][0]["key"] == "outputBoolVar" 32 | assert activity.type_properties["value"][0]["value"].result == "is not null: False" 33 | 34 | activity = next(activities) 35 | assert activity.name == "Set outputArrayVar" 36 | assert activity.type_properties["value"][0]["key"] == "outputArrayVar" 37 | assert activity.type_properties["value"][0]["value"].result == "is not null: []" 38 | 39 | with pytest.raises(StopIteration): 40 | next(activities) 41 | -------------------------------------------------------------------------------- /tests/functional/xml/pipeline/xpath_example_08_a.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "xpath_example_08_a", 3 | "properties": { 4 | "activities": [ 5 | { 6 | "name": "Set XML String", 7 | "type": "SetVariable", 8 | "dependsOn": [], 9 | "policy": { 10 | "secureOutput": false, 11 | "secureInput": false 12 | }, 13 | "userProperties": [], 14 | "typeProperties": { 15 | "variableName": "xml", 16 | "value": "\nParis" 17 | } 18 | }, 19 | { 20 | "name": "Example 1 as string", 21 | "description": "[\n\t\t{\n\t\t\t\"$content-type\": \"application/xml;charset=utf-8\",\n\t\t\t\"$content\": \"PG5hbWU+R2FsYTwvbmFtZT4=\"\n\t\t},\n\t\t{\n\t\t\t\"$content-type\": \"application/xml;charset=utf-8\",\n\t\t\t\"$content\": \"PG5hbWU+SG9uZXljcmlzcDwvbmFtZT4=\"\n\t\t}\n]", 22 | "type": "SetVariable", 23 | "dependsOn": [ 24 | { 25 | "activity": "Set XML String", 26 | "dependencyConditions": [ 27 | "Succeeded" 28 | ] 29 | }, 30 | { 31 | "activity": "Set xpath", 32 | "dependencyConditions": [ 33 | "Succeeded" 34 | ] 35 | } 36 | ], 37 | "policy": { 38 | "secureOutput": false, 39 | "secureInput": false 40 | }, 41 | "userProperties": [], 42 | "typeProperties": { 43 | "variableName": "result_str", 44 | "value": { 45 | "value": "@concat(\n xpath(xml(variables('xml')), variables('xpath'))[0],\n string(length(xpath(xml(variables('xml')), variables('xpath'))))\n)", 46 | "type": "Expression" 47 | } 48 | } 49 | }, 50 | { 51 | "name": "Example 1 as array", 52 | "description": "[\n\t\t{\n\t\t\t\"$content-type\": \"application/xml;charset=utf-8\",\n\t\t\t\"$content\": \"PG5hbWU+R2FsYTwvbmFtZT4=\"\n\t\t},\n\t\t{\n\t\t\t\"$content-type\": \"application/xml;charset=utf-8\",\n\t\t\t\"$content\": \"PG5hbWU+SG9uZXljcmlzcDwvbmFtZT4=\"\n\t\t}\n]", 53 | "type": "SetVariable", 54 | "dependsOn": [ 55 | { 56 | "activity": "Set XML String", 57 | "dependencyConditions": [ 58 | "Succeeded" 59 | ] 60 | }, 61 | { 62 | "activity": "Set xpath", 63 | "dependencyConditions": [ 64 | "Succeeded" 65 | ] 66 | } 67 | ], 68 | "policy": { 69 | "secureOutput": false, 70 | "secureInput": false 71 | }, 72 | "userProperties": [], 73 | "typeProperties": { 74 | "variableName": "result_arr", 75 | "value": { 76 | "value": "@xpath(xml(variables('xml')), variables('xpath'))", 77 | "type": "Expression" 78 | } 79 | } 80 | }, 81 | { 82 | "name": "Set xpath", 83 | "type": "SetVariable", 84 | "dependsOn": [], 85 | "policy": { 86 | "secureOutput": false, 87 | "secureInput": false 88 | }, 89 | "userProperties": [], 90 | "typeProperties": { 91 | "variableName": "xpath", 92 | "value": "/*[name()=\"file\"]/*[name()=\"location\"]" 93 | } 94 | } 95 | ], 96 | "variables": { 97 | "xml": { 98 | "type": "String" 99 | }, 100 | "result": { 101 | "type": "Array" 102 | }, 103 | "decoded": { 104 | "type": "String" 105 | }, 106 | "expected_01": { 107 | "type": "String" 108 | }, 109 | "expected_00": { 110 | "type": "String" 111 | }, 112 | "result_str": { 113 | "type": "String" 114 | }, 115 | "result_arr": { 116 | "type": "Array" 117 | }, 118 | "xpath": { 119 | "type": "String" 120 | } 121 | }, 122 | "folder": { 123 | "name": "XML Tests" 124 | }, 125 | "annotations": [] 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /tests/functional/xml/test_xml.py: -------------------------------------------------------------------------------- 1 | import base64 2 | 3 | import pytest 4 | from data_factory_testing_framework import TestFramework, TestFrameworkType 5 | 6 | 7 | def test_batch_job_pipeline(request: pytest.FixtureRequest) -> None: 8 | # Arrange 9 | test_framework = TestFramework( 10 | framework_type=TestFrameworkType.DataFactory, root_folder_path=request.fspath.dirname 11 | ) 12 | pipeline = test_framework.get_pipeline_by_name("xpath_example_08_a") 13 | 14 | # Act 15 | activities = test_framework.evaluate_pipeline( 16 | pipeline, 17 | [], 18 | ) 19 | 20 | # Assert 21 | activity = next(activities) 22 | assert activity.name == "Set XML String" 23 | 24 | activity = next(activities) 25 | assert activity.name == "Set xpath" 26 | 27 | activity = next(activities) 28 | assert activity.name == "Example 1 as string" 29 | 30 | activity = next(activities) 31 | 32 | xml_array = activity.value.result 33 | 34 | assert isinstance(xml_array, list) 35 | assert len(xml_array) == 1 36 | assert ( 37 | base64.b64decode(xml_array[0]["$content"]).decode("utf-8") 38 | == 'Paris' 39 | ) 40 | assert xml_array[0]["$content-type"] == "application/xml;charset=utf-8" 41 | -------------------------------------------------------------------------------- /tests/unit/models/activities/base/test_activity.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.models import DataFactoryElement 4 | from data_factory_testing_framework.models.activities import Activity, ExecutePipelineActivity 5 | from data_factory_testing_framework.state import DependencyCondition, PipelineRunState, RunParameter, RunParameterType 6 | 7 | TestFramework(framework_type=TestFrameworkType.Fabric) 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "required_condition, actual_condition, expected", 12 | [ 13 | ("Succeeded", "Succeeded", True), 14 | ("Failed", "Succeeded", False), 15 | ("Skipped", "Succeeded", False), 16 | ("Completed", "Succeeded", True), 17 | ("Failed", "Failed", True), 18 | ("Skipped", "Failed", False), 19 | ("Completed", "Failed", True), 20 | ("Skipped", "Skipped", True), 21 | ("Completed", "Skipped", True), 22 | ("Completed", "Completed", True), 23 | ], 24 | ) 25 | def test_dependency_conditions_when_called_returns_expected( 26 | required_condition: str, 27 | actual_condition: str, 28 | expected: bool, 29 | ) -> None: 30 | # Arrange 31 | pipeline_activity = Activity( 32 | name="activity", 33 | type="WebActivity", 34 | dependsOn=[ 35 | { 36 | "activity": "otherActivity", 37 | "dependencyConditions": [required_condition], 38 | } 39 | ], 40 | ) 41 | 42 | state = PipelineRunState() 43 | state.add_activity_result("otherActivity", actual_condition) 44 | 45 | # Act 46 | result = pipeline_activity.are_dependency_condition_met(state) 47 | 48 | # Assert 49 | assert result == expected 50 | 51 | 52 | def test_dependency_condition_completed_is_false_when_no_activity_result_is_set() -> None: 53 | # Arrange 54 | pipeline_activity = Activity( 55 | name="activity", 56 | type="WebActivity", 57 | dependsOn=[ 58 | { 59 | "activity": "otherActivity", 60 | "dependencyConditions": ["Completed"], 61 | } 62 | ], 63 | ) 64 | state = PipelineRunState() 65 | 66 | # Act 67 | result = pipeline_activity.are_dependency_condition_met(state) 68 | 69 | # Assert 70 | assert result is False 71 | 72 | 73 | def test_evaluate_when_no_status_is_set_should_set_status_to_succeeded() -> None: 74 | # Arrange 75 | pipeline_activity = Activity(name="activity", type="WebActivity", dependsOn=[]) 76 | state = PipelineRunState() 77 | 78 | # Act 79 | pipeline_activity.evaluate(state) 80 | 81 | # Assert 82 | assert pipeline_activity.status == DependencyCondition.Succeeded 83 | 84 | 85 | def test_evaluate_is_evaluating_expressions_inside_dict() -> None: 86 | # Arrange 87 | pipeline_activity = ExecutePipelineActivity( 88 | name="activity", 89 | typeProperties={ 90 | "pipeline": {"referenceName": "dummy"}, 91 | "parameters": { 92 | "url": DataFactoryElement("@pipeline().parameters.url"), 93 | }, 94 | }, 95 | depends_on=[], 96 | ) 97 | state = PipelineRunState( 98 | parameters=[ 99 | RunParameter(RunParameterType.Pipeline, "url", "example.com"), 100 | ], 101 | ) 102 | 103 | # Act 104 | pipeline_activity.evaluate(state) 105 | 106 | # Assert 107 | assert pipeline_activity.type_properties["parameters"]["url"].result == "example.com" 108 | -------------------------------------------------------------------------------- /tests/unit/models/activities/control_activities/test_filter_activity.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 4 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 5 | ) 6 | from data_factory_testing_framework.models import DataFactoryElement, DataFactoryObjectType, Pipeline 7 | from data_factory_testing_framework.models.activities import FilterActivity 8 | from data_factory_testing_framework.state import PipelineRunState, RunParameter, RunParameterType 9 | 10 | 11 | @pytest.mark.parametrize( 12 | "input_values,expected_filtered_values", 13 | [ 14 | ([1, 2, 3, 4, 5], [1, 2, 3]), 15 | ([], []), 16 | ([4], []), 17 | ([3, 4, 5, 6], [3]), 18 | ([4, 5, 6], []), 19 | ([-1, 3, 4], [-1, 3]), 20 | ], 21 | ) 22 | def test_filter_activity_on_range_of_values(input_values: [], expected_filtered_values: []) -> None: 23 | # Arrange 24 | test_framework = TestFramework(framework_type=TestFrameworkType.Fabric) 25 | pipeline = Pipeline( 26 | pipeline_id="some-id", 27 | name="pipeline", 28 | parameters={ 29 | "input_values": { 30 | "type": "Array", 31 | "defaultValue": [], 32 | }, 33 | }, 34 | variables={}, 35 | activities=[ 36 | FilterActivity( 37 | name="FilterActivity", 38 | typeProperties={ 39 | "items": DataFactoryElement("@pipeline().parameters.input_values"), 40 | "condition": DataFactoryElement("@lessOrEquals(item(), 3)"), 41 | }, 42 | ), 43 | ], 44 | ) 45 | 46 | # Act 47 | activities = test_framework.evaluate_pipeline( 48 | pipeline, 49 | [ 50 | RunParameter(RunParameterType.Pipeline, "input_values", input_values), 51 | ], 52 | ) 53 | 54 | # Assert 55 | activity = next(activities) 56 | assert activity.type == "Filter" 57 | assert activity.type_properties["items"].result == input_values 58 | assert activity.output["value"] == expected_filtered_values 59 | 60 | 61 | @pytest.mark.parametrize(("evaluated_value"), [1, 1.1, "string-value", {}, True, None]) 62 | def test_filter_activity_evaluated_raises_error_when_evaluated_value_is_not_a_list( 63 | evaluated_value: DataFactoryObjectType 64 | ) -> None: 65 | # Arrange 66 | state = PipelineRunState(parameters=[RunParameter(RunParameterType.Pipeline, "input_values", evaluated_value)]) 67 | filter_activity = FilterActivity( 68 | name="FilterActivity", 69 | typeProperties={ 70 | "items": DataFactoryElement("@pipeline().parameters.input_values"), 71 | "condition": DataFactoryElement("@lessOrEquals(item(), 3)"), 72 | }, 73 | ) 74 | 75 | # Act 76 | with pytest.raises(ControlActivityExpressionEvaluatedNotToExpectedTypeError) as ex_info: 77 | filter_activity.evaluate(state) 78 | 79 | assert ( 80 | ex_info.value.args[0] 81 | == "Iteration expression of Activity: 'FilterActivity' does not evaluate to the expected type: 'list'." 82 | ) 83 | -------------------------------------------------------------------------------- /tests/unit/models/activities/control_activities/test_for_each_activity.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 4 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 5 | ) 6 | from data_factory_testing_framework.models import DataFactoryElement, DataFactoryObjectType 7 | from data_factory_testing_framework.models.activities import ForEachActivity, SetVariableActivity 8 | from data_factory_testing_framework.state import PipelineRunState, PipelineRunVariable, RunParameter, RunParameterType 9 | 10 | 11 | def test_when_evaluate_child_activities_then_should_return_the_activity_with_item_expression_evaluated() -> None: 12 | # Arrange 13 | test_framework = TestFramework(TestFrameworkType.Fabric) 14 | for_each_activity = ForEachActivity( 15 | name="ForEachActivity", 16 | typeProperties={ 17 | "items": DataFactoryElement("@split('a,b,c', ',')"), 18 | }, 19 | activities=[ 20 | SetVariableActivity( 21 | name="setVariable", 22 | typeProperties={ 23 | "variableName": "variable", 24 | "value": DataFactoryElement("@item()"), 25 | }, 26 | depends_on=[], 27 | ), 28 | ], 29 | depends_on=[], 30 | ) 31 | state = PipelineRunState( 32 | variables=[ 33 | PipelineRunVariable(name="variable", default_value=""), 34 | ], 35 | ) 36 | 37 | # Act 38 | activities = test_framework.evaluate_activity(for_each_activity, state) 39 | 40 | # Assert 41 | set_variable_activity: SetVariableActivity = next(activities) 42 | assert set_variable_activity is not None 43 | assert set_variable_activity.name == "setVariable" 44 | assert set_variable_activity.type_properties["value"].result == "a" 45 | 46 | set_variable_activity = next(activities) 47 | assert set_variable_activity is not None 48 | assert set_variable_activity.name == "setVariable" 49 | assert set_variable_activity.type_properties["value"].result == "b" 50 | 51 | set_variable_activity = next(activities) 52 | assert set_variable_activity is not None 53 | assert set_variable_activity.name == "setVariable" 54 | assert set_variable_activity.type_properties["value"].result == "c" 55 | 56 | # Assert that there are no more activities 57 | with pytest.raises(StopIteration): 58 | next(activities) 59 | 60 | 61 | @pytest.mark.parametrize(("evaluated_value"), [1, 1.1, "string-value", {}, True, None]) 62 | def test_evaluated_raises_error_when_evaluated_value_is_not_a_list(evaluated_value: DataFactoryObjectType) -> None: 63 | # Arrange 64 | state = PipelineRunState(parameters=[RunParameter(RunParameterType.Pipeline, "input_values", evaluated_value)]) 65 | foreach_activity = ForEachActivity( 66 | name="ForEachActivity", 67 | typeProperties={ 68 | "items": DataFactoryElement("@pipeline().parameters.input_values"), 69 | }, 70 | activities=[], 71 | depends_on=[], 72 | ) 73 | 74 | # Act 75 | with pytest.raises(ControlActivityExpressionEvaluatedNotToExpectedTypeError) as ex_info: 76 | foreach_activity.evaluate(state) 77 | 78 | assert ( 79 | ex_info.value.args[0] 80 | == "Iteration expression of Activity: 'ForEachActivity' does not evaluate to the expected type: 'list'." 81 | ) 82 | -------------------------------------------------------------------------------- /tests/unit/models/activities/control_activities/test_until_activity.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | import pytest 4 | from data_factory_testing_framework import TestFramework, TestFrameworkType 5 | from data_factory_testing_framework.exceptions._control_activity_expression_evaluated_not_to_expected_type import ( 6 | ControlActivityExpressionEvaluatedNotToExpectedTypeError, 7 | ) 8 | from data_factory_testing_framework.models import DataFactoryElement, DataFactoryObjectType 9 | from data_factory_testing_framework.models.activities import SetVariableActivity, UntilActivity 10 | from data_factory_testing_framework.state import PipelineRunState, PipelineRunVariable, RunParameter, RunParameterType 11 | 12 | 13 | def test_when_evaluate_until_activity_should_repeat_until_expression_is_true(monkeypatch: pytest.MonkeyPatch) -> None: 14 | # Arrange 15 | test_framework = TestFramework(framework_type=TestFrameworkType.Fabric) 16 | until_activity = UntilActivity( 17 | name="UntilActivity", 18 | typeProperties={ 19 | "expression": DataFactoryElement("@equals(1, 1)"), 20 | }, 21 | activities=[ 22 | SetVariableActivity( 23 | name="setVariable", 24 | typeProperties={ 25 | "variableName": "variable", 26 | "value": DataFactoryElement("'1'"), 27 | }, 28 | depends_on=[], 29 | ), 30 | ], 31 | depends_on=[], 32 | ) 33 | 34 | state = PipelineRunState( 35 | variables=[ 36 | PipelineRunVariable(name="variable", default_value=""), 37 | ], 38 | ) 39 | 40 | # Act 41 | monkeypatch.setattr(until_activity.expression, "evaluate", lambda state: False) 42 | activities = test_framework.evaluate_activity(until_activity, state) 43 | 44 | # Assert 45 | set_variable_activity = next(activities) 46 | assert set_variable_activity is not None 47 | assert set_variable_activity.name == "setVariable" 48 | 49 | set_variable_activity = next(activities) 50 | assert set_variable_activity is not None 51 | assert set_variable_activity.name == "setVariable" 52 | 53 | monkeypatch.setattr(until_activity.expression, "evaluate", lambda state: True) 54 | 55 | # Assert that there are no more activities 56 | with pytest.raises(StopIteration): 57 | next(activities) 58 | 59 | 60 | def test_evaluate_pipeline_should_pass_iteration_item_to_child_activities() -> None: 61 | # Arrange 62 | state = PipelineRunState(variables=[PipelineRunVariable("variable", None)], iteration_item="some-item") 63 | until_activity = UntilActivity( 64 | name="UntilActivity", 65 | typeProperties={ 66 | "expression": DataFactoryElement("@equals(1,1)"), 67 | }, 68 | activities=[], 69 | ) 70 | evaluator = Mock(return_value=[]) 71 | 72 | # Act 73 | list(until_activity.evaluate_control_activities(state, evaluator)) 74 | 75 | # Assert 76 | assert evaluator.call_args[0][1].iteration_item == "some-item" 77 | 78 | 79 | @pytest.mark.parametrize(("evaluated_value"), [1, 1.1, "string-value", {}, [], None]) 80 | def test_evaluated_raises_error_when_evaluated_value_is_not_a_bool(evaluated_value: DataFactoryObjectType) -> None: 81 | # Arrange 82 | state = PipelineRunState(parameters=[RunParameter(RunParameterType.Pipeline, "input_values", evaluated_value)]) 83 | activity = UntilActivity( 84 | name="UntilActivity", 85 | typeProperties={ 86 | "expression": DataFactoryElement("@pipeline().parameters.input_values"), 87 | }, 88 | activities=[], 89 | ) 90 | evaluator = Mock(return_value=[]) 91 | 92 | # Act 93 | with pytest.raises(ControlActivityExpressionEvaluatedNotToExpectedTypeError) as ex_info: 94 | list(activity.evaluate_control_activities(state, evaluator)) 95 | 96 | assert ( 97 | ex_info.value.args[0] 98 | == "Iteration expression of Activity: 'UntilActivity' does not evaluate to the expected type: 'bool'." 99 | ) 100 | -------------------------------------------------------------------------------- /tests/unit/models/activities/test_append_variable_activity.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import pytest 4 | from data_factory_testing_framework import TestFramework, TestFrameworkType 5 | from data_factory_testing_framework.exceptions import ( 6 | VariableBeingEvaluatedDoesNotExistError, 7 | ) 8 | from data_factory_testing_framework.models import DataFactoryElement 9 | from data_factory_testing_framework.models.activities import AppendVariableActivity 10 | from data_factory_testing_framework.state import PipelineRunState, PipelineRunVariable 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "initial_value,appended_value,expected_value", 15 | [ 16 | ([1, 2], 3, [1, 2, "3"]), 17 | ([], 1, ["1"]), 18 | ([4], 5, [4, "5"]), 19 | ], 20 | ) 21 | def test_when_int_variable_appended_then_state_variable_should_be_set( 22 | initial_value: List[int], appended_value: int, expected_value: List[int] 23 | ) -> None: 24 | # Arrange 25 | TestFramework(framework_type=TestFrameworkType.Fabric) 26 | variable_name = "TestVariable" 27 | set_variable_activity = AppendVariableActivity( 28 | name="AppendVariableActivity", 29 | typeProperties={ 30 | "variableName": variable_name, 31 | "value": DataFactoryElement(str(appended_value)), 32 | }, 33 | ) 34 | state = PipelineRunState( 35 | variables=[ 36 | PipelineRunVariable(name=variable_name, default_value=initial_value), 37 | ], 38 | ) 39 | 40 | # Act 41 | set_variable_activity.evaluate(state) 42 | 43 | # Assert 44 | variable = state.get_variable_by_name(variable_name) 45 | assert variable.value == expected_value 46 | 47 | 48 | def test_when_unknown_variable_evaluated_then_should_raise_exception() -> None: 49 | # Arrange 50 | TestFramework(framework_type=TestFrameworkType.Fabric) 51 | variable_name = "TestVariable" 52 | set_variable_activity = AppendVariableActivity( 53 | name="AppendVariableActivity", 54 | typeProperties={ 55 | "variableName": variable_name, 56 | "value": DataFactoryElement("TestValue"), 57 | }, 58 | ) 59 | state = PipelineRunState() 60 | 61 | # Act 62 | with pytest.raises(VariableBeingEvaluatedDoesNotExistError) as exception_info: 63 | set_variable_activity.evaluate(state) 64 | 65 | # Assert 66 | assert exception_info.value.args[0] == "Variable being evaluated does not exist: TestVariable" 67 | -------------------------------------------------------------------------------- /tests/unit/models/activities/test_execute_pipeline_activity_parameters.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.models import DataFactoryElement 2 | from data_factory_testing_framework.models.activities import ExecutePipelineActivity 3 | from data_factory_testing_framework.state import PipelineRunState, RunParameter, RunParameterType 4 | 5 | 6 | def test_execute_pipeline_activity_evaluates_parameters() -> None: 7 | # Arrange 8 | execute_pipeline_activity = ExecutePipelineActivity( 9 | name="ExecutePipelineActivity", 10 | typeProperties={ 11 | "parameters": { 12 | "url": DataFactoryElement("@pipeline().parameters.param1"), 13 | }, 14 | }, 15 | depends_on=[], 16 | ) 17 | state = PipelineRunState( 18 | parameters=[ 19 | RunParameter(name="param1", value="value1", parameter_type=RunParameterType.Pipeline), 20 | ], 21 | ) 22 | 23 | # Act 24 | activity = execute_pipeline_activity.evaluate(state) 25 | 26 | # Assert 27 | assert activity is not None 28 | assert activity.name == "ExecutePipelineActivity" 29 | assert activity.parameters["url"].result == "value1" 30 | 31 | 32 | def test_execute_pipeline_activity_evaluates_no_parameters() -> None: 33 | # Arrange 34 | execute_pipeline_activity = ExecutePipelineActivity( 35 | name="ExecutePipelineActivity", 36 | typeProperties={}, 37 | depends_on=[], 38 | ) 39 | state = PipelineRunState() 40 | 41 | # Act 42 | activity = execute_pipeline_activity.evaluate(state) 43 | 44 | # Assert 45 | assert activity is not None 46 | assert activity.name == "ExecutePipelineActivity" 47 | assert activity.parameters == {} 48 | -------------------------------------------------------------------------------- /tests/unit/models/activities/test_fail_activity.py: -------------------------------------------------------------------------------- 1 | from data_factory_testing_framework.models import DataFactoryElement 2 | from data_factory_testing_framework.models.activities import FailActivity 3 | from data_factory_testing_framework.state import DependencyCondition, PipelineRunState 4 | 5 | 6 | def test_fail_activity_evaluates_to_failed_result() -> None: 7 | # Arrange 8 | fail_activity = FailActivity( 9 | name="FailActivity", 10 | typeProperties={ 11 | "message": DataFactoryElement("@concat('Error code: ', '500')"), 12 | "errorCode": "500", 13 | }, 14 | depends_on=[], 15 | ) 16 | 17 | state = PipelineRunState() 18 | 19 | # Act 20 | activity = fail_activity.evaluate(state) 21 | 22 | # Assert 23 | assert activity is not None 24 | assert activity.name == "FailActivity" 25 | assert activity.status == DependencyCondition.FAILED 26 | assert activity.type_properties["message"].result == "Error code: 500" 27 | assert activity.type_properties["errorCode"] == "500" 28 | -------------------------------------------------------------------------------- /tests/unit/models/activities/test_set_variable_activity.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.exceptions import ( 4 | VariableBeingEvaluatedDoesNotExistError, 5 | ) 6 | from data_factory_testing_framework.models import DataFactoryElement 7 | from data_factory_testing_framework.models.activities import SetVariableActivity 8 | from data_factory_testing_framework.state import PipelineRunState, PipelineRunVariable 9 | 10 | 11 | def test_when_string_variable_evaluated_then_state_variable_should_be_set() -> None: 12 | # Arrange 13 | TestFramework(framework_type=TestFrameworkType.Fabric) 14 | variable_name = "TestVariable" 15 | set_variable_activity = SetVariableActivity( 16 | name="SetVariableActivity", 17 | typeProperties={ 18 | "variableName": variable_name, 19 | "value": DataFactoryElement("TestValue"), 20 | }, 21 | ) 22 | state = PipelineRunState( 23 | variables=[ 24 | PipelineRunVariable(name=variable_name, default_value=""), 25 | ], 26 | ) 27 | 28 | # Act 29 | set_variable_activity.evaluate(state) 30 | 31 | # Assert 32 | variable = state.get_variable_by_name(variable_name) 33 | assert variable.value == "TestValue" 34 | 35 | 36 | def test_when_unknown_variable_evaluated_then_should_raise_exception() -> None: 37 | # Arrange 38 | TestFramework(framework_type=TestFrameworkType.Fabric) 39 | variable_name = "TestVariable" 40 | set_variable_activity = SetVariableActivity( 41 | name="SetVariableActivity", 42 | typeProperties={ 43 | "variableName": variable_name, 44 | "value": DataFactoryElement("TestValue"), 45 | }, 46 | ) 47 | state = PipelineRunState() 48 | 49 | # Act 50 | with pytest.raises(VariableBeingEvaluatedDoesNotExistError) as exception_info: 51 | set_variable_activity.evaluate(state) 52 | 53 | # Assert 54 | assert exception_info.value.args[0] == "Variable being evaluated does not exist: TestVariable" 55 | -------------------------------------------------------------------------------- /tests/unit/test__test_framework.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from data_factory_testing_framework import TestFramework, TestFrameworkType 3 | from data_factory_testing_framework.exceptions import ( 4 | NoRemainingPipelineActivitiesMeetDependencyConditionsError, 5 | ) 6 | from data_factory_testing_framework.models import DataFactoryElement, Pipeline 7 | from data_factory_testing_framework.models.activities import FailActivity, SetVariableActivity 8 | 9 | 10 | def test_circular_dependency_between_activities_should_throw_error() -> None: 11 | # Arrange 12 | test_framework = TestFramework(TestFrameworkType.Fabric) 13 | pipeline = Pipeline( 14 | pipeline_id="some-id", 15 | name="main", 16 | parameters={}, 17 | variables={}, 18 | activities=[ 19 | SetVariableActivity( 20 | name="setVariable1", 21 | variable_name="variable", 22 | typeProperties={ 23 | "variableName": "variable", 24 | "value": DataFactoryElement("'1'"), 25 | }, 26 | dependsOn=[ 27 | { 28 | "activity": "setVariable2", 29 | "dependencyConditions": [ 30 | "Succeeded", 31 | ], 32 | } 33 | ], 34 | ), 35 | SetVariableActivity( 36 | name="setVariable2", 37 | variable_name="variable", 38 | typeProperties={ 39 | "variableName": "variable", 40 | "value": DataFactoryElement("'1'"), 41 | }, 42 | dependsOn=[ 43 | { 44 | "activity": "setVariable1", 45 | "dependencyConditions": [ 46 | "Succeeded", 47 | ], 48 | } 49 | ], 50 | ), 51 | ], 52 | ) 53 | test_framework._repository.pipelines.append(pipeline) 54 | 55 | # Act & Assert 56 | with pytest.raises(NoRemainingPipelineActivitiesMeetDependencyConditionsError): 57 | next(test_framework.evaluate_pipeline(pipeline, [])) 58 | 59 | 60 | def test_fail_activity_halts_further_evaluation() -> None: 61 | # Arrange 62 | test_framework = TestFramework(TestFrameworkType.Fabric) 63 | pipeline = Pipeline( 64 | pipeline_id="some-id", 65 | name="main", 66 | parameters={}, 67 | variables={}, 68 | activities=[ 69 | SetVariableActivity( 70 | name="setVariable1", 71 | variable_name="variable", 72 | typeProperties={ 73 | "variableName": "variable", 74 | "value": DataFactoryElement("'1'"), 75 | }, 76 | dependsOn=[ 77 | { 78 | "activity": "failActivity", 79 | "dependencyConditions": [ 80 | "Succeeded", 81 | ], 82 | } 83 | ], 84 | ), 85 | FailActivity( 86 | name="failActivity", 87 | typeProperties={ 88 | "message": DataFactoryElement("@concat('Error code: ', '500')"), 89 | "errorCode": "500", 90 | }, 91 | dependsOn=[], 92 | ), 93 | ], 94 | ) 95 | test_framework._repository.pipelines.append(pipeline) 96 | 97 | # Act 98 | activities = test_framework.evaluate_pipeline(pipeline, []) 99 | 100 | # Assert 101 | activity = next(activities) 102 | assert activity is not None 103 | assert activity.name == "failActivity" 104 | assert activity.type == "Fail" 105 | assert activity.status == "Failed" 106 | assert activity.type_properties["message"].result == "Error code: 500" 107 | assert activity.type_properties["errorCode"] == "500" 108 | 109 | # Assert that there are no more activities 110 | with pytest.raises(StopIteration): 111 | next(activities) 112 | --------------------------------------------------------------------------------