├── .flake8
├── .gitignore
├── CHANGELOG.md
├── INSTALL.md
├── LICENSE
├── NOTICE.md
├── README.md
├── build-pipeline.yml
├── build-requirements.txt
├── build
    └── version_bumper.py
├── examples
    ├── azure_devops
    │   ├── multistage
    │   │   ├── CD-AzureAgent.yml
    │   │   ├── jobs
    │   │   │   ├── AcceptChangesJob.yaml
    │   │   │   ├── FetchTechnicalDebtJob.yaml
    │   │   │   ├── LifeTimeDeploymentJob.yaml
    │   │   │   ├── PublishTriggerManifestJob.yaml
    │   │   │   ├── RegressionTestingJob.yaml
    │   │   │   ├── TagWorkItemsJob-AzureAgent.yaml
    │   │   │   └── ValidateTechnicalDebtJob.yaml
    │   │   ├── scripts
    │   │   │   └── tech_debt_validation.py
    │   │   └── tasks
    │   │   │   ├── ApplyConfigurationValues.yaml
    │   │   │   └── InstallPythonPackage.yaml
    │   ├── windows_azure_pipeline_ci.yaml
    │   ├── windows_azure_task_group_deploy_to_destination_env.json
    │   └── windows_azure_task_group_deploy_to_destination_env_with_airgap.json
    ├── jenkins
    │   ├── Linux.Jenkinsfile
    │   ├── Windows-GlobalAgent-AirGap.Jenkinsfile
    │   ├── Windows-GlobalAgent.Jenkinsfile
    │   ├── Windows-LocalAgent.Jenkinsfile
    │   └── trigger_manifest
    │   │   ├── Linux-LocalAgent.Jenkinsfile
    │   │   └── Windows-LocalAgent.Jenkinsfile
    └── other_pipelines
    │   ├── Linux
    │       ├── build_test_endpoints.sh
    │       ├── deploy_apps_to_env.sh
    │       ├── fetch_lt_data.sh
    │       ├── install_dependencies.sh
    │       ├── running_test_endpoints.sh
    │       └── send_notifications_slack.sh
    │   ├── README.md
    │   └── Windows
    │       ├── build_test_endpoints.ps1
    │       ├── deploy_apps_to_env.ps1
    │       ├── fetch_lt_data.ps1
    │       ├── install_dependencies.ps1
    │       ├── running_test_endpoints.ps1
    │       └── send_notifications_slack.ps1
├── outsystems
    ├── architecture_dashboard
    │   ├── __init__.py
    │   ├── ad_base.py
    │   └── ad_tech_debt.py
    ├── bdd_framework
    │   ├── __init__.py
    │   ├── bdd_base.py
    │   └── bdd_runner.py
    ├── cicd_probe
    │   ├── __init__.py
    │   ├── cicd_base.py
    │   ├── cicd_dependencies.py
    │   └── cicd_scan.py
    ├── exceptions
    │   ├── __init__.py
    │   ├── app_does_not_exist.py
    │   ├── app_version_error.py
    │   ├── deployment_not_found.py
    │   ├── environment_not_found.py
    │   ├── impossible_action_deployment.py
    │   ├── invalid_json_response.py
    │   ├── invalid_os_package.py
    │   ├── invalid_parameters.py
    │   ├── manifest_does_not_exist.py
    │   ├── no_apps_available.py
    │   ├── no_deployments.py
    │   ├── not_enough_permissions.py
    │   ├── osptool_error.py
    │   └── server_error.py
    ├── file_helpers
    │   ├── __init__.py
    │   └── file.py
    ├── lifetime
    │   ├── __init__.py
    │   ├── lifetime_applications.py
    │   ├── lifetime_base.py
    │   ├── lifetime_deployments.py
    │   ├── lifetime_downloads.py
    │   ├── lifetime_environments.py
    │   └── lifetime_solutions.py
    ├── manifest
    │   ├── __init__.py
    │   └── manifest_base.py
    ├── osp_tool
    │   ├── __init__.py
    │   └── osp_base.py
    ├── pipeline
    │   ├── __init__.py
    │   ├── apply_configuration_values_to_target_env.py
    │   ├── continue_deployment_to_target_env.py
    │   ├── deploy_apps_to_target_env_with_airgap.py
    │   ├── deploy_latest_tags_to_target_env.py
    │   ├── deploy_package_to_target_env.py
    │   ├── deploy_package_to_target_env_with_osptool.py
    │   ├── deploy_tags_to_target_env_with_manifest.py
    │   ├── evaluate_test_results.py
    │   ├── fetch_apps_packages.py
    │   ├── fetch_apps_source_code.py
    │   ├── fetch_lifetime_data.py
    │   ├── fetch_lifetime_solution_from_manifest.py
    │   ├── fetch_tech_debt.py
    │   ├── generate_manifest_file.py
    │   ├── generate_unit_testing_assembly.py
    │   ├── scan_test_endpoints.py
    │   ├── start_deployment_to_target_env.py
    │   ├── tag_apps_based_on_manifest_data.py
    │   ├── tag_modified_apps.py
    │   └── validate_manifest_apps_exist_in_target_env.py
    ├── properties
    │   ├── __init__.py
    │   ├── properties_base.py
    │   └── properties_set_value.py
    └── vars
    │   ├── __init__.py
    │   ├── ad_vars.py
    │   ├── bdd_vars.py
    │   ├── cicd_vars.py
    │   ├── dotnet_vars.py
    │   ├── file_vars.py
    │   ├── lifetime_vars.py
    │   ├── manifest_vars.py
    │   ├── pipeline_vars.py
    │   ├── properties_vars.py
    │   └── vars_base.py
├── outsystems_components
    ├── lifetime
    │   ├── O10
    │   │   └── Trigger Pipeline.oap
    │   └── O11
    │   │   └── Trigger Pipeline.oap
    └── regression_environment
    │   ├── O10
    │       └── CICD Probe.oap
    │   └── O11
    │       └── CICD Probe.oap
├── outsystems_integrations
    ├── architecture_dashboard
    │   ├── __init__.py
    │   ├── fetch_tech_debt_sync.py
    │   └── vars.py
    └── slack
    │   ├── __init__.py
    │   ├── send_pipeline_status_to_slack.py
    │   ├── send_slack_message.py
    │   ├── send_test_results_to_slack.py
    │   └── vars.py
├── setup.py
└── test
    ├── __init__.py
    └── test_deploy_latest_tags.py
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore = E402,E501,E722
3 | max-line-length = 160
4 | exclude = tests/*
5 | 
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
 1 | *.mapping
 2 | # Python files
 3 | __pycache__
 4 | temp_file
 5 | .pytest_cache
 6 | *.bak
 7 | # local testing
 8 | tests/python-tests/*.xml
 9 | Artifacts
10 | .vscode
11 | .idea/*
12 | .venv/*
13 | venv/*
14 | junit
15 | dist/*
16 | VERSION
17 | .venv/
18 | app.py
19 | tests/*
--------------------------------------------------------------------------------
/INSTALL.md:
--------------------------------------------------------------------------------
 1 | # Dependencies to run / test locally
 2 | 
 3 | In order to be able to test locally, there's a few things you'll have to install. For the pipeline itself, you won't need to do this. That is assured by the pipeline code.
 4 | 
 5 | ## Install Python
 6 | 
 7 | * Go to 
 8 | * Install Python v3.11.x (the code was tested with v3.11.3)
 9 | 
10 | ## Install Python dependencies
11 | 
12 | To install all the dependencies you'll need:
13 | 
14 | * **Pip**
15 |   * Download Pip: 
16 |   * Run the script to install pip: `python get-pip.py`
17 | 
18 | * **Dependencies**
19 |   * To install the dependencies needed, you just need to install the requirements with pip.
20 |   * On the root dir run: `pip install -q -I -r cd_pipelines/requirements.txt`
21 |   
--------------------------------------------------------------------------------
/NOTICE.md:
--------------------------------------------------------------------------------
 1 | # outsystems-pipeline
 2 | 
 3 | Copyright 2019 OutSystems
 4 | 
 5 | Licensed under the Apache License, Version 2.0 (the "License");
 6 | you may not use this file except in compliance with the License.
 7 | You may obtain a copy of the License at
 8 | 
 9 |     http://www.apache.org/licenses/LICENSE-2.0
10 | 
11 | Unless required by applicable law or agreed to in writing, software
12 | distributed under the License is distributed on an "AS IS" BASIS,
13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | See the License for the specific language governing permissions and
15 | limitations under the License.
16 | 
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
 1 | # outsystems-pipeline
 2 | 
 3 | 
 4 | 
 5 | This project contains the source code of `outsystems-pipeline`, a Python package [distributed on PyPI.org](https://pypi.org/project/outsystems-pipeline/) maintained by [OutSystems](https://www.outsystems.com) to accelerate the creation of OutSystems CI/CD pipelines using your DevOps automation tool of choice. The project also includes example scripts and pipeline templates.
 6 | 
 7 | In the future we intend to open the project repository to accept your contributions by pull requests.
 8 | 
 9 | ## Getting started
10 | 
11 | In the [Wiki](https://github.com/OutSystems/outsystems-pipeline/wiki), you will find all the information on how to use `outsystems-pipeline` and on setting up your OutSystems pipelines.
12 | 
13 | The following are step-by-step instructions on how to set up OutSystems CI/CD pipelines using `outsystems-pipeline`:
14 | 
15 | * [Building an OutSystems pipeline with Jenkins](https://github.com/OutSystems/outsystems-pipeline/wiki/Building-an-OutSystems-pipeline-with-Jenkins)
16 | * [Building an OutSystems pipeline with Azure DevOps Multistage Pipeline](https://github.com/OutSystems/outsystems-pipeline/wiki/Building-an-OutSystems-pipeline-with-Azure-DevOps-Multistage-Pipeline)
17 | 
18 | We also provide [example scripts](https://github.com/OutSystems/outsystems-pipeline/tree/master/examples/other_pipelines) to help bootstrap your custom pipelines.
19 | 
20 | ## Get in touch
21 | 
22 | Help us improve `outsystems-pipeline` by either:
23 | 
24 | * [Submitting an issue](https://github.com/OutSystems/outsystems-pipeline/issues) with detailed information about the problem you're having
25 | * [Sending us an email](mailto:cicd.integrations@outsystems.com) with any feedback or questions that you may have
26 | 
27 | ## Change log
28 | 
29 | See the [change log](CHANGELOG.md) to learn about the latest changes and improvements to `outsystems-pipeline`.
30 | 
31 | ## License
32 | 
33 | OutSystems distributes the project `outsystem-pipeline` under the [Apache License](LICENSE) with no support. For more information, see the [notice file](NOTICE.md).
34 | 
--------------------------------------------------------------------------------
/build-pipeline.yml:
--------------------------------------------------------------------------------
 1 | # Generates a Python package if the code passes the test
 2 | # Create and test a Python code on multiple Python versions
 3 | # Uploads the package with a new version to Pypi.org
 4 | 
 5 | trigger: none
 6 | #  batch: true # Batch the changes if there are many commits in a short burst
 7 | #  branches:
 8 | #    include: # Only use the master branch
 9 | #    - master
10 | #  paths:
11 | #    include: # Only includes the outsystems pipeline code as trigger
12 | #      - outsystems/*
13 | #      - build/*
14 | #      - setup.py
15 | #    exclude: # Excludes everything else
16 | #      - /*
17 | 
18 | # Exclude pull requests
19 | pr: none
20 | 
21 | jobs:
22 | - job: 'Test'
23 |   pool:
24 |     vmImage: 'ubuntu-latest'
25 |   strategy:
26 |     matrix:
27 |       Python38:
28 |         python.version: '3.8'
29 |       Python312:
30 |         python.version: '3.12'
31 |     maxParallel: 2
32 | 
33 |   steps:
34 |   - task: UsePythonVersion@0
35 |     inputs:
36 |       versionSpec: '$(python.version)'
37 |       architecture: 'x64'
38 | 
39 |   - script: python -m pip install --upgrade pip && pip install -r build-requirements.txt
40 |     displayName: 'Install dependencies'
41 | 
42 |   - script: |
43 |      python -m pip install flake8
44 |      flake8 .
45 |     displayName: 'Run lint validation'
46 | 
47 |   - script: |
48 |       pip install pytest
49 |       pytest test --doctest-modules --junitxml=junit/test-results.xml
50 |     displayName: 'pytest'
51 | 
52 |   - task: PublishTestResults@2
53 |     inputs:
54 |       testResultsFiles: '**/test-results.xml'
55 |       testRunTitle: 'Python $(python.version)'
56 |     condition: succeededOrFailed()
57 | 
58 | - job: 'Publish'
59 |   dependsOn: 'Test'
60 |   pool:
61 |     vmImage: 'ubuntu-latest'
62 | 
63 |   steps:
64 |   - checkout: self
65 |     persistCredentials: true
66 | 
67 |   - task: UsePythonVersion@0
68 |     inputs:
69 |       versionSpec: '3.11'
70 |       architecture: 'x64'
71 | 
72 |   - script: pip install requests
73 |     displayName: Installing Requests
74 | 
75 |   - script: |
76 |       python build/version_bumper.py --$(ReleaseType)
77 |       python setup.py sdist
78 |     displayName: 'Build sdist'
79 | 
80 |   - script: pip install twine
81 |     displayName: Installing Twine
82 | 
83 |   - script: twine upload dist/* --disable-progress-bar --skip-existing -u $(TwineUser) -p $(TwinePassword)
84 |     displayName: Uploading to PyOrg via Twine
--------------------------------------------------------------------------------
/build-requirements.txt:
--------------------------------------------------------------------------------
1 | python-dateutil==2.9.0.post0
2 | requests==2.32.2
3 | unittest-xml-reporting==3.2.0
4 | xunitparser==1.3.4
5 | toposort==1.10
6 | python-dotenv==1.0.1
7 | packaging==24.1
--------------------------------------------------------------------------------
/build/version_bumper.py:
--------------------------------------------------------------------------------
 1 | import argparse
 2 | import fileinput
 3 | import requests
 4 | 
 5 | if __name__ == "__main__":
 6 |     parser = argparse.ArgumentParser()
 7 |     parser.add_argument("--revision", help="Toggle if you're doing a revision version.", action="store_true")
 8 |     parser.add_argument("--minor", help="Toggle if you're doing a minor version.", action="store_true")
 9 |     parser.add_argument("--major", help="Toggle if you're doing a major version.", action="store_true")
10 |     args = parser.parse_args()
11 | 
12 |     url = "https://pypi.org/pypi/outsystems-pipeline/json"
13 |     response = requests.get(url)
14 |     response.raise_for_status()  # Raise an error if the request failed
15 |     version = response.json()['info']['version']
16 |     version_array = version.split('.')
17 | 
18 |     if args.revision:
19 |         if len(version_array) > 2:
20 |             # Increments the previous beta version
21 |             revision_version = int(version_array[-1])
22 |             version_array[-1] = str(revision_version + 1)
23 |         else:
24 |             # no beta version, creates one with 1
25 |             version_array.extend("1")
26 |     elif args.minor:
27 |         if len(version_array) > 2:
28 |             # Removes the previous beta version
29 |             version_array[-1] = "0"
30 |         else:
31 |             # forces 3 part release versions
32 |             while len(version_array) < 3:
33 |                 version_array.extend("0")
34 |         minor_version = int(version_array[-2])
35 |         version_array[-2] = str(minor_version + 1)
36 |     elif args.major:
37 |         major_version = int(version_array[0])
38 |         version_array = [str(major_version + 1), "0", "0"]
39 |     else:
40 |         parser.error("Release type not specified")
41 |     version = ".".join(version_array)
42 | 
43 |     with fileinput.FileInput("setup.py", inplace=True, backup='.bak') as setup_file:
44 |         for line in setup_file:
45 |             print(line.replace("version=''", "version='{}'".format(version)), end='')
46 | #    with open("VERSION", 'w') as version_file:
47 | #        version_file.write(version)
48 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/CD-AzureAgent.yml:
--------------------------------------------------------------------------------
  1 | # ******************************************************************
  2 | # Template: CD-AzureAgent
  3 | # ******************************************************************
  4 | # Baseline Continuous Delivery pipeline template that leverages 
  5 | # Azure-hosted agents (i.e. each Job runs on a dedicated agent)
  6 | # ******************************************************************
  7 | 
  8 | # ******************************************************************
  9 | # Declare parameters
 10 | # ******************************************************************
 11 | parameters:
 12 | - name: TriggerManifest
 13 |   displayName: Trigger Manifest
 14 |   type: string
 15 | - name: TriggeredBy
 16 |   displayName: Triggered By
 17 |   type: string
 18 | 
 19 | # ******************************************************************
 20 | # Declare variables
 21 | # ******************************************************************
 22 | variables:
 23 |   - group: OutSystems-GitHub-Template-SharedVars
 24 | 
 25 | # ******************************************************************
 26 | # Declare triggers
 27 | # ******************************************************************
 28 | trigger: none
 29 | pr: none
 30 | 
 31 | # ******************************************************************
 32 | # Declare agent type
 33 | # ******************************************************************
 34 | pool:
 35 |   vmImage: $(Agent.VMImage)
 36 | 
 37 | # ******************************************************************
 38 | # Declare stages
 39 | # ******************************************************************
 40 | stages:
 41 | 
 42 | # ******************************************************************
 43 | # Stage: Setup
 44 | # ******************************************************************
 45 | - stage: setup
 46 |   displayName: Setup  
 47 |   jobs:
 48 | 
 49 |   # ******************************************************************
 50 |   # Job: Publish Trigger Manifest
 51 |   # ******************************************************************
 52 |   # Publish trigger manifest artifact provided as input
 53 |   # ******************************************************************
 54 |   - template: ./jobs/PublishTriggerManifestJob.yaml
 55 |     parameters:
 56 |       TriggerManifest: ${{ parameters.TriggerManifest }}
 57 | 
 58 | # ******************************************************************
 59 | # Stage: Code Analysis
 60 | # ******************************************************************
 61 | - stage: code_analysis
 62 |   displayName: Code Analysis    
 63 |   jobs:
 64 | 
 65 |   # ******************************************************************
 66 |   # Job: Fetch Technical Debt
 67 |   # ******************************************************************
 68 |   # Fetch technical debt data from AI Mentor Studio
 69 |   # ******************************************************************
 70 |   - template: ./jobs/FetchTechnicalDebtJob.yaml
 71 | 
 72 |   # ******************************************************************
 73 |   # Job: Validate Technical Debt
 74 |   # ******************************************************************
 75 |   # Validate technical debt data using predefined thresholds
 76 |   # ******************************************************************
 77 |   # Sample script to validate TechDebt level and number of 
 78 |   # security findings
 79 |   - template: ./jobs/ValidateTechnicalDebtJob.yaml 
 80 | 
 81 | # ******************************************************************
 82 | # Stage: Regression Testing
 83 | # ******************************************************************
 84 | - stage: regression_testing
 85 |   displayName: Regression Testing  
 86 |   jobs:
 87 | 
 88 |   # ******************************************************************
 89 |   # Job: LifeTime Deployment
 90 |   # ******************************************************************
 91 |   # Deploy list of application versions (including test apps) to REG 
 92 |   # environment
 93 |   # ******************************************************************
 94 |   - template: ./jobs/LifeTimeDeploymentJob.yaml
 95 |     parameters:
 96 |       EnvironmentKey: $(Environment.Regression.Key)
 97 |       SourceEnvironmentLabel: $(Environment.Development.Label)
 98 |       DestinationEnvironmentLabel: $(Environment.Regression.Label)
 99 |       IncludeTestApplications: true
100 | 
101 |   # ******************************************************************
102 |   # Job: Regression Testing (BDD)
103 |   # ******************************************************************
104 |   # Run BDD regression tests and publish test results
105 |   # ******************************************************************
106 |   - template: ./jobs/RegressionTestingJob.yaml
107 | 
108 | # ******************************************************************
109 | # Stage: Release Acceptance
110 | # ******************************************************************
111 | - stage: release_acceptance
112 |   displayName: Release Acceptance
113 |   jobs:
114 | 
115 |   # ******************************************************************
116 |   # Job: LifeTime Deployment
117 |   # ******************************************************************
118 |   # Deploy list of application versions to ACC environment
119 |   # ******************************************************************
120 |   - template: ./jobs/LifeTimeDeploymentJob.yaml
121 |     parameters:
122 |       EnvironmentKey: $(Environment.Acceptance.Key)
123 |       SourceEnvironmentLabel: $(Environment.Regression.Label)
124 |       DestinationEnvironmentLabel: $(Environment.Acceptance.Label)
125 | 
126 |   # ******************************************************************
127 |   # Job: Accept Changes
128 |   # ******************************************************************
129 |   # Accept release candidate before go-live
130 |   # ******************************************************************
131 |   - template: ./jobs/AcceptChangesJob.yaml
132 | 
133 | # ******************************************************************
134 | # Stage: Dry-Run
135 | # ******************************************************************
136 | - stage: dry_run
137 |   displayName: Dry-Run
138 |   jobs:
139 | 
140 |   # ******************************************************************
141 |   # Job: LifeTime Deployment
142 |   # ******************************************************************
143 |   # Deploy list of application versions to PRE environment
144 |   # ******************************************************************
145 |   - template: ./jobs/LifeTimeDeploymentJob.yaml
146 |     parameters:
147 |       EnvironmentKey: $(Environment.PreProduction.Key)
148 |       SourceEnvironmentLabel: $(Environment.Acceptance.Label)
149 |       DestinationEnvironmentLabel: $(Environment.PreProduction.Label)
150 | 
151 | # ******************************************************************
152 | # Stage: Go-Live
153 | # ******************************************************************
154 | - stage: go_live
155 |   displayName: Go-Live
156 |   jobs:
157 | 
158 |   # ******************************************************************
159 |   # Job: LifeTime Deployment
160 |   # ******************************************************************
161 |   # Deploy list of application versions to PRD environment
162 |   # ******************************************************************
163 |   - template: ./jobs/LifeTimeDeploymentJob.yaml
164 |     parameters:
165 |       EnvironmentKey: $(Environment.Production.Key)
166 |       SourceEnvironmentLabel: $(Environment.PreProduction.Label)
167 |       DestinationEnvironmentLabel: $(Environment.Production.Label)
168 |     # To enable 2stage-deploy on this environment uncomment the line below 
169 |     #  Use2StepDeployment: true
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/jobs/AcceptChangesJob.yaml:
--------------------------------------------------------------------------------
 1 | # ******************************************************************
 2 | # Template: AcceptChangesJob-AzureAgent
 3 | # ******************************************************************
 4 | # TBD
 5 | # ******************************************************************
 6 | 
 7 | # ******************************************************************
 8 | # Declare parameters
 9 | # ******************************************************************
10 | 
11 | # ******************************************************************
12 | # Declare jobs
13 | # ******************************************************************
14 | jobs:
15 | 
16 | # ******************************************************************
17 | # Job: Accept release candidate 
18 | # ******************************************************************
19 | # Manual release candidate acceptance before go-live  
20 | # ******************************************************************
21 | - job: accept_changes
22 |   displayName: Accept Changes
23 |   dependsOn: lifetime_deployment
24 |   pool: server
25 |   steps:
26 | 
27 |   # ******************************************************************
28 |   # Step: Manual user validation
29 |   # ******************************************************************
30 |   # Ask user to accept release candidate (i.e. push-button device)
31 |   # ******************************************************************
32 |   - task: ManualValidation@0
33 |     timeoutInMinutes: 1440
34 |     inputs:
35 |       instructions: 'Accept changes and proceed to Production?'
36 |       onTimeout: 'reject'
37 |     displayName: 'Go/No-Go Decision'
38 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/jobs/FetchTechnicalDebtJob.yaml:
--------------------------------------------------------------------------------
 1 | # ******************************************************************
 2 | # FetchTechnicalDebtJob-AzureAgent
 3 | # ******************************************************************
 4 | # TBD
 5 | # ******************************************************************
 6 | 
 7 | # ******************************************************************
 8 | # Declare jobs
 9 | # ******************************************************************
10 | jobs:
11 |         
12 | # ******************************************************************
13 | # Job: Fetch Technical Debt  
14 | # ******************************************************************
15 | # Fetch technical debt data for provided application scope 
16 | # ******************************************************************
17 | - job: fetch_tech_debt
18 |   displayName: Fetch Technical Debt 
19 |   steps:
20 |   - checkout: none # Avoid repository checkout
21 |   - download: current # Download current pipeline artifacts    
22 |   - template: ../tasks/InstallPythonPackage.yaml # Install python package
23 | 
24 |   # ******************************************************************
25 |   # Step: Fetch technical debt data from Architecture Dashboard
26 |   # ******************************************************************
27 |   # Fetch technical debt data for application list by leveraging 
28 |   # Architecture Dashboard API
29 |   # ******************************************************************
30 | 
31 |   - script: >
32 |       python -m outsystems.pipeline.fetch_tech_debt
33 |       --artifacts "$(Artifacts.Folder)" 
34 |       --ad_hostname $(AIMentorStudio.Hostname) 
35 |       --activation_code $(AIMentorStudio.ActivationCode) 
36 |       --api_key $(AIMentorStudio.APIKey)
37 |       --manifest_file "$(Pipeline.Workspace)/$(Manifest.Folder)/$(Manifest.File)"
38 |     displayName: 'Fetch technical debt data from AI Mentor Studio'
39 | 
40 |   # ******************************************************************
41 |   # Step: Publish technical debt artifact
42 |   # ******************************************************************
43 |   # Publish manifest JSON file created in local workspace 
44 |   # ******************************************************************
45 |   - task: PublishBuildArtifacts@1
46 |     inputs:
47 |       PathtoPublish: "$(Artifacts.Folder)/$(AIMentorStudio.Folder)"
48 |       ArtifactName: $(AIMentorStudio.Folder)
49 |     condition: succeeded()
50 |     displayName: 'Publish technical debt artifact'
51 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/jobs/LifeTimeDeploymentJob.yaml:
--------------------------------------------------------------------------------
  1 | # ******************************************************************
  2 | # Template: LifeTimeDeploymentJob-AzureAgent
  3 | # ******************************************************************
  4 | # TBD
  5 | # ******************************************************************
  6 | 
  7 | # ******************************************************************
  8 | # Declare parameters
  9 | # ******************************************************************
 10 | parameters:
 11 | - name: EnvironmentKey # Environment key (in Azure DevOps) 
 12 |   type: string
 13 | - name: SourceEnvironmentLabel # Source Environment (in manifest)
 14 |   type: string
 15 | - name: DestinationEnvironmentLabel # Destination Environment (in manifest)
 16 |   type: string
 17 | - name: IncludeTestApplications # Include test apps in deployment plan  
 18 |   type: boolean
 19 |   default: false
 20 | - name: Use2StepDeployment # Use 2-step deployment process  
 21 |   type: boolean
 22 |   default: false
 23 | 
 24 | # ******************************************************************
 25 | # Declare jobs
 26 | # ******************************************************************
 27 | jobs:
 28 | 
 29 | # ******************************************************************
 30 | # Job: Deploy to target environment
 31 | # ******************************************************************
 32 | # Deploy application tags list to target LifeTime environment
 33 | # ******************************************************************
 34 | - deployment: lifetime_deployment
 35 |   displayName: LifeTime Deployment
 36 |   environment: ${{ parameters.EnvironmentKey }}
 37 |   strategy:
 38 |     runOnce:
 39 |       deploy:
 40 |         steps:
 41 |         - download: current # Download current pipeline artifacts
 42 |         - template: ../tasks/InstallPythonPackage.yaml # Install python package
 43 |         
 44 |         # ******************************************************************
 45 |         # Step: Deploy to target environment (using manifest)
 46 |         # ******************************************************************
 47 |         # Deploy application list to target environment using manifest
 48 |         # ******************************************************************
 49 |         - ${{ if eq(parameters.IncludeTestApplications, true) }}:
 50 |           - script: >
 51 |               python -m outsystems.pipeline.deploy_tags_to_target_env_with_manifest
 52 |               --artifacts "$(Artifacts.Folder)" 
 53 |               --lt_url $(LifeTime.Hostname) 
 54 |               --lt_token $(LifeTime.ServiceAccountToken) 
 55 |               --lt_api_version $(LifeTime.APIVersion) 
 56 |               --source_env_label "${{ parameters.SourceEnvironmentLabel }}"
 57 |               --destination_env_label "${{ parameters.DestinationEnvironmentLabel }}"
 58 |               --include_test_apps
 59 |               --manifest_file "$(Pipeline.Workspace)/$(Manifest.Folder)/$(Manifest.File)"
 60 |             displayName: 'Deploy to ${{ parameters.DestinationEnvironmentLabel }} environment'
 61 | 
 62 |         - ${{ if eq(parameters.IncludeTestApplications, false) }}:
 63 |           - script: >
 64 |               python -m outsystems.pipeline.deploy_tags_to_target_env_with_manifest
 65 |               --artifacts "$(Artifacts.Folder)" 
 66 |               --lt_url $(LifeTime.Hostname) 
 67 |               --lt_token $(LifeTime.ServiceAccountToken) 
 68 |               --lt_api_version $(LifeTime.APIVersion) 
 69 |               --source_env_label "${{ parameters.SourceEnvironmentLabel }}"
 70 |               --destination_env_label "${{ parameters.DestinationEnvironmentLabel }}"
 71 |               --manifest_file "$(Pipeline.Workspace)/$(Manifest.Folder)/$(Manifest.File)"
 72 |             displayName: 'Deploy to ${{ parameters.DestinationEnvironmentLabel }} environment'
 73 | 
 74 |         # ******************************************************************
 75 |         # Step: Apply configuration values
 76 |         # ******************************************************************
 77 |         # Apply configuration values (if any) to target environment
 78 |         # ******************************************************************
 79 |         - ${{ if eq(parameters.Use2StepDeployment, false) }}:
 80 |           - template: ../tasks/ApplyConfigurationValues.yaml
 81 |             parameters:
 82 |               TargetEnvironmentLabel: ${{ parameters.DestinationEnvironmentLabel }}
 83 | 
 84 |         # ******************************************************************
 85 |         # Step: Print deployment conflicts
 86 |         # ******************************************************************
 87 |         # Check if there any Deployment Conflicts and show them in the 
 88 |         # console log
 89 |         # ******************************************************************
 90 |         - task: PowerShell@2
 91 |           inputs:
 92 |             targetType: 'inline'
 93 |             script:  Get-Content -Path "$(Artifacts.Folder)\DeploymentConflicts" | Write-Host
 94 |           condition: failed()
 95 |           displayName: 'Show content of DeploymentConflicts file'
 96 | 
 97 | # ******************************************************************
 98 | # Job: Wait for confirmation 
 99 | # ******************************************************************
100 | # Wait for user confirmation that prepared deployment can resume
101 | # ******************************************************************
102 | - ${{ if eq(parameters.Use2StepDeployment, true) }}:
103 |   - job: wait_confirmation
104 |     displayName: Wait for Confirmation
105 |     dependsOn: lifetime_deployment
106 |     pool: server
107 |     steps:
108 | 
109 |     # ******************************************************************
110 |     # Step: Manual user validation
111 |     # ******************************************************************
112 |     # Ask user to confirm that prepared deployment can resume    
113 |     # ******************************************************************
114 |     - task: ManualValidation@0
115 |       timeoutInMinutes: 1440
116 |       inputs:
117 |         instructions: 'Please confirm that prepared deployment to ${{ parameters.DestinationEnvironmentLabel }} can continue.'
118 |         onTimeout: 'reject'
119 |       displayName: Manual Validation
120 |         
121 | # ******************************************************************
122 | # Job: Resume prepared deployment 
123 | # ******************************************************************
124 | # Resume prepared deployment to target LifeTime environment after
125 | # manual user confirmation 
126 | # ******************************************************************
127 | - ${{ if eq(parameters.Use2StepDeployment, true) }}:
128 |   - job: finalize_deployment
129 |     displayName: Finalize Deployment
130 |     dependsOn: wait_confirmation
131 |     steps:
132 |     - checkout: none # Avoid repository checkout
133 |     - download: current # Download current pipeline artifacts
134 |     - template: ../tasks/InstallPythonPackage.yaml  # Install python package
135 | 
136 |     # ******************************************************************
137 |     # Step: Continue prepared deployment in target environment
138 |     # ******************************************************************
139 |     # Continue prepared deployment plan in LifeTime when 2-step 
140 |     # deployment is enabled
141 |     # ******************************************************************
142 | 
143 |     - script: >
144 |         python -m outsystems.pipeline.continue_deployment_to_target_env
145 |         --artifacts "$(Artifacts.Folder)" 
146 |         --lt_url $(LifeTime.Hostname) 
147 |         --lt_token $(LifeTime.ServiceAccountToken) 
148 |         --lt_api_version $(LifeTime.APIVersion)
149 |         --destination_env "${{ parameters.DestinationEnvironmentLabel }}"
150 |       displayName: 'Continue deployment to ${{ parameters.DestinationEnvironmentLabel }} environment'
151 | 
152 |     # ******************************************************************
153 |     # Step: Apply configuration values
154 |     # ******************************************************************
155 |     # Apply configuration values (if any) to target environment
156 |     # ******************************************************************
157 |     - template: ../tasks/ApplyConfigurationValues.yaml
158 |       parameters:
159 |         TargetEnvironmentLabel: ${{ parameters.DestinationEnvironmentLabel }}
160 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/jobs/PublishTriggerManifestJob.yaml:
--------------------------------------------------------------------------------
 1 | # ******************************************************************
 2 | # Template: PublishTriggerManifest-AzureAgent
 3 | # ******************************************************************
 4 | # Publish manifest JSON file as Pipeline Artifact
 5 | # ******************************************************************
 6 | 
 7 | # ******************************************************************
 8 | # Declare parameters
 9 | # ******************************************************************
10 | parameters:
11 | - name: TriggerManifest # Trigger manifest content (in JSON format)
12 |   type: string
13 | 
14 | # ******************************************************************
15 | # Declare jobs
16 | # ******************************************************************
17 | jobs:
18 | 
19 | # ******************************************************************
20 | # Job: Publish Trigger Manifest 
21 | # ******************************************************************
22 | # Publish trigger manifest artifact provided as input parameter 
23 | # ******************************************************************
24 | - job: publish_trigger_manifest
25 |   displayName: Publish Trigger Manifest
26 |   steps:
27 |   - checkout: none # Avoid repository checkout
28 |   
29 |   # ******************************************************************
30 |   # Step: Create trigger manifest file  
31 |   # ******************************************************************
32 |   # Create JSON file with trigger manifest contents in local 
33 |   # workspace
34 |   # ******************************************************************
35 |   - task: PowerShell@2
36 |     inputs:
37 |       targetType: 'inline'
38 |       script: |
39 |         mkdir $(Manifest.Folder)
40 |         ('${{ parameters.TriggerManifest }}') | Out-File -FilePath "$(Manifest.Folder)\$(Manifest.File)" -Encoding default
41 |       workingDirectory: $(System.DefaultWorkingDirectory)
42 |     displayName: 'Create trigger manifest file'
43 | 
44 |   # ******************************************************************
45 |   # Step: Publish trigger manifest artifact
46 |   # ******************************************************************
47 |   # Publish manifest JSON file created in local workspace 
48 |   # ******************************************************************
49 |   - task: PublishBuildArtifacts@1
50 |     inputs:
51 |       PathtoPublish: $(Manifest.Folder)
52 |       ArtifactName: $(Manifest.Folder)
53 |     condition: succeeded()
54 |     displayName: 'Publish trigger manifest artifact'
55 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/jobs/RegressionTestingJob.yaml:
--------------------------------------------------------------------------------
 1 | # ******************************************************************
 2 | # Template: RegressionTestingJob-AzureAgent
 3 | # ******************************************************************
 4 | # TBD
 5 | # ******************************************************************
 6 | 
 7 | # ******************************************************************
 8 | # Declare jobs
 9 | # ******************************************************************
10 | jobs:
11 | 
12 | # ******************************************************************
13 | # Job: BDD Regression Testing 
14 | # ******************************************************************
15 | # Run list of BDD test jobs and publish results to the pipeline 
16 | # ******************************************************************
17 | - job: regression_testing_bdd
18 |   displayName: Regression Testing (BDD)
19 |   dependsOn: lifetime_deployment
20 |   steps:
21 |   - download: current
22 |   - template: ../tasks/InstallPythonPackage.yaml # Install python package
23 | 
24 |   # ******************************************************************
25 |   # Step: Get App List from Manifest
26 |   # ******************************************************************
27 |   # Gets a comma-separated list of the application names
28 |   # found on the Trigger Manifest artifact
29 |   # ******************************************************************
30 |   - script: |
31 |       AppList=$(cat "$(Pipeline.Workspace)/$(Manifest.Folder)/$(Manifest.File)" | jq -c '.ApplicationVersions | map(.ApplicationName) | join(",")')
32 |       echo "##vso[task.setvariable variable=AppList;isOutput=true]$AppList"
33 |     displayName: 'Get App List from Manifest'
34 |     name: get_app_list
35 | 
36 |   # ******************************************************************
37 |   # Step: Generate URL endpoints for BDD test suites 
38 |   # ******************************************************************
39 |   # Generate a list of URL endpoints to query the BDD framework
40 |   # ******************************************************************
41 |   - script: >
42 |       python -m outsystems.pipeline.generate_unit_testing_assembly
43 |       --artifacts "$(Artifacts.Folder)" 
44 |       --app_list $(get_app_list.AppList)
45 |       --cicd_probe_env "$(CICDProbe.EnvironmentURL)"
46 |       --bdd_framework_env "$(BDDFramework.EnvironmentURL)"
47 |     displayName: 'Generate URL endpoints for BDD test suites'
48 | 
49 |   # ******************************************************************
50 |   # Step: Run BDD test suites and generate JUnit test report
51 |   # ******************************************************************
52 |   # Calls each BDD test URL and store the results in JUnit format
53 |   # ******************************************************************
54 |   - script: >
55 |       python -m outsystems.pipeline.evaluate_test_results 
56 |       --artifacts "$(Artifacts.Folder)"
57 |     continueOnError: true 
58 |     displayName: 'Run BDD test suites and generate JUnit test report'
59 |   
60 |   # ******************************************************************
61 |   # Step: Publish test results
62 |   # ******************************************************************
63 |   # Publish results from the JUnit test result file
64 |   # ******************************************************************
65 |   - task: PublishTestResults@2
66 |     inputs:
67 |       testRunTitle: 'BDD Tests'
68 |       testResultsFormat: 'JUnit'
69 |       testResultsFiles: 'junit-result.xml' 
70 |       searchFolder: '$(Artifacts.Folder)'
71 |       mergeTestResults: true
72 |       failTaskOnFailedTests: true    
73 |     displayName: 'Publish test results'
74 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/jobs/TagWorkItemsJob-AzureAgent.yaml:
--------------------------------------------------------------------------------
 1 | # ******************************************************************
 2 | # Template: TagWorkItemsJob-AzureAgent
 3 | # ******************************************************************
 4 | # TBD
 5 | # ******************************************************************
 6 | 
 7 | # ******************************************************************
 8 | # Declare jobs
 9 | # ******************************************************************
10 | jobs:
11 |         
12 | # ******************************************************************
13 | # Job: Tag Work Items  
14 | # ******************************************************************
15 | # Tag Workitems retrieved from an ADO predefined query
16 | # ******************************************************************
17 | - job: tag_woktiems
18 |   displayName: Tag Work Items
19 |   pool:
20 |     vmImage: 'windows-latest'
21 |   steps:
22 |   - download: current # Download current pipeline artifacts
23 | 
24 |   - task: PowerShell@2
25 |     inputs:
26 |       targetType: 'inline'
27 |       script: |
28 |         Install-PackageProvider nuget -Scope CurrentUser -force
29 |         Install-Module -Name VSTeam -Scope CurrentUser -Force
30 |         
31 |         Import-Module -Name VSTeam
32 | 
33 |         $projectName = $env:project
34 |         $versiontag = $env:tagversion
35 |         
36 |         Set-VSTeamDefaultProjectCount 1000
37 |         Set-VSTeamAccount -Account $env:azuri -PersonalAccessToken $env:token
38 | 
39 |         $query = Invoke-VSTeamRequest -ProjectName $projectName -resource $env:query -area wit/queries -method Get -version '5.0' -JSON | ConvertFrom-Json
40 | 
41 |         $workItems = Invoke-VSTeamRequest -ProjectName $projectName -resource wit/wiql -method Get -version '5.0' -id $query.id -JSON | ConvertFrom-Json
42 | 
43 |         $manifest = Get-Content "$(Pipeline.Workspace)/$(Manifest.Folder)/$(Manifest.File)" | ConvertFrom-Json
44 |         
45 |         $manifest.ApplicationVersions | ForEach-Object {
46 |             if ( -Not $_.IsTestApplication ) 
47 |             {
48 |                 $new_tag = $_.ApplicationName + "_" + $_.VersionNumber
49 |                 $body = '[
50 |                   {
51 |                     "op": "add",
52 |                     "path": "/fields/System.Tags",
53 |                     "value": "tagged; ' + $versiontag + ';' + $new_tag +'"
54 |                   }
55 |                 ]'
56 |                 $workItems.workItems | ForEach-Object {
57 |                       $wi = Get-VSTeamWorkItem -Id $_.id -Fields 'System.Tags' 
58 |                       $tags = 'tagged; ' + $versiontag + '; ' + ';' + $new_tag + '; '
59 |                       if ($wi.fields) { $tags = $tags + $wi.fields.'System.Tags' }
60 |                       $body = '[
61 |                         {
62 |                           "op": "add",
63 |                           "path": "/fields/System.Tags",
64 |                           "value": "' + $tags + '"
65 |                         }
66 |                       ]'
67 |                       Invoke-VSTeamRequest -ProjectName $projectName -ContentType 'application/json-patch+json' -resource wit/workitems -method Patch -version '5.0' -body $body -id $_.id 
68 |                   }
69 |             }
70 |         }
71 |     displayName: 'Read Query and Tag Work Items'
72 |     env:
73 |       token: $(TagWorkItems.pat)
74 |       azuri: $(TagWorkItems.azuri)
75 |       project: $(TagWorkItems.project)
76 |       query: $(TagWorkItems.wi_query)
77 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/jobs/ValidateTechnicalDebtJob.yaml:
--------------------------------------------------------------------------------
 1 | # ******************************************************************
 2 | # ValidateTechnicalDebtJob-AzureAgent
 3 | # ******************************************************************
 4 | # TBD
 5 | # ******************************************************************
 6 | 
 7 | # ******************************************************************
 8 | # Declare parameters
 9 | # ******************************************************************
10 | 
11 | # ******************************************************************
12 | # Declare jobs
13 | # ******************************************************************
14 | jobs:
15 |         
16 | # ******************************************************************
17 | # Job: Validate Technical Debt  
18 | # ******************************************************************
19 | # Validate technical debt data for provided application scope using 
20 | # predefined thresholds
21 | # ******************************************************************
22 | - job: validate_tech_debt
23 |   displayName: Validate Technical Debt 
24 |   dependsOn: fetch_tech_debt
25 |   steps:
26 |   - download: current # Download current pipeline artifacts
27 |   
28 |   # ******************************************************************
29 |   # Step: Check technical debt data thresholds
30 |   # ******************************************************************
31 |   # Check technical debt data thresholds for application list
32 |   # ****************************************************************** 
33 |   - task: PythonScript@0
34 |     inputs:
35 |       scriptSource: 'filePath'
36 |       scriptPath: './scripts/tech_debt_validation.py'
37 |       arguments: >
38 |         --manifest_file "$(Pipeline.Workspace)/$(Manifest.Folder)/$(Manifest.File)"
39 |         --techdebt_data "$(Pipeline.Workspace)/$(AIMentorStudio.Folder)"
40 |         --max_techdebt_level "$(AIMentorStudio.Thresholds.TechDebtLevel)"
41 |         --max_security_findings "$(AIMentorStudio.Thresholds.SecurityFindingsCount)"
42 |     displayName: 'Check technical debt data thresholds'
43 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/scripts/tech_debt_validation.py:
--------------------------------------------------------------------------------
 1 | # Sample script to validate TechDebt level and number of security findings
 2 | # Python modules
 3 | import argparse
 4 | import json
 5 | import os
 6 | 
 7 | 
 8 | # Custom exceptions
 9 | class TechDebtAnalysisException(Exception):
10 |     pass
11 | 
12 | 
13 | # Local vars
14 | cat_security_guid = "6c87e98f-2ece-4df2-b791-d0c7eae15914"
15 | cat_architecture_guid = "f7fdbb75-f2f3-4199-9761-ae0fd08f0998"
16 | cat_performance_guid = "da5489cc-0102-4de7-8788-a5de6c4b297c"
17 | 
18 | # Argument menu / parsing
19 | parser = argparse.ArgumentParser()
20 | parser.add_argument("-m", "--manifest_file", type=str, required=True,
21 |                     help="Manifest file (with JSON format).")
22 | parser.add_argument("-d", "--techdebt_data", type=str, required=True,
23 |                     help="Technical debt data folder.")
24 | parser.add_argument("-l", "--max_techdebt_level", type=str, default="Medium",
25 |                     help="Technical debt level threshold (per application).")
26 | parser.add_argument("-s", "--max_security_findings", type=int, default=0,
27 |                     help="Number of security findings threshold (per application).")
28 | args = parser.parse_args()
29 | 
30 | techdebt_data_folder = args.techdebt_data
31 | max_techdebt_lvl = args.max_techdebt_level
32 | max_sec_findings_count = args.max_security_findings
33 | trigger_manifest = json.load(open(args.manifest_file, "r"))
34 | levels = json.load(open("{}/TechDebt.levels.cache".format(techdebt_data_folder), "r"))
35 | 
36 | print(
37 |     '''Checking thresholds (per application) for technical debt data:
38 |     >>> Tech Debt Level = {}
39 |     >>> Security Findings (Count) = {}'''.format(max_techdebt_lvl, max_sec_findings_count), flush=True
40 | )
41 | 
42 | # Get max tech debt level index
43 | max_techdebt_lvl_info = next(filter(lambda x: x["Name"] == max_techdebt_lvl, levels["Levels"]), None)
44 | if max_techdebt_lvl_info is None:
45 |     raise TechDebtAnalysisException("Unknown tech debt level: {}".format(max_techdebt_lvl))
46 | max_techdebt_idx = levels["Levels"].index(max_techdebt_lvl_info)
47 | 
48 | # Check if tech debt level of each app in the pipeline scope is below defined threshold
49 | for manifest_app in trigger_manifest["ApplicationVersions"]:
50 |     app_name = manifest_app["ApplicationName"].replace(' ', '_')
51 | 
52 |     findings_file = "{}/TechDebt.{}.application.cache".format(techdebt_data_folder, app_name)
53 |     findings = {}
54 | 
55 |     if os.path.isfile(findings_file):
56 |         findings = json.load(open(findings_file, "r"))
57 |     else:
58 |         print("Validation skipped for {}: No technical debt data found.".format(app_name), flush=True)
59 |         break
60 | 
61 |     for app in findings["Applications"]:
62 |         techdebt_lvl_info = next(filter(lambda x: x["GUID"] == app["LevelGUID"], levels["Levels"]), None)
63 |         techdebt_lvl_idx = levels["Levels"].index(techdebt_lvl_info)
64 |         if techdebt_lvl_idx > max_techdebt_idx:
65 |             raise TechDebtAnalysisException("Technical debt level of application {} is above defined threshold ({}).".format(app["Name"], techdebt_lvl_info["Name"]))
66 | 
67 |         # Check if security findings count of each app in the pipeline scope is below defined threshold
68 |         sec_findings_count = 0
69 |         for module in app["Modules"]:
70 |             sec_findings_only = filter(lambda x: x.get("CategoryGUID") == cat_security_guid, module.get("Findings", []))
71 |             for finding in sec_findings_only:
72 |                 sec_findings_count += finding["Count"]
73 |         if sec_findings_count > max_sec_findings_count:
74 |             raise TechDebtAnalysisException("Security findings count of application {} is above defined threshold ({}).".format(app["Name"], sec_findings_count))
75 | 
76 | print("Technical debt findings are below predefined thresholds.", flush=True)
77 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/tasks/ApplyConfigurationValues.yaml:
--------------------------------------------------------------------------------
 1 | # ******************************************************************
 2 | # Template: ApplyConfigurationValues
 3 | # ******************************************************************
 4 | # TBD
 5 | # ******************************************************************
 6 | 
 7 | # ******************************************************************
 8 | # Declare parameters
 9 | # ******************************************************************
10 | parameters:
11 | - name: TargetEnvironmentLabel # Target Environment (in manifest)
12 |   type: string
13 | 
14 | # ******************************************************************
15 | # Declare steps
16 | # ******************************************************************
17 | steps:
18 | 
19 | # ******************************************************************
20 | # Step: Apply configuration values
21 | # ******************************************************************
22 | # Apply configuration values (if any) to target environment
23 | # ******************************************************************
24 | - script: >
25 |     python -m outsystems.pipeline.apply_configuration_values_to_target_env
26 |     --artifacts "$(ArtifactsBuildFolder)" 
27 |     --lt_url $(LifeTime.Hostname) 
28 |     --lt_token $(LifeTime.ServiceAccountToken)
29 |     --target_env_label "${{ parameters.TargetEnvironmentLabel }}" 
30 |     --manifest_file "$(Pipeline.Workspace)/$(Manifest.Folder)/$(Manifest.File)"
31 |   displayName: 'Apply configuration values in ${{ parameters.TargetEnvironmentLabel }} environment'
32 | 
--------------------------------------------------------------------------------
/examples/azure_devops/multistage/tasks/InstallPythonPackage.yaml:
--------------------------------------------------------------------------------
 1 | # ******************************************************************
 2 | # Template: InstallPythonPackage-AzureAgent
 3 | # ******************************************************************
 4 | # Install outsystems-pipeline package from PyPI repository
 5 | # ******************************************************************
 6 | 
 7 | # ******************************************************************
 8 | # Declare steps
 9 | # ******************************************************************
10 | steps:
11 | 
12 | # ******************************************************************
13 | # Step: Select python version to use
14 | # ******************************************************************
15 | # Select which python version to use in the Pipeline Agents 
16 | # ******************************************************************
17 | # - task: UsePythonVersion@0
18 | #   inputs:
19 | #     versionSpec: '$(Python.Version)'
20 | #   displayName: 'Select python version ($(Python.Version))'
21 | 
22 | # ******************************************************************
23 | # Step: Install outsystems-pipeline package 
24 | # ******************************************************************
25 | # Install python package and its dependencies from PyPI
26 | # ******************************************************************
27 | - script: |
28 |     pip install -U outsystems-pipeline==$(OSPackage.Version)
29 |   displayName: 'Install outsystems-pipeline package'
30 | 
--------------------------------------------------------------------------------
/examples/azure_devops/windows_azure_pipeline_ci.yaml:
--------------------------------------------------------------------------------
 1 | ---
 2 | steps:
 3 | - task: PowerShell@2
 4 |   inputs:
 5 |     targetType: 'inline'
 6 |     script: |
 7 |       Write-Host "Create $(ArtifactsBuildFolder) directory on $(System.DefaultWorkingDirectory)"
 8 |       mkdir "$(ArtifactsBuildFolder)" -Force
 9 |     workingDirectory: $(System.DefaultWorkingDirectory)
10 |   displayName: 'Create Artifacts Folder'
11 | 
12 | - task: PowerShell@2
13 |   inputs:
14 |     targetType: 'inline'
15 |     script: pip install -U outsystems-pipeline==$(OSPackageVersion)
16 |     workingDirectory: $(System.DefaultWorkingDirectory)
17 |   displayName: 'Install OutSystems Pipeline Package'
18 | 
19 | - task: PowerShell@2
20 |   inputs:
21 |     targetType: 'inline'
22 |     script: python -m outsystems.pipeline.fetch_lifetime_data --artifacts "$(ArtifactsBuildFolder)" --lt_url  $(LifeTimeHostname) --lt_token $(LifeTimeServiceAccountToken) --lt_api_version $(LifeTimeAPIVersion)
23 |   displayName: 'Fetch Lifetime Data'
24 | 
25 | - task: PowerShell@2
26 |   inputs:
27 |     targetType: 'inline'
28 |     script: python -m outsystems.pipeline.deploy_latest_tags_to_target_env --artifacts "$(ArtifactsBuildFolder)" --lt_url $(LifeTimeHostname) --lt_token $(LifeTimeServiceAccountToken) --lt_api_version $(LifeTimeAPIVersion) --source_env $(DevelopmentEnvironment) --destination_env $(RegressionEnvironment) --app_list "$(ApplicationScopeWithTests)"
29 |   displayName: 'Deploy to CI Environment'
30 | 
31 | - task: PowerShell@2
32 |   inputs:
33 |     targetType: 'inline'
34 |     script: python -m outsystems.pipeline.generate_unit_testing_assembly --artifacts "$(ArtifactsBuildFolder)" --app_list "$(ApplicationScopeWithTests)" --cicd_probe_env $(ProbeEnvironmentURL) --bdd_framework_env $(BddEnvironmentURL)
35 |   displayName: 'Generate URLs for BDD testing'
36 | 
37 | - task: PowerShell@2
38 |   inputs:
39 |     targetType: 'inline'
40 |     script: |
41 |       python -m outsystems.pipeline.evaluate_test_results --artifacts "$(ArtifactsBuildFolder)"
42 |       exit 0
43 |   displayName: 'Evaluate Test Results'
44 |   continueOnError: true
45 | 
46 | - task: PublishTestResults@2
47 |   displayName: 'Publish Test Results'
48 |   inputs:
49 |     testResultsFiles: '$(ArtifactsBuildFolder)\junit-result.xml'
50 |     mergeTestResults: true
51 |     failTaskOnFailedTests: true
52 | 
53 | - task: PowerShell@2
54 |   inputs:
55 |     targetType: 'inline'
56 |     script: New-Item -Force -Path "$(ArtifactsBuildFolder)\deployment_data" -Name "ApplicationScope.cache" -ItemType "file" -Value "$(ApplicationScope)"
57 |   displayName: 'Write ApplicationScope to File'
58 |   continueOnError: true
59 | 
60 | - task: PublishBuildArtifacts@1
61 |   displayName: 'Publish Artifact: deployment_manifest'
62 |   inputs:
63 |     PathtoPublish: $(ArtifactsBuildFolder)\deployment_data
64 |     ArtifactName: $(ArtifactName)
65 | 
66 | - powershell: |
67 |     Write-Host "Stashing the *.cache generated in the pipeline logs"
68 |     $cache_files = Get-ChildItem -Path "$(ArtifactsBuildFolder)\*.cache" -Recurse
69 |     foreach ($cfile in $cache_files) {
70 |       Write-Host "Stashing $cfile"
71 |       Write-Output "##vso[task.uploadfile]$cfile"
72 |     }
73 |   condition: always()
74 |   displayName: 'Stash generated *.cache files into pipeline logs'
75 | ...
--------------------------------------------------------------------------------
/examples/other_pipelines/Linux/build_test_endpoints.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | if [[ $# -lt 10 ]] ; then
 4 |     echo -e '\nNot enough parameters! You need to set the necessary parameters:\n'
 5 |     echo -e '-e  \t\t Python environment name, in order to use your pip dependencies.'
 6 |     echo -e '-a  \t\t\t\t Artifacts directory used for cache files between pipeline scripts.'
 7 |     echo -e '-l  \t\t\t\t Comma separeted list of apps you want to test (including the ones with tests).'
 8 |     echo -e '-c  \t\t\t\t URL for the environment containing the CICD probe. You dont need to set the API endpoint.'
 9 |     echo -e '-b  \t\t\t\t URL for the environment containing the BDD Framework. You dont need to set the API endpoint.'
10 |     echo -e '\n\nusage: ./build_test_endpoints.sh -e  -a  -l  -c  -b '
11 |     exit 1
12 | fi
13 | 
14 | while getopts "e:a:l:c:b:" option 
15 | do
16 |     case "${option}"
17 |     in
18 |         e) env_name=${OPTARG};;
19 |         a) artifacts=${OPTARG};;
20 |         l) app_list=${OPTARG};;
21 |         c) cicd_url=${OPTARG};;
22 |         b) bdd_url=${OPTARG};;
23 |     esac
24 | done
25 | 
26 | echo "Switch to Virtual Environment"
27 | source $env_name/bin/activate
28 | 
29 | echo "Building the test endpoints"
30 | python3 -m outsystems.pipeline.generate_unit_testing_assembly --artifacts "$artifacts" --app_list "$app_list" --cicd_probe_env $cicd_url --bdd_framework_env $bdd_url
31 | 
32 | # Store the exit status from the command above, to make it the exit status of this script
33 | status_code=$?
34 | 
35 | echo "Leave the Virtual Environment for now"
36 | deactivate
37 | 
38 | #### For Azure DevOps, uncomment the next lines ####
39 | #echo "Stashing the *.cache generated in the pipeline logs"
40 | #cache_files=$PWD/$artifacts/**/*.cache
41 | #for cfile in $cache_files
42 | #do
43 | #    echo "Stashing $cfile"
44 | #    echo "##vso[task.uploadfile]$cfile"
45 | #done
46 | 
47 | exit $status_code
--------------------------------------------------------------------------------
/examples/other_pipelines/Linux/deploy_apps_to_env.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | if [[ $# -lt 18 ]] ; then
 4 |     echo -e '\nNot enough parameters! You need to set the necessary parameters:\n'
 5 |     echo -e '-e  \t\t Python environment name, in order to use your pip dependencies.'
 6 |     echo -e '-a  \t\t\t\t Artifacts directory used for cache files between pipeline scripts.'
 7 |     echo -e '-u  \t\t\t\t URL for the environment containing the LifeTime. You dont need to set the API endpoint.'
 8 |     echo -e '-t  \t\t\t\t API Token for the LifeTime service account.'
 9 |     echo -e '-v  \t\t\t\t LifeTime API version number.'
10 |     echo -e '-s  \t\t\t\t Source environment name, where the apps you want to deploy are tagged.'
11 |     echo -e '-d  \t\t\t\t Destination environment name, where the apps you deploy will be.'
12 |     echo -e '-l  \t\t\t\t Comma separated list of applications you want to deploy.'
13 |     echo -e '-m  \t\t\t\t Message you want to set on the deployment plan in LifeTime.'
14 |     echo -e '\n\nusage: ./deploy_apps_to_env.sh -e  -a  -u  -t  -v  -s  -d  -l  -m '
15 |     exit 1
16 | fi
17 | 
18 | while getopts "e:a:u:t:v:s:d:l:m:" option 
19 | do
20 |     case "${option}"
21 |     in
22 |         e) env_name=${OPTARG};;
23 |         a) artifacts=${OPTARG};;
24 |         u) lt_url=${OPTARG};;
25 |         t) lt_token=${OPTARG};;
26 |         v) lt_api=${OPTARG};;
27 |         s) source_env=${OPTARG};;
28 |         d) dest_env=${OPTARG};;
29 |         l) app_list=${OPTARG};;
30 |         m) dep_msg=${OPTARG};;
31 |     esac
32 | done
33 | 
34 | echo "Switch to Virtual Environment"
35 | source $env_name/bin/activate
36 | 
37 | echo "Deploy apps to $dest_env"
38 | python3 -m outsystems.pipeline.deploy_latest_tags_to_target_env --artifacts "$artifacts" --lt_url $lt_url --lt_token $lt_token --lt_api_version $lt_api --source_env "$source_env" --destination_env "$dest_env" --app_list "$app_list" --deploy_msg "$dep_msg"
39 | 
40 | # Store the exit status from the command above, to make it the exit status of this script
41 | status_code=$?
42 | 
43 | echo "Leave the Virtual Environment for now"
44 | deactivate
45 | 
46 | #### For Azure DevOps, uncomment the next lines ####
47 | #echo "Stashing the *.cache generated in the pipeline logs"
48 | #cache_files=$PWD/$artifacts/**/*.cache
49 | #for cfile in $cache_files
50 | #do
51 | #    echo "Stashing $cfile"
52 | #    echo "##vso[task.uploadfile]$cfile"
53 | #done
54 | 
55 | #cache_files=$PWD/$artifacts/*.cache
56 | #for cfile in $cache_files
57 | #do
58 | #    echo "Stashing $cfile"
59 | #    echo "##vso[task.uploadfile]$cfile"
60 | #done
61 | 
62 | #conflicts_file=$PWD/$artifacts/DeploymentConflicts
63 | #if test -f "$conflicts_file"; then
64 | #    echo "Stashing $conflicts_file"
65 | #    echo "##vso[task.uploadfile]$conflicts_file"
66 | #fi
67 | 
68 | exit $status_code
69 | 
--------------------------------------------------------------------------------
/examples/other_pipelines/Linux/fetch_lt_data.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | if [[ $# -lt 10 ]] ; then
 4 |     echo -e '\nNot enough parameters! You need to set the necessary parameters:\n'
 5 |     echo -e '-e  \t\t Python environment name, in order to use your pip dependencies.'
 6 |     echo -e '-a  \t\t\t\t Artifacts directory used for cache files between pipeline scripts.'
 7 |     echo -e '-u  \t\t\t\t URL for the environment containing the LifeTime. You dont need to set the API endpoint.'
 8 |     echo -e '-t  \t\t\t\t API Token for the LifeTime service account.'
 9 |     echo -e '-v  \t\t\t\t LifeTime API version number.'
10 |     echo -e '\n\nusage: ./fetch_lt_data.sh -e  -a  -u  -t  -v '
11 |     exit 1
12 | fi
13 | 
14 | while getopts "e:a:u:t:v:" option 
15 | do
16 |     case "${option}"
17 |     in
18 |         e) env_name=${OPTARG};;
19 |         a) artifacts=${OPTARG};;
20 |         u) lt_url=${OPTARG};;
21 |         t) lt_token=${OPTARG};;
22 |         v) lt_api=${OPTARG};;
23 |     esac
24 | done
25 | 
26 | echo "Switch to Virtual Environment"
27 | source $env_name/bin/activate
28 | 
29 | echo "Fetch LifeTime data"
30 | python3 -m outsystems.pipeline.fetch_lifetime_data --artifacts "$artifacts" --lt_url $lt_url --lt_token $lt_token --lt_api_version $lt_api
31 | 
32 | # Store the exit status from the command above, to make it the exit status of this script
33 | status_code=$?
34 | 
35 | echo "Leave the Virtual Environment for now"
36 | deactivate
37 | 
38 | #### For Azure DevOps, uncomment the next lines ####
39 | #echo "Stashing the *.cache generated in the pipeline logs"
40 | #cache_files=$PWD/$artifacts/*.cache
41 | #for cfile in $cache_files
42 | #do
43 | #    echo "Stashing $cfile"
44 | #    echo "##vso[task.uploadfile]$cfile"
45 | #done
46 | 
47 | exit $status_code
--------------------------------------------------------------------------------
/examples/other_pipelines/Linux/install_dependencies.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | if [[ $# -lt 6 ]] ; then
 4 |   echo -e '\nNot enough parameters! You need to set the necessary parameters:\n'
 5 |   echo -e '-e  \t\t Python environment name, in order to use your pip dependencies.'
 6 |   echo -e '-a  \t\t\t\t Artifacts directory used for cache files between pipeline scripts.'
 7 |   echo -e '-f  \t\t\t\t Requirements filepath for pip.'
 8 |   echo -e '\n\nusage: ./install_dependencies.sh -e  -a  -f '
 9 |   exit 1
10 | fi
11 | 
12 | while getopts "e:a:f:" option 
13 | do
14 |   case "${option}"
15 |   in
16 |   e) env_name=${OPTARG};;
17 |   a) artifacts=${OPTARG};;
18 |   f) dep_file=${OPTARG};;
19 |   esac
20 | done
21 | 
22 | echo "Create Artifacts Folder"
23 | mkdir $artifacts
24 | 
25 | echo "Create Python Virtual environment"
26 | python3 -m venv $env_name --clear
27 | 
28 | echo "Switch to Virtual Environment"
29 | source $env_name/bin/activate
30 | 
31 | echo "Installing wheel Package"
32 | pip3 install -U wheel
33 | echo "Install OutSystems Pipeline Package"
34 | pip3 install -U outsystems-pipeline
35 | 
36 | echo "Leave the Virtual Environment for now"
37 | deactivate
--------------------------------------------------------------------------------
/examples/other_pipelines/Linux/running_test_endpoints.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | if [[ $# -lt 4 ]] ; then
 4 |   echo -e '\nNot enough parameters! You need to set the necessary parameters:\n'
 5 |   echo -e '-e  \t\t Python environment name, in order to use your pip dependencies.'
 6 |   echo -e '-a  \t\t\t\t Artifacts directory used for cache files between pipeline scripts.'
 7 |   echo -e '\n\nusage: ./running_test_endpoints.sh -e  -a '
 8 |   exit 1
 9 | fi
10 | 
11 | while getopts "e:a:" option 
12 | do
13 |   case "${option}"
14 |   in
15 |   e) env_name=${OPTARG};;
16 |   a) artifacts=${OPTARG};;
17 |   esac
18 | done
19 | 
20 | echo "Switch to Virtual Environment"
21 | source $env_name/bin/activate
22 | 
23 | echo "Building the test endpoints"
24 | python3 -m outsystems.pipeline.evaluate_test_results --artifacts "$artifacts"
25 | 
26 | # Store the exit status from the command above, to make it the exit status of this script
27 | status_code=$?
28 | 
29 | echo "Leave the Virtual Environment for now"
30 | deactivate
31 | 
32 | exit $status_code
--------------------------------------------------------------------------------
/examples/other_pipelines/Linux/send_notifications_slack.sh:
--------------------------------------------------------------------------------
 1 | #!/bin/bash
 2 | 
 3 | if [[ $# -lt 14 ]] ; then
 4 |   echo -e '\nNot enough parameters! You need to set the necessary parameters:\n'
 5 |   echo -e '-e  \t\t Python environment name, in order to use your pip dependencies.'
 6 |   echo -e '-a  \t\t\t\t Artifacts directory used for cache files between pipeline scripts.'
 7 |   echo -e '-s  \t\t\t\t URL with the Slack API Hook, for API calls.'
 8 |   echo -e '-c  \t\t\t\t Comma separated list for slack channels.'
 9 |   echo -e '-p  \t\t\t\t Pipeline type. Current Types: azure, jenkins.'
10 |   echo -e '-j  \t\t\t\t Job name you want to show on the notification. Example: Main app name.'
11 |   echo -e '-d  \t\t\t\t URL for the dashboard.'
12 |   echo -e '\n\nusage: ./send_notifications_slack.sh -e  -a  -s  -c  -p  -j  -d '
13 |   exit 1
14 | fi
15 | 
16 | while getopts "e:a:s:c:p:j:d:" option 
17 | do
18 |   case "${option}"
19 |   in
20 |   e) env_name=${OPTARG};;
21 |   a) artifacts=${OPTARG};;
22 |   s) slack_hook=${OPTARG};;
23 |   c) slack_channels=${OPTARG};;
24 |   p) pipeline_type=${OPTARG};;
25 |   j) job_name=${OPTARG};;
26 |   d) dashboard_url=${OPTARG};;
27 |   esac
28 | done
29 | 
30 | echo "Switch to Virtual Environment"
31 | source $env_name/bin/activate
32 | 
33 | echo "Sending test results to Slack"
34 | python3 outsystems_integrations/slack/send_test_results_to_slack.py --artifacts "$artifacts" --slack_hook $slack_hook --slack_channel "$slack_channels" --pipeline "$pipeline_type" --job_name "$job_name" --job_dashboard_url "$dashboard_url"
35 | 
36 | # Store the exit status from the command above, to make it the exit status of this script
37 | status_code=$?
38 | 
39 | echo "Leave the Virtual Environment for now"
40 | deactivate
41 | 
42 | exit $status_code
43 | 
--------------------------------------------------------------------------------
/examples/other_pipelines/README.md:
--------------------------------------------------------------------------------
1 | # Support to other pipeline types
2 | 
3 | The current examples will grow as time goes but since it's very impratical to create / mantain every possible combination, here you will find shell scripts (both in `bash` or `powershell`) to help you integrate the OutSystems Pipeline in any CICD Engine of your choice.
4 | 
--------------------------------------------------------------------------------
/examples/other_pipelines/Windows/build_test_endpoints.ps1:
--------------------------------------------------------------------------------
 1 | param(
 2 |     [Parameter(Mandatory = $true)]
 3 |     [string]$PythonEnv,
 4 |     [Parameter(Mandatory = $true)]
 5 |     [string]$ArtifactDir,
 6 |     [Parameter(Mandatory = $true)]
 7 |     [string[]]$AppList,
 8 |     [Parameter(Mandatory = $true)]
 9 |     [string]$BddUrl,
10 |     [Parameter(Mandatory = $true)]
11 |     [string]$CicdUrl
12 | )
13 | 
14 | Write-Host "Switch to Virtual Environment"
15 | . .\$PythonEnv\Scripts\Activate.ps1
16 | 
17 | Write-Host "Building the test endpoints"
18 | python -m outsystems.pipeline.generate_unit_testing_assembly --artifacts "$ArtifactDir" --app_list "$AppList" --cicd_probe_env $CicdUrl --bdd_framework_env $BddUrl
19 | 
20 | # Store the exit status from the command above, to make it the exit status of this script
21 | $status_code = $LASTEXITCODE
22 | 
23 | Write-Host "Leave the Virtual Environment for now"
24 | deactivate
25 | 
26 | #### For Azure DevOps, uncomment the next lines ####
27 | #Write-Host "Stashing the *.cache generated in the pipeline logs"
28 | ## The recurse flag is used to go into each directory (application_data, etc)
29 | #$cache_files = Get-ChildItem -Path "$PWD\$(ArtifactsFolder)\*.cache" -Recurse
30 | #foreach ($cfile in $cache_files) {
31 | #    Write-Host "Stashing $cfile"
32 | #    Write-Output "##vso[task.uploadfile]$cfile"
33 | #}
34 | 
35 | exit $status_code
--------------------------------------------------------------------------------
/examples/other_pipelines/Windows/deploy_apps_to_env.ps1:
--------------------------------------------------------------------------------
 1 | param(
 2 |     [Parameter(Mandatory = $true)]
 3 |     [string]$PythonEnv,
 4 |     [Parameter(Mandatory = $true)]
 5 |     [string]$ArtifactDir,
 6 |     [Parameter(Mandatory = $true)]
 7 |     [string]$LifeTimeUrl,
 8 |     [Parameter(Mandatory = $true)]
 9 |     [string]$LifeTimeToken,
10 |     [Parameter(Mandatory = $true)]
11 |     [int]$LifeTimeApi,
12 |     [Parameter(Mandatory = $true)]
13 |     [string]$SourceEnv,
14 |     [Parameter(Mandatory = $true)]
15 |     [string]$DestEnv,
16 |     [Parameter(Mandatory = $true)]
17 |     [string[]]$AppList,
18 |     [Parameter(Mandatory = $true)]
19 |     [string]$DeployMsg
20 | )
21 | 
22 | Write-Host "Switch to Virtual Environment"
23 | . .\$PythonEnv\Scripts\Activate.ps1
24 | 
25 | Write-Host "Deploy apps to $DestEnv"
26 | python -m outsystems.pipeline.deploy_latest_tags_to_target_env --artifacts "$ArtifactDir" --lt_url $LifeTimeUrl --lt_token $LifeTimeToken --lt_api_version $LifeTimeApi --source_env "$SourceEnv" --destination_env "$DestEnv" --app_list "$AppList" --deploy_msg "$DeployMsg"
27 | 
28 | # Store the exit status from the command above, to make it the exit status of this script
29 | $status_code = $LASTEXITCODE
30 | 
31 | Write-Host "Leave the Virtual Environment for now"
32 | deactivate
33 | 
34 | Write-Host "Stashing the *.cache generated in the pipeline logs"
35 | 
36 | #### For Azure DevOps, uncomment the next lines ####
37 | ## The recurse flag is used to go into each directory (application_data, etc)
38 | #$cache_files = Get-ChildItem -Path "$PWD\$(ArtifactsFolder)\*.cache" -Recurse
39 | #foreach ($cfile in $cache_files) {
40 | #  Write-Host "Stashing $cfile"
41 | #  Write-Output "##vso[task.uploadfile]$cfile"
42 | #}
43 | 
44 | #$conflicts_file = Get-ChildItem -Path $PWD\$(ArtifactsFolder)\DeploymentConflicts
45 | #if(Test-Path $conflicts_file) {
46 | #  Write-Host "Stashing $conflicts_file"
47 | #  Write-Output "##vso[task.uploadfile]$conflicts_file"
48 | #}
49 | 
50 | exit $status_code
--------------------------------------------------------------------------------
/examples/other_pipelines/Windows/fetch_lt_data.ps1:
--------------------------------------------------------------------------------
 1 | param(
 2 |     [Parameter(Mandatory = $true)]
 3 |     [string]$PythonEnv,
 4 |     [Parameter(Mandatory = $true)]
 5 |     [string]$ArtifactDir,
 6 |     [Parameter(Mandatory = $true)]
 7 |     [string]$LifeTimeUrl,
 8 |     [Parameter(Mandatory = $true)]
 9 |     [string]$LifeTimeToken,
10 |     [Parameter(Mandatory = $true)]
11 |     [int]$LifeTimeApi
12 | )
13 | 
14 | Write-Host "Switch to Virtual Environment"
15 | . .\$PythonEnv\Scripts\Activate.ps1
16 | 
17 | Write-Host "Fetch LifeTime data"
18 | python -m outsystems.pipeline.fetch_lifetime_data --artifacts "$ArtifactDir" --lt_url $LifeTimeUrl --lt_token $LifeTimeToken --lt_api_version $LifeTimeApi
19 | 
20 | # Store the exit status from the command above, to make it the exit status of this script
21 | $status_code = $LASTEXITCODE
22 | 
23 | Write-Host "Leave the Virtual Environment for now"
24 | deactivate
25 | 
26 | #### For Azure DevOps, uncomment the next lines ####
27 | #Write-Host "Stashing the *.cache generated in the pipeline logs"
28 | #$cache_files = Get-ChildItem -Path "$PWD\$(ArtifactsFolder)\*.cache"
29 | #foreach ($cfile in $cache_files) {
30 | #  Write-Host "Stashing $cfile"
31 | #  Write-Output "##vso[task.uploadfile]$cfile"
32 | #}
33 | 
34 | exit $status_code
--------------------------------------------------------------------------------
/examples/other_pipelines/Windows/install_dependencies.ps1:
--------------------------------------------------------------------------------
 1 | param(
 2 |   [Parameter(Mandatory=$true)]
 3 |   [string]$PythonEnv,
 4 |   [Parameter(Mandatory=$true)]
 5 |   [string]$ArtifactDir,
 6 |   [Parameter(Mandatory=$true)]
 7 |   [string]$RequirementsFile
 8 | )
 9 | 
10 | Write-Host "Create Artifacts Folder"
11 | if(Test-Path -Path $ArtifactDir) {
12 |   Remove-Item –Path $ArtifactDir -Force
13 | } 
14 | New-Item -ItemType Directory -Path $ArtifactDir
15 | 
16 | Write-Host "Create Python Virtual environment"
17 | python -m venv $PythonEnv --clear
18 | 
19 | Write-Host "Switch to Virtual Environment"
20 | . .\$PythonEnv\Scripts\Activate.ps1
21 | 
22 | Write-Host "Installing wheel Package"
23 | pip install -U wheel
24 | Write-Host "Install OutSystems Pipeline Package"
25 | pip install -U outsystems-pipeline
26 | 
27 | Write-Host "Leave the Virtual Environment for now"
28 | deactivate
--------------------------------------------------------------------------------
/examples/other_pipelines/Windows/running_test_endpoints.ps1:
--------------------------------------------------------------------------------
 1 | param(
 2 |     [Parameter(Mandatory = $true)]
 3 |     [string]$PythonEnv,
 4 |     [Parameter(Mandatory = $true)]
 5 |     [string]$ArtifactDir
 6 | )
 7 | 
 8 | 
 9 | Write-Host "Switch to Virtual Environment"
10 | . .\$PythonEnv\Scripts\Activate.ps1
11 | 
12 | Write-Host "Building the test endpoints"
13 | python -m outsystems.pipeline.evaluate_test_results --artifacts "$ArtifactDir"
14 | 
15 | # Store the exit status from the command above, to make it the exit status of this script
16 | $status_code = $LASTEXITCODE
17 | 
18 | Write-Host "Leave the Virtual Environment for now"
19 | deactivate
20 | 
21 | exit $status_code
--------------------------------------------------------------------------------
/examples/other_pipelines/Windows/send_notifications_slack.ps1:
--------------------------------------------------------------------------------
 1 | param(
 2 |   [Parameter(Mandatory=$true)]
 3 |   [string]$PythonEnv,
 4 |   [Parameter(Mandatory=$true)]
 5 |   [string]$ArtifactDir,
 6 |   [Parameter(Mandatory=$true)]
 7 |   [string]$SlackHook,
 8 |   [Parameter(Mandatory=$true)]
 9 |   [string[]]$SlackChannels,
10 |   [Parameter(Mandatory=$true)]
11 |   [ValidateSet("jenkins","azure")]
12 |   [string]$PipelineType,
13 |   [Parameter(Mandatory=$true)]
14 |   [string]$JobName,
15 |   [Parameter(Mandatory=$true)]
16 |   [string]$DashboardUrl
17 | )
18 | 
19 | Write-Host "Switch to Virtual Environment"
20 | . .\$PythonEnv\Scripts\Activate.ps1
21 | 
22 | Write-Host "Building the test endpoints"
23 | python outsystems_integrations/slack/send_test_results_to_slack.py --artifacts "$ArtifactDir" --slack_hook $SlackHook --slack_channel "$SlackChannels" --pipeline "$PipelineType" --job_name "$JobName" --job_dashboard_url "$DashboardUrl"
24 | 
25 | # Store the exit status from the command above, to make it the exit status of this script
26 | $status_code = $LASTEXITCODE
27 | 
28 | Write-Host "Leave the Virtual Environment for now"
29 | deactivate
30 | 
31 | exit $status_code
--------------------------------------------------------------------------------
/outsystems/architecture_dashboard/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/architecture_dashboard/__init__.py
--------------------------------------------------------------------------------
/outsystems/architecture_dashboard/ad_base.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import requests
 3 | 
 4 | # Custom Modules
 5 | # Exceptions
 6 | from outsystems.exceptions.invalid_json_response import InvalidJsonResponseError
 7 | 
 8 | 
 9 | # Method that builds the endpoint for Architecture Dashboard API and returns it
10 | def build_ad_endpoint(ad_http_proto: str, ad_api_host: str, ad_api_endpoint: str, ad_api_version: int):
11 |     return "{}://{}/{}/v{}".format(ad_http_proto, ad_api_host, ad_api_endpoint, ad_api_version)
12 | 
13 | 
14 | # Sends a GET request to Architecture Dashboard
15 | def send_get_request(request_string: str, activation_code: str, api_key: str, url_params: dict = None):
16 |     # API key + Customer Activation Code
17 |     headers = {'x-api-key': api_key,
18 |                'x-activation-code': activation_code}
19 | 
20 |     response = requests.get(request_string, params=url_params, headers=headers)
21 |     response_obj = {"http_status": response.status_code, "response": {}}
22 |     if len(response.text) > 0:
23 |         try:
24 |             response_obj["response"] = response.json()
25 |         except:
26 |             raise InvalidJsonResponseError(
27 |                 "GET {}: The JSON response could not be parsed. Response: {}".format(request_string, response.text))
28 |     return response_obj
29 | 
--------------------------------------------------------------------------------
/outsystems/architecture_dashboard/ad_tech_debt.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import os
  3 | 
  4 | # Custom Modules
  5 | # Exceptions
  6 | from outsystems.exceptions.not_enough_permissions import NotEnoughPermissionsError
  7 | # Functions
  8 | from outsystems.architecture_dashboard.ad_base import send_get_request, build_ad_endpoint
  9 | from outsystems.file_helpers.file import store_data
 10 | 
 11 | # Variables
 12 | from outsystems.vars.ad_vars import AD_API_ENDPOINT, AD_API_SUCCESS_CODE, AD_HTTP_PROTO, AD_API_VERSION, \
 13 |     AD_API_UNAUTHORIZED_CODE, AD_APP_ENDPOINT, AD_APP_LIMIT_DEFAULT, AD_APP_SUCCESS_CODE, \
 14 |     AD_LEVELS_ENDPOINT, AD_LEVELS_SUCCESS_CODE, AD_CATEGORIES_ENDPOINT, AD_CATEGORIES_SUCCESS_CODE, \
 15 |     AD_API_NOT_FOUND_CODE
 16 | from outsystems.vars.file_vars import AD_FOLDER, AD_FILE_PREFIX, AD_INFRA_FILE, AD_APP_FILE, \
 17 |     AD_LEVELS_FILE, AD_CATEGORIES_FILE
 18 | 
 19 | 
 20 | # Returns the infrastructure technical debt summary
 21 | def get_infra_techdebt(artifact_dir: str, ad_api_host: str, activation_code: str, api_key: str):
 22 | 
 23 |     # Format the request URL to include the api endpoint
 24 |     base_url = build_ad_endpoint(AD_HTTP_PROTO, ad_api_host, AD_API_ENDPOINT, AD_API_VERSION)
 25 |     request_string = "{}/{}".format(base_url, AD_APP_ENDPOINT)
 26 |     params = {"Limit": AD_APP_LIMIT_DEFAULT}
 27 | 
 28 |     # Sends the request
 29 |     response = send_get_request(request_string, activation_code, api_key, params)
 30 |     status_code = int(response["http_status"])
 31 | 
 32 |     # Process the response based on the status code returned from the server
 33 |     if status_code == AD_API_SUCCESS_CODE:
 34 |         # Stores the result
 35 |         filename = "{}{}".format(AD_FILE_PREFIX, AD_INFRA_FILE)
 36 |         filename = os.path.join(AD_FOLDER, filename)
 37 |         store_data(artifact_dir, filename, response["response"])
 38 |         return response["response"]
 39 |     elif status_code == AD_API_UNAUTHORIZED_CODE:
 40 |         raise NotEnoughPermissionsError(
 41 |             "You don't have enough permissions to get Tecnical Debt information. Details {}".format(response["response"]))
 42 |     else:
 43 |         raise NotImplementedError(
 44 |             "There was an error. Response from server: {}".format(response))
 45 | 
 46 | 
 47 | # Returns the application technical debt summary
 48 | def get_app_techdebt(artifact_dir: str, ad_api_host: str, activation_code: str, api_key: str, app: dict):
 49 | 
 50 |     # Format the request URL to include the api endpoint
 51 |     base_url = build_ad_endpoint(AD_HTTP_PROTO, ad_api_host, AD_API_ENDPOINT, AD_API_VERSION)
 52 |     request_string = "{}/{}".format(base_url, AD_APP_ENDPOINT)
 53 |     params = {"ApplicationGUID": app["ApplicationKey"]}
 54 | 
 55 |     # Sends the request
 56 |     response = send_get_request(request_string, activation_code, api_key, params)
 57 |     status_code = int(response["http_status"])
 58 | 
 59 |     # Process the response based on the status code returned from the server
 60 |     if status_code == AD_APP_SUCCESS_CODE:
 61 |         # Stores the result
 62 |         filename = "{}.{}{}".format(AD_FILE_PREFIX, app["ApplicationName"], AD_APP_FILE)
 63 |         filename = os.path.join(AD_FOLDER, filename)
 64 |         store_data(artifact_dir, filename, response["response"])
 65 |         return response["response"]
 66 |     # No application found with a key matching the Application input parameter
 67 |     # Probably all modules of the app are ignored"
 68 |     elif status_code == AD_API_NOT_FOUND_CODE:
 69 |         return None
 70 |     elif status_code == AD_API_UNAUTHORIZED_CODE:
 71 |         raise NotEnoughPermissionsError(
 72 |             "You don't have enough permissions to get Tecnical Debt information. Details {}".format(response["response"]))
 73 |     else:
 74 |         raise NotImplementedError(
 75 |             "There was an error. Response from server: {}".format(response))
 76 | 
 77 | 
 78 | # Returns the technical debt levels detail
 79 | def get_techdebt_levels(artifact_dir: str, ad_api_host: str, activation_code: str, api_key: str):
 80 | 
 81 |     # Format the request URL to include the api endpoint
 82 |     base_url = build_ad_endpoint(AD_HTTP_PROTO, ad_api_host, AD_API_ENDPOINT, AD_API_VERSION)
 83 |     request_string = "{}/{}".format(base_url, AD_LEVELS_ENDPOINT)
 84 | 
 85 |     # Sends the request
 86 |     response = send_get_request(request_string, activation_code, api_key)
 87 |     status_code = int(response["http_status"])
 88 | 
 89 |     # Process the response based on the status code returned from the server
 90 |     if status_code == AD_LEVELS_SUCCESS_CODE:
 91 |         # Stores the result
 92 |         filename = "{}{}".format(AD_FILE_PREFIX, AD_LEVELS_FILE)
 93 |         filename = os.path.join(AD_FOLDER, filename)
 94 |         store_data(artifact_dir, filename, response["response"])
 95 |         return response["response"]
 96 |     elif status_code == AD_API_UNAUTHORIZED_CODE:
 97 |         raise NotEnoughPermissionsError(
 98 |             "You don't have enough permissions to get Tecnical Debt information. Details {}".format(response["response"]))
 99 |     else:
100 |         raise NotImplementedError(
101 |             "There was an error. Response from server: {}".format(response))
102 | 
103 | 
104 | # Returns the technical debt categories detail
105 | def get_techdebt_categories(artifact_dir: str, ad_api_host: str, activation_code: str, api_key: str):
106 | 
107 |     # Format the request URL to include the api endpoint
108 |     base_url = build_ad_endpoint(AD_HTTP_PROTO, ad_api_host, AD_API_ENDPOINT, AD_API_VERSION)
109 |     request_string = "{}/{}".format(base_url, AD_CATEGORIES_ENDPOINT)
110 | 
111 |     # Sends the request
112 |     response = send_get_request(request_string, activation_code, api_key)
113 |     status_code = int(response["http_status"])
114 | 
115 |     # Process the response based on the status code returned from the server
116 |     if status_code == AD_CATEGORIES_SUCCESS_CODE:
117 |         # Stores the result
118 |         filename = "{}{}".format(AD_FILE_PREFIX, AD_CATEGORIES_FILE)
119 |         filename = os.path.join(AD_FOLDER, filename)
120 |         store_data(artifact_dir, filename, response["response"])
121 |         return response["response"]
122 |     elif status_code == AD_API_UNAUTHORIZED_CODE:
123 |         raise NotEnoughPermissionsError(
124 |             "You don't have enough permissions to get Tecnical Debt information. Details {}".format(response["response"]))
125 |     else:
126 |         raise NotImplementedError(
127 |             "There was an error. Response from server: {}".format(response))
128 | 
--------------------------------------------------------------------------------
/outsystems/bdd_framework/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/bdd_framework/__init__.py
--------------------------------------------------------------------------------
/outsystems/bdd_framework/bdd_base.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import requests
 3 | 
 4 | # Custom Modules
 5 | # Exceptions
 6 | from outsystems.exceptions.invalid_json_response import InvalidJsonResponseError
 7 | # Variables
 8 | from outsystems.vars.bdd_vars import BDD_TEST_RUNNER_ENDPOINT, BDD_API_SSL_CERT_VERIFY
 9 | # Functions
10 | from outsystems.vars.vars_base import get_configuration_value
11 | 
12 | 
13 | # Method that builds the BDD Framework endpoint based on the environment host
14 | def build_bdd_endpoint(bdd_http_proto: str, bdd_url: str, bdd_api_endpoint: str, bdd_api_version: int):
15 |     # Builds the endpoint for BDD Framework and returns it
16 |     return "{}://{}/{}/v{}".format(bdd_http_proto, bdd_url, bdd_api_endpoint, bdd_api_version)
17 | 
18 | 
19 | # Method that builds the BDD Framework test endpoint based on the environment host, application and test name
20 | def build_bdd_test_endpoint(bdd_endpoint: str, espace_name: str, webscreen_name: str):
21 |     # Builds the endpoint for BDD Framework and returns it
22 |     return "{}/{}/{}/{}".format(bdd_endpoint, BDD_TEST_RUNNER_ENDPOINT, espace_name, webscreen_name)
23 | 
24 | 
25 | # Runs the test on the BDD Framework app
26 | def send_bdd_get_request(bdd_api: str, bdd_endpoint: str, url_params: str):
27 |     # Format the request URL to include the api endpoint
28 |     request_string = "{}/{}".format(bdd_api, bdd_endpoint)
29 |     return send_bdd_get_run_request(request_string, url_params)
30 | 
31 | 
32 | # Runs the test on the BDD Framework app
33 | def send_bdd_get_run_request(test_endpoint: str, url_params: str):
34 |     # Send the request
35 |     response = requests.get(test_endpoint, params=url_params, verify=get_configuration_value("BDD_API_SSL_CERT_VERIFY", BDD_API_SSL_CERT_VERIFY))
36 |     response_obj = {"http_status": response.status_code, "response": {}}
37 |     if len(response.text) > 0:
38 |         try:
39 |             response_obj["response"] = response.json()
40 |         except:
41 |             raise InvalidJsonResponseError(
42 |                 "GET {}: The JSON response could not be parsed. Response: {}".format(test_endpoint, response.text))
43 |     return response_obj
44 | 
--------------------------------------------------------------------------------
/outsystems/bdd_framework/bdd_runner.py:
--------------------------------------------------------------------------------
 1 | # Custom Modules
 2 | # Functions
 3 | from outsystems.bdd_framework.bdd_base import send_bdd_get_run_request
 4 | # Variables
 5 | from outsystems.vars.bdd_vars import BDD_RUNNER_SUCCESS_CODE
 6 | 
 7 | 
 8 | # Run existing BDD test in the target environment.
 9 | def run_bdd_test(test_url: str):
10 |     # Sends the request
11 |     response = send_bdd_get_run_request(test_url, None)
12 |     status_code = response["http_status"]
13 |     if status_code == BDD_RUNNER_SUCCESS_CODE:
14 |         return response["response"]
15 |     else:
16 |         raise NotImplementedError(
17 |             "There was an error. Response from server: {}".format(response))
18 | 
--------------------------------------------------------------------------------
/outsystems/cicd_probe/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/cicd_probe/__init__.py
--------------------------------------------------------------------------------
/outsystems/cicd_probe/cicd_base.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import requests
 3 | 
 4 | # Custom Modules
 5 | from outsystems.exceptions.invalid_json_response import InvalidJsonResponseError
 6 | # Variables
 7 | from outsystems.vars.cicd_vars import PROBE_API_SSL_CERT_VERIFY
 8 | # Functions
 9 | from outsystems.vars.vars_base import get_configuration_value
10 | 
11 | 
12 | # Method that builds the CICD Probe endpoint based on the environment host
13 | def build_probe_endpoint(probe_http_proto: str, probe_url: str, probe_api_endpoint: str, probe_api_version: int):
14 |     return "{}://{}/{}/v{}".format(probe_http_proto, probe_url, probe_api_endpoint, probe_api_version)
15 | 
16 | 
17 | # Sends a GET request to LT, with url_params
18 | def send_probe_get_request(probe_api: str, probe_endpoint: str, api_key: str, url_params: str):
19 |     # Format the request URL to include the api endpoint
20 |     request_string = "{}/{}".format(probe_api, probe_endpoint)
21 |     # Set API key header, when provided
22 |     headers = {"X-CICDProbe-Key": api_key} if api_key else None
23 |     # Send the request
24 |     response = requests.get(request_string, params=url_params, headers=headers, verify=get_configuration_value("PROBE_API_SSL_CERT_VERIFY", PROBE_API_SSL_CERT_VERIFY))
25 |     response_obj = {"http_status": response.status_code, "response": {}}
26 |     if len(response.text) > 0:
27 |         try:
28 |             response_obj["response"] = response.json()
29 |         except:
30 |             raise InvalidJsonResponseError(
31 |                 "GET {}: The JSON response could not be parsed. Response: {}".format(request_string, response.text))
32 | 
33 |     return response_obj
34 | 
--------------------------------------------------------------------------------
/outsystems/cicd_probe/cicd_dependencies.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | from toposort import toposort_flatten, CircularDependencyError
 3 | 
 4 | # Custom Modules
 5 | # Functions
 6 | from outsystems.cicd_probe.cicd_base import send_probe_get_request
 7 | # Variables
 8 | from outsystems.vars.cicd_vars import GET_APPLICATION_DEPENDENCIES_ENDPOINT, PROBE_DEPENDENCIES_SUCCESS_CODE
 9 | 
10 | 
11 | # Get a set of applications which are producers for a specified application version.
12 | def get_app_dependencies(artifact_dir: str, probe_endpoint: str, api_key: str, application_version_key: str,
13 |                          application_name: str, application_version: str):
14 |     # Builds the API params
15 |     params = {"ApplicationName": application_name, "ApplicationVersion": application_version}
16 | 
17 |     # Sends the request
18 |     response = send_probe_get_request(
19 |         probe_endpoint, GET_APPLICATION_DEPENDENCIES_ENDPOINT, api_key, params)
20 |     status_code = response["http_status"]
21 | 
22 |     if status_code == PROBE_DEPENDENCIES_SUCCESS_CODE:
23 |         response = response["response"]
24 |         dependencies_list = []
25 |         for dependency in response:
26 |             dependencies_list.append(dependency["ApplicationKey"])
27 |         return set(dependencies_list)
28 |     else:
29 |         raise NotImplementedError(
30 |             "There was an error. Response from server: {}".format(response))
31 | 
32 | 
33 | # Topological ordering (linear ordering) of a dependency list
34 | def sort_app_dependencies(dep_list: list):
35 |     try:
36 |         return toposort_flatten(dep_list)
37 |     except:
38 |         raise CircularDependencyError(
39 |             "There are circular dependencies among the list of applications.")
40 | 
--------------------------------------------------------------------------------
/outsystems/cicd_probe/cicd_scan.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import os
 3 | 
 4 | # Custom Modules
 5 | # Functions
 6 | from outsystems.cicd_probe.cicd_base import send_probe_get_request
 7 | from outsystems.file_helpers.file import store_data
 8 | # Variables
 9 | from outsystems.vars.cicd_vars import SCAN_BDD_TESTS_ENDPOINT, PROBE_SCAN_SUCCESS_CODE, PROBE_API_VERSION
10 | from outsystems.vars.file_vars import PROBE_APPLICATION_SCAN_FILE, PROBE_FOLDER
11 | 
12 | 
13 | # Scan existing BDD test endpoints (i.e. Screens) in the target environment.
14 | def scan_bdd_test_endpoint(artifact_dir: str, endpoint: str, application_name: str, api_key: str = None,
15 |                            exclude_pattern: str = None, probe_version: int = PROBE_API_VERSION):
16 |     # Builds the API params
17 |     params = {"ApplicationName": application_name}
18 |     if exclude_pattern:
19 |         if probe_version == 1:
20 |             params["WebFlowExcludePattern"] = exclude_pattern
21 |         elif probe_version == 2:
22 |             params["TestFlowExcludePattern"] = exclude_pattern
23 |     # Sends the request
24 |     response = send_probe_get_request(
25 |         endpoint, SCAN_BDD_TESTS_ENDPOINT, api_key, params)
26 |     status_code = response["http_status"]
27 |     if status_code == PROBE_SCAN_SUCCESS_CODE:
28 |         # Stores the result
29 |         filename = "{}{}".format(application_name, PROBE_APPLICATION_SCAN_FILE)
30 |         filename = os.path.join(PROBE_FOLDER, filename)
31 |         store_data(artifact_dir, filename, response["response"])
32 |         return response["response"]
33 |     else:
34 |         raise NotImplementedError(
35 |             "There was an error. Response from server: {}".format(response))
36 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/exceptions/__init__.py
--------------------------------------------------------------------------------
/outsystems/exceptions/app_does_not_exist.py:
--------------------------------------------------------------------------------
1 | class AppDoesNotExistError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/app_version_error.py:
--------------------------------------------------------------------------------
1 | class AppVersionsError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/deployment_not_found.py:
--------------------------------------------------------------------------------
1 | class DeploymentNotFoundError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/environment_not_found.py:
--------------------------------------------------------------------------------
1 | class EnvironmentNotFoundError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/impossible_action_deployment.py:
--------------------------------------------------------------------------------
1 | class ImpossibleApplyActionDeploymentError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/invalid_json_response.py:
--------------------------------------------------------------------------------
1 | class InvalidJsonResponseError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/invalid_os_package.py:
--------------------------------------------------------------------------------
1 | class InvalidOutSystemsPackage(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/invalid_parameters.py:
--------------------------------------------------------------------------------
1 | class InvalidParametersError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/manifest_does_not_exist.py:
--------------------------------------------------------------------------------
1 | class ManifestDoesNotExistError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/no_apps_available.py:
--------------------------------------------------------------------------------
1 | class NoAppsAvailableError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/no_deployments.py:
--------------------------------------------------------------------------------
1 | class NoDeploymentsError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/not_enough_permissions.py:
--------------------------------------------------------------------------------
1 | class NotEnoughPermissionsError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/osptool_error.py:
--------------------------------------------------------------------------------
1 | class OSPToolDeploymentError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/exceptions/server_error.py:
--------------------------------------------------------------------------------
1 | class ServerError(Exception):
2 |     pass
3 | 
--------------------------------------------------------------------------------
/outsystems/file_helpers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/file_helpers/__init__.py
--------------------------------------------------------------------------------
/outsystems/file_helpers/file.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import json
 3 | import os
 4 | 
 5 | 
 6 | def store_data(artifact_dir: str, filename: str, data: str):
 7 |     filename = os.path.join(artifact_dir, filename)
 8 |     # Remove the spaces in the filename
 9 |     filename = filename.replace(" ", "_")
10 |     # Makes sure that, if a directory is in the filename, that directory exists
11 |     os.makedirs(os.path.dirname(filename), exist_ok=True)
12 |     with open(filename, "w") as outfile:
13 |         json.dump(data, outfile, indent=4)
14 | 
15 | 
16 | def load_data(artifact_dir: str, filename: str):
17 |     # Remove the spaces in the filename
18 |     filename = filename.replace(" ", "_")
19 |     if check_file(artifact_dir, filename):
20 |         filename = os.path.join(artifact_dir, filename)
21 |         with open(filename, "r") as infile:
22 |             return json.load(infile)
23 |     raise FileNotFoundError(
24 |         "The file with filename {} does not exist.".format(filename))
25 | 
26 | 
27 | def check_file(artifact_dir: str, filename: str):
28 |     filename = os.path.join(artifact_dir, filename)
29 |     return os.path.isfile(filename)
30 | 
31 | 
32 | def clear_cache(artifact_dir: str, filename: str):
33 |     if not check_file(artifact_dir, filename):
34 |         return
35 |     filename = os.path.join(artifact_dir, filename)
36 |     os.remove(filename)
37 | 
38 | 
39 | # Returns a human readable string representation of bytes
40 | def bytes_human_readable_size(bytes, units=[' bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB']):
41 |     return str(bytes) + units[0] if bytes < 1024 else bytes_human_readable_size(bytes >> 10, units[1:])
42 | 
43 | 
44 | def is_valid_os_package(filename: str):
45 |     return filename.lower().split('.')[-1] in ("osp", "oap")
46 | 
--------------------------------------------------------------------------------
/outsystems/lifetime/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/lifetime/__init__.py
--------------------------------------------------------------------------------
/outsystems/lifetime/lifetime_base.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import requests
  3 | import json
  4 | 
  5 | # Custom Modules
  6 | # Exceptions
  7 | from outsystems.exceptions.invalid_json_response import InvalidJsonResponseError
  8 | # Variables
  9 | from outsystems.vars.lifetime_vars import LIFETIME_SSL_CERT_VERIFY
 10 | # Functions
 11 | from outsystems.vars.vars_base import get_configuration_value
 12 | from outsystems.file_helpers.file import check_file
 13 | 
 14 | 
 15 | # Method that builds the LifeTime endpoint based on the LT host
 16 | def build_lt_endpoint(lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int):
 17 |     # Builds the endpoint for LT and returns it
 18 |     return "{}://{}/{}/v{}".format(lt_http_proto, lt_url, lt_api_endpoint, lt_api_version)
 19 | 
 20 | 
 21 | # Sends a GET request to LT, with url_params
 22 | def send_get_request(lt_api: str, token: str, api_endpoint: str, url_params: dict):
 23 |     # Auth token + content type json
 24 |     headers = {'content-type': 'application/json',
 25 |                'authorization': 'Bearer ' + token}
 26 |     # Format the request URL to include the api endpoint
 27 |     request_string = "{}/{}".format(lt_api, api_endpoint)
 28 |     response = requests.get(request_string, params=url_params, headers=headers, verify=get_configuration_value("LIFETIME_SSL_CERT_VERIFY", LIFETIME_SSL_CERT_VERIFY))
 29 |     response_obj = {"http_status": response.status_code, "response": {}}
 30 |     if len(response.text) > 0:
 31 |         try:
 32 |             response_obj["response"] = response.json()
 33 |         except:
 34 |             raise InvalidJsonResponseError(
 35 |                 "GET {}: The JSON response could not be parsed. Response: {}".format(request_string, response.text))
 36 |     return response_obj
 37 | 
 38 | 
 39 | # Sends a POST request to LT, with a payload. The json part is ignored
 40 | def send_post_request(lt_api: str, token: str, api_endpoint: str, payload: str):
 41 |     # Auth token + content type json
 42 |     headers = {'content-type': 'application/json',
 43 |                'authorization': 'Bearer ' + token}
 44 |     # Format the request URL to include the api endpoint
 45 |     request_string = "{}/{}".format(lt_api, api_endpoint)
 46 |     response = requests.post(
 47 |         request_string, data=payload, json=None, headers=headers, verify=get_configuration_value("LIFETIME_SSL_CERT_VERIFY", LIFETIME_SSL_CERT_VERIFY))
 48 |     response_obj = {"http_status": response.status_code, "response": {}}
 49 |     # Since LT API POST requests do not reply with native JSON, we have to make it ourselves
 50 |     if len(response.text) > 0:
 51 |         try:
 52 |             response_obj["response"] = response.json()
 53 |         except:
 54 |             # Workaround for POST /deployments/ since the response is not JSON, just text
 55 |             response_obj["response"] = json.loads('"{}"'.format(response.text))
 56 |     return response_obj
 57 | 
 58 | 
 59 | # Sends a POST request to LT, with binary content.
 60 | def send_binary_post_request(lt_api: str, token: str, api_endpoint: str, dest_env: str, lt_endpont: str, binary_file_path: str):
 61 |     # Auth token + content type octet-stream
 62 |     headers = {'content-type': 'application/octet-stream',
 63 |                'authorization': 'Bearer ' + token}
 64 |     # Format the request URL to include the api endpoint
 65 |     request_string = "{}/{}/{}/{}".format(lt_api, api_endpoint, dest_env, lt_endpont)
 66 | 
 67 |     if check_file("", binary_file_path):
 68 |         with open(binary_file_path, 'rb') as f:
 69 |             data = f.read()
 70 |     response = requests.post(request_string, data=data, headers=headers, verify=get_configuration_value("LIFETIME_SSL_CERT_VERIFY", LIFETIME_SSL_CERT_VERIFY))
 71 |     response_obj = {"http_status": response.status_code, "response": {}}
 72 |     # Since LT API POST requests do not reply with native JSON, we have to make it ourselves
 73 |     if len(response.text) > 0:
 74 |         try:
 75 |             response_obj["response"] = response.json()
 76 |         except:
 77 |             # Workaround for POST /deployments/ since the response is not JSON, just text
 78 |             response_obj["response"] = json.loads('"{}"'.format(response.text))
 79 |     return response_obj
 80 | 
 81 | 
 82 | # Sends a DELETE request to LT
 83 | def send_delete_request(lt_api: str, token: str, api_endpoint: str):
 84 |     # Auth token + content type json
 85 |     headers = {'content-type': 'application/json',
 86 |                'authorization': 'Bearer ' + token}
 87 |     # Format the request URL to include the api endpoint
 88 |     request_string = "{}/{}".format(lt_api, api_endpoint)
 89 |     response = requests.delete(request_string, headers=headers, verify=get_configuration_value("LIFETIME_SSL_CERT_VERIFY", LIFETIME_SSL_CERT_VERIFY))
 90 |     response_obj = {"http_status": response.status_code, "response": {}}
 91 |     if len(response.text) > 0:
 92 |         try:
 93 |             response_obj["response"] = response.json()
 94 |         except:
 95 |             raise InvalidJsonResponseError(
 96 |                 "DELETE {}: The JSON response could not be parsed. Response: {}".format(request_string, response.text))
 97 |     return response_obj
 98 | 
 99 | 
100 | # Sends a GET request to LT, with url_params
101 | def send_download_request(pkg_url: str, token: str):
102 |     # Auth token + content type json
103 |     headers = {'content-type': 'application/json',
104 |                'authorization': token}
105 |     # Format the request URL to include the api endpoint
106 |     response = requests.get(pkg_url, headers=headers, verify=get_configuration_value("LIFETIME_SSL_CERT_VERIFY", LIFETIME_SSL_CERT_VERIFY))
107 |     response_obj = {"http_status": response.status_code, "response": response.content}
108 |     return response_obj
109 | 
--------------------------------------------------------------------------------
/outsystems/lifetime/lifetime_downloads.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import os
 3 | 
 4 | # Custom Modules
 5 | # Exceptions
 6 | from outsystems.exceptions.invalid_parameters import InvalidParametersError
 7 | from outsystems.exceptions.environment_not_found import EnvironmentNotFoundError
 8 | from outsystems.exceptions.not_enough_permissions import NotEnoughPermissionsError
 9 | from outsystems.exceptions.server_error import ServerError
10 | # Functions
11 | from outsystems.lifetime.lifetime_base import send_download_request
12 | 
13 | # Variables
14 | from outsystems.vars.lifetime_vars import DOWNLOAD_SUCCESS_CODE, DOWNLOAD_INVALID_KEY_CODE, \
15 |     DOWNLOAD_NO_PERMISSION_CODE, DOWNLOAD_NOT_FOUND, DOWNLOAD_FAILED_CODE
16 | 
17 | 
18 | # Downloads a binary file from a LifeTime download link
19 | def download_package(file_path: str, auth_token: str, pkg_url: str):
20 |     # Sends the request
21 |     response = send_download_request(pkg_url, auth_token)
22 |     status_code = int(response["http_status"])
23 | 
24 |     if status_code == DOWNLOAD_SUCCESS_CODE:
25 |         # Remove the spaces in the filename
26 |         file_path = file_path.replace(" ", "_")
27 |         # Makes sure that, if a directory is in the filename, that directory exists
28 |         os.makedirs(os.path.dirname(file_path), exist_ok=True)
29 |         with open(file_path, "wb") as f:
30 |             f.write(response["response"])
31 |     elif status_code == DOWNLOAD_INVALID_KEY_CODE:
32 |         raise InvalidParametersError("The required type  is invalid for given keys (EnvironmentKey; ApplicationKey). Details: {}".format(
33 |             response["response"]))
34 |     elif status_code == DOWNLOAD_NO_PERMISSION_CODE:
35 |         raise NotEnoughPermissionsError("User doesn't have permissions for the given keys (EnvironmentKey; ApplicationKey). Details: {}".format(
36 |             response["response"]))
37 |     elif status_code == DOWNLOAD_NOT_FOUND:
38 |         raise EnvironmentNotFoundError("No environment or application found. Please check that the EnvironmentKey and ApplicationKey exist. Details: {}".format(
39 |             response["response"]))
40 |     elif status_code == DOWNLOAD_FAILED_CODE:
41 |         raise ServerError("Failed to start the operation to package. Details: {}".format(
42 |             response["response"]))
43 |     else:
44 |         raise NotImplementedError(
45 |             "There was an error. Response from server: {}".format(response))
46 | 
--------------------------------------------------------------------------------
/outsystems/lifetime/lifetime_solutions.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import json
 3 | import os
 4 | 
 5 | # Custom Modules
 6 | # Exceptions
 7 | from outsystems.exceptions.no_deployments import NoDeploymentsError
 8 | from outsystems.exceptions.not_enough_permissions import NotEnoughPermissionsError
 9 | from outsystems.exceptions.server_error import ServerError
10 | # Functions
11 | from outsystems.lifetime.lifetime_base import send_post_request, send_get_request
12 | from outsystems.file_helpers.file import store_data
13 | # Variables
14 | from outsystems.vars.lifetime_vars import ENVIRONMENTS_ENDPOINT, ENVIRONMENT_SOLUTION_ENDPOINT, ENVIRONMENT_SOLUTION_SUCCESS_CODE, \
15 |     ENVIRONMENT_SOLUTION_STATUS_ENDPOINT, ENVIRONMENT_SOLUTION_STATUS_SUCCESS_CODE, ENVIRONMENT_SOLUTION_STATUS_NOT_STATUS_CODE, ENVIRONMENT_SOLUTION_STATUS_NO_PERMISSION_CODE, \
16 |     ENVIRONMENT_SOLUTION_STATUS_FAILED_CODE, ENVIRONMENT_SOLUTION_LINK_SUCCESS_CODE, ENVIRONMENT_SOLUTION_LINK_FAILED_CODE
17 | from outsystems.vars.file_vars import SOLUTIONS_LINK_FILE, SOLUTIONS_FOLDER  # , SOLUTIONS_STATUS_FILE
18 | 
19 | 
20 | # Sends a request to create a solution, on a target environment, for a specific set of app keys.
21 | # Returns a solution key.
22 | def create_solution(artifact_dir: str, endpoint: str, auth_token: str, environment_key: str, solution_name: str, app_keys: list, include_refs: bool):
23 |     # Builds the API call
24 |     query = "{}/{}/{}".format(ENVIRONMENTS_ENDPOINT, environment_key, ENVIRONMENT_SOLUTION_ENDPOINT)
25 | 
26 |     # Builds the body for the request
27 |     solution_request = _create_solution_request(solution_name, app_keys, include_refs)
28 |     # Sends the request
29 |     response = send_post_request(
30 |         endpoint, auth_token, query, solution_request)
31 |     status_code = int(response["http_status"])
32 |     if status_code == ENVIRONMENT_SOLUTION_SUCCESS_CODE:
33 |         return response["response"]
34 |     else:
35 |         raise NotImplementedError(
36 |             "There was an error. Response from server: {}".format(response))
37 | 
38 | 
39 | # Returns the status of a given solution key
40 | def get_solution_status(artifact_dir: str, endpoint: str, auth_token: str, environment_key: str, solution_key: str):
41 |     # Builds the API call
42 |     query = "{}/{}/{}/{}".format(ENVIRONMENTS_ENDPOINT, environment_key, ENVIRONMENT_SOLUTION_STATUS_ENDPOINT, solution_key)
43 | 
44 |     # Sends the request
45 |     response = send_get_request(endpoint, auth_token, query, None)
46 |     status_code = int(response["http_status"])
47 |     if status_code == ENVIRONMENT_SOLUTION_STATUS_SUCCESS_CODE:
48 |         # Stores the result
49 |         # filename = "{}{}".format(solution_key, SOLUTIONS_STATUS_FILE)
50 |         # filename = os.path.join(SOLUTIONS_FOLDER, filename)
51 |         # store_data(artifact_dir, filename, response["response"])
52 |         return response["response"]
53 |     elif status_code == ENVIRONMENT_SOLUTION_STATUS_NO_PERMISSION_CODE:
54 |         raise NotEnoughPermissionsError(
55 |             "You don't have enough permissions to see the details of that solution. Details: {}".format(response["response"]))
56 |     elif status_code == ENVIRONMENT_SOLUTION_STATUS_NOT_STATUS_CODE:
57 |         raise NoDeploymentsError("There is no solution with the key {}. Details: {}".format(
58 |             solution_key, response["response"]))
59 |     elif status_code == ENVIRONMENT_SOLUTION_STATUS_FAILED_CODE:
60 |         raise ServerError("Failed to get the status of solution with key {}. Details: {}".format(
61 |             solution_key, response["response"]))
62 |     else:
63 |         raise NotImplementedError(
64 |             "There was an error. Response from server: {}".format(response))
65 | 
66 | 
67 | # Returns download link of source code package of the specified application in a given environment.
68 | def get_solution_url(artifact_dir: str, endpoint: str, auth_token: str, environment_key: str, solution_key: str):
69 |     # Builds the API call
70 |     query = "{}/{}/{}/{}".format(ENVIRONMENTS_ENDPOINT, environment_key, ENVIRONMENT_SOLUTION_ENDPOINT, solution_key)
71 | 
72 |     # Sends the request
73 |     response = send_get_request(endpoint, auth_token, query, None)
74 |     status_code = int(response["http_status"])
75 |     if status_code == ENVIRONMENT_SOLUTION_LINK_SUCCESS_CODE:
76 |         # Stores the result
77 |         filename = "{}{}".format(solution_key, SOLUTIONS_LINK_FILE)
78 |         filename = os.path.join(SOLUTIONS_FOLDER, filename)
79 |         store_data(artifact_dir, filename, response["response"])
80 |         return response["response"]["url"]
81 |     elif status_code == ENVIRONMENT_SOLUTION_LINK_FAILED_CODE:
82 |         raise ServerError("Failed to access the solution package link. Details: {}".format(
83 |             response["response"]))
84 |     else:
85 |         raise NotImplementedError(
86 |             "There was an error. Response from server: {}".format(response))
87 | 
88 | 
89 | # ---------------------- PRIVATE METHODS ----------------------
90 | def _create_solution_request(solution_name: str, app_keys: str, include_refs: bool):
91 | 
92 |     solution_request = {"SolutionName": solution_name,
93 |                         "ApplicationKeys": app_keys,
94 |                         "IncludeReferences": include_refs}
95 | 
96 |     return json.dumps(solution_request)
97 | 
--------------------------------------------------------------------------------
/outsystems/manifest/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/manifest/__init__.py
--------------------------------------------------------------------------------
/outsystems/manifest/manifest_base.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | 
 3 | # Custom Modules
 4 | # Exceptions
 5 | from outsystems.exceptions.environment_not_found import EnvironmentNotFoundError
 6 | # Variables
 7 | from outsystems.vars.manifest_vars import MANIFEST_ENVIRONMENT_KEY, MANIFEST_ENVIRONMENT_NAME, MANIFEST_ENVIRONMENT_LABEL, \
 8 |     MANIFEST_ENVIRONMENT_DEFINITIONS, MANIFEST_CONFIGURATION_ITEMS, MANIFEST_CONFIG_ITEM_VALUES, MANIFEST_MODULE_KEY, MANIFEST_MODULE_NAME, \
 9 |     MANIFEST_CONFIG_ITEM_KEY, MANIFEST_CONFIG_ITEM_NAME, MANIFEST_CONFIG_ITEM_TYPE, MANIFEST_CONFIG_ITEM_TARGET_VALUE, MANIFEST_DEPLOYMENT_NOTES
10 | from outsystems.vars.lifetime_vars import DEPLOYMENT_MESSAGE
11 | 
12 | 
13 | # Returns the environment details: tuple(Name, Key)
14 | def get_environment_details(manifest: dict, environment_label: str):
15 |     environment_definition = next(filter(lambda x: x[MANIFEST_ENVIRONMENT_LABEL] == environment_label, manifest[MANIFEST_ENVIRONMENT_DEFINITIONS]), None)
16 |     if environment_definition:
17 |         return (environment_definition[MANIFEST_ENVIRONMENT_NAME], environment_definition[MANIFEST_ENVIRONMENT_KEY])
18 |     else:
19 |         raise EnvironmentNotFoundError(
20 |             "Failed to retrieve the environment key from label. Please make sure the label is correct. Environment label: {}".format(environment_label))
21 | 
22 | 
23 | # Returns the configuration items for the target environment key
24 | def get_configuration_items_for_environment(manifest: dict, target_env_key: str):
25 |     config_items = []
26 |     if MANIFEST_CONFIGURATION_ITEMS in manifest:
27 |         for cfg_item in manifest[MANIFEST_CONFIGURATION_ITEMS]:
28 |             target_value = next(filter(lambda x: x[MANIFEST_ENVIRONMENT_KEY] == target_env_key, cfg_item[MANIFEST_CONFIG_ITEM_VALUES]), None)
29 |             if target_value:
30 |                 # Add it to the config items list
31 |                 config_items.append({
32 |                     MANIFEST_MODULE_KEY: cfg_item[MANIFEST_MODULE_KEY],
33 |                     MANIFEST_MODULE_NAME: cfg_item[MANIFEST_MODULE_NAME],
34 |                     MANIFEST_CONFIG_ITEM_KEY: cfg_item[MANIFEST_CONFIG_ITEM_KEY],
35 |                     MANIFEST_CONFIG_ITEM_NAME: cfg_item[MANIFEST_CONFIG_ITEM_NAME],
36 |                     MANIFEST_CONFIG_ITEM_TYPE: cfg_item[MANIFEST_CONFIG_ITEM_TYPE],
37 |                     MANIFEST_CONFIG_ITEM_TARGET_VALUE: target_value[MANIFEST_CONFIG_ITEM_TARGET_VALUE],
38 |                     MANIFEST_ENVIRONMENT_NAME: target_value[MANIFEST_ENVIRONMENT_NAME]
39 |                 })
40 | 
41 |     return config_items
42 | 
43 | 
44 | # Returns the deployment notes
45 | def get_deployment_notes(manifest: dict):
46 |     if MANIFEST_DEPLOYMENT_NOTES in manifest:
47 |         return manifest[MANIFEST_DEPLOYMENT_NOTES]
48 |     else:
49 |         return DEPLOYMENT_MESSAGE
50 | 
--------------------------------------------------------------------------------
/outsystems/osp_tool/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/osp_tool/__init__.py
--------------------------------------------------------------------------------
/outsystems/osp_tool/osp_base.py:
--------------------------------------------------------------------------------
 1 | import subprocess
 2 | import threading
 3 | 
 4 | # Custom Modules
 5 | # Variables
 6 | from outsystems.vars.pipeline_vars import SOLUTION_TIMEOUT_IN_SECS
 7 | # Functions
 8 | from outsystems.vars.vars_base import get_configuration_value
 9 | # Exceptions
10 | from outsystems.exceptions.osptool_error import OSPToolDeploymentError
11 | 
12 | 
13 | def run_command(command, live_output_callback=None, timeout=None):
14 |     process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
15 | 
16 |     def read_output(pipe, callback, output_list):
17 |         with pipe:
18 |             for line in iter(pipe.readline, ''):
19 |                 callback(line.strip())
20 |                 output_list.append(line)
21 | 
22 |     # List to capture the live output
23 |     live_output = []
24 | 
25 |     # Create a thread for reading and displaying live output
26 |     live_output_thread = threading.Thread(target=read_output, args=(process.stdout, live_output_callback, live_output))
27 |     live_output_thread.start()
28 | 
29 |     # Wait for the process to finish and get the return code
30 |     try:
31 |         return_code = process.wait(timeout=timeout)
32 |     except subprocess.TimeoutExpired:
33 |         # Process has exceeded the timeout
34 |         process.terminate()
35 |         raise OSPToolDeploymentError("OSPTool Deployment timed out.")
36 | 
37 |     # Wait for the live output thread to finish
38 |     live_output_thread.join()
39 | 
40 |     # Combine the live output into a single string (execution log)
41 |     execution_log = ''.join(live_output)
42 | 
43 |     return return_code, execution_log
44 | 
45 | 
46 | def call_osptool(osp_tool_path: str, package_file_path: str, env_hostname: str, credentials: str, catalogmappings_path: str):
47 | 
48 |     if catalogmappings_path:
49 |         # Construct the command using a formatted string
50 |         command = '"{}" "{}" "{}" {} /catalogmappings "{}"'.format(osp_tool_path, package_file_path, env_hostname, credentials, catalogmappings_path)
51 |     else:
52 |         # Construct the command using a formatted string
53 |         command = '"{}" "{}" "{}" {}'.format(osp_tool_path, package_file_path, env_hostname, credentials)
54 | 
55 |     # Define a callback function for live output
56 |     def live_output_callback(output_line):
57 |         print(output_line)
58 | 
59 |     # Run the command and get the return code and execution log
60 |     return_code, execution_log = run_command(command, live_output_callback, timeout=get_configuration_value("SOLUTION_TIMEOUT_IN_SECS", SOLUTION_TIMEOUT_IN_SECS))
61 | 
62 |     return return_code, execution_log
63 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/pipeline/__init__.py
--------------------------------------------------------------------------------
/outsystems/pipeline/apply_configuration_values_to_target_env.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import sys
  3 | import os
  4 | import argparse
  5 | import json
  6 | 
  7 | # Workaround for Jenkins:
  8 | # Set the path to include the outsystems module
  9 | # Jenkins exposes the workspace directory through env.
 10 | if "WORKSPACE" in os.environ:
 11 |     sys.path.append(os.environ['WORKSPACE'])
 12 | else:  # Else just add the project dir
 13 |     sys.path.append(os.getcwd())
 14 | 
 15 | # Custom Modules
 16 | # Variables
 17 | from outsystems.vars.manifest_vars import MANIFEST_CONFIG_ITEM_TYPE, MANIFEST_MODULE_KEY, MANIFEST_CONFIG_ITEM_KEY, \
 18 |     MANIFEST_CONFIG_ITEM_TARGET_VALUE, MANIFEST_CONFIG_ITEM_NAME
 19 | from outsystems.vars.properties_vars import PROPERTY_TYPE_SITE_PROPERTY, PROPERTY_TYPE_REST_ENDPOINT, PROPERTY_TYPE_SOAP_ENDPOINT, \
 20 |     PROPERTY_TYPE_TIMER_SCHEDULE
 21 | from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO
 22 | from outsystems.vars.file_vars import ARTIFACT_FOLDER
 23 | # Functions
 24 | from outsystems.file_helpers.file import load_data
 25 | from outsystems.manifest.manifest_base import get_configuration_items_for_environment
 26 | from outsystems.manifest.manifest_base import get_environment_details
 27 | from outsystems.properties.properties_set_value import set_site_property_value, set_rest_endpoint_url, set_soap_endpoint_url, \
 28 |     set_timer_schedule
 29 | from outsystems.vars.vars_base import load_configuration_file
 30 | # Exceptions
 31 | from outsystems.exceptions.manifest_does_not_exist import ManifestDoesNotExistError
 32 | 
 33 | 
 34 | # Function to apply configuration values to a target environment
 35 | def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_token: str, target_env_label: str, trigger_manifest: dict):
 36 | 
 37 |     # Tuple with (EnvName, EnvKey): target_env_tuple[0] = EnvName; target_env_tuple[1] = EnvKey
 38 |     target_env_tuple = get_environment_details(trigger_manifest, target_env_label)
 39 | 
 40 |     # Get configuration items defined in the manifest for target environment
 41 |     config_items = get_configuration_items_for_environment(trigger_manifest, target_env_tuple[1])
 42 | 
 43 |     # Check if there are any configuration item values to apply for target environment
 44 |     if len(config_items) == 0:
 45 |         print("No configuration item values were found in the manifest for {} (Label: {}).".format(target_env_tuple[0], target_env_label), flush=True)
 46 |     else:
 47 |         print("Applying new values to configuration items in {} (Label: {})...".format(target_env_tuple[0], target_env_label), flush=True)
 48 | 
 49 |     # Apply target value for each configuration item according to its type
 50 |     for cfg_item in config_items:
 51 |         result = {}
 52 |         if cfg_item[MANIFEST_CONFIG_ITEM_TYPE] == PROPERTY_TYPE_SITE_PROPERTY:
 53 |             result = set_site_property_value(
 54 |                 lt_url, lt_token, cfg_item[MANIFEST_MODULE_KEY], target_env_tuple[1], cfg_item[MANIFEST_CONFIG_ITEM_KEY], cfg_item[MANIFEST_CONFIG_ITEM_TARGET_VALUE])
 55 |         elif cfg_item[MANIFEST_CONFIG_ITEM_TYPE] == PROPERTY_TYPE_REST_ENDPOINT:
 56 |             result = set_rest_endpoint_url(
 57 |                 lt_url, lt_token, cfg_item[MANIFEST_MODULE_KEY], target_env_tuple[1], cfg_item[MANIFEST_CONFIG_ITEM_KEY], cfg_item[MANIFEST_CONFIG_ITEM_TARGET_VALUE])
 58 |         elif cfg_item[MANIFEST_CONFIG_ITEM_TYPE] == PROPERTY_TYPE_SOAP_ENDPOINT:
 59 |             result = set_soap_endpoint_url(
 60 |                 lt_url, lt_token, cfg_item[MANIFEST_MODULE_KEY], target_env_tuple[1], cfg_item[MANIFEST_CONFIG_ITEM_KEY], cfg_item[MANIFEST_CONFIG_ITEM_TARGET_VALUE])
 61 |         elif cfg_item[MANIFEST_CONFIG_ITEM_TYPE] == PROPERTY_TYPE_TIMER_SCHEDULE:
 62 |             result = set_timer_schedule(
 63 |                 lt_url, lt_token, cfg_item[MANIFEST_MODULE_KEY], target_env_tuple[1], cfg_item[MANIFEST_CONFIG_ITEM_KEY], cfg_item[MANIFEST_CONFIG_ITEM_TARGET_VALUE])
 64 |         else:
 65 |             raise NotImplementedError("Configuration item type '{}' not supported.".format(cfg_item[MANIFEST_CONFIG_ITEM_TYPE]))
 66 | 
 67 |         # Check returned result after setting configuration item value
 68 |         if "Success" in result and result["Success"]:
 69 |             print("New value successfully applied to configuration item '{}' ({}).".format(cfg_item[MANIFEST_CONFIG_ITEM_NAME], cfg_item[MANIFEST_CONFIG_ITEM_TYPE]), flush=True)
 70 |         else:
 71 |             print("Unable to apply new value to configuration item '{}' ({}).\nReason: {}".format(cfg_item[MANIFEST_CONFIG_ITEM_NAME], cfg_item[MANIFEST_CONFIG_ITEM_TYPE], result["Message"]), flush=True)
 72 | 
 73 |     # Exit the script to continue with the pipeline
 74 |     sys.exit(0)
 75 | 
 76 | 
 77 | # End of main()
 78 | 
 79 | 
 80 | if __name__ == "__main__":
 81 |     # Argument menu / parsing
 82 |     parser = argparse.ArgumentParser()
 83 |     parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER,
 84 |                         help="(Optional) Name of the artifacts folder. Default: \"Artifacts\"")
 85 |     parser.add_argument("-u", "--lt_url", type=str, required=True,
 86 |                         help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"")
 87 |     parser.add_argument("-t", "--lt_token", type=str, required=True,
 88 |                         help="Service account token for Properties API calls.")
 89 |     parser.add_argument("-e", "--target_env_label", type=str, required=True,
 90 |                         help="Label, as configured in the manifest, of the target environment where the configuration values will be applied.")
 91 |     parser.add_argument("-m", "--trigger_manifest", type=str,
 92 |                         help="Manifest artifact (in JSON format) received when the pipeline is triggered. Contains required data used throughout the pipeline execution.")
 93 |     parser.add_argument("-f", "--manifest_file", type=str,
 94 |                         help="Manifest file (with JSON format). Contains required data used throughout the pipeline execution.")
 95 |     parser.add_argument("-cf", "--config_file", type=str,
 96 |                         help="Config file path. Contains configuration values to override the default ones.")
 97 | 
 98 |     args = parser.parse_args()
 99 | 
100 |     # Load config file if exists
101 |     if args.config_file:
102 |         load_configuration_file(args.config_file)
103 |     # Parse the artifact directory
104 |     artifact_dir = args.artifacts
105 |     # Parse the LT Url and split the LT hostname from the HTTP protocol
106 |     # Assumes the default HTTP protocol = https
107 |     lt_http_proto = LIFETIME_HTTP_PROTO
108 |     lt_url = args.lt_url
109 |     if lt_url.startswith("http://"):
110 |         lt_http_proto = "http"
111 |         lt_url = lt_url.replace("http://", "")
112 |     else:
113 |         lt_url = lt_url.replace("https://", "")
114 |     if lt_url.endswith("/"):
115 |         lt_url = lt_url[:-1]
116 |     # Parse the LT Token
117 |     lt_token = args.lt_token
118 |     # Parse Destination Environment
119 |     target_env_label = args.target_env_label
120 | 
121 |     # Validate Manifest is being passed either as JSON or as file
122 |     if not args.trigger_manifest and not args.manifest_file:
123 |         raise ManifestDoesNotExistError("The manifest was not provided as JSON or as a file. Aborting!")
124 | 
125 |     # Parse Trigger Manifest artifact
126 |     if args.manifest_file:
127 |         trigger_manifest_path = os.path.split(args.manifest_file)
128 |         trigger_manifest = load_data(trigger_manifest_path[0], trigger_manifest_path[1])
129 |     else:
130 |         trigger_manifest = json.loads(args.trigger_manifest)
131 | 
132 |     # Calls the main script
133 |     main(artifact_dir, lt_http_proto, lt_url, lt_token, target_env_label, trigger_manifest)
134 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/continue_deployment_to_target_env.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import sys
  3 | import os
  4 | import argparse
  5 | from time import sleep
  6 | 
  7 | # Workaround for Jenkins:
  8 | # Set the path to include the outsystems module
  9 | # Jenkins exposes the workspace directory through env.
 10 | if "WORKSPACE" in os.environ:
 11 |     sys.path.append(os.environ['WORKSPACE'])
 12 | else:  # Else just add the project dir
 13 |     sys.path.append(os.getcwd())
 14 | 
 15 | # Custom Modules
 16 | # Variables
 17 | from outsystems.vars.file_vars import ARTIFACT_FOLDER
 18 | from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO, LIFETIME_API_ENDPOINT, LIFETIME_API_VERSION
 19 | from outsystems.vars.pipeline_vars import SLEEP_PERIOD_IN_SECS, \
 20 |     DEPLOYMENT_TIMEOUT_IN_SECS, DEPLOYMENT_RUNNING_STATUS, DEPLOYMENT_WAITING_STATUS, \
 21 |     DEPLOYMENT_ERROR_STATUS_LIST, DEPLOY_ERROR_FILE
 22 | # Functions
 23 | from outsystems.lifetime.lifetime_environments import get_environment_key
 24 | from outsystems.lifetime.lifetime_deployments import get_deployment_status, check_deployment_two_step_deploy_status, \
 25 |     continue_deployment, get_running_deployment
 26 | from outsystems.file_helpers.file import store_data
 27 | from outsystems.lifetime.lifetime_base import build_lt_endpoint
 28 | from outsystems.vars.vars_base import get_configuration_value, load_configuration_file
 29 | # Exceptions
 30 | 
 31 | # ############################################################# SCRIPT ##############################################################
 32 | 
 33 | 
 34 | def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int, lt_token: str, dest_env: str):
 35 | 
 36 |     # Builds the LifeTime endpoint
 37 |     lt_endpoint = build_lt_endpoint(lt_http_proto, lt_url, lt_api_endpoint, lt_api_version)
 38 | 
 39 |     # Gets the environment key for the destination environment
 40 |     dest_env_key = get_environment_key(artifact_dir, lt_endpoint, lt_token, dest_env)
 41 | 
 42 |     # Find running deployment plan in destination environment
 43 |     deployment = get_running_deployment(artifact_dir, lt_endpoint, lt_token, dest_env_key)
 44 |     if len(deployment) == 0:
 45 |         print("Continue skipped because no running deployment plan was found on {} environment.".format(dest_env))
 46 |         sys.exit(0)
 47 | 
 48 |     # Grab the key from the deployment plan found
 49 |     dep_plan_key = deployment[0]["Key"]
 50 |     print("Deployment plan {} was found.".format(dep_plan_key), flush=True)
 51 | 
 52 |     # Check deployment plan status
 53 |     dep_status = get_deployment_status(
 54 |         artifact_dir, lt_endpoint, lt_token, dep_plan_key)
 55 | 
 56 |     if dep_status["DeploymentStatus"] == DEPLOYMENT_WAITING_STATUS and check_deployment_two_step_deploy_status(dep_status):
 57 |         continue_deployment(lt_endpoint, lt_token, dep_plan_key)
 58 |         print("Deployment plan {} resumed execution.".format(dep_plan_key), flush=True)
 59 |     else:
 60 |         print("Deployment plan {} is not in 'Prepared' status".format(dep_plan_key), flush=True)
 61 |         # Previously created deployment plan to target environment will NOT be deleted
 62 |         sys.exit(1)
 63 | 
 64 |     # Sleep thread until deployment has finished
 65 |     wait_counter = 0
 66 |     while wait_counter < get_configuration_value("DEPLOYMENT_TIMEOUT_IN_SECS", DEPLOYMENT_TIMEOUT_IN_SECS):
 67 |         # Check Deployment Plan status.
 68 |         dep_status = get_deployment_status(
 69 |             artifact_dir, lt_endpoint, lt_token, dep_plan_key)
 70 |         if dep_status["DeploymentStatus"] != DEPLOYMENT_RUNNING_STATUS:
 71 |             # Check deployment status is pending approval. Force it to continue (if 2-Step deployment is enabled)
 72 |             if dep_status["DeploymentStatus"] == DEPLOYMENT_WAITING_STATUS:
 73 |                 continue_deployment(lt_endpoint, lt_token, dep_plan_key)
 74 |                 print("Deployment plan {} resumed execution.".format(dep_plan_key), flush=True)
 75 |             elif dep_status["DeploymentStatus"] in DEPLOYMENT_ERROR_STATUS_LIST:
 76 |                 print("Deployment plan finished with status {}.".format(dep_status["DeploymentStatus"]), flush=True)
 77 |                 store_data(artifact_dir, DEPLOY_ERROR_FILE, dep_status)
 78 |                 sys.exit(1)
 79 |             else:
 80 |                 # If it reaches here, it means the deployment was successful
 81 |                 print("Deployment plan finished with status {}.".format(dep_status["DeploymentStatus"]), flush=True)
 82 |                 # Exit the script to continue with the pipeline
 83 |                 sys.exit(0)
 84 |         # Deployment status is still running. Go back to sleep.
 85 |         sleep_value = get_configuration_value("SLEEP_PERIOD_IN_SECS", SLEEP_PERIOD_IN_SECS)
 86 |         sleep(sleep_value)
 87 |         wait_counter += sleep_value
 88 |         print("{} secs have passed since the deployment started...".format(wait_counter), flush=True)
 89 | 
 90 |     # Deployment timeout reached. Exit script with error
 91 |     print("Timeout occurred while deployment plan is still in {} status.".format(DEPLOYMENT_RUNNING_STATUS), flush=True)
 92 |     sys.exit(1)
 93 | 
 94 | # End of main()
 95 | 
 96 | 
 97 | if __name__ == "__main__":
 98 |     # Argument menu / parsing
 99 |     parser = argparse.ArgumentParser()
100 |     parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER,
101 |                         help="Name of the artifacts folder. Default: \"Artifacts\"")
102 |     parser.add_argument("-u", "--lt_url", type=str, required=True,
103 |                         help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"")
104 |     parser.add_argument("-t", "--lt_token", type=str, required=True,
105 |                         help="Token for LifeTime API calls.")
106 |     parser.add_argument("-v", "--lt_api_version", type=int, default=LIFETIME_API_VERSION,
107 |                         help="LifeTime API version number. If version <= 10, use 1, if version >= 11, use 2. Default: 2")
108 |     parser.add_argument("-e", "--lt_endpoint", type=str, default=LIFETIME_API_ENDPOINT,
109 |                         help="(optional) Used to set the API endpoint for LifeTime, without the version. Default: \"lifetimeapi/rest\"")
110 |     parser.add_argument("-d", "--destination_env", type=str, required=True,
111 |                         help="Name, as displayed in LifeTime, of the destination environment where you want to continue the deployment plan.")
112 |     parser.add_argument("-cf", "--config_file", type=str,
113 |                         help="Config file path. Contains configuration values to override the default ones.")
114 | 
115 |     args = parser.parse_args()
116 | 
117 |     # Load config file if exists
118 |     if args.config_file:
119 |         load_configuration_file(args.config_file)
120 |     # Parse the artifact directory
121 |     artifact_dir = args.artifacts
122 |     # Parse the API endpoint
123 |     lt_api_endpoint = args.lt_endpoint
124 |     # Parse the LT Url and split the LT hostname from the HTTP protocol
125 |     # Assumes the default HTTP protocol = https
126 |     lt_http_proto = LIFETIME_HTTP_PROTO
127 |     lt_url = args.lt_url
128 |     if lt_url.startswith("http://"):
129 |         lt_http_proto = "http"
130 |         lt_url = lt_url.replace("http://", "")
131 |     else:
132 |         lt_url = lt_url.replace("https://", "")
133 |     if lt_url.endswith("/"):
134 |         lt_url = lt_url[:-1]
135 |     # Parte LT API Version
136 |     lt_version = args.lt_api_version
137 |     # Parse the LT Token
138 |     lt_token = args.lt_token
139 |     # Parse Destination Environment
140 |     dest_env = args.destination_env
141 | 
142 |     # Calls the main script
143 |     main(artifact_dir, lt_http_proto, lt_url, lt_api_endpoint, lt_version, lt_token, dest_env)
144 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/deploy_package_to_target_env_with_osptool.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import sys
  3 | import os
  4 | import argparse
  5 | 
  6 | # Workaround for Jenkins:
  7 | # Set the path to include the outsystems module
  8 | # Jenkins exposes the workspace directory through env.
  9 | if "WORKSPACE" in os.environ:
 10 |     sys.path.append(os.environ['WORKSPACE'])
 11 | else:  # Else just add the project dir
 12 |     sys.path.append(os.getcwd())
 13 | 
 14 | # Custom Modules
 15 | # Variables
 16 | from outsystems.vars.file_vars import ARTIFACT_FOLDER, SOLUTIONS_FOLDER, SOLUTIONS_DEPLOY_FILE
 17 | # Functions
 18 | from outsystems.osp_tool.osp_base import call_osptool
 19 | from outsystems.vars.vars_base import load_configuration_file
 20 | from outsystems.file_helpers.file import store_data
 21 | # Exceptions
 22 | from outsystems.exceptions.osptool_error import OSPToolDeploymentError
 23 | 
 24 | 
 25 | # ############################################################# SCRIPT ##############################################################
 26 | def main(artifact_dir: str, dest_env: str, package_path: str, catalogmappings_path: str, osp_tool_path: str, credentials: str):
 27 | 
 28 |     # Get solution file name from path
 29 |     solution_file = os.path.split(package_path)[1]
 30 | 
 31 |     print("Starting deployment of '{}' into '{}' environment...".format(solution_file, dest_env), flush=True)
 32 | 
 33 |     # Call OSP Tool
 34 |     return_code, execution_log = call_osptool(osp_tool_path, package_path, dest_env, credentials, catalogmappings_path)
 35 | 
 36 |     # Split the output into lines
 37 |     execution_log = execution_log.splitlines()
 38 | 
 39 |     # Stores the execution log
 40 |     filename = "{}{}".format(solution_file, SOLUTIONS_DEPLOY_FILE)
 41 |     filename = os.path.join(SOLUTIONS_FOLDER, filename)
 42 |     store_data(artifact_dir, filename, execution_log)
 43 | 
 44 |     error_validation_list = ['Incompatible Dependency', 'Execution Plan Abort', 'Outdated Consumer', 'Missing Configuration']
 45 | 
 46 |     # Validate the presence of each error validation
 47 |     deploy_error_flag = False
 48 |     for error_validation in error_validation_list:
 49 |         existing_error_list = [s for s in execution_log if error_validation in s]
 50 |         if existing_error_list:
 51 |             deploy_error_flag = True
 52 |             print(f'\nFound "{error_validation}" validation:')
 53 |             for error in existing_error_list:
 54 |                 print(f' - {error}')
 55 | 
 56 |     if deploy_error_flag:
 57 |         # Exit script with error
 58 |         raise OSPToolDeploymentError(
 59 |             "OSP Tool Deployment finished with errors. Please check the logs for further details.")
 60 | 
 61 | 
 62 | if __name__ == "__main__":
 63 |     # Argument menu / parsing
 64 |     parser = argparse.ArgumentParser()
 65 |     parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER,
 66 |                         help="Name of the artifacts folder. Default: \"Artifacts\"")
 67 |     parser.add_argument("-d", "--destination_env", type=str, required=True,
 68 |                         help="Name, as displayed in LifeTime, of the destination environment where you want to deploy the apps. (if in Airgap mode should be the hostname of the destination environment where you want to deploy the apps)")
 69 |     parser.add_argument("-p", "--package_path", type=str, required=True,
 70 |                         help="Package file path")
 71 |     parser.add_argument("-c", "--catalogmappings_path", type=str,
 72 |                         help="(Optional) Catalog mappings file path")
 73 |     parser.add_argument("-o", "--osp_tool_path", type=str, required=True,
 74 |                         help="OSP Tool file path")
 75 |     parser.add_argument("-user", "--osptool_user", type=str, required=True,
 76 |                         help="Username with privileges to deploy applications on target environment")
 77 |     parser.add_argument("-pwd", "--osptool_pwd", type=str, required=True,
 78 |                         help="Password of the Username with priveleges to deploy applications on target environment")
 79 |     parser.add_argument("-cf", "--config_file", type=str,
 80 |                         help="Config file path. Contains configuration values to override the default ones.")
 81 | 
 82 |     args = parser.parse_args()
 83 | 
 84 |     # Load config file if exists
 85 |     if args.config_file:
 86 |         load_configuration_file(args.config_file)
 87 |     # Parse the artifact directory
 88 |     artifact_dir = args.artifacts
 89 |     # Parse the package path
 90 |     package_path = args.package_path
 91 |     # Parse the Catalog Mapping path
 92 |     catalogmappings_path = args.catalogmappings_path
 93 |     # Parse Destination Environment
 94 |     dest_env = args.destination_env
 95 |     # Parse OSP Tool path
 96 |     osp_tool_path = args.osp_tool_path
 97 |     # Parse Credentials for OSP Tool
 98 |     credentials = args.osptool_user + " " + args.osptool_pwd
 99 | 
100 |     # Calls the main script
101 |     main(artifact_dir, dest_env, package_path, catalogmappings_path, osp_tool_path, credentials)
102 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/evaluate_test_results.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import unittest
 3 | import os
 4 | import xmlrunner
 5 | import sys
 6 | import argparse
 7 | 
 8 | # Workaround for Jenkins:
 9 | # Set the path to include the outsystems module
10 | # Jenkins exposes the workspace directory through env.
11 | if "WORKSPACE" in os.environ:
12 |     sys.path.append(os.environ['WORKSPACE'])
13 | else:  # Else just add the project dir
14 |     sys.path.append(os.getcwd())
15 | 
16 | # Custom Modules
17 | from outsystems.vars.file_vars import ARTIFACT_FOLDER, BDD_FRAMEWORK_FOLDER, BDD_FRAMEWORK_TEST_ENDPOINTS_FILE, JUNIT_TEST_RESULTS_FILE
18 | from outsystems.bdd_framework.bdd_runner import run_bdd_test
19 | from outsystems.file_helpers.file import load_data
20 | from outsystems.vars.vars_base import load_configuration_file
21 | 
22 | 
23 | # Functions
24 | # Variables
25 | 
26 | # ---------------------- TEST CLASS ----------------------
27 | # Generator class that will create a unit test for each entry of the test results and print out a XML in tests/python-tests/*.xml
28 | class BDDTestRunner(unittest.TestCase):
29 |     longMessage = False
30 | 
31 | 
32 | def format_error_report(error_obj):
33 |     description = ""
34 |     if not error_obj["ErrorMessage"]:
35 |         description += "\nBDD Test Suite failed {} scenarios (in {})\n".format(error_obj["FailedScenarios"], error_obj["FailedScenarios"] + error_obj["SuccessfulScenarios"])
36 |         for failure in error_obj["FailureReports"]:
37 |             description += failure
38 |     else:
39 |         description += "\nAn error was found in the unit test.\nError: {}".format(error_obj["ErrorMessage"])
40 |     return description
41 | 
42 | 
43 | def bdd_check_generator(url: str):
44 |     def test(self):
45 |         json_obj = run_bdd_test(url)
46 |         self.assertTrue(json_obj["SuiteSuccess"], format_error_report(json_obj))
47 | 
48 |     return test
49 | 
50 | 
51 | # ---------------------- SCRIPT ----------------------
52 | if __name__ == '__main__':
53 |     # Argument menu / parsing
54 |     parser = argparse.ArgumentParser()
55 |     parser.add_argument("-a", "--artifacts", type=str, help="Name of the artifacts folder. Default: \"Artifacts\"", default=ARTIFACT_FOLDER)
56 |     parser.add_argument("-cf", "--config_file", type=str,
57 |                         help="Config file path. Contains configuration values to override the default ones.")
58 | 
59 |     args = parser.parse_args()
60 | 
61 |     # Load config file if exists
62 |     if args.config_file:
63 |         load_configuration_file(args.config_file)
64 |     # Parse the artifact directory
65 |     # Assumes the default dir = Artifacts
66 |     artifact_dir = args.artifacts
67 |     if len(sys.argv) == 3:  # Workaround to clear the args to avoid messing with the unittest.main()
68 |         sys.argv = sys.argv[:-2]
69 |     elif len(sys.argv) == 5:
70 |         sys.argv = sys.argv[:-4]
71 | 
72 |     # Load the test endpoints
73 |     filename = os.path.join(BDD_FRAMEWORK_FOLDER, BDD_FRAMEWORK_TEST_ENDPOINTS_FILE)
74 |     test_urls = load_data(artifact_dir, filename)
75 | 
76 |     for test_endpoint in test_urls:
77 |         test_func = bdd_check_generator(test_endpoint["URL"])
78 |         test_name = "test_{}__{}".format(test_endpoint["TestSuite"], test_endpoint["Name"])
79 |         setattr(BDDTestRunner, test_name, test_func)
80 | 
81 |     # Runs the test suite and stores the value in a XMN file to be used by JUNIT
82 |     filename = os.path.join(artifact_dir, JUNIT_TEST_RESULTS_FILE)
83 |     try:
84 |         with open(filename, 'wb') as output:
85 |             runner = xmlrunner.XMLTestRunner(output=output, failfast=False, buffer=False)
86 |             unittest.main(testRunner=runner)
87 |     except UnboundLocalError:
88 |         sys.exit(0)
89 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/fetch_lifetime_data.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import os
 3 | import sys
 4 | import argparse
 5 | 
 6 | # Workaround for Jenkins:
 7 | # Set the path to include the outsystems module
 8 | # Jenkins exposes the workspace directory through env.
 9 | if "WORKSPACE" in os.environ:
10 |     sys.path.append(os.environ['WORKSPACE'])
11 | else:  # Else just add the project dir
12 |     sys.path.append(os.getcwd())
13 | 
14 | # Custom Modules
15 | from outsystems.lifetime.lifetime_applications import get_applications
16 | from outsystems.lifetime.lifetime_environments import get_environments
17 | from outsystems.lifetime.lifetime_base import build_lt_endpoint
18 | from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO, LIFETIME_API_ENDPOINT, LIFETIME_API_VERSION
19 | from outsystems.vars.file_vars import ARTIFACT_FOLDER
20 | from outsystems.vars.vars_base import load_configuration_file
21 | 
22 | 
23 | # ---------------------- SCRIPT ----------------------
24 | def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int, lt_token: str):
25 |     # Builds the LifeTime endpoint
26 |     lt_endpoint = build_lt_endpoint(
27 |         lt_http_proto, lt_url, lt_api_endpoint, lt_api_version)
28 | 
29 |     # Get Environments
30 |     get_environments(artifact_dir, lt_endpoint, lt_token)
31 |     print("OS Environments data retrieved successfully.", flush=True)
32 |     # Get Applications without extra data
33 |     get_applications(artifact_dir, lt_endpoint, lt_token, False)
34 |     print("OS Applications data retrieved successfully.", flush=True)
35 | 
36 | 
37 | if __name__ == "__main__":
38 |     # Argument menu / parsing
39 |     parser = argparse.ArgumentParser()
40 |     parser.add_argument("-a", "--artifacts", type=str,
41 |                         help="Name of the artifacts folder. Default: \"Artifacts\"", default=ARTIFACT_FOLDER)
42 |     parser.add_argument("-u", "--lt_url", type=str,
43 |                         help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"", required=True)
44 |     parser.add_argument("-t", "--lt_token", type=str,
45 |                         help="Token for LifeTime API calls.", required=True)
46 |     parser.add_argument("-v", "--lt_api_version", type=int,
47 |                         help="LifeTime API version number. If version <= 10, use 1, if version >= 11, use 2. Default: 2", default=LIFETIME_API_VERSION)
48 |     parser.add_argument("-e", "--lt_endpoint", type=str,
49 |                         help="(optional) Used to set the API endpoint for LifeTime, without the version. Default: \"lifetimeapi/rest\"", default=LIFETIME_API_ENDPOINT)
50 |     parser.add_argument("-cf", "--config_file", type=str,
51 |                         help="Config file path. Contains configuration values to override the default ones.")
52 | 
53 |     args = parser.parse_args()
54 | 
55 |     # Load config file if exists
56 |     if args.config_file:
57 |         load_configuration_file(args.config_file)
58 |     # Parse the artifact directory
59 |     artifact_dir = args.artifacts
60 |     # Parse the API endpoint
61 |     lt_api_endpoint = args.lt_endpoint
62 |     # Parse the LT Url and split the LT hostname from the HTTP protocol
63 |     # Assumes the default HTTP protocol = https
64 |     lt_http_proto = LIFETIME_HTTP_PROTO
65 |     lt_url = args.lt_url
66 |     if lt_url.startswith("http://"):
67 |         lt_http_proto = "http"
68 |         lt_url = lt_url.replace("http://", "")
69 |     else:
70 |         lt_url = lt_url.replace("https://", "")
71 |     if lt_url.endswith("/"):
72 |         lt_url = lt_url[:-1]
73 |     # Parte LT API Version
74 |     lt_version = args.lt_api_version
75 |     # Parse the LT Token
76 |     lt_token = args.lt_token
77 | 
78 |     # Calls the main script
79 |     main(artifact_dir, lt_http_proto, lt_url,
80 |          lt_api_endpoint, lt_version, lt_token)
81 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/fetch_tech_debt.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import sys
 3 | import os
 4 | import argparse
 5 | 
 6 | # Workaround for Jenkins:
 7 | # Set the path to include the outsystems module
 8 | # Jenkins exposes the workspace directory through env.
 9 | if "WORKSPACE" in os.environ:
10 |     sys.path.append(os.environ['WORKSPACE'])
11 | else:  # Else just add the project dir
12 |     sys.path.append(os.getcwd())
13 | 
14 | # Custom Modules
15 | # Variables
16 | from outsystems.vars.file_vars import ARTIFACT_FOLDER
17 | from outsystems.vars.manifest_vars import MANIFEST_APPLICATION_VERSIONS
18 | from outsystems.vars.ad_vars import AD_API_HOST
19 | 
20 | # Functions
21 | from outsystems.file_helpers.file import load_data
22 | from outsystems.architecture_dashboard.ad_tech_debt import get_infra_techdebt, get_app_techdebt, \
23 |     get_techdebt_levels, get_techdebt_categories
24 | 
25 | 
26 | # ############################################################# SCRIPT ##############################################################
27 | def main(artifact_dir: str, ad_api_host: str, activation_code: str, api_key: str, trigger_manifest: dict):
28 | 
29 |     # Get tech debt reference data (levels)
30 |     get_techdebt_levels(artifact_dir, ad_api_host, activation_code, api_key)
31 |     print("Technical debt levels retrieved successfully.", flush=True)
32 | 
33 |     # Get tech debt reference data (categories)
34 |     get_techdebt_categories(artifact_dir, ad_api_host, activation_code, api_key)
35 |     print("Technical debt categories retrieved successfully.", flush=True)
36 | 
37 |     # If the manifest file is being used, tech debt analysis is made for each app in the manifest
38 |     # Otherwise it runs for the entire infrastructure
39 |     if trigger_manifest and MANIFEST_APPLICATION_VERSIONS in trigger_manifest:
40 |         for app in trigger_manifest[MANIFEST_APPLICATION_VERSIONS]:
41 |             status = get_app_techdebt(artifact_dir, ad_api_host, activation_code, api_key, app)
42 |             if status:
43 |                 print("Technical debt data retrieved successfully for application {}.".format(app["ApplicationName"]), flush=True)
44 |             else:
45 |                 print("No technical debt data found for application {}.".format(app["ApplicationName"]), flush=True)
46 | 
47 |     else:
48 |         get_infra_techdebt(artifact_dir, ad_api_host, activation_code, api_key)
49 |         print("Technical debt data retrieved successfully for infrastructure {}.".format(activation_code), flush=True)
50 | 
51 |     sys.exit(0)
52 | 
53 | # End of main()
54 | 
55 | 
56 | if __name__ == "__main__":
57 |     # Argument menu / parsing
58 |     parser = argparse.ArgumentParser()
59 |     parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER,
60 |                         help="Name of the artifacts folder. Default: \"Artifacts\"")
61 |     parser.add_argument("-n", "--ad_hostname", type=str, default=AD_API_HOST,
62 |                         help="Hostname of Architecture Dashboard, without the API endpoint. Default: \"architecture.outsystems.com\"")
63 |     parser.add_argument("-c", "--activation_code", type=str, required=True,
64 |                         help="Activation code of target infrastructure.")
65 |     parser.add_argument("-k", "--api_key", type=str, required=True,
66 |                         help="Key for Architecture Dashboard API calls.")
67 |     parser.add_argument("-f", "--manifest_file", type=str,
68 |                         help="(Optional) Trigger manifest file path.")
69 | 
70 |     args = parser.parse_args()
71 | 
72 |     # Parse the artifact directory
73 |     artifact_dir = args.artifacts
74 |     # Parse the Architecture Dashboard hostname
75 |     ad_api_host = args.ad_hostname
76 |     # Parse the Infrastcucture Activation Code
77 |     activation_code = args.activation_code
78 |     # Parse the Architecture Dashboard API Key
79 |     api_key = args.api_key
80 |     # Parse Manifest file if it exists
81 |     if args.manifest_file:
82 |         manifest_path = os.path.split(args.manifest_file)
83 |         manifest_file = load_data(manifest_path[0], manifest_path[1])
84 |     else:
85 |         manifest_file = None
86 | 
87 |     # Calls the main script
88 |     main(artifact_dir, ad_api_host, activation_code, api_key, manifest_file)
89 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/generate_manifest_file.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import sys
  3 | import os
  4 | import argparse
  5 | 
  6 | # Workaround for Jenkins:
  7 | # Set the path to include the outsystems module
  8 | # Jenkins exposes the workspace directory through env.
  9 | if "WORKSPACE" in os.environ:
 10 |     sys.path.append(os.environ['WORKSPACE'])
 11 | else:  # Else just add the project dir
 12 |     sys.path.append(os.getcwd())
 13 | 
 14 | # Custom Modules
 15 | # Variables
 16 | from outsystems.vars.file_vars import ARTIFACT_FOLDER
 17 | from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO, LIFETIME_API_ENDPOINT, LIFETIME_API_VERSION, DEPLOYMENT_MESSAGE
 18 | from outsystems.vars.manifest_vars import MANIFEST_APPLICATION_VERSIONS, MANIFEST_ENVIRONMENT_DEFINITIONS, MANIFEST_DEPLOYMENT_NOTES, \
 19 |     MANIFEST_APPLICATION_KEY, MANIFEST_APPLICATION_NAME, MANIFEST_APPLICATION_VERSION_KEY, MANIFEST_APPLICATION_VERSION_NUMBER, \
 20 |     MANIFEST_ENVIRONMENT_KEY, MANIFEST_ENVIRONMENT_NAME, MANIFEST_ENVIRONMENT_LABEL, MANIFEST_FLAG_IS_TEST_APPLICATION, \
 21 |     MANIFEST_FOLDER, MANIFEST_FILE
 22 | 
 23 | # Functions
 24 | from outsystems.lifetime.lifetime_environments import get_environments, get_environment_deployment_zones
 25 | from outsystems.lifetime.lifetime_applications import get_running_app_version, get_application_data
 26 | from outsystems.file_helpers.file import store_data
 27 | from outsystems.lifetime.lifetime_base import build_lt_endpoint
 28 | 
 29 | 
 30 | # Function that will build the info required for the environments
 31 | def generate_manifest_env_info(artifact_dir: str, lt_endpoint: str, lt_token: str):
 32 |     # Gets all infra environments information
 33 |     infra_envs = get_environments(artifact_dir, lt_endpoint, lt_token)
 34 | 
 35 |     # Trims info to include only the desired env info (Name and Key)
 36 |     env_info = [{MANIFEST_ENVIRONMENT_KEY: env["Key"], MANIFEST_ENVIRONMENT_NAME: env["Name"], MANIFEST_ENVIRONMENT_LABEL: env["Name"]} for env in infra_envs if "Name" in env and "Key" in env]
 37 | 
 38 |     return env_info
 39 | 
 40 | 
 41 | # Function that will build the info required for a deployment based on the latest versions of the apps in the src environment
 42 | def generate_manifest_app_info(artifact_dir: str, lt_endpoint: str, lt_token: str, src_env_key: str, app_list: list):
 43 |     app_data_list = []  # will contain the applications to deploy details from LT
 44 | 
 45 |     deployment_zones = get_environment_deployment_zones(artifact_dir, lt_endpoint, lt_token, env_key=src_env_key)
 46 | 
 47 |     # Creates a list with the details for the apps you want to deploy
 48 |     for app_name in app_list:
 49 |         # Removes whitespaces in the beginning and end of the string
 50 |         app_name = app_name.strip()
 51 | 
 52 |         # Get the app running version on the source environment. It will only retrieve tagged applications
 53 |         app_info = get_running_app_version(artifact_dir, lt_endpoint, lt_token, src_env_key, app_name=app_name)
 54 | 
 55 |         # Get the module info
 56 |         app_module_data = get_application_data(artifact_dir, lt_endpoint, lt_token, True, app_key=app_info[MANIFEST_APPLICATION_KEY])
 57 | 
 58 |         # Get deployment zone info
 59 |         deployment_zone_key = next((item['DeploymentZoneKey'] for item in app_module_data['AppStatusInEnvs'] if item['EnvironmentKey'] == src_env_key), None)
 60 |         deployment_zone_name = next((item['Name'] for item in deployment_zones if item['Key'] == deployment_zone_key), None)
 61 | 
 62 |         # Add it to the app data list
 63 |         app_data_list.append({MANIFEST_APPLICATION_KEY: app_info[MANIFEST_APPLICATION_KEY], MANIFEST_APPLICATION_NAME: app_info[MANIFEST_APPLICATION_NAME],
 64 |                               MANIFEST_APPLICATION_VERSION_KEY: app_info[MANIFEST_APPLICATION_VERSION_KEY], MANIFEST_APPLICATION_VERSION_NUMBER: app_info["Version"],
 65 |                               'CreatedOn': app_info["CreatedOn"], 'ChangeLog': app_info["ChangeLog"], MANIFEST_FLAG_IS_TEST_APPLICATION: False,
 66 |                               'DeploymentZoneKey': deployment_zone_key, 'DeploymentZoneName': deployment_zone_name})
 67 | 
 68 |     return app_data_list
 69 | 
 70 | 
 71 | # Function that will generate and save the manifest file
 72 | def generate_manifest_file(artifact_dir: str, app_details: list, env_details: list, dep_note: str):
 73 | 
 74 |     manifest_data = {
 75 |         MANIFEST_APPLICATION_VERSIONS: app_details,
 76 |         MANIFEST_ENVIRONMENT_DEFINITIONS: env_details,
 77 |         MANIFEST_DEPLOYMENT_NOTES: dep_note
 78 |     }
 79 | 
 80 |     # Store the manifest to be used in other stages of the pipeline
 81 |     filename = "{}/{}".format(MANIFEST_FOLDER, MANIFEST_FILE)
 82 |     store_data(artifact_dir, filename, manifest_data)
 83 |     return manifest_data
 84 | 
 85 | 
 86 | def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int, lt_token: str, source_env: str, apps: list, dep_note: str):
 87 | 
 88 |     # Builds the LifeTime endpoint
 89 |     lt_endpoint = build_lt_endpoint(lt_http_proto, lt_url, lt_api_endpoint, lt_api_version)
 90 | 
 91 |     # Save environments info structure for manifest
 92 |     env_details = generate_manifest_env_info(artifact_dir, lt_endpoint, lt_token)
 93 | 
 94 |     # Gets the environment key for the source environment
 95 |     src_env_key = next((env["EnvironmentKey"] for env in env_details if env.get("EnvironmentName") == source_env), None)
 96 | 
 97 |     # Save applications info structure for manifest
 98 |     app_details = generate_manifest_app_info(artifact_dir, lt_endpoint, lt_token, src_env_key, apps)
 99 | 
100 |     generate_manifest_file(artifact_dir, app_details, env_details, dep_note)
101 | 
102 | # End of main()
103 | 
104 | 
105 | if __name__ == "__main__":
106 |     # Argument menu / parsing
107 |     parser = argparse.ArgumentParser()
108 |     parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER,
109 |                         help="Name of the artifacts folder. Default: \"Artifacts\"")
110 |     parser.add_argument("-u", "--lt_url", type=str, required=True,
111 |                         help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"")
112 |     parser.add_argument("-t", "--lt_token", type=str, required=True,
113 |                         help="Token for LifeTime API calls.")
114 |     parser.add_argument("-v", "--lt_api_version", type=int, default=LIFETIME_API_VERSION,
115 |                         help="LifeTime API version number. If version <= 10, use 1, if version >= 11, use 2. Default: 2")
116 |     parser.add_argument("-e", "--lt_endpoint", type=str, default=LIFETIME_API_ENDPOINT,
117 |                         help="(optional) Used to set the API endpoint for LifeTime, without the version. Default: \"lifetimeapi/rest\"")
118 |     parser.add_argument("-s", "--source_env", type=str, required=True,
119 |                         help="Name, as displayed in LifeTime, of the source environment where the apps are.")
120 |     parser.add_argument("-l", "--app_list", type=str, required=True,
121 |                         help="Comma separated list of apps you want to deploy. Example: \"App1,App2 With Spaces,App3_With_Underscores\"")
122 |     parser.add_argument("-m", "--deploy_msg", type=str, default=DEPLOYMENT_MESSAGE,
123 |                         help="Message you want to show on the deployment plans in LifeTime. Default: \"Automated deploy using OS Pipelines\".")
124 |     args = parser.parse_args()
125 | 
126 |     # Parse the artifact directory
127 |     artifact_dir = args.artifacts
128 |     # Parse the API endpoint
129 |     lt_api_endpoint = args.lt_endpoint
130 |     # Parse the LT Url and split the LT hostname from the HTTP protocol
131 |     # Assumes the default HTTP protocol = https
132 |     lt_http_proto = LIFETIME_HTTP_PROTO
133 |     lt_url = args.lt_url
134 |     if lt_url.startswith("http://"):
135 |         lt_http_proto = "http"
136 |         lt_url = lt_url.replace("http://", "")
137 |     else:
138 |         lt_url = lt_url.replace("https://", "")
139 |     if lt_url.endswith("/"):
140 |         lt_url = lt_url[:-1]
141 |     # Parte LT API Version
142 |     lt_version = args.lt_api_version
143 |     # Parse the LT Token
144 |     lt_token = args.lt_token
145 |     # Parse Source Environment
146 |     source_env = args.source_env
147 |     # Parse App list
148 |     _apps = args.app_list
149 |     apps = _apps.split(',')
150 |     # Parse Deployment Message
151 |     dep_note = args.deploy_msg
152 | 
153 |     # Calls the main script
154 |     main(artifact_dir, lt_http_proto, lt_url, lt_api_endpoint, lt_version, lt_token, source_env, apps, dep_note)
155 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/generate_unit_testing_assembly.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import sys
  3 | import os
  4 | import argparse
  5 | 
  6 | # Workaround for Jenkins:
  7 | # Set the path to include the outsystems module
  8 | # Jenkins exposes the workspace directory through env.
  9 | if "WORKSPACE" in os.environ:
 10 |     sys.path.append(os.environ['WORKSPACE'])
 11 | else:  # Else just add the project dir
 12 |     sys.path.append(os.getcwd())
 13 | 
 14 | # Custom Modules
 15 | from outsystems.vars.bdd_vars import BDD_HTTP_PROTO, BDD_API_ENDPOINT, BDD_API_VERSION
 16 | from outsystems.vars.cicd_vars import PROBE_HTTP_PROTO, PROBE_API_ENDPOINT, PROBE_API_VERSION
 17 | from outsystems.vars.file_vars import ARTIFACT_FOLDER, BDD_FRAMEWORK_FOLDER, BDD_FRAMEWORK_TEST_ENDPOINTS_FILE
 18 | from outsystems.bdd_framework.bdd_base import build_bdd_endpoint, build_bdd_test_endpoint
 19 | from outsystems.cicd_probe.cicd_scan import scan_bdd_test_endpoint
 20 | from outsystems.cicd_probe.cicd_base import build_probe_endpoint
 21 | from outsystems.file_helpers.file import store_data
 22 | 
 23 | # Functions
 24 | # Variables
 25 | 
 26 | # ---------------------- VARS ----------------------
 27 | # Set script local variables
 28 | bdd_test = []  # will contain the BDD Framework tests for each app
 29 | bdd_modules = 0  # will count the number of bdd tests
 30 | test_names = []  # will contain the names of the tests to run
 31 | test_list = []  # will contain the webflows output from BDD for the application
 32 | test_urls = []  # will contain the urls for the BDD framework
 33 | 
 34 | 
 35 | # ---------------------- SCRIPT ----------------------
 36 | def main(artifact_dir: str, apps: list, bdd_http_proto: str, bdd_url: str, bdd_api_endpoint: str, bdd_version: int,
 37 |          cicd_http_proto: str, cicd_url: str, cicd_api_endpoint: str, cicd_version: int):
 38 |     # use the script variables
 39 |     global bdd_test, bdd_modules, test_names, test_list, test_urls
 40 | 
 41 |     probe_endpoint = build_probe_endpoint(
 42 |         cicd_http_proto, cicd_url, cicd_api_endpoint, cicd_version)
 43 |     bdd_endpoint = build_bdd_endpoint(
 44 |         bdd_http_proto, bdd_url, bdd_api_endpoint, bdd_version)
 45 | 
 46 |     # Query the CICD probe
 47 |     for app in apps:
 48 |         # Removes whitespaces in the beginning and end of the string
 49 |         app = app.strip()
 50 |         response = scan_bdd_test_endpoint(artifact_dir, probe_endpoint, app)
 51 |         if len(response) == 0:
 52 |             continue  # It has no test suites, continue the loop
 53 |         for test_endpoint in response:
 54 |             # Get the BDD test endpoints information
 55 |             bdd_test += [{"EspaceName": test_endpoint["BDDTestEndpointsInfo"]["EspaceName"],
 56 |                           "WebFlows": test_endpoint["BDDTestEndpointsInfo"]["WebFlows"]}]
 57 |             bdd_modules += len(test_endpoint["BDDTestEndpointsInfo"]
 58 |                                ["WebFlows"])
 59 |     print("{} BDD module(s) found.".format(bdd_modules), flush=True)
 60 | 
 61 |     # Get the tests to run (just for presentation)
 62 |     for bdd in bdd_test:  # For each BDD test
 63 |         if "WebFlows" in bdd:  # Sanity check to see if there are actual webflows in tests
 64 |             for webflow in bdd["WebFlows"]:  # For each webflow
 65 |                 if "WebScreens" in webflow:  # Sanity check to see if there are actual webscreens in tests
 66 |                     test_list += webflow["WebScreens"]
 67 |     print("{} BDD endpoint(s) scanned successfully.".format(len(test_list)), flush=True)
 68 | 
 69 |     # Get the names of the tests to run (just for presentation)
 70 |     for test in test_list:
 71 |         test_names.append(test["Name"])
 72 |     print("Tests to run:{}".format(test_names), flush=True)
 73 | 
 74 |     # For each test, generate the URL to query the BDD framework, to be used in the test class
 75 |     for bdd in bdd_test:  # For each BDD test
 76 |         if "WebFlows" in bdd:  # Sanity check to see if there are actual webflows in tests
 77 |             for webflow in bdd["WebFlows"]:  # For each webflow
 78 |                 if "WebScreens" in webflow:  # Sanity check to see if there are actual webscreens in tests
 79 |                     for webscreen in webflow["WebScreens"]:  # for each webscreen
 80 |                         test_endpoint = build_bdd_test_endpoint(
 81 |                             bdd_endpoint, bdd["EspaceName"], webscreen["Name"])
 82 |                         test_urls.append(
 83 |                             {"TestSuite": bdd["EspaceName"], "Name": webscreen["Name"], "URL": test_endpoint})
 84 | 
 85 |     # Save the test results in a file for later processing
 86 |     filename = os.path.join(BDD_FRAMEWORK_FOLDER,
 87 |                             BDD_FRAMEWORK_TEST_ENDPOINTS_FILE)
 88 |     store_data(artifact_dir, filename, test_urls)
 89 | 
 90 | 
 91 | # end of main()
 92 | 
 93 | if __name__ == "__main__":
 94 |     # Argument menu / parsing
 95 |     parser = argparse.ArgumentParser()
 96 |     parser.add_argument("-a", "--artifacts", type=str,
 97 |                         help="Name of the artifacts folder. Default: \"Artifacts\"", default=ARTIFACT_FOLDER)
 98 |     parser.add_argument("-l", "--app_list", type=str,
 99 |                         help="Comma separated list of apps you want to deploy. Example: \"App1,App2 With Spaces,App3_With_Underscores\"", required=True)
100 | 
101 |     parser.add_argument("--cicd_probe_env", type=str,
102 |                         help="URL for CICD Probe, without the API endpoint. Example: \"https://\"", required=True)
103 |     parser.add_argument("--cicd_probe_api", type=str,
104 |                         help="(optional) Used to set the API endpoint for CICD Probe, without the version. Default: \"CI_CDProbe/rest\"", default=PROBE_API_ENDPOINT)
105 |     parser.add_argument("--cicd_probe_version", type=int,
106 |                         help="(optional) CICD Probe API version number. Default: 1", default=PROBE_API_VERSION)
107 | 
108 |     parser.add_argument("--bdd_framework_env", type=str,
109 |                         help="URL for BDD Framework, without the API endpoint. Example: \"https://\"", required=True)
110 |     parser.add_argument("--bdd_framework_api", type=str,
111 |                         help="(optional) Used to set the API endpoint for BDD Framework, without the version. Default: \"BDDFramework/rest\"", default=BDD_API_ENDPOINT)
112 |     parser.add_argument("--bdd_framework_version", type=int,
113 |                         help="(optional) BDD Framework API version number. Default: 1", default=BDD_API_VERSION)
114 | 
115 |     args = parser.parse_args()
116 | 
117 |     # Parse the artifact directory
118 |     artifact_dir = args.artifacts
119 |     # Parse App list
120 |     _apps = args.app_list
121 |     apps = _apps.split(',')
122 | 
123 |     # Parse the BDD API endpoint
124 |     bdd_api_endpoint = args.bdd_framework_api
125 |     # Parse the BDD Url and split the BDD hostname from the HTTP protocol
126 |     # Assumes the default HTTP protocol = "https"
127 |     bdd_http_proto = BDD_HTTP_PROTO
128 |     bdd_url = args.bdd_framework_env
129 |     if bdd_url.startswith("http://"):
130 |         bdd_http_proto = "http"
131 |         bdd_url = bdd_url.replace("http://", "")
132 |     else:
133 |         bdd_url = bdd_url.replace("https://", "")
134 |     if bdd_url.endswith("/"):
135 |         bdd_url = bdd_url[:-1]
136 |     # Parse BDD API Version
137 |     bdd_version = args.bdd_framework_version
138 | 
139 |     # Parse the CICD Probe API endpoint
140 |     cicd_api_endpoint = args.cicd_probe_api
141 |     # Parse the CICD Probe Url and split the CICD Probe hostname from the HTTP protocol
142 |     # Assumes the default HTTP protocol = "https"
143 |     cicd_http_proto = PROBE_HTTP_PROTO
144 |     cicd_url = args.cicd_probe_env
145 |     if cicd_url.startswith("http://"):
146 |         cicd_http_proto = "http"
147 |         cicd_url = cicd_url.replace("http://", "")
148 |     else:
149 |         cicd_url = cicd_url.replace("https://", "")
150 |     if cicd_url.endswith("/"):
151 |         cicd_url = cicd_url[:-1]
152 |     # Parse CICD Probe API Version
153 |     cicd_version = args.cicd_probe_version
154 | 
155 |     # Calls the main script
156 |     main(artifact_dir, apps, bdd_http_proto, bdd_url, bdd_api_endpoint, bdd_version,
157 |          cicd_http_proto, cicd_url, cicd_api_endpoint, cicd_version)
158 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/tag_apps_based_on_manifest_data.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import sys
  3 | import os
  4 | import argparse
  5 | from packaging.version import Version
  6 | 
  7 | # Workaround for Jenkins:
  8 | # Set the path to include the outsystems module
  9 | # Jenkins exposes the workspace directory through env.
 10 | if "WORKSPACE" in os.environ:
 11 |     sys.path.append(os.environ['WORKSPACE'])
 12 | else:  # Else just add the project dir
 13 |     sys.path.append(os.getcwd())
 14 | 
 15 | # Custom Modules
 16 | # Variables
 17 | from outsystems.vars.file_vars import ARTIFACT_FOLDER
 18 | from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO, LIFETIME_API_ENDPOINT, LIFETIME_API_VERSION
 19 | 
 20 | # Functions
 21 | from outsystems.file_helpers.file import load_data
 22 | from outsystems.lifetime.lifetime_environments import get_environment_key
 23 | from outsystems.lifetime.lifetime_base import build_lt_endpoint
 24 | from outsystems.lifetime.lifetime_applications import set_application_version, get_running_app_version
 25 | from outsystems.vars.vars_base import load_configuration_file
 26 | # Exceptions
 27 | from outsystems.exceptions.invalid_parameters import InvalidParametersError
 28 | 
 29 | 
 30 | # ############################################################# SCRIPT ##############################################################
 31 | def valid_tag_number(artifact_dir: str, lt_endpoint: str, lt_token: str, env_name: str, env_key: str, app: dict):
 32 |     # Get the app running version on the source environment. It will only retrieve tagged applications
 33 |     running_app = get_running_app_version(artifact_dir, lt_endpoint, lt_token, env_key, app_name=app["ApplicationName"])
 34 | 
 35 |     if Version(running_app["Version"]) < Version(app["VersionNumber"]):
 36 |         return True
 37 | 
 38 |     print("Skipping tag! Application '{}' current tag ({}) on {} is greater than or equal to the manifest data ({}). ".format(app["ApplicationName"], running_app["Version"], env_name, app["VersionNumber"]), flush=True)
 39 |     return False
 40 | 
 41 | 
 42 | def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int, lt_token: str, dest_env: str, app_list: list, dep_manifest: list, trigger_manifest: dict, include_test_apps: bool):
 43 |     # Builds the LifeTime endpoint
 44 |     lt_endpoint = build_lt_endpoint(lt_http_proto, lt_url, lt_api_endpoint, lt_api_version)
 45 |     # Get the environment keys
 46 |     dest_env_key = get_environment_key(artifact_dir, lt_endpoint, lt_token, dest_env)
 47 | 
 48 |     # the app versions MUST come from that a file
 49 |     # either deployment or trigger manifest file
 50 |     if dep_manifest:
 51 |         for deployed_app in dep_manifest:
 52 |             if deployed_app["ApplicationName"] in app_list:
 53 |                 set_application_version(lt_endpoint, lt_token, dest_env_key, deployed_app["ApplicationKey"], deployed_app["ChangeLog"], deployed_app["Version"], None)
 54 |                 print("{} application successuflly tagged as {} on {}".format(deployed_app["ApplicationName"], deployed_app["Version"], dest_env), flush=True)
 55 |     elif trigger_manifest:
 56 |         for deployed_app in trigger_manifest["ApplicationVersions"]:
 57 |             if not deployed_app["IsTestApplication"] or (deployed_app["IsTestApplication"] and include_test_apps):
 58 |                 if valid_tag_number(artifact_dir, lt_endpoint, lt_token, dest_env, dest_env_key, deployed_app):
 59 |                     set_application_version(lt_endpoint, lt_token, dest_env_key, deployed_app["ApplicationKey"], deployed_app["ChangeLog"], deployed_app["VersionNumber"], None)
 60 |                     print("{} application successuflly tagged as {} on {}".format(deployed_app["ApplicationName"], deployed_app["VersionNumber"], dest_env), flush=True)
 61 |                 else:
 62 |                     continue
 63 | 
 64 | # End of main()
 65 | 
 66 | 
 67 | if __name__ == "__main__":
 68 |     # Argument menu / parsing
 69 |     parser = argparse.ArgumentParser()
 70 |     parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER,
 71 |                         help="Name of the artifacts folder. Default: \"Artifacts\"")
 72 |     parser.add_argument("-u", "--lt_url", type=str, required=True,
 73 |                         help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"")
 74 |     parser.add_argument("-t", "--lt_token", type=str, required=True,
 75 |                         help="Token for LifeTime API calls.")
 76 |     parser.add_argument("-v", "--lt_api_version", type=int, default=LIFETIME_API_VERSION,
 77 |                         help="LifeTime API version number. If version <= 10, use 1, if version >= 11, use 2. Default: 2")
 78 |     parser.add_argument("-e", "--lt_endpoint", type=str, default=LIFETIME_API_ENDPOINT,
 79 |                         help="(Optional) Used to set the API endpoint for LifeTime, without the version. Default: \"lifetimeapi/rest\"")
 80 |     parser.add_argument("-d", "--destination_env", type=str, required=True,
 81 |                         help="Name, as displayed in LifeTime, of the destination environment where you want to deploy the apps. (if in Airgap mode should be the hostname of the destination environment where you want to deploy the apps)")
 82 |     parser.add_argument("-l", "--app_list", type=str,
 83 |                         help="(Optional) Comma separated list of apps you want to tag. Example: \"App1,App2 With Spaces,App3_With_Underscores\"")
 84 |     parser.add_argument("-f", "--manifest_file", type=str, required=True,
 85 |                         help="Manifest file path (either deployment or trigger).")
 86 |     parser.add_argument("-i", "--include_test_apps", action='store_true',
 87 |                         help="(Optional) Flag that indicates if applications marked as \"Test Application\" in the trigger manifest are included for tagging.")
 88 |     parser.add_argument("-cf", "--config_file", type=str,
 89 |                         help="Config file path. Contains configuration values to override the default ones.")
 90 | 
 91 |     args = parser.parse_args()
 92 | 
 93 |     # Load config file if exists
 94 |     if args.config_file:
 95 |         load_configuration_file(args.config_file)
 96 |     # Parse the artifact directory
 97 |     artifact_dir = args.artifacts
 98 |     # Parse the API endpoint
 99 |     lt_api_endpoint = args.lt_endpoint
100 |     # Parse the LT Url and split the LT hostname from the HTTP protocol
101 |     # Assumes the default HTTP protocol = https
102 |     lt_http_proto = LIFETIME_HTTP_PROTO
103 |     lt_url = args.lt_url
104 |     if lt_url.startswith("http://"):
105 |         lt_http_proto = "http"
106 |         lt_url = lt_url.replace("http://", "")
107 |     else:
108 |         lt_url = lt_url.replace("https://", "")
109 |     if lt_url.endswith("/"):
110 |         lt_url = lt_url[:-1]
111 |     # Parte LT API Version
112 |     lt_version = args.lt_api_version
113 |     # Parse the LT Token
114 |     lt_token = args.lt_token
115 |     # Parse Destination Environment
116 |     dest_env = args.destination_env
117 | 
118 |     # Parse Manifest file if it exists
119 |     # Based on the file content it can be a deployment manifest (list-based) or trigger manifest (dict-based)
120 |     manifest_file = None
121 |     if args.manifest_file:
122 |         manifest_path = os.path.split(args.manifest_file)
123 |         manifest_file = load_data(manifest_path[0], manifest_path[1])
124 | 
125 |     dep_manifest = manifest_file if type(manifest_file) is list else None
126 |     trigger_manifest = manifest_file if type(manifest_file) is dict else None
127 | 
128 |     if dep_manifest and not args.app_list:
129 |         raise InvalidParametersError("--app_list parameter is required for Deployment Manifest operation")
130 | 
131 |     # Parse App list
132 |     apps = None
133 |     if args.app_list:
134 |         _apps = args.app_list
135 |         apps = _apps.split(',')
136 | 
137 |     # Parse Include Test Apps flag
138 |     include_test_apps = args.include_test_apps
139 |     # Calls the main script
140 |     main(artifact_dir, lt_http_proto, lt_url, lt_api_endpoint, lt_version, lt_token, dest_env, apps, dep_manifest, trigger_manifest, include_test_apps)  # type: ignore
141 | 
--------------------------------------------------------------------------------
/outsystems/pipeline/validate_manifest_apps_exist_in_target_env.py:
--------------------------------------------------------------------------------
  1 | # Python Modules
  2 | import sys
  3 | import os
  4 | import argparse
  5 | import json
  6 | 
  7 | # Workaround for Jenkins:
  8 | # Set the path to include the outsystems module
  9 | # Jenkins exposes the workspace directory through env.
 10 | if "WORKSPACE" in os.environ:
 11 |     sys.path.append(os.environ['WORKSPACE'])
 12 | else:  # Else just add the project dir
 13 |     sys.path.append(os.getcwd())
 14 | 
 15 | # Custom Modules
 16 | # Variables
 17 | from outsystems.vars.file_vars import ARTIFACT_FOLDER
 18 | from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO, LIFETIME_API_ENDPOINT, LIFETIME_API_VERSION
 19 | from outsystems.vars.manifest_vars import MANIFEST_APPLICATION_VERSIONS, MANIFEST_FLAG_IS_TEST_APPLICATION, MANIFEST_APPLICATION_NAME
 20 | # Functions
 21 | from outsystems.lifetime.lifetime_applications import get_applications
 22 | from outsystems.lifetime.lifetime_environments import get_environment_key
 23 | from outsystems.file_helpers.file import load_data
 24 | from outsystems.lifetime.lifetime_base import build_lt_endpoint
 25 | from outsystems.vars.vars_base import load_configuration_file
 26 | # Exceptions
 27 | from outsystems.exceptions.app_does_not_exist import AppDoesNotExistError
 28 | from outsystems.exceptions.manifest_does_not_exist import ManifestDoesNotExistError
 29 | 
 30 | 
 31 | def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int, lt_token: str, env_label: str, include_test_apps: bool, trigger_manifest: dict):
 32 | 
 33 |     # Builds the LifeTime endpoint
 34 |     lt_endpoint = build_lt_endpoint(lt_http_proto, lt_url, lt_api_endpoint, lt_api_version)
 35 | 
 36 |     # Gets the environment key for the target environment
 37 |     env_key = get_environment_key(artifact_dir, lt_endpoint, lt_token, env_label)
 38 | 
 39 |     # Get Applications without extra data
 40 |     apps = get_applications(artifact_dir, lt_endpoint, lt_token, True)
 41 |     print("OS Applications data retrieved successfully.", flush=True)
 42 | 
 43 |     app_names_to_validate = [app[MANIFEST_APPLICATION_NAME] for app in trigger_manifest.get(MANIFEST_APPLICATION_VERSIONS, []) if include_test_apps or not app.get(MANIFEST_FLAG_IS_TEST_APPLICATION)]
 44 | 
 45 |     # Check if all manifest application names exist in the infra
 46 |     if not all(any(app["Name"] == app_name for app in apps) for app_name in app_names_to_validate):
 47 |         raise AppDoesNotExistError("One or more applications not found in this infra.")
 48 | 
 49 |     # Check if the each manifest application exists in the provided environment
 50 |     for app_info in apps:
 51 |         if app_info["Name"] in app_names_to_validate:
 52 |             app_status_in_envs = app_info.get("AppStatusInEnvs", [])
 53 |             environment_keys = [env["EnvironmentKey"] for env in app_status_in_envs]
 54 |             if env_key not in environment_keys:
 55 |                 raise AppDoesNotExistError("Application '{}' does not exist in '{}' Environment .".format(app_info['Name'], env_key))
 56 | 
 57 |     print("All Trigger Manifest Applications exist in the {} Environment.".format(env_label), flush=True)
 58 | 
 59 | # End of main()
 60 | 
 61 | 
 62 | if __name__ == "__main__":
 63 |     # Argument menu / parsing
 64 |     parser = argparse.ArgumentParser()
 65 |     parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER,
 66 |                         help="(Optional) Name of the artifacts folder. Default: \"Artifacts\"")
 67 |     parser.add_argument("-u", "--lt_url", type=str, required=True,
 68 |                         help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"")
 69 |     parser.add_argument("-t", "--lt_token", type=str, required=True,
 70 |                         help="Token for LifeTime API calls.")
 71 |     parser.add_argument("-v", "--lt_api_version", type=int, default=LIFETIME_API_VERSION,
 72 |                         help="(Optional) LifeTime API version number. If version <= 10, use 1, if version >= 11, use 2. Default: 2")
 73 |     parser.add_argument("-e", "--lt_endpoint", type=str, default=LIFETIME_API_ENDPOINT,
 74 |                         help="(Optional) Used to set the API endpoint for LifeTime, without the version. Default: \"lifetimeapi/rest\"")
 75 |     parser.add_argument("-s", "--env_label", type=str, required=True,
 76 |                         help="Label, as configured in the manifest, of the source environment where the apps are.")
 77 |     parser.add_argument("-i", "--include_test_apps", action='store_true',
 78 |                         help="Flag that indicates if applications marked as \"Test Application\" in the manifest are included in the deployment plan.")
 79 |     parser.add_argument("-m", "--trigger_manifest", type=str,
 80 |                         help="Manifest artifact (in JSON format) received when the pipeline is triggered. Contains required data used throughout the pipeline execution.")
 81 |     parser.add_argument("-f", "--manifest_file", type=str,
 82 |                         help="Manifest file (with JSON format). Contains required data used throughout the pipeline execution.")
 83 |     parser.add_argument("-cf", "--config_file", type=str,
 84 |                         help="Config file path. Contains configuration values to override the default ones.")
 85 | 
 86 |     args = parser.parse_args()
 87 | 
 88 |     # Load config file if exists
 89 |     if args.config_file:
 90 |         load_configuration_file(args.config_file)
 91 |     # Parse the artifact directory
 92 |     artifact_dir = args.artifacts
 93 |     # Parse the API endpoint
 94 |     lt_api_endpoint = args.lt_endpoint
 95 |     # Parse the LT Url and split the LT hostname from the HTTP protocol
 96 |     # Assumes the default HTTP protocol = https
 97 |     lt_http_proto = LIFETIME_HTTP_PROTO
 98 |     lt_url = args.lt_url
 99 |     if lt_url.startswith("http://"):
100 |         lt_http_proto = "http"
101 |         lt_url = lt_url.replace("http://", "")
102 |     else:
103 |         lt_url = lt_url.replace("https://", "")
104 |     if lt_url.endswith("/"):
105 |         lt_url = lt_url[:-1]
106 |     # Parte LT API Version
107 |     lt_version = args.lt_api_version
108 |     # Parse the LT Token
109 |     lt_token = args.lt_token
110 |     # Parse Environment
111 |     env_label = args.env_label
112 |     # Parse Include Test Apps flag
113 |     include_test_apps = args.include_test_apps
114 | 
115 |     # Validate Manifest is being passed either as JSON or as file
116 |     if not args.trigger_manifest and not args.manifest_file:
117 |         raise ManifestDoesNotExistError("The manifest was not provided as JSON or as a file. Aborting!")
118 | 
119 |     # Parse Trigger Manifest artifact
120 |     if args.manifest_file:
121 |         trigger_manifest_path = os.path.split(args.manifest_file)
122 |         trigger_manifest = load_data(trigger_manifest_path[0], trigger_manifest_path[1])
123 |     else:
124 |         trigger_manifest = json.loads(args.trigger_manifest)
125 | 
126 |     # Calls the main script
127 |     main(artifact_dir, lt_http_proto, lt_url, lt_api_endpoint, lt_version, lt_token, env_label, include_test_apps, trigger_manifest)
128 | 
--------------------------------------------------------------------------------
/outsystems/properties/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/properties/__init__.py
--------------------------------------------------------------------------------
/outsystems/properties/properties_base.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import requests
 3 | 
 4 | # Custom Modules
 5 | from outsystems.exceptions.invalid_json_response import InvalidJsonResponseError
 6 | from outsystems.vars.properties_vars import PROPERTIES_API_HTTP_PROTO, PROPERTIES_API_ENDPOINT, PROPERTIES_API_VERSION, PROPERTIES_API_SSL_CERT_VERIFY
 7 | # Functions
 8 | from outsystems.vars.vars_base import get_configuration_value
 9 | 
10 | 
11 | # Method that builds the Properties API endpoint based on the environment host
12 | def build_properties_api_url(properties_http_proto: str, lt_url: str, properties_api_endpoint: str, properties_api_version: int):
13 |     return "{}://{}/{}/v{}".format(properties_http_proto, lt_url, properties_api_endpoint, properties_api_version)
14 | 
15 | 
16 | # Sends a PUT request to Properties API, with a payload. The json part is ignored
17 | def send_properties_put_request(lt_url: str, token: str, api_endpoint: str, payload: str):
18 |     # Auth token + content type json
19 |     headers = {'content-type': 'application/json',
20 |                'authorization': 'Bearer ' + token}
21 |     # Format the request URL to include the api endpoint
22 |     properties_api_url = build_properties_api_url(PROPERTIES_API_HTTP_PROTO, lt_url, PROPERTIES_API_ENDPOINT, PROPERTIES_API_VERSION)
23 |     request_string = "{}/{}".format(properties_api_url, api_endpoint)
24 |     response = requests.put(
25 |         request_string, data=payload, json=None, headers=headers, verify=get_configuration_value("PROPERTIES_API_SSL_CERT_VERIFY", PROPERTIES_API_SSL_CERT_VERIFY))
26 |     response_obj = {"http_status": response.status_code, "response": {}}
27 |     if len(response.text) > 0:
28 |         try:
29 |             response_obj["response"] = response.json()
30 |         except:
31 |             raise InvalidJsonResponseError(
32 |                 "PUT {}: The JSON response could not be parsed. Response: {}".format(request_string, response.text))
33 | 
34 |     return response_obj
35 | 
--------------------------------------------------------------------------------
/outsystems/properties/properties_set_value.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | 
 3 | # Custom Modules
 4 | # Functions
 5 | from outsystems.properties.properties_base import send_properties_put_request
 6 | # Variables
 7 | from outsystems.vars.properties_vars import SET_SITE_PROPERTY_VALUE_SUCCESS_CODE, SET_TIMER_SCHEDULE_SUCCESS_CODE, \
 8 |     SET_REST_ENDPOINT_URL_SUCCESS_CODE, SET_SOAP_ENDPOINT_URL_SUCCESS_CODE
 9 | 
10 | 
11 | def set_site_property_value(lt_url: str, token: str, module_key: str, environment_key: str, site_property_key: str, site_property_value: str):
12 |     # Builds the API endpoint
13 |     api_endpoint = "Modules/{}/Environments/{}/SiteProperties/{}/Value/".format(module_key, environment_key, site_property_key)
14 |     # Sends the request
15 |     response = send_properties_put_request(
16 |         lt_url, token, api_endpoint, site_property_value)
17 |     status_code = response["http_status"]
18 |     if status_code == SET_SITE_PROPERTY_VALUE_SUCCESS_CODE:
19 |         return response["response"]
20 |     else:
21 |         raise NotImplementedError(
22 |             "There was an error. Response from server: {}".format(response))
23 | 
24 | 
25 | def set_rest_endpoint_url(lt_url: str, token: str, module_key: str, environment_key: str, rest_endpoint_key: str, rest_endpoint_url: str):
26 |     # Builds the API endpoint
27 |     api_endpoint = "Modules/{}/Environments/{}/RESTReferences/{}/EffectiveURL/".format(module_key, environment_key, rest_endpoint_key)
28 |     # Sends the request
29 |     response = send_properties_put_request(
30 |         lt_url, token, api_endpoint, rest_endpoint_url)
31 |     status_code = response["http_status"]
32 |     if status_code == SET_REST_ENDPOINT_URL_SUCCESS_CODE:
33 |         return response["response"]
34 |     else:
35 |         raise NotImplementedError(
36 |             "There was an error. Response from server: {}".format(response))
37 | 
38 | 
39 | def set_soap_endpoint_url(lt_url: str, token: str, module_key: str, environment_key: str, soap_endpoint_key: str, soap_endpoint_url: str):
40 |     # Builds the API endpoint
41 |     api_endpoint = "Modules/{}/Environments/{}/SOAPReferences/{}/EffectiveURL/".format(module_key, environment_key, soap_endpoint_key)
42 |     # Sends the request
43 |     response = send_properties_put_request(
44 |         lt_url, token, api_endpoint, soap_endpoint_url)
45 |     status_code = response["http_status"]
46 |     if status_code == SET_SOAP_ENDPOINT_URL_SUCCESS_CODE:
47 |         return response["response"]
48 |     else:
49 |         raise NotImplementedError(
50 |             "There was an error. Response from server: {}".format(response))
51 | 
52 | 
53 | def set_timer_schedule(lt_url: str, token: str, module_key: str, environment_key: str, timer_key: str, timer_schedule: str):
54 |     # Builds the API endpoint
55 |     api_endpoint = "Modules/{}/Environments/{}/Timers/{}/Schedule/".format(module_key, environment_key, timer_key)
56 |     # Sends the request
57 |     response = send_properties_put_request(
58 |         lt_url, token, api_endpoint, timer_schedule)
59 |     status_code = response["http_status"]
60 |     if status_code == SET_TIMER_SCHEDULE_SUCCESS_CODE:
61 |         return response["response"]
62 |     else:
63 |         raise NotImplementedError(
64 |             "There was an error. Response from server: {}".format(response))
65 | 
--------------------------------------------------------------------------------
/outsystems/vars/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems/vars/__init__.py
--------------------------------------------------------------------------------
/outsystems/vars/ad_vars.py:
--------------------------------------------------------------------------------
 1 | # Base Architecture Dashboard API Variables
 2 | AD_HTTP_PROTO = "https"
 3 | AD_API_HOST = "aimentorstudio.outsystems.com"
 4 | AD_API_ENDPOINT = "AIMentorStudioAPI/rest"
 5 | AD_API_VERSION = "1"
 6 | 
 7 | # Architecture Dashboard API Endpoint Variables
 8 | AD_API_SUCCESS_CODE = 200
 9 | AD_API_BAD_REQUEST_CODE = 400
10 | AD_API_UNAUTHORIZED_CODE = 401
11 | AD_API_FORBIDDEN_CODE = 403
12 | AD_API_NOT_FOUND_CODE = 404
13 | AD_API_TOO_MANY_REQ_CODE = 429
14 | AD_API_INTERNAL_ERROR_CODE = 500
15 | 
16 | # Application specific
17 | AD_APP_ENDPOINT = "TechnicalDebt_Application"
18 | AD_APP_LIMIT_DEFAULT = 50
19 | AD_APP_SUCCESS_CODE = 200
20 | 
21 | # Level specific
22 | AD_LEVELS_ENDPOINT = "TechnicalDebt_Level"
23 | AD_LEVELS_SUCCESS_CODE = 200
24 | 
25 | # Categories specific
26 | AD_CATEGORIES_ENDPOINT = "TechnicalDebt_Category"
27 | AD_CATEGORIES_SUCCESS_CODE = 200
28 | 
--------------------------------------------------------------------------------
/outsystems/vars/bdd_vars.py:
--------------------------------------------------------------------------------
 1 | # Base CICD Probe Variables
 2 | BDD_HTTP_PROTO = "https"
 3 | BDD_API_ENDPOINT = "BDDFramework/rest"
 4 | BDD_CLIENT_API_ENDPOINT = "TestRunner_API/rest"
 5 | BDD_API_VERSION = 1
 6 | BDD_API_SSL_CERT_VERIFY = True
 7 | BDD_FRAMEWORK_TYPE_SERVER = "server"
 8 | BDD_FRAMEWORK_TYPE_CLIENT = "client"
 9 | 
10 | # Test Runner Endpoint Variables
11 | BDD_TEST_RUNNER_ENDPOINT = "BDDTestRunner"
12 | BDD_RUNNER_SUCCESS_CODE = 200
13 | 
--------------------------------------------------------------------------------
/outsystems/vars/cicd_vars.py:
--------------------------------------------------------------------------------
 1 | # Base CICD Probe Variables
 2 | PROBE_HTTP_PROTO = "https"
 3 | PROBE_API_ENDPOINT = "CI_CDProbe/rest"
 4 | PROBE_API_VERSION = 1
 5 | PROBE_API_SSL_CERT_VERIFY = True
 6 | 
 7 | # Scan Endpoints
 8 | SCAN_BDD_TESTS_ENDPOINT = "ScanBDDTestEndpoints"
 9 | PROBE_SCAN_SUCCESS_CODE = 200
10 | 
11 | # Application Dependencies Endpoint
12 | GET_APPLICATION_DEPENDENCIES_ENDPOINT = "GetApplicationDependencies"
13 | PROBE_DEPENDENCIES_SUCCESS_CODE = 200
14 | 
--------------------------------------------------------------------------------
/outsystems/vars/dotnet_vars.py:
--------------------------------------------------------------------------------
1 | # DotNet specific
2 | MS_BUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
3 | ASSEMBLY_BLACKLIST = ["System.ComponentModel.Annotations"]
4 | 
--------------------------------------------------------------------------------
/outsystems/vars/file_vars.py:
--------------------------------------------------------------------------------
 1 | # Directory Vars
 2 | ARTIFACT_FOLDER = "Artifacts"
 3 | 
 4 | # Applications vars
 5 | APPLICATIONS_FILE = "applications.cache"
 6 | APPLICATION_FILE = ".cache"
 7 | APPLICATION_VERSIONS_FILE = ".versions.cache"
 8 | APPLICATION_VERSION_FILE = ".version.cache"
 9 | APPLICATION_FOLDER = "application_data"
10 | APPLICATION_OAP_FOLDER = "application_oap"
11 | APPLICATION_OAP_FILE = ".oap"
12 | 
13 | # Modules vars
14 | MODULES_FILE = "modules.cache"
15 | MODULES_FOLDER = "module_data"
16 | MODULE_FILE = ".cache"
17 | MODULE_VERSIONS_FILE = ".versions.cache"
18 | MODULE_VERSION_FILE = ".version.cache"
19 | 
20 | # Environments vars
21 | ENVIRONMENTS_FILE = "environments.cache"
22 | ENVIRONMENT_FILE = ".cache"
23 | ENVIRONMENT_APPS_FILE = ".applications.cache"
24 | ENVIRONMENT_APPLICATION_FILE = ".applications.cache"
25 | ENVIRONMENT_VERSIONS_FILE = ".versions.cache"
26 | ENVIRONMENT_DEPLOYMENT_ZONES_FILE = ".deploymentzones.cache"
27 | ENVIRONMENT_FOLDER = "environment_data"
28 | ENVIRONMENT_SOURCECODE_FOLDER = "sourcecode_data"
29 | ENVIRONMENT_SOURCECODE_STATUS_FILE = ".status.cache"
30 | ENVIRONMENT_SOURCECODE_LINK_FILE = ".link.cache"
31 | ENVIRONMENT_SOURCECODE_DOWNLOAD_FILE = ".source.zip"
32 | 
33 | # Deployments vars
34 | DEPLOYMENTS_FILE = "deployments.cache"
35 | DEPLOYMENT_FILE = ".cache"
36 | DEPLOYMENT_PLAN_FILE = ".plan.cache"
37 | DEPLOYMENT_MANIFEST_FILE = "deployment_manifest.cache"
38 | DEPLOYMENT_STATUS_FILE = ".status.cache"
39 | DEPLOYMENT_FOLDER = "deployment_data"
40 | 
41 | # CICD Probe vars
42 | PROBE_APPLICATION_SCAN_FILE = ".probe.cache"
43 | PROBE_APPLICATION_DEPENDENCIES_FILE = ".dependencies.cache"
44 | PROBE_FOLDER = "cicd_probe_data"
45 | 
46 | # BDD Framework vars
47 | BDD_FRAMEWORK_TEST_RUN_FILE = ".testrun.cache"
48 | BDD_FRAMEWORK_TEST_ENDPOINTS_FILE = "test.endpoints.cache"
49 | BDD_FRAMEWORK_FOLDER = "bdd_data"
50 | 
51 | # JUNIT vars
52 | JUNIT_TEST_RESULTS_FILE = "junit-result.xml"
53 | 
54 | # Architecture Dashboard vars
55 | AD_FILE_PREFIX = "TechDebt"
56 | AD_LEVELS_FILE = ".levels.cache"
57 | AD_CATEGORIES_FILE = ".categories.cache"
58 | AD_INFRA_FILE = ".infrastructure.cache"
59 | AD_APP_FILE = ".application.cache"
60 | AD_FOLDER = "techdebt_data"
61 | 
62 | # AirGap vars
63 | DEPLOYMENT_ORDER_FILE = "sorted_oap.list"
64 | 
65 | # Solutions vars
66 | SOLUTIONS_OSP_FILE = ".osp"
67 | SOLUTIONS_LINK_FILE = ".link.cache"
68 | SOLUTIONS_STATUS_FILE = ".status.cache"
69 | SOLUTIONS_DEPLOY_FILE = ".deploy.cache"
70 | SOLUTIONS_FOLDER = "solution_data"
71 | 
--------------------------------------------------------------------------------
/outsystems/vars/lifetime_vars.py:
--------------------------------------------------------------------------------
  1 | # Base LT Variables
  2 | LIFETIME_HTTP_PROTO = "https"
  3 | LIFETIME_API_ENDPOINT = "lifetimeapi/rest"
  4 | LIFETIME_API_VERSION = 2
  5 | LIFETIME_SSL_CERT_VERIFY = True
  6 | 
  7 | # Applications Endpoint Variables
  8 | # Application list specific
  9 | APPLICATIONS_ENDPOINT = "applications"
 10 | APPLICATIONS_SUCCESS_CODE = 200
 11 | APPLICATIONS_EMPTY_CODE = 204
 12 | APPLICATIONS_FLAG_FAILED_CODE = 400
 13 | APPLICATIONS_FAILED_CODE = 500
 14 | # Application specific
 15 | APPLICATION_SUCCESS_CODE = 200
 16 | APPLICATION_FLAG_FAILED_CODE = 400
 17 | APPLICATION_NO_PERMISSION_CODE = 403
 18 | APPLICATION_FAILED_CODE = 404
 19 | # Application version specific
 20 | APPLICATION_VERSIONS_ENDPOINT = "versions"
 21 | APPLICATION_VERSIONS_CONTENT = "content"
 22 | APPLICATION_VERSION_SUCCESS_CODE = 200
 23 | APPLICATION_VERSIONS_EMPTY_CODE = 204
 24 | APPLICATION_VERSION_INVALID_CODE = 400
 25 | APPLICATION_VERSION_NO_PERMISSION_CODE = 403
 26 | APPLICATION_VERSION_FAILED_CODE = 404
 27 | APPLICATION_VERSION_FAILED_LIST_CODE = 500
 28 | # Application Create Version specific
 29 | APPLICATION_VERSION_CREATE_SUCCESS_CODE = 201
 30 | APPLICATION_VERSION_CREATE_INVALID_CODE = 400
 31 | APPLICATION_VERSION_CREATE_NO_PERMISSION_CODE = 403
 32 | APPLICATION_VERSION_CREATE_NO_ENVIRONMENT_CODE = 404
 33 | APPLICATION_VERSION_CREATE_FAILED_CODE = 500
 34 | 
 35 | # Deployments Endpoint Variables
 36 | # Deployment list specific
 37 | DEPLOYMENT_ENDPOINT = "deployment"
 38 | DEPLOYMENTS_ENDPOINT = "deployments"
 39 | DEPLOYMENTS_SUCCESS_CODE = 200
 40 | DEPLOYMENTS_EMPTY_CODE = 204
 41 | DEPLOYMENTS_INVALID_CODE = 400
 42 | DEPLOYMENTS_NO_PERMISSION_CODE = 403
 43 | DEPLOYMENTS_FAILED_CODE = 500
 44 | DEPLOYMENT_MESSAGE = "Automated deploy via OutSystems Pipeline"
 45 | # Deployment creation specific
 46 | DEPLOYMENT_PLAN_V1_API_OPS = "ApplicationVersionKeys"
 47 | DEPLOYMENT_PLAN_V2_API_OPS = "ApplicationOperations"
 48 | DEPLOYMENT_SUCCESS_CODE = 201
 49 | DEPLOYMENT_INVALID_CODE = 400
 50 | DEPLOYMENT_NO_PERMISSION_CODE = 403
 51 | DEPLOYMENT_NO_ENVIRONMENT_CODE = 404
 52 | DEPLOYMENT_FAILED_CODE = 500
 53 | # Deployment specific
 54 | DEPLOYMENT_GET_SUCCESS_CODE = 200
 55 | DEPLOYMENT_GET_NO_PERMISSION_CODE = 403
 56 | DEPLOYMENT_GET_NO_DEPLOYMENT_CODE = 404
 57 | DEPLOYMENT_GET_FAILED_CODE = 500
 58 | # Deployment delete specific
 59 | DEPLOYMENT_DELETE_SUCCESS_CODE = 204
 60 | DEPLOYMENT_DELETE_IMPOSSIBLE_CODE = 400
 61 | DEPLOYMENT_DELETE_NO_PERMISSION_CODE = 403
 62 | DEPLOYMENT_DELETE_NO_DEPLOYMENT_CODE = 404
 63 | DEPLOYMENT_DELETE_FAILED_CODE = 500
 64 | # Deployment status specific
 65 | DEPLOYMENT_STATUS_ENDPOINT = "status"
 66 | DEPLOYMENT_STATUS_SUCCESS_CODE = 200
 67 | DEPLOYMENT_STATUS_NO_PERMISSION_CODE = 403
 68 | DEPLOYMENT_STATUS_NO_DEPLOYMENT_CODE = 404
 69 | DEPLOYMENT_STATUS_FAILED_CODE = 500
 70 | # Deployment execution specific
 71 | DEPLOYMENT_START_ENDPOINT = "start"
 72 | DEPLOYMENT_ABORT_ENDPOINT = "abort"
 73 | DEPLOYMENT_CONTINUE_ENDPOINT = "continue"
 74 | DEPLOYMENT_ACTION_SUCCESS_CODE = 202
 75 | DEPLOYMENT_ACTION_IMPOSSIBLE_CODE = 400
 76 | DEPLOYMENT_ACTION_NO_PERMISSION_CODE = 403
 77 | DEPLOYMENT_ACTION_NO_DEPLOYMENT_CODE = 404
 78 | DEPLOYMENT_ACTION_FAILED_CODE = 500
 79 | 
 80 | # Environments Endpoint Variables
 81 | # Environment list specific
 82 | ENVIRONMENTS_ENDPOINT = "environments"
 83 | ENVIRONMENTS_SUCCESS_CODE = 200
 84 | ENVIRONMENTS_NOT_FOUND_CODE = 204
 85 | ENVIRONMENTS_FAILED_CODE = 500
 86 | # Environment application list specific
 87 | ENVIRONMENT_APPLICATIONS_ENDPOINT = "applications"
 88 | ENVIRONMENT_APP_SUCCESS_CODE = 200
 89 | ENVIRONMENT_APP_NOT_STATUS_CODE = 400
 90 | ENVIRONMENT_APP_NO_PERMISSION_CODE = 403
 91 | ENVIRONMENT_APP_NOT_FOUND = 404
 92 | ENVIRONMENT_APP_FAILED_CODE = 500
 93 | # Environment deployment zones specific
 94 | ENVIRONMENT_DEPLOYMENT_ZONES_ENDPOINT = "deploymentzones"
 95 | ENVIRONMENT_ZONES_SUCCESS_CODE = 200
 96 | ENVIRONMENT_ZONES_NOT_STATUS_CODE = 400
 97 | ENVIRONMENT_ZONES_NO_PERMISSION_CODE = 403
 98 | ENVIRONMENT_ZONES_NOT_FOUND = 404
 99 | ENVIRONMENT_ZONES_FAILED_CODE = 500
100 | # Environment application source code specific
101 | ENVIRONMENT_APPLICATIONS_SOURCECODE_ENDPOINT = "sourcecodeaccess"
102 | ENVIRONMENT_SOURCECODE_LINK_SUCCESS_CODE = 200
103 | ENVIRONMENT_SOURCECODE_PACKAGE_SUCCESS_CODE = 201
104 | ENVIRONMENT_SOURCECODE_FAILED_CODE = 500
105 | # Solutions specific
106 | ENVIRONMENT_SOLUTION_ENDPOINT = "solution"
107 | ENVIRONMENT_SOLUTION_SUCCESS_CODE = 200
108 | ENVIRONMENT_SOLUTION_NOT_STATUS_CODE = 400
109 | ENVIRONMENT_SOLUTION_NO_PERMISSION_CODE = 403
110 | ENVIRONMENT_SOLUTION_NOT_FOUND = 404
111 | ENVIRONMENT_SOLUTION_FAILED_CODE = 500
112 | # Solutions status specific
113 | ENVIRONMENT_SOLUTION_STATUS_ENDPOINT = "solutionstatus"
114 | ENVIRONMENT_SOLUTION_STATUS_SUCCESS_CODE = 200
115 | ENVIRONMENT_SOLUTION_STATUS_NOT_STATUS_CODE = 400
116 | ENVIRONMENT_SOLUTION_STATUS_NO_PERMISSION_CODE = 403
117 | ENVIRONMENT_SOLUTION_STATUS_NOT_FOUND = 404
118 | ENVIRONMENT_SOLUTION_STATUS_FAILED_CODE = 500
119 | # Solutions link specific
120 | ENVIRONMENT_SOLUTION_LINK_SUCCESS_CODE = 200
121 | ENVIRONMENT_SOLUTION_LINK_FAILED_CODE = 400
122 | 
123 | # Downloads Endpoint Variables
124 | DOWNLOADS_ENDPOINT = "downloads"
125 | DOWNLOAD_SUCCESS_CODE = 200
126 | DOWNLOAD_INVALID_KEY_CODE = 400
127 | DOWNLOAD_NO_PERMISSION_CODE = 403
128 | DOWNLOAD_NOT_FOUND = 404
129 | DOWNLOAD_FAILED_CODE = 500
130 | 
--------------------------------------------------------------------------------
/outsystems/vars/manifest_vars.py:
--------------------------------------------------------------------------------
 1 | # Manifest sections
 2 | MANIFEST_APPLICATION_VERSIONS = "ApplicationVersions"
 3 | MANIFEST_CONFIGURATION_ITEMS = "ConfigurationItems"
 4 | MANIFEST_ENVIRONMENT_DEFINITIONS = "EnvironmentDefinitions"
 5 | 
 6 | # Application tag attributes
 7 | MANIFEST_APPLICATION_KEY = "ApplicationKey"
 8 | MANIFEST_APPLICATION_NAME = "ApplicationName"
 9 | MANIFEST_APPLICATION_VERSION_KEY = "VersionKey"
10 | MANIFEST_APPLICATION_VERSION_NUMBER = "VersionNumber"
11 | MANIFEST_FLAG_IS_TEST_APPLICATION = "IsTestApplication"
12 | 
13 | # Configuration item attributes
14 | MANIFEST_MODULE_KEY = "ModuleKey"
15 | MANIFEST_MODULE_NAME = "ModuleName"
16 | MANIFEST_CONFIG_ITEM_KEY = "ConfigurationItemKey"
17 | MANIFEST_CONFIG_ITEM_NAME = "ConfigurationItemName"
18 | MANIFEST_CONFIG_ITEM_TYPE = "ConfigurationItemType"
19 | MANIFEST_CONFIG_ITEM_VALUES = "Values"
20 | MANIFEST_CONFIG_ITEM_TARGET_VALUE = "TargetValue"
21 | 
22 | # Environment definitions attributes
23 | MANIFEST_ENVIRONMENT_KEY = "EnvironmentKey"
24 | MANIFEST_ENVIRONMENT_NAME = "EnvironmentName"
25 | MANIFEST_ENVIRONMENT_LABEL = "EnvironmentLabel"
26 | 
27 | # Manifest attributes
28 | MANIFEST_DEPLOYMENT_NOTES = "DeploymentNotes"
29 | 
30 | # AutoGen Manifest
31 | MANIFEST_FOLDER = "trigger_manifest"
32 | MANIFEST_FILE = "auto_gen_trigger_manifest.json"
33 | 
--------------------------------------------------------------------------------
/outsystems/vars/pipeline_vars.py:
--------------------------------------------------------------------------------
 1 | # Deployment specific variables
 2 | QUEUE_TIMEOUT_IN_SECS = 1800
 3 | DEPLOYMENT_TIMEOUT_IN_SECS = 3600
 4 | SLEEP_PERIOD_IN_SECS = 20
 5 | REDEPLOY_OUTDATED_APPS = True
 6 | ALLOW_CONTINUE_WITH_ERRORS = False
 7 | DEPLOYMENT_STATUS_LIST = ["saved", "running", "needs_user_intervention", "aborting"]
 8 | DEPLOYMENT_ERROR_STATUS_LIST = ["aborted", "finished_with_errors"]
 9 | DEPLOYMENT_WAITING_STATUS = "needs_user_intervention"
10 | DEPLOYMENT_RUNNING_STATUS = "running"
11 | DEPLOYMENT_SAVED_STATUS = "saved"
12 | 
13 | # Pipeline files variables
14 | CONFLICTS_FILE = "DeploymentConflicts"
15 | DEPLOY_ERROR_FILE = "DeploymentErrors"
16 | 
17 | # Application specific variables
18 | MAX_VERSIONS_TO_RETURN = 10
19 | TAG_APP_MAX_RETRIES = 5
20 | 
21 | # Environment specific variables
22 | SOURCECODE_TIMEOUT_IN_SECS = 3600
23 | SOURCECODE_SLEEP_PERIOD_IN_SECS = 10
24 | SOURCECODE_ONGOING_STATUS = "InProgress"
25 | SOURCECODE_FINISHED_STATUS = "Done"
26 | 
27 | # Solutions specific variables
28 | SOLUTION_TIMEOUT_IN_SECS = 3600
29 | SOLUTION_SLEEP_PERIOD_IN_SECS = 2
30 | SOLUTION_CREATED_STATUS = "Created"
31 | SOLUTION_READY_STATUS = "Ready"
32 | SOLUTION_GATHERING_DEPENDENCIES_STATUS = "Gathering Dependencies"
33 | SOLUTION_GETTING_BINARIES_STATUS = "Getting Binaries"
34 | SOLUTION_GENERATING_META_MODEL_STATUS = "Generating Meta Model"
35 | SOLUTION_GENERATING_SOLUTION_STATUS = "Generating Solution"
36 | SOLUTION_COMPLETED_STATUS = "Completed"
37 | SOLUTION_ABORTED_STATUS = "Aborted"
38 | 
--------------------------------------------------------------------------------
/outsystems/vars/properties_vars.py:
--------------------------------------------------------------------------------
 1 | # Base Properties API Variables
 2 | PROPERTIES_API_HTTP_PROTO = "https"
 3 | PROPERTIES_API_ENDPOINT = "PropertiesAPI/rest"
 4 | PROPERTIES_API_VERSION = 1
 5 | PROPERTIES_API_SSL_CERT_VERIFY = True
 6 | 
 7 | # Properties API Endpoints
 8 | SET_SITE_PROPERTY_VALUE_SUCCESS_CODE = 201
 9 | SET_REST_ENDPOINT_URL_SUCCESS_CODE = 201
10 | SET_SOAP_ENDPOINT_URL_SUCCESS_CODE = 201
11 | SET_TIMER_SCHEDULE_SUCCESS_CODE = 201
12 | 
13 | # Supported Property types
14 | PROPERTY_TYPE_SITE_PROPERTY = "SiteProperty"
15 | PROPERTY_TYPE_REST_ENDPOINT = "REST_Endpoint"
16 | PROPERTY_TYPE_SOAP_ENDPOINT = "SOAP_Endpoint"
17 | PROPERTY_TYPE_TIMER_SCHEDULE = "TimerSchedule"
18 | 
--------------------------------------------------------------------------------
/outsystems/vars/vars_base.py:
--------------------------------------------------------------------------------
 1 | import os
 2 | from dotenv import load_dotenv
 3 | 
 4 | 
 5 | # Evaluates whether there are environment variables that match a global variable
 6 | # If a matching environment variable is found, it returns the environment variable with the correct data type
 7 | # Otherwise, it returns the default value
 8 | def get_configuration_value(variable_name: str, default_value: any):
 9 |     if "OVERRIDE_CONFIG_IN_USE" in os.environ:
10 |         # Verify if there's the variable exists within all env variables
11 |         if (os.environ.get("OVERRIDE_CONFIG_IN_USE") == 'True') and variable_name in os.environ:
12 |             env_value = os.environ[variable_name]
13 |             # Convert env variable type from a string to a int
14 |             if env_value.isnumeric():
15 |                 return int(env_value)
16 |             # Convert env variable type from a string to a boolean
17 |             elif env_value.lower() in ('true', 'false'):
18 |                 return env_value.lower() == 'true'
19 |             else:
20 |                 return env_value
21 |     return default_value
22 | 
23 | 
24 | # loads configuration values from a specified file into the environment variables
25 | def load_configuration_file(config_file_path: str):
26 |     if os.path.isfile(config_file_path):
27 |         load_dotenv(config_file_path)
28 |         os.environ["OVERRIDE_CONFIG_IN_USE"] = 'True'
29 |         print("Configuration file loaded successfully.", flush=True)
30 | 
--------------------------------------------------------------------------------
/outsystems_components/lifetime/O10/Trigger Pipeline.oap:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems_components/lifetime/O10/Trigger Pipeline.oap
--------------------------------------------------------------------------------
/outsystems_components/lifetime/O11/Trigger Pipeline.oap:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems_components/lifetime/O11/Trigger Pipeline.oap
--------------------------------------------------------------------------------
/outsystems_components/regression_environment/O10/CICD Probe.oap:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems_components/regression_environment/O10/CICD Probe.oap
--------------------------------------------------------------------------------
/outsystems_components/regression_environment/O11/CICD Probe.oap:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems_components/regression_environment/O11/CICD Probe.oap
--------------------------------------------------------------------------------
/outsystems_integrations/architecture_dashboard/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems_integrations/architecture_dashboard/__init__.py
--------------------------------------------------------------------------------
/outsystems_integrations/architecture_dashboard/fetch_tech_debt_sync.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import sys
 3 | import os
 4 | import argparse
 5 | import dateutil.parser
 6 | from time import sleep
 7 | 
 8 | # Workaround for Jenkins:
 9 | # Set the path to include the outsystems module
10 | # Jenkins exposes the workspace directory through env.
11 | if "WORKSPACE" in os.environ:
12 |     sys.path.append(os.environ['WORKSPACE'])
13 | else:  # Else just add the project dir
14 |     sys.path.append(os.getcwd())
15 | 
16 | # Custom Modules
17 | # Variables
18 | from outsystems.vars.file_vars import ARTIFACT_FOLDER, AD_FOLDER, AD_FILE_PREFIX, AD_APP_FILE
19 | from outsystems_integrations.architecture_dashboard.vars import SLEEP_PERIOD_IN_SECS, MAX_RETRIES
20 | 
21 | # Functions
22 | from outsystems.file_helpers.file import load_data, clear_cache
23 | from outsystems.architecture_dashboard.ad_tech_debt import get_app_techdebt
24 | 
25 | 
26 | def convert_to_date(date_string: str):
27 |     return dateutil.parser.parse(date_string)
28 | 
29 | 
30 | # ############################################################# SCRIPT ##############################################################
31 | def main(artifact_dir: str, activation_code: str, api_key: str, dep_manifest: list):
32 | 
33 |     last_tag_time = None
34 |     last_analysis_time = None
35 | 
36 |     retry_counter = 0
37 |     while retry_counter < MAX_RETRIES:
38 | 
39 |         # Compare applications tag creation datetime with Architecture Dashboard's last analysis datetime
40 |         # to assure the analysis includes the last code changes
41 |         for app in dep_manifest:
42 |             app_analysis_time = convert_to_date(get_app_techdebt(artifact_dir, api_key, activation_code, app)["LastAnalysisOn"])
43 |             app_tag_time = convert_to_date(app["CreatedOn"])
44 | 
45 |             # Save most recent application datetime
46 |             if last_tag_time is None or last_tag_time < app_tag_time:
47 |                 last_tag_time = app_tag_time
48 | 
49 |             # Save most recent code analysis datetime
50 |             if last_analysis_time is None or last_analysis_time < app_analysis_time:
51 |                 last_analysis_time = app_analysis_time
52 | 
53 |         if last_tag_time < last_analysis_time:
54 |             print("Success: Code Analysis includes latest code changes.", flush=True)
55 |             sys.exit(0)
56 |         else:
57 |             retry_counter += 1
58 |             print("Code Analysis does not include the latest code changes. Trying again in {} minutes... (tentative {} out of {})".format(int(SLEEP_PERIOD_IN_SECS / 60), retry_counter, MAX_RETRIES), flush=True)
59 |             sleep(SLEEP_PERIOD_IN_SECS)
60 | 
61 |             print("Deleting old code analysis cached files...", flush=True)
62 |             # Clear Code Analysis cached data
63 |             for app in dep_manifest:
64 |                 filename = "{}.{}{}".format(AD_FILE_PREFIX, app["ApplicationName"], AD_APP_FILE)
65 |                 filename = os.path.join(AD_FOLDER, filename)
66 |                 clear_cache(artifact_dir, filename)
67 | 
68 |     print("Error: Max tries reached out.", flush=True)
69 |     sys.exit(1)
70 | 
71 | # End of main()
72 | 
73 | 
74 | if __name__ == "__main__":
75 |     # Argument menu / parsing
76 |     parser = argparse.ArgumentParser()
77 |     parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER,
78 |                         help="Name of the artifacts folder. Default: \"Artifacts\"")
79 |     parser.add_argument("-c", "--activation_code", type=str, required=True,
80 |                         help="Infrastructure Activation Code.")
81 |     parser.add_argument("-t", "--api_key", type=str, required=True,
82 |                         help="Token for Architecture Dashboard API calls.")
83 |     parser.add_argument("-f", "--manifest_file", type=str, required=True,
84 |                         help="Manifest file path.")
85 | 
86 |     args = parser.parse_args()
87 | 
88 |     # Parse the artifact directory
89 |     artifact_dir = args.artifacts
90 |     # Parse the Architecture Dashboard API Key
91 |     api_key = args.api_key
92 |     # Parse the Infrastcucture Activation Code
93 |     activation_code = args.activation_code
94 |     # Parse Manifest file
95 |     manifest_file = load_data("", args.manifest_file)
96 | 
97 |     # Calls the main script
98 |     main(artifact_dir, activation_code, api_key, manifest_file)
99 | 
--------------------------------------------------------------------------------
/outsystems_integrations/architecture_dashboard/vars.py:
--------------------------------------------------------------------------------
1 | # Architecture Dashboard API Integration Variables
2 | # Sync specific
3 | SLEEP_PERIOD_IN_SECS = 1800
4 | MAX_RETRIES = 3
5 | 
--------------------------------------------------------------------------------
/outsystems_integrations/slack/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/outsystems_integrations/slack/__init__.py
--------------------------------------------------------------------------------
/outsystems_integrations/slack/send_pipeline_status_to_slack.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import os
 3 | import sys
 4 | import argparse
 5 | 
 6 | # Workaround for Jenkins:
 7 | # Set the path to include the outsystems module
 8 | # Jenkins exposes the workspace directory through env.
 9 | if "WORKSPACE" in os.environ:
10 |     sys.path.append(os.environ['WORKSPACE'])
11 | else:  # Else just add the project dir
12 |     sys.path.append(os.getcwd())
13 | 
14 | # Custom Modules
15 | from outsystems.vars.file_vars import ARTIFACT_FOLDER
16 | from outsystems.file_helpers.file import load_data
17 | from outsystems_integrations.slack.send_slack_message import send_slack_message
18 | 
19 | 
20 | # ---------------------- SCRIPT ----------------------
21 | def main(artifact_dir: str, error_file_name: str, slack_hook: str, slack_channels: list, pipeline_type: str, pipeline_status: bool, msg_title: str, message: str):
22 |     slack_message = message
23 |     if error_file_name:
24 |         try:
25 |             file_contents = load_data(artifact_dir, error_file_name)
26 |             slack_message += "\n\n*Details:*\n\n`{}`".format(file_contents)
27 |         except FileNotFoundError:
28 |             slack_message += "\nCould not found the file {} in the {} directory".format(error_file_name, artifact_dir)
29 |         except:
30 |             slack_message += "\nCould not load the file {} in the {} directory".format(error_file_name, artifact_dir)
31 | 
32 |     send_slack_message(slack_hook, slack_channels, pipeline_type, msg_title, pipeline_status, slack_message)
33 | 
34 | 
35 | # End of main()
36 | 
37 | if __name__ == "__main__":
38 |     # Argument menu / parsing
39 |     parser = argparse.ArgumentParser()
40 |     parser.add_argument("-a", "--artifacts", type=str,
41 |                         help="Name of the artifacts folder. Default: \"Artifacts\"", default=ARTIFACT_FOLDER)
42 |     parser.add_argument("--error_in_file", type=str,
43 |                         help="Filename where the error output is stored", default="")
44 |     parser.add_argument("--slack_hook", type=str,
45 |                         help="Slack hook URL for API calls. Example: \"https://hooks.slack.com/services///\"", required=True)
46 |     parser.add_argument("--slack_channel", type=str,
47 |                         help="Comma separeted list with slack channel names. Example: \"Channel1,Channel-2\"", required=True)
48 |     parser.add_argument("--pipeline", type=str,
49 |                         help="Sets the pipeline type. Currently supported values: \"azure\" or \"jenkins\". Default: \"jenkins\"", default="")
50 |     parser.add_argument("--title", type=str,
51 |                         help="Title of the message that will show up on the notification.", required=True)
52 |     parser.add_argument("--status", type=str,
53 |                         help="Status of the pipeline. True if OK, False if Not OK.", required=True)
54 |     parser.add_argument("--message", type=str,
55 |                         help="Message that will show up on the notification.", required=True)
56 |     args = parser.parse_args()
57 | 
58 |     # Parse the artifact directory
59 |     artifact_dir = args.artifacts
60 |     # Parse the artifact file with errors
61 |     error_file_name = args.error_in_file
62 |     # Parse Slack Hook
63 |     slack_hook = args.slack_hook
64 |     # Parse Slack Channel list
65 |     slack_channels = args.slack_channel.split(',')
66 |     # Parse Pipeline Type
67 |     pipeline_type = args.pipeline
68 |     # Parse Message Title
69 |     msg_title = args.title
70 |     # Parse status
71 |     status = (args.status.lower() == "true")
72 |     # Parse Message
73 |     message = args.message
74 | 
75 |     # Calls the main script
76 |     main(artifact_dir, error_file_name, slack_hook, slack_channels, pipeline_type, status, msg_title, message)
77 | 
--------------------------------------------------------------------------------
/outsystems_integrations/slack/send_slack_message.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import json
 3 | import requests
 4 | import sys
 5 | import os
 6 | 
 7 | # Workaround for Jenkins:
 8 | # Set the path to include the outsystems module
 9 | # Jenkins exposes the workspace directory through env.
10 | if "WORKSPACE" in os.environ:
11 |     sys.path.append(os.environ['WORKSPACE'])
12 | else:  # Else just add the project dir
13 |     sys.path.append(os.getcwd())
14 | 
15 | # Custom Modules
16 | from outsystems_integrations.slack.vars import notification_type
17 | 
18 | 
19 | # Sends a slack message for a given channel list
20 | def send_slack_message(slack_hook: str, slack_channels: list, pipeline_type: str, slack_title: str, job_status: bool, slack_message: str):
21 |     if pipeline_type not in notification_type:
22 |         username = "Regression Testing"
23 |         icon = ":outsystems:"
24 |     else:
25 |         username = "{} Regression Testing".format(notification_type[pipeline_type][0])
26 |         icon = notification_type[pipeline_type][1]
27 |     for channel in slack_channels:
28 |         # Build slack post
29 |         postData = {
30 |             "channel": channel,
31 |             "username": username,
32 |             "text": "{}".format(slack_title),
33 |             "icon_emoji": icon,
34 |             "attachments": [{
35 |                 "color": "#49C39E" if job_status else "#D40E0D",
36 |                 "text": slack_message,
37 |                 "mrkdwn_in": ["text"]
38 |             }]
39 |         }
40 |         response = requests.post(slack_hook, json.dumps(postData), None)
41 |         if response.status_code == 200:
42 |             print("Message sent to slack channel {} successfully...".format(channel))
43 |         else:
44 |             print("Error sending notification to slack channel {}: {}".format(
45 |                 channel, response.text))
46 | 
--------------------------------------------------------------------------------
/outsystems_integrations/slack/send_test_results_to_slack.py:
--------------------------------------------------------------------------------
 1 | # Python Modules
 2 | import xunitparser
 3 | import os
 4 | import sys
 5 | import argparse
 6 | 
 7 | # Workaround for Jenkins:
 8 | # Set the path to include the outsystems module
 9 | # Jenkins exposes the workspace directory through env.
10 | if "WORKSPACE" in os.environ:
11 |     sys.path.append(os.environ['WORKSPACE'])
12 | else:  # Else just add the project dir
13 |     sys.path.append(os.getcwd())
14 | 
15 | # Custom Modules
16 | from outsystems.vars.file_vars import ARTIFACT_FOLDER, JUNIT_TEST_RESULTS_FILE
17 | from outsystems_integrations.slack.send_slack_message import send_slack_message
18 | 
19 | 
20 | # ---------------------- SCRIPT ----------------------
21 | def main(artifact_dir: str, slack_hook: str, slack_channels: list, pipeline_type: str, job_name: str, job_url: str):
22 |     filename = os.path.join(artifact_dir, JUNIT_TEST_RESULTS_FILE)
23 |     _, tr = xunitparser.parse(open(filename))
24 | 
25 |     message = "*{}* BDD tests run.\n*{}* errors found.".format(tr.testsRun, len(tr.failures))
26 |     # Add test report url
27 |     message += "\n\nGo here for full test report: {}\n\n".format(job_url)
28 | 
29 |     if len(tr.failures) > 0:
30 |         message += "\nIt failed on the following tests:\n"
31 |         for failure in tr.failures:
32 |             test_info = "{}".format(failure[0])
33 |             # cuts the test_ part and the class name since it's generated by the runner
34 |             test_info = test_info.split('test_')[1]
35 |             test_info = test_info.split(" ")[0]
36 |             # Splits between espace name and test name
37 |             test_info = test_info.split('__')
38 |             test_module = test_info[0]
39 |             test_name = test_info[1]
40 | 
41 |             message += "*{} ({})*\n".format(test_name, test_module)
42 | 
43 |     job_status = (len(tr.failures) <= 0)
44 |     send_slack_message(slack_hook, slack_channels, pipeline_type, "*Test Results for {}:*".format(job_name), job_status, message)
45 | 
46 | 
47 | # End of main()
48 | 
49 | if __name__ == "__main__":
50 |     # Argument menu / parsing
51 |     parser = argparse.ArgumentParser()
52 |     parser.add_argument("-a", "--artifacts", type=str,
53 |                         help="Name of the artifacts folder. Default: \"Artifacts\"", default=ARTIFACT_FOLDER)
54 |     parser.add_argument("--slack_hook", type=str,
55 |                         help="Slack hook URL for API calls. Example: \"https://hooks.slack.com/services///\"", required=True)
56 |     parser.add_argument("--slack_channel", type=str,
57 |                         help="Comma separeted list with slack channel names. Example: \"Channel1,Channel-2\"", required=True)
58 |     parser.add_argument("--pipeline", type=str,
59 |                         help="Sets the pipeline type. Currently supported values: \"azure\" or \"jenkins\". Default: \"jenkins\"", default="")
60 |     parser.add_argument("--job_name", type=str,
61 |                         help="Name of the Job that will show up on the notification.", required=True)
62 |     parser.add_argument("--job_dashboard_url", type=str,
63 |                         help="URL for the run dashboard that will show up on the notification.", required=True)
64 |     args = parser.parse_args()
65 | 
66 |     # Parse the artifact directory
67 |     artifact_dir = args.artifacts
68 |     # Parse Slack Hook
69 |     slack_hook = args.slack_hook
70 |     # Parse Slack Channel list
71 |     slack_channels = args.slack_channel.split(',')
72 |     # Parse Pipeline Type
73 |     pipeline_type = args.pipeline
74 |     # Parse Job Name
75 |     job_name = args.job_name
76 |     # Parse Job Dashboard URL
77 |     job_url = args.job_dashboard_url
78 | 
79 |     # Calls the main script
80 |     main(artifact_dir, slack_hook, slack_channels, pipeline_type, job_name, job_url)
81 | 
--------------------------------------------------------------------------------
/outsystems_integrations/slack/vars.py:
--------------------------------------------------------------------------------
1 | notification_type = {"azure": ("Azure DevOps", ":microsoft:"), "jenkins": ("Jenkins", ":jenkins:")}
2 | 
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
  1 | from distutils.core import setup
  2 | import os
  3 | 
  4 | NAME = 'outsystems-pipeline'
  5 | DESCRIPTION = 'Python package to accelerate the integration of OutSystems with third-party CI/CD tools'
  6 | LONG_DESCRIPTION = '''The outsystems-pipeline Python package provides functions to support the creation of OutSystems CI/CD pipelines using your DevOps automation tool of choice.
  7 | 
  8 | Visit the `project repository `_ on GitHub for instructions on how to build example OutSystems CI/CD pipelines with common DevOps automation tools, as well as documentation that will help you adapt the examples to your particular scenarios.
  9 | 
 10 | 
 11 | What's new
 12 | ==========
 13 | 
 14 | **Parallel Deployments**
 15 | 
 16 |  The following scripts have been updated to enable creating and running parallel deployment plans:
 17 | 
 18 |  * `deploy_latest_tags_to_target_env.py`
 19 |  * `deploy_package_to_target_env.py`
 20 |  * `deploy_tags_to_target_env_with_manifest.py`
 21 | 
 22 |  To enable this feature, use the following parameter:
 23 | 
 24 |  * `--allow_parallel_deployments`: Skips LifeTime validation for active deployment plans.
 25 | 
 26 | **Enhanced Pipeline Operations**
 27 | 
 28 |  New pipeline scripts have been added to streamline operations related to manifest files:
 29 | 
 30 |  * `generate_manifest_file.py`: Generates a trigger manifest file.
 31 |  * `validate_manifest_apps_exist_in_target_env.py`: Verifies that manifest applications exist in the target environment.
 32 | 
 33 | **Updated Package Dependencies**
 34 | 
 35 |  * Updated `requests` dependency to version 2.32.2
 36 |  * Added `packaging` dependency, version 24.1
 37 | 
 38 | Installing and upgrading
 39 | ========================
 40 | 
 41 | Install or upgrade outsystems-pipeline to the latest available version as follows:
 42 | ::
 43 | 
 44 |     pip install -U outsystems-pipeline
 45 | 
 46 | '''
 47 | AUTHOR = u'OutSystems'
 48 | EMAIL = u'cicd.integrations@outsystems.com'
 49 | URL = 'https://github.com/OutSystems/outsystems-pipeline'
 50 | LICENSE = 'Apache License 2.0'
 51 | PYTHON_REQUIRES = '>=3.8'
 52 | KEYWORDS = [
 53 |     '',
 54 | ]
 55 | 
 56 | CLASSIFIERS = [
 57 |     'Development Status :: 4 - Beta',
 58 |     'Intended Audience :: Developers',
 59 |     'Intended Audience :: System Administrators',
 60 |     'License :: OSI Approved :: Apache Software License',
 61 |     'Programming Language :: Python',
 62 |     'Topic :: Software Development :: Build Tools',
 63 |     'Topic :: Software Development :: Quality Assurance',
 64 |     'Topic :: Software Development :: Testing',
 65 |     'Topic :: Software Development :: Testing :: Acceptance',
 66 |     'Topic :: Software Development :: Testing :: BDD',
 67 |     'Topic :: Software Development :: Testing :: Unit',
 68 |     'Topic :: System :: Software Distribution'
 69 | ]
 70 | 
 71 | REQUIREMENTS = [
 72 |     'python-dateutil==2.9.0.post0',
 73 |     'requests==2.32.2',
 74 |     'unittest-xml-reporting==3.2.0',
 75 |     'xunitparser==1.3.4',
 76 |     'toposort==1.10',
 77 |     'python-dotenv==1.0.1',
 78 |     'packaging==24.1'
 79 | ]
 80 | 
 81 | PACKAGES = [
 82 |     'outsystems',
 83 |     'outsystems.architecture_dashboard',
 84 |     'outsystems.bdd_framework',
 85 |     'outsystems.cicd_probe',
 86 |     'outsystems.exceptions',
 87 |     'outsystems.file_helpers',
 88 |     'outsystems.lifetime',
 89 |     'outsystems.manifest',
 90 |     'outsystems.osp_tool',
 91 |     'outsystems.pipeline',
 92 |     'outsystems.properties',
 93 |     'outsystems.vars'
 94 | ]
 95 | 
 96 | if __name__ == '__main__':  # Do not run setup() when we import this module.
 97 |     if os.path.isfile("VERSION"):
 98 |         with open("VERSION", 'r') as version_file:
 99 |             version = version_file.read().replace('\n', '')
100 |     else:
101 |         # dummy version
102 |         version = '1.0.0'
103 | 
104 |     setup(
105 |         name=NAME,
106 |         version='',
107 |         description=DESCRIPTION,
108 |         long_description=LONG_DESCRIPTION,
109 |         keywords=' '.join(KEYWORDS),
110 |         author=AUTHOR,
111 |         author_email=EMAIL,
112 |         url=URL,
113 |         license=LICENSE,
114 |         python_requires=PYTHON_REQUIRES,
115 |         classifiers=CLASSIFIERS,
116 |         packages=PACKAGES,
117 |         install_requires=REQUIREMENTS
118 |     )
119 | 
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OutSystems/outsystems-pipeline/0bea1fc1970f8b99fe86c07a6f403e42988e221d/test/__init__.py
--------------------------------------------------------------------------------
/test/test_deploy_latest_tags.py:
--------------------------------------------------------------------------------
1 | def test_deploy_latest_tags():
2 |     assert True
3 | 
--------------------------------------------------------------------------------