├── tests ├── __init__.py ├── test_base_logger.py ├── test_main.py ├── test_validator.py ├── ci │ └── generate_apigee_edge_access_token.py ├── test_topology.py ├── test_exporter.py ├── test_core_wrappers.py ├── test_rest.py ├── test_nextgen.py ├── test_classic.py └── test_utils.py ├── topology_mapping ├── __init__.py └── pod.py ├── assessment_mapping ├── __init__.py ├── resourcefiles.py └── targetservers.py ├── qualification_report_mapping ├── __init__.py ├── report_summary.py └── header_mapping.py ├── .bandit.yml ├── .dockerignore ├── assets └── visualization.png ├── qualification_report_mapping_json ├── validation_report.json ├── topology_installation_mapping.json ├── org_resourcefiles.json ├── sharded_proxies.json ├── aliases_with_private_keys.json ├── northbound_mtls.json ├── company_and_developers.json ├── cname_anomaly.json ├── apps_without_api_products.json ├── cache_without_expiry.json ├── target_environments.json ├── api_with_multiple_basepath.json ├── env_limits.json ├── org_limits.json ├── proxies_per_env.json ├── api_limits.json ├── json_path_enabled.json ├── anti_patterns.json ├── unsupported_policies.json └── report_summary.json ├── sample ├── outputs │ └── sample_qualification_report.xlsx └── inputs │ ├── x.input.properties │ ├── saas.input.properties │ └── opdk.input.properties ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── ISSUE_TEMPLATE │ ├── feature_request.md │ └── bug_report.md ├── config │ └── commitlint.config.mjs └── workflows │ ├── static-checks.yml │ └── tests.yml ├── topology_mapping_json └── pod.json ├── assessment_mapping_json ├── targetservers.json └── resourcefiles.json ├── test-requirements.txt ├── requirements.txt ├── backend.properties ├── Dockerfile ├── .lycheeignore ├── CONTRIBUTING.md ├── base_logger.py ├── permissions.json ├── .gitignore ├── main.py ├── unifier.py ├── topology.py ├── LICENSE ├── README.md ├── classic.py └── rest.py /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /topology_mapping/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /assessment_mapping/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /qualification_report_mapping/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.bandit.yml: -------------------------------------------------------------------------------- 1 | exclude_dirs: 2 | - 'tests/*' 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # .dockerignore 2 | sample 3 | assets 4 | -------------------------------------------------------------------------------- /assets/visualization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apigee/apigee-migration-assessment-tool/HEAD/assets/visualization.png -------------------------------------------------------------------------------- /qualification_report_mapping_json/validation_report.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Env | Resource", "Name", "Importable", "Reason", "Imported to Apigee X/Hybrid"] 3 | } -------------------------------------------------------------------------------- /sample/outputs/sample_qualification_report.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apigee/apigee-migration-assessment-tool/HEAD/sample/outputs/sample_qualification_report.xlsx -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Fixes # 2 | 3 | > It's a good idea to open an issue first for discussion. 4 | 5 | - [ ] Tests pass 6 | - [ ] Appropriate changes to README are included in PR -------------------------------------------------------------------------------- /topology_mapping_json/pod.json: -------------------------------------------------------------------------------- 1 | { 2 | "gateway": { 3 | "bgcolor": "#d0e2f3" 4 | }, 5 | "central": { 6 | "bgcolor": "#f4cdcc" 7 | }, 8 | "analytics": { 9 | "bgcolor": "#fff2cc" 10 | } 11 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/topology_installation_mapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Data center", "Pod", "Component", "Internal IP", "Internal Hostname", "Is Up", "Reachable"], 3 | "key_mapping": ["internalIP", "internalHostName", "isUp", "reachable"] 4 | } -------------------------------------------------------------------------------- /sample/inputs/x.input.properties: -------------------------------------------------------------------------------- 1 | [inputs] 2 | SOURCE_URL=https://apigee.googleapis.com/v1 3 | SOURCE_ORG=apigee-org-1234 4 | SOURCE_AUTH_TYPE=oauth 5 | SOURCE_APIGEE_VERSION=SAAS 6 | TARGET_URL=https://apigee.googleapis.com/v1 7 | GCP_PROJECT_ID=apigee-org-4321 8 | TARGET_DIR=target 9 | TARGET_COMPARE=false 10 | SSL_VERIFICATION=true 11 | -------------------------------------------------------------------------------- /assessment_mapping_json/targetservers.json: -------------------------------------------------------------------------------- 1 | { 2 | "host": { 3 | "invalid_values": { 4 | "localhost": { 5 | "message": "localhost is not valid FQDN value" 6 | }, 7 | "127.0.0.1": { 8 | "message": "127.0.0.1 is not valid IP for APIGEE-X" 9 | } 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /sample/inputs/saas.input.properties: -------------------------------------------------------------------------------- 1 | [inputs] 2 | SOURCE_URL=https://api.enterprise.apigee.com/v1 3 | SOURCE_ORG=sample-saas-project-1 4 | SOURCE_AUTH_TYPE=oauth 5 | SOURCE_APIGEE_VERSION=SAAS 6 | TARGET_URL=https://apigee.googleapis.com/v1 7 | GCP_PROJECT_ID=sample-apigee-project-11 8 | TARGET_DIR=target 9 | TARGET_COMPARE=false 10 | SSL_VERIFICATION=true 11 | 12 | -------------------------------------------------------------------------------- /assessment_mapping_json/resourcefiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": { 3 | "invalid_values": { 4 | "node": { 5 | "message": "Files with type node are not supported in Apigee X/Hybrid" 6 | }, 7 | "hosted": { 8 | "message": "Hosted Targets are not supported in Apigee X/Hybrid" 9 | } 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/org_resourcefiles.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "ResourceFiles"], 3 | "info_block": { 4 | "text": "[Feature Parity] Resourcefiles\n\nOrg Level Resourcefiles are not allowed in Apigee X/Hybrid.\n", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 10 9 | } 10 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/sharded_proxies.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Api Name", "New Sharded Proxies"], 3 | "info_block": { 4 | "text": "[Feature Parity] Proxy Unifier\n\nProxy Unifier is required because Apigee X currently supports only 10 proxy endpoints.\n", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 10 9 | } 10 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/aliases_with_private_keys.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Env Name", "keystore", "Alias", "Key Name"], 3 | "info_block": { 4 | "text": "Keystore alias with private key can not be imported directly. \nBecause private keys cannot be exported", 5 | "text_line_no_for_col_count": 0, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 14 9 | } 10 | } -------------------------------------------------------------------------------- /sample/inputs/opdk.input.properties: -------------------------------------------------------------------------------- 1 | [inputs] 2 | # for OPDK, use the IP or hostname of the management server 3 | SOURCE_URL=https://192.168.56.55/v1 4 | 5 | # Apigee OPDK organization name 6 | SOURCE_ORG=validate 7 | 8 | # auth type can be basic or oauth. 9 | SOURCE_AUTH_TYPE=basic 10 | 11 | SOURCE_APIGEE_VERSION=OPDK 12 | TARGET_URL=https://apigee.googleapis.com/v1 13 | GCP_PROJECT_ID=sample-apigee-project-1 14 | TARGET_DIR=target 15 | TARGET_COMPARE=false 16 | SSL_VERIFICATION=false 17 | -------------------------------------------------------------------------------- /qualification_report_mapping_json/northbound_mtls.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Environment","Vhost Name", "One way TLS","mTLS" , "Keystore"], 3 | "info_block": { 4 | "text": "[Feature Parity] Northbound mTLS\n\nApigee X leverages Google Load Balancer for supporting Northbound mTLS.\n\nWorkaround:\nmTLS in GLB is currently in Public Preview.", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 6, 7 | "start_row": 2, 8 | "end_row": 16 9 | } 10 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/company_and_developers.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Companies"], 3 | "info_block": { 4 | "text": "[Feature Parity] Companies\n\nApigee X does not support Companies feature. This lists all Companies.\n\nLong Term Plan:\nAlternate Solution: AppGroups: https://cloud.google.com/apigee/docs/api-platform/publish/organizing-client-app-ownership#what-are-appgroups. ", 5 | "text_line_no_for_col_count": 5, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 10 9 | } 10 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/cname_anomaly.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Org Name", "Env", "Vhost Name"], 3 | "info_block": { 4 | "text": "[Anti-pattern] Free Trial Cert Usage\n\nPlease refer to this documentation for further information and fix.", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 6, 9 | "link":[{ 10 | "link_text": "Free Trial Cert Usage", 11 | "link": "https://docs.apigee.com/migration-to-x/migration-antipatterns#trial-certificate-in-a-virtual-host" 12 | }] 13 | } 14 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/apps_without_api_products.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "App Name", "App Id", "Status"], 3 | "info_block": { 4 | "text": "[Anti-pattern] Apps without API Product\n\nPlease refer to this documentation for more information and suggested fix.", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 7, 9 | "link":[{ 10 | "link_text": "Apps without API Product", 11 | "link": "https://docs.apigee.com/migration-to-x/migration-antipatterns#apps-without-api-products" 12 | }] 13 | } 14 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/cache_without_expiry.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Api Name", "Policy Name", "Policy Type"], 3 | "info_block": { 4 | "text": "[Anti-pattern] Cache Expiry Limit not set\n\nPlease refer to this documentation for description and suggested fix.", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 8, 9 | "link":[{ 10 | "link_text": "Cache without expiry time", 11 | "link": "https://docs.apigee.com/migration-to-x/migration-antipatterns#cache-without-expiry-time" 12 | }] 13 | } 14 | } -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License 14 | urllib3==2.6.0 -------------------------------------------------------------------------------- /qualification_report_mapping_json/target_environments.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Env Name", "Sharded Env", "Proxies List","Shared Flows List","Proxies count","Shared Flows Count","Total Count"], 3 | "info_block": { 4 | "text": "[Proxy Updates] In Apigee X, the total number of proxies and shareflows in an environment can't be more than 60.\nIf any environmrnt is exceeding that limit, it needs to be sharded into multiple environments.\nThis sheet presents the target environments after sharding the respective environments.", 5 | "text_line_no_for_col_count": 0, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 2 9 | } 10 | } -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. -------------------------------------------------------------------------------- /qualification_report_mapping_json/api_with_multiple_basepath.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Api Name", "Base Paths"], 3 | "info_block": { 4 | "text": "[Anti-pattern] APIs with Multiple Basepaths\n\nApigee X currently supports only 10 proxy endpoints. Best practice is to have only one Proxy Endpoint in an API Proxy", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 9, 9 | "link":[{ 10 | "link_text": "APIs with Multiple Basepaths", 11 | "link": "https://docs.apigee.com/migration-to-x/migration-antipatterns#multiple-base-paths-deployed-for-an-api-proxy" 12 | }] 13 | } 14 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/env_limits.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Environment", "Target Servers", "caches", "certs", "kvms", "Encrypted KVM Count", "Non Encrypted KVM Count", "Vhosts", "References"], 3 | "info_block": { 4 | "text": "[Product Updates] Limits per Environment\n\nApigee X enforces / will start to enforce limits per environment level.\n\nFix:\nReduce the number of these items based on the maximum allowable number per environment.", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 13, 9 | "link":[{ 10 | "link_text": "Limits per environment", 11 | "link": "https://cloud.google.com/apigee/docs/api-platform/reference/limits" 12 | }] 13 | } 14 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/org_limits.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Developer Count", "KVM Count", "Encrypted KVM Count", "Non Encrypted KVM Count", "Apps Count", "Api products Count", "Apis Count"], 3 | "info_block": { 4 | "text": "[Anti-pattern] Resources Count per Org\n\nApigee X enforces a maximum of 100 KVMs, 1 Million apps, and 5000 api products per organization.\n\nFix:\nThese resources must be re-organized and reduce to meet this limit.", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 8, 9 | "link":[{ 10 | "link_text": "Limits per organization", 11 | "link": "https://cloud.google.com/apigee/docs/api-platform/reference/limits" 12 | }] 13 | } 14 | } -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Additional context** 32 | Add any other context about the problem here. -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License 14 | requests==2.32.4 15 | xlsxwriter==3.1.2 16 | pyvis==0.3.2 17 | xmltodict==0.13.0 18 | diagrams>=0.24.0 19 | google-auth==2.38.0 20 | google-cloud-resource-manager==1.14.0 21 | defusedxml==0.7.1 22 | -------------------------------------------------------------------------------- /.github/config/commitlint.config.mjs: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2025 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | export default { 18 | extends: ['@commitlint/config-conventional'], 19 | rules: { 20 | 'subject-case': [1, 'always', ['lower-case']] 21 | } 22 | }; -------------------------------------------------------------------------------- /qualification_report_mapping_json/proxies_per_env.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": [ 3 | "Organization", 4 | "Environment", 5 | "Proxies", 6 | "SharedFlows", 7 | "Proxies Shared Flows" 8 | ], 9 | "info_block": { 10 | "text": "[Feature Parity] Max Proxies + SharedFlows per Environment\n\nUnder the Subscription 2021 plan, Apigee X has a limit of 6000 Proxies + SharedFlow.\n\nApigee hybrid supports : 4,250 Proxies + SharedFlow.(Subscription 2021) Apigee hybrid: 6,000 Proxies + SharedFlow. (Subscription 2024)\n", 11 | "text_line_no_for_col_count": 2, 12 | "col_merge": 3, 13 | "start_row": 2, 14 | "end_row": 14, 15 | "link": [ 16 | { 17 | "link_text": "Proxies and Sharedflow limits", 18 | "link": "https://cloud.google.com/apigee/docs/api-platform/reference/limits#api-proxies" 19 | } 20 | ] 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /qualification_report_mapping_json/api_limits.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Api Name", "Revisions"], 3 | "info_block": { 4 | "text": "[Anti-pattern] Product Limits - Max Number of Revision History\n\nApigee X strictly enforces a maximum of 250 revisions per API. Customers exceeding the limits may experience one or more of these types of symptoms: high API latencies, low API throughput, and failing API calls.\n\nFix:\nThe customer can reduce the number of revisions to meet the recommended max revisions OR decide on which revisions to migrate over to X.", 5 | "text_line_no_for_col_count": 5, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 13, 9 | "link":[{ 10 | "link_text": "Max Number of Revision History", 11 | "link": "https://cloud.google.com/apigee/docs/api-platform/reference/limits" 12 | }] 13 | } 14 | } -------------------------------------------------------------------------------- /qualification_report_mapping_json/json_path_enabled.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Api Name", "Policy", "Usage Count"], 3 | "info_block": { 4 | "text": "[Product Updates] JSONPath Enabled\n\nApigee X's JSONPath version has been upgraded to 2.4.0 which is now compliant to JSONPath Specification. As a result of this move, any usage of this in Apigee Edge (now in version 0.8.0) will stop working as we move the API to Apigee X.\n\nFix:\nDuring the upgrade process, API will have to be updated to support the 2.4.0 JSONPath version.", 5 | "text_line_no_for_col_count": 5, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 12, 9 | "link":[{ 10 | "link_text": "Use this to understand how to switch to latest jsonpath", 11 | "link": "https://cloud.google.com/apigee/docs/api-platform/reference/message-template-intro#json-path-function" 12 | }] 13 | } 14 | } -------------------------------------------------------------------------------- /backend.properties: -------------------------------------------------------------------------------- 1 | [inputs] 2 | NO_OF_PROXIES_PER_ENV_LIMITS=50 3 | NO_OF_SHARED_FLOWS_PER_ENV_LIMITS=60 4 | NO_OF_PROXIES_AND_SHARED_FLOWS_PER_ENV_LIMITS=60 5 | NO_OF_API_REVISIONS_IN_API_PROXY=250 6 | NO_OF_KVMS_PER_ENV=100 7 | NO_OF_KVMS_PER_ORG=100 8 | NO_OF_TARGET_SERVERS_PER_ENV=1000 9 | MAX_PROXY_ENDPOINT_LIMIT = 10 10 | NO_OF_APPS_PER_ORG = 1000000 11 | NO_OF_API_PRODUCTS_PER_ORG = 5000 12 | 13 | [unifier] 14 | source_unzipped_apis=/source_unzipped_apis 15 | unifier_output_dir=unifier_output_dir 16 | unifier_zipped_bundles=unifier_zipped_bundles 17 | proxy_endpoint_count=10 18 | debug=false 19 | 20 | [export] 21 | EXPORT_DIR=export 22 | EXPORT_FILE=export_data.json 23 | 24 | [topology] 25 | TOPOLOGY_DIR=topology 26 | NW_TOPOLOGY_MAPPING=pod_component_mapping.json 27 | DATA_CENTER_MAPPING=data_center_mapping.json 28 | 29 | [report] 30 | QUALIFICATION_REPORT=qualification_report.xlsx 31 | 32 | [visualize] 33 | VISUALIZATION_GRAPH_FILE=visualization.html -------------------------------------------------------------------------------- /qualification_report_mapping_json/anti_patterns.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Organization", "Api Name", "Api Policy", "Antip - Quota Distributed", "Antip - Quota Synchronous"], 3 | "link":[{ 4 | "link_text": "Quota Non Synchronous", 5 | "link": "https://docs.apigee.com/api-platform/reference/policies/quota-policy#synchronous" 6 | }, 7 | { 8 | "link_text": "Quota Non Distributed", 9 | "link": "https://docs.apigee.com/api-platform/antipatterns/non-distributed-quota" 10 | } 11 | ], 12 | "info_block": [{ 13 | "text": "[Anti-pattern] Quota Non Distributed\nPlease refer to this documentation for details and suggested fix.", 14 | "col_merge": 1, 15 | "start_row": 2, 16 | "end_row": 5, 17 | "text_line_no_for_col_count": 1 18 | }, 19 | { 20 | "text": "[Anti-pattern] Quota Non Synchronous\nPlease refer to this documentation for details and suggested fix.", 21 | "col_merge": 1, 22 | "start_row": 7, 23 | "end_row": 10 24 | }] 25 | } -------------------------------------------------------------------------------- /topology_mapping/pod.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Loads the pod mapping from a JSON file. 18 | 19 | This module parses the `pod.json` file and stores the mapping 20 | in the `pod_mapping` variable. The `parse_json` utility function 21 | is used to perform the parsing. 22 | """ 23 | 24 | from utils import parse_json # pylint: disable=E0401 25 | 26 | pod_mapping = parse_json( 27 | "./topology_mapping_json/pod.json") 28 | -------------------------------------------------------------------------------- /assessment_mapping/resourcefiles.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Loads the resource files mapping from a JSON file. 18 | 19 | This module parses the `resourcefiles.json` file and stores the mapping 20 | in the `resourcefiles_mapping` variable. The `parse_json` utility function 21 | is used to perform the parsing. 22 | """ 23 | 24 | from utils import parse_json # pylint: disable=E0401 25 | 26 | resourcefiles_mapping = parse_json( 27 | "./assessment_mapping_json/resourcefiles.json") 28 | -------------------------------------------------------------------------------- /assessment_mapping/targetservers.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Loads the Target Servers mapping from a JSON file. 18 | 19 | This module parses the `targetservers.json` file and stores the mapping 20 | in the `targetservers_mapping` variable. The `parse_json` utility 21 | function is used to perform the parsing. 22 | """ 23 | 24 | from utils import parse_json # pylint: disable=E0401 25 | 26 | targetservers_mapping = parse_json( 27 | "./assessment_mapping_json/targetservers.json") 28 | -------------------------------------------------------------------------------- /qualification_report_mapping/report_summary.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Loads the report summary mapping from a JSON file. 18 | 19 | This module parses the `report_summary.json` file and stores the mapping 20 | in the `report_summary` variable. The `parse_json` utility function 21 | is used to perform the parsing. 22 | """ 23 | 24 | 25 | from utils import parse_json # pylint: disable=E0401 26 | 27 | report_summary = parse_json( 28 | "./qualification_report_mapping_json/report_summary.json") 29 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | # Copyright 2025 Google LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License 15 | 16 | FROM python:3.12-alpine 17 | 18 | # create apigee use and group. 19 | RUN addgroup -S apigee && adduser -S apigee -G apigee && \ 20 | apk add --no-cache --virtual .build-deps build-base && \ 21 | apk add --no-cache graphviz=12.2.1-r0 22 | 23 | # Copy only the requirements file first to leverage Docker's build cache. 24 | # hadolint ignore=DL3045 25 | COPY requirements.txt requirements.txt 26 | 27 | # As root, install the Python dependencies. 28 | RUN python3 -m pip install --no-cache-dir -r requirements.txt && \ 29 | apk del .build-deps 30 | 31 | USER apigee 32 | 33 | WORKDIR /app 34 | 35 | COPY --chown=apigee:apigee . . 36 | 37 | HEALTHCHECK \ 38 | CMD python -c 'print()' 39 | 40 | # Set the entrypoint to execute the Python script 41 | ENTRYPOINT ["python3", "main.py"] 42 | -------------------------------------------------------------------------------- /qualification_report_mapping_json/unsupported_policies.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": ["Org Name", "Api Name", "Policy", "Policy Type "], 3 | "info_block": { 4 | "text": "[Product Updates] Unsupported Policies\n\nThe following policies are not supported in ApigeeX and the associated alternate policies/suggested fix must be implemented during the migration\n1. ConcurrentRatelimit - SpikeArrest Policy\n2. ConnectorCallout - Obtain Google OAuth Token\n3. DeleteOAuthV1Info - OAuth v2 Policy\n4. GetOAuthV1Info - OAuth v2 Policy\n5. OAuthV1 - OAuth v2 Policy\n6. Ldap - Not supported\n7. StatisticsCollector - DataCapture Policy", 5 | "text_line_no_for_col_count": 2, 6 | "col_merge": 3, 7 | "start_row": 2, 8 | "end_row": 13, 9 | "link":[{ 10 | "link_text": "SpikeArrest Policy", 11 | "link": "https://docs.apigee.com/api-platform/develop/comparing-quota-spike-arrest-and-concurrent-rate-limit-policies" 12 | }, 13 | { 14 | "link_text": "Obtain Google OAuth Token", 15 | "link": "https://cloud.google.com/apigee/docs/api-platform/security/google-auth/overview" 16 | }, 17 | { 18 | "link_text": "OAuth v2 Policy", 19 | "link": "https://cloud.google.com/apigee/docs/api-platform/reference/policies/oauthv2-policy" 20 | }, 21 | { 22 | "link_text": "DataCapture Policy", 23 | "link": "https://cloud.google.com/apigee/docs/api-platform/reference/policies/data-capture-policy" 24 | }] 25 | } 26 | } -------------------------------------------------------------------------------- /.lycheeignore: -------------------------------------------------------------------------------- 1 | # These links are ignored by lychee link checker: https://github.com/lycheeverse/lychee 2 | # The file allows you to list multiple regular expressions for exclusion (one pattern per line). 3 | # The `.lycheeignore` file is only used for excluding URLs, not paths. Use the `exclude_path` key in the `lychee.toml` file. ref: https://github.com/lycheeverse/lycheeverse.github.io/blob/master/recipes/excluding-paths.md 4 | 5 | https://github.com/cloud-innovations/apigee-migration-assessment-tool/actions/workflows/tests.yml/badge.svg 6 | https://github.com/cloud-innovations/apigee-migration-assessment-tool/actions/workflows/tests.yml 7 | https://docs.apigee.com/api-platform/develop/comparing-quota-spike-arrest-and-concurrent-rate-limit-policies 8 | https://docs.apigee.com/api-platform/system-administration/management-api-overview 9 | https://docs.apigee.com/migration-to-x/migration-antipatterns#multiple-base-paths-deployed-for-an-api-proxy 10 | https://docs.apigee.com/api-platform/antipatterns/non-distributed-quota 11 | https://docs.apigee.com/migration-to-x/migration-antipatterns#cache-without-expiry-time 12 | https://docs.apigee.com/migration-to-x/migration-antipatterns#apps-without-api-products 13 | https://docs.apigee.com/api-platform/reference/policies/quota-policy#synchronous 14 | https://cloud.google.com/apigee/docs/api-platform/reference/limits 15 | https://docs.apigee.com/migration-to-x/migration-antipatterns#trial-certificate-in-a-virtual-host 16 | https://jsonpath-comparison-lwy4scuauq-wl.a.run.app/ 17 | https://cloud.google.com/apigee/docs/api-platform/reference/limits#api-proxies 18 | https://cloud.google.com/apigee/docs/api-platform/security/google-auth/overview 19 | https://cloud.google.com/apigee/docs/api-platform/reference/policies/oauthv2-policy 20 | https://cloud.google.com/apigee/docs/api-platform/reference/policies/data-capture-policy 21 | https://cloud.google.com/iam/docs/understanding-roles#apigee-roles 22 | https://docs.apigee.com/api-platform/system-administration/edge-built-roles 23 | file:///github/workspace/sample/outputs/qualification_report.xlsx 24 | file:///github/workspace/sample/outputs 25 | -------------------------------------------------------------------------------- /.github/workflows/static-checks.yml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | --- 16 | name: Static Checks Pipeline 17 | on: # yamllint disable-line rule:truthy 18 | push: 19 | pull_request: 20 | branches: 21 | - main 22 | workflow_dispatch: 23 | schedule: 24 | - cron: "0 0 * * *" 25 | 26 | permissions: read-all 27 | 28 | jobs: 29 | linter: 30 | name: Lint Codebase 31 | runs-on: ubuntu-latest 32 | steps: 33 | - name: Checkout Code 34 | uses: actions/checkout@v5 35 | - name: Run Mega Linter 36 | uses: oxsecurity/megalinter@v8.3.0 37 | env: 38 | DEFAULT_BRANCH: main 39 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 40 | VALIDATE_JAVASCRIPT_STANDARD: false 41 | VALIDATE_GHERKIN: false 42 | VALIDATE_JSCPD: false 43 | SPELL_MISSPELL_DISABLE_ERRORS: true 44 | SPELL_CSPELL_DISABLE_ERRORS: true 45 | COPYPASTE_JSCPD_DISABLE_ERRORS: true 46 | LINTER_RULES_PATH: "." 47 | GROOVY_NPM_GROOVY_LINT_FILTER_REGEX_EXCLUDE: "Jenkinsfile" 48 | MARKDOWN_MARKDOWN_LINK_CHECK_DISABLE_ERRORS: true 49 | PYTHON_MYPY_DISABLE_ERRORS: true 50 | DISABLE_LINTERS: "SPELL_CSPELL,REPOSITORY_KICS,PYTHON_PYRIGHT,REPOSITORY_DEVSKIM" 51 | 52 | commit-messages: 53 | name: Conventional Commits Lint 54 | runs-on: ubuntu-latest 55 | steps: 56 | - uses: actions/checkout@v4 57 | with: 58 | fetch-depth: 0 59 | - uses: wagoid/commitlint-github-action@v6 60 | with: 61 | configFile: .github/config/commitlint.config.mjs 62 | failOnWarnings: false 63 | -------------------------------------------------------------------------------- /tests/test_base_logger.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Tests for the base_logger module. 4 | """ 5 | import unittest 6 | import logging 7 | import os 8 | from unittest.mock import patch 9 | import importlib 10 | import base_logger 11 | 12 | from base_logger import CustomFormatter, logger 13 | 14 | 15 | class TestBaseLogger(unittest.TestCase): 16 | """ 17 | Test cases for the base_logger module. 18 | """ 19 | 20 | def setUp(self): 21 | """ 22 | Set up the test case. 23 | """ 24 | # Clear existing handlers 25 | for handler in logger.handlers[:]: 26 | logger.removeHandler(handler) 27 | 28 | def test_custom_formatter(self): 29 | """ 30 | Test the custom formatter. 31 | """ 32 | formatter = CustomFormatter() 33 | record = logging.LogRecord( 34 | name='test', 35 | level=logging.INFO, 36 | pathname='test.py', 37 | lineno=10, 38 | msg='Test message', 39 | args=(), 40 | exc_info=None 41 | ) 42 | formatted_message = formatter.format(record) 43 | self.assertIn('Test message', formatted_message) 44 | self.assertIn('test.py:10', formatted_message) 45 | 46 | @patch('base_logger.logging.StreamHandler') 47 | def test_logger_stream_handler(self, mock_stream_handler): 48 | """ 49 | Test the logger's stream handler. 50 | """ 51 | mock_stream_handler.return_value.level = logging.DEBUG 52 | with patch.dict(os.environ, {'LOG_HANDLER': 'Stream', 53 | 'LOGLEVEL': 'DEBUG'}): 54 | importlib.reload(base_logger) 55 | self.assertEqual(len(logger.handlers), 1) 56 | self.assertIsInstance(logger.handlers[0], 57 | type(mock_stream_handler.return_value)) 58 | self.assertEqual(logger.level, logging.DEBUG) 59 | 60 | @patch('base_logger.logging.FileHandler') 61 | def test_logger_file_handler(self, mock_file_handler): 62 | """ 63 | Test the logger's file handler. 64 | """ 65 | mock_file_handler.return_value.level = logging.INFO 66 | with patch.dict(os.environ, {'LOG_HANDLER': 'File', 67 | 'LOG_FILE_PATH': 'test.log', 68 | 'LOGLEVEL': 'INFO'}): 69 | importlib.reload(base_logger) 70 | self.assertEqual(len(logger.handlers), 1) 71 | self.assertIsInstance(logger.handlers[0], 72 | type(mock_file_handler.return_value)) 73 | self.assertEqual(logger.level, logging.INFO) 74 | mock_file_handler.assert_called_with('test.log', mode='a') 75 | 76 | 77 | if __name__ == '__main__': 78 | unittest.main() 79 | -------------------------------------------------------------------------------- /tests/test_main.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Tests for the main module. 4 | """ 5 | import unittest 6 | from unittest.mock import patch, MagicMock 7 | 8 | from main import main 9 | 10 | 11 | class TestMain(unittest.TestCase): 12 | """ 13 | Test cases for the main function. 14 | """ 15 | 16 | @patch('main.argparse.ArgumentParser') 17 | @patch('main.parse_config') 18 | @patch('main.pre_validation_checks') 19 | @patch('main.export_artifacts') 20 | @patch('main.validate_artifacts') 21 | @patch('main.visualize_artifacts') 22 | @patch('main.get_topology') 23 | @patch('main.qualification_report') 24 | @patch('main.parse_json') 25 | @patch('main.write_json') 26 | # noqa pylint: disable=too-many-arguments, too-many-locals, unused-argument, too-many-positional-arguments 27 | def test_main_flow(self, mock_write_json, mock_parse_json, 28 | mock_qualification_report, mock_get_topology, 29 | mock_visualize_artifacts, mock_validate_artifacts, 30 | mock_export_artifacts, mock_pre_validation_checks, 31 | mock_parse_config, mock_arg_parser): 32 | """ 33 | Test the main flow of the script. 34 | """ 35 | # Mock command line arguments 36 | mock_args = MagicMock() 37 | mock_args.resources = 'all' 38 | mock_args.skip_target_validation = False 39 | mock_arg_parser.return_value.parse_args.return_value = mock_args 40 | 41 | # Mock config files 42 | mock_cfg = MagicMock() 43 | mock_cfg.get.return_value = 'OPDK' 44 | mock_backend_cfg = MagicMock() 45 | mock_parse_config.side_effect = [mock_cfg, mock_backend_cfg] 46 | 47 | # Mock pre_validation_checks to return True 48 | mock_pre_validation_checks.return_value = True 49 | 50 | # Mock parse_json to return empty dicts initially 51 | mock_parse_json.side_effect = [{}, {}] 52 | 53 | # Mock export_artifacts to return some data 54 | mock_export_artifacts.return_value = {'export': True} 55 | 56 | # Mock validate_artifacts to return some data 57 | mock_validate_artifacts.return_value = {'report': True} 58 | 59 | # Mock get_topology to return some data 60 | mock_get_topology.return_value = {} 61 | 62 | # Call the main function 63 | main() 64 | 65 | # Assert that the core functions were called 66 | mock_pre_validation_checks.assert_called_once() 67 | mock_export_artifacts.assert_called_once() 68 | mock_validate_artifacts.assert_called_once() 69 | mock_visualize_artifacts.assert_called_once() 70 | mock_get_topology.assert_called_once() 71 | mock_qualification_report.assert_called_once() 72 | 73 | 74 | if __name__ == '__main__': 75 | unittest.main() 76 | -------------------------------------------------------------------------------- /tests/test_validator.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the validator module. 3 | """ 4 | import unittest 5 | from unittest.mock import MagicMock, patch 6 | from validator import ApigeeValidator 7 | 8 | 9 | class TestApigeeValidator(unittest.TestCase): 10 | """ 11 | Test cases for the ApigeeValidator class. 12 | """ 13 | 14 | def setUp(self): 15 | """ 16 | Set up the test case. 17 | """ 18 | self.mock_apigee_new_gen = MagicMock() 19 | self.validator = ApigeeValidator( 20 | baseurl="https://mock.baseurl", 21 | project_id="mock_project", 22 | token="mock_token", 23 | env_type="hybrid", 24 | target_export_data={}, 25 | target_compare=False, 26 | skip_target_validation=False, 27 | ssl_verify=True 28 | ) 29 | self.validator.xorhybrid = self.mock_apigee_new_gen 30 | 31 | def test_validate_org_resource(self): 32 | """ 33 | Test the validate_org_resource method. 34 | """ 35 | resources = {"developers": {"dev1": {"name": "dev1"}}} 36 | result = self.validator.validate_org_resource("developers", resources) 37 | self.assertEqual(len(result), 1) 38 | self.assertTrue(result[0]["importable"]) 39 | self.assertEqual(result[0]["imported"], "UNKNOWN") 40 | 41 | def test_validate_kvms(self): 42 | """ 43 | Test the validate_kvms method. 44 | """ 45 | kvms = {"kvm1": {"name": "kvm1"}} 46 | result = self.validator.validate_kvms("test_env", kvms) 47 | self.assertEqual(len(result), 1) 48 | self.assertTrue(result[0]["importable"]) 49 | self.assertEqual(result[0]["imported"], "UNKNOWN") 50 | 51 | @patch('validator.list_dir') 52 | @patch('zipfile.ZipFile') 53 | # pylint: disable=unused-argument 54 | def test_validate_proxy_bundles(self, mock_zipfile, mock_list_dir): 55 | """ 56 | Test the validate_proxy_bundles method. 57 | """ 58 | self.validator.skip_target_validation = True 59 | export_objects = ['api1'] 60 | validation = self.validator.validate_proxy_bundles( 61 | export_objects, 62 | '/tmp', 63 | '/tmp', 64 | 'apis' 65 | ) 66 | self.assertEqual(len(validation['apis']), 1) 67 | self.assertEqual(validation['apis'][0]['name'], 'api1') 68 | self.assertFalse(validation['apis'][0]['importable']) 69 | 70 | def test_validate_env_flowhooks(self): 71 | """ 72 | Test the validate_env_flowhooks method. 73 | """ 74 | flowhooks = {"fh1": {"name": "fh1", "sharedFlow": "sf1"}} 75 | self.mock_apigee_new_gen.get_env_object.return_value = { 76 | "deployments": []} 77 | result = self.validator.validate_env_flowhooks("test_env", flowhooks) 78 | self.assertEqual(len(result), 1) 79 | self.assertFalse(result[0]["importable"]) 80 | self.assertEqual(result[0]["imported"], "UNKNOWN") 81 | 82 | 83 | if __name__ == '__main__': 84 | unittest.main() 85 | -------------------------------------------------------------------------------- /qualification_report_mapping/header_mapping.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Loads mappings for the qualification report from JSON files. 18 | 19 | This module parses various JSON files containing mappings 20 | and data used to generate the qualification report. The parsed data 21 | is stored in individual variables, making it accessible for 22 | report generation. The `parse_json` utility function is used for 23 | parsing the JSON data. 24 | """ 25 | 26 | from utils import parse_json # pylint: disable=E0401 27 | 28 | 29 | topology_installation_mapping = parse_json( 30 | "./qualification_report_mapping_json/topology_installation_mapping.json") 31 | anti_patterns_mapping = parse_json( 32 | "./qualification_report_mapping_json/anti_patterns.json") 33 | api_limits_mapping = parse_json( 34 | "./qualification_report_mapping_json/api_limits.json") 35 | api_with_multiple_basepath_mapping = parse_json( 36 | "./qualification_report_mapping_json/api_with_multiple_basepath.json") 37 | apps_without_api_products_mapping = parse_json( 38 | "./qualification_report_mapping_json/apps_without_api_products.json") 39 | cache_without_expiry_mapping = parse_json( 40 | "./qualification_report_mapping_json/cache_without_expiry.json") 41 | cname_anomaly_mapping = parse_json( 42 | "./qualification_report_mapping_json/cname_anomaly.json") 43 | company_and_developers_mapping = parse_json( 44 | "./qualification_report_mapping_json/company_and_developers.json") 45 | env_limits_mapping = parse_json( 46 | "./qualification_report_mapping_json/env_limits.json") 47 | json_path_enabled_mapping = parse_json( 48 | "./qualification_report_mapping_json/json_path_enabled.json") 49 | northbound_mtls_mapping = parse_json( 50 | "./qualification_report_mapping_json/northbound_mtls.json") 51 | org_limits_mapping = parse_json( 52 | "./qualification_report_mapping_json/org_limits.json") 53 | proxies_per_env_mapping = parse_json( 54 | "./qualification_report_mapping_json/proxies_per_env.json") 55 | unsupported_polices_mapping = parse_json( 56 | "./qualification_report_mapping_json/unsupported_policies.json") 57 | sharding_output = parse_json( 58 | "./qualification_report_mapping_json/target_environments.json") 59 | aliases_with_private_keys = parse_json( 60 | "./qualification_report_mapping_json/aliases_with_private_keys.json") 61 | sharded_proxies = parse_json( 62 | "./qualification_report_mapping_json/sharded_proxies.json") 63 | org_resourcefiles = parse_json( 64 | "./qualification_report_mapping_json/org_resourcefiles.json") 65 | validation_report = parse_json( 66 | "./qualification_report_mapping_json/validation_report.json") 67 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Apigee Migration Assessment Tooling 2 | 3 | First off, thank you for considering contributing to Apigee Migration Assessment Tooling ! We appreciate your time and effort. 4 | 5 | Here's how you can get involved: 6 | 7 | ## Reporting Bugs 8 | 9 | * **Check existing issues:** Before submitting a new bug report, please search the [Issues](link to your project's issues page) to see if the issue has already been reported. 10 | * **Provide detailed information:** When reporting a bug, please include: 11 | * **Clear and concise description:** Explain the problem you encountered. 12 | * **Steps to reproduce:** Provide detailed steps to reproduce the bug. 13 | * **Expected behavior:** Describe what you expected to happen. 14 | * **Actual behavior:** Describe what actually happened. 15 | * **Environment details:** Include information about your operating system, Python version, and any relevant dependencies. 16 | * **Screenshots or code snippets:** If applicable, include screenshots or code snippets to illustrate the issue. 17 | 18 | ## Suggesting Enhancements 19 | 20 | We welcome suggestions for new features or improvements to existing functionality. When suggesting an enhancement: 21 | 22 | * **Provide a clear description:** Explain the enhancement you're proposing. 23 | * **Explain the benefits:** Describe how the enhancement would benefit users. 24 | * **Consider alternatives:** If possible, suggest alternative approaches or solutions. 25 | 26 | ## Submitting Code Changes 27 | 28 | We welcome code contributions! Here's how to submit a pull request: 29 | 30 | 1. **Fork the repository:** Click the "Fork" button in the top right corner of the repository page. 31 | 2. **Clone the forked repository:** `git clone https://github.com/your-username/your-forked-repo.git` 32 | 3. **Create a new branch:** `git checkout -b my-new-branch` 33 | 4. **Make your changes:** Implement your bug fix or enhancement. 34 | 5. **Commit your changes:** `git commit -am "Fix: Issue with [issue number]"` 35 | 6. **Push to your forked repository:** `git push origin my-new-branch` 36 | 7. **Create a pull request:** Go to the original repository and click the "New pull request" button. Select your forked repository and branch. 37 | 8. **Provide a detailed description:** Explain the changes you made and why. 38 | 39 | ## Code Style 40 | 41 | * **Follow PEP 8:** Adhere to the [PEP 8 style guide](https://www.python.org/dev/peps/pep-0008/) for Python code. 42 | * **Use meaningful names:** Choose descriptive names for variables, functions, and classes. 43 | * **Write clear comments:** Explain the purpose and functionality of your code. 44 | * **Keep functions short:** Break down complex tasks into smaller, more manageable functions. 45 | 46 | ## Testing 47 | 48 | * **Write unit tests:** Ensure your code changes are well-tested by writing unit tests. 49 | * **Run existing tests:** Before submitting a pull request, make sure all existing tests pass. 50 | 51 | ## Code of Conduct 52 | 53 | We expect all contributors to adhere to our [Code of Conduct](link to your project's code of conduct). Please treat everyone with respect and courtesy. 54 | 55 | ## Questions 56 | 57 | If you have any questions or need help getting started, feel free to [open an issue](link to your project's issues page) or contact us. 58 | 59 | Thank you for your contributions! -------------------------------------------------------------------------------- /base_logger.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Provides a consistent logging setup for the Migratool. 18 | 19 | This module sets up a logger named "Migratool" with configurable handler, level, and format. # noqa 20 | It supports logging to a file or stream, and uses a custom formatter for colored output. # noqa 21 | Environment variables can be used to control the logger's behavior. 22 | 23 | Environment Variables: 24 | 25 | - `EXEC_INFO`: If set to "True", exception information will be included. 26 | - `LOG_HANDLER`: Specifies the logging handler. Can be "File" or "Stream". 27 | - `LOG_FILE_PATH`: If LOG_HANDLER is "File", this specifies the file path. 28 | - `LOGLEVEL`: Sets the logging level. 29 | Can be one of 30 | "CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", or "NOTSET". 31 | Defaults to "WARNING". 32 | """ 33 | 34 | import os 35 | import logging 36 | 37 | EXEC_INFO = os.getenv("EXEC_INFO") == "True" 38 | LOG_HANDLER = os.getenv("LOG_HANDLER", "Stream") 39 | LOG_FILE_PATH = os.getenv("LOG_FILE_PATH", "app.log") 40 | LOGLEVEL = os.getenv('LOGLEVEL', 'INFO').upper() 41 | 42 | if LOG_HANDLER not in {"File", "Stream"}: 43 | LOG_HANDLER = "Stream" 44 | 45 | if LOGLEVEL not in {"CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"}: 46 | LOGLEVEL = "WARNING" 47 | 48 | 49 | class CustomFormatter(logging.Formatter): 50 | """A custom formatter for colored logging output. 51 | 52 | Provides colored output for different log levels using ANSI escape codes. 53 | The format includes timestamp, logger name, level, message 54 | and file location. 55 | """ 56 | 57 | grey = "\x1b[38;20m" 58 | yellow = "\x1b[33;20m" 59 | red = "\x1b[31;20m" 60 | bold_red = "\x1b[31;1m" 61 | reset = "\x1b[0m" 62 | format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s (%(filename)s:%(lineno)d)" # noqa pylint: disable=C0301 63 | 64 | FORMATS = { 65 | logging.DEBUG: grey + format + reset, 66 | logging.INFO: grey + format + reset, 67 | logging.WARNING: yellow + format + reset, 68 | logging.ERROR: red + format + reset, 69 | logging.CRITICAL: bold_red + format + reset 70 | } 71 | 72 | def format(self, record): # pylint: disable=E0102 73 | log_fmt = self.FORMATS.get(record.levelno) 74 | formatter = logging.Formatter(log_fmt) 75 | return formatter.format(record) 76 | 77 | 78 | logger = logging.getLogger("Migratool") 79 | 80 | if LOG_HANDLER == "File": 81 | ch = logging.FileHandler(LOG_FILE_PATH, mode="a") 82 | else: 83 | ch = logging.StreamHandler() 84 | 85 | # Set handler and logger to the same level 86 | ch.setLevel(getattr(logging, LOGLEVEL)) 87 | logger.setLevel(ch.level) 88 | 89 | ch.setFormatter(CustomFormatter()) 90 | 91 | logger.addHandler(ch) 92 | -------------------------------------------------------------------------------- /tests/ci/generate_apigee_edge_access_token.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | 18 | """ 19 | Generates Apigee Edge SAAS Access Token 20 | """ 21 | 22 | 23 | import os 24 | import sys 25 | import requests # pylint: disable=E0401 26 | import pyotp # pylint: disable=E0401 27 | 28 | 29 | def fetch_apigee_token(): 30 | """ 31 | Generates an MFA token and uses it to fetch an Apigee OAuth access token. 32 | Reads credentials from environment variables. 33 | """ 34 | apigee_user = os.getenv('APIGEE_EDGE_USER') 35 | apigee_password = os.getenv('APIGEE_EDGE_PASSWORD') 36 | otp_secret = os.getenv('OTP_SECRET') 37 | 38 | if not all([apigee_user, apigee_password, otp_secret]): 39 | print( 40 | "Error: Please set the APIGEE_EDGE_USER, APIGEE_EDGE_PASSWORD, and OTP_SECRET environment variables.", # noqa pylint: disable=C0301 41 | file=sys.stderr 42 | ) 43 | sys.exit(1) 44 | 45 | try: 46 | totp = pyotp.TOTP(otp_secret) 47 | mfa_token = totp.now() 48 | except Exception as e: # noqa pylint: disable=W0718 49 | print(f"Error generating MFA token: {e}", file=sys.stderr) 50 | sys.exit(1) 51 | 52 | url = "https://login.apigee.com/oauth/token" 53 | headers = { 54 | "Content-Type": "application/x-www-form-urlencoded;charset=utf-8", 55 | "Accept": "application/json;charset=utf-8", 56 | "Authorization": "Basic ZWRnZWNsaTplZGdlY2xpc2VjcmV0", 57 | } 58 | params = {"mfa_token": mfa_token} 59 | data = { 60 | "username": apigee_user, 61 | "password": apigee_password, 62 | "grant_type": "password", 63 | } 64 | 65 | try: 66 | response = requests.post(url, headers=headers, params=params, data=data, timeout=3) # noqa 67 | response.raise_for_status() 68 | access_token = response.json().get("access_token") 69 | if not access_token: 70 | print("Error: 'access_token' not found in the response.", file=sys.stderr) # noqa pylint: disable=C0301 71 | return None 72 | return access_token 73 | except requests.exceptions.HTTPError as http_err: 74 | print(f"HTTP Error occurred: {http_err}", file=sys.stderr) 75 | print(f"Response Body: {http_err.response.text}", file=sys.stderr) 76 | except requests.exceptions.RequestException as req_err: 77 | print(f"A request error occurred: {req_err}", file=sys.stderr) 78 | return None 79 | 80 | 81 | if __name__ == "__main__": 82 | token = fetch_apigee_token() 83 | if token: 84 | print(f"::set-output name=access_token::{token}") 85 | else: 86 | # Exit with a non-zero status code to fail the workflow step on error 87 | print("Failed to retrieve access token.", file=sys.stderr) 88 | sys.exit(1) 89 | -------------------------------------------------------------------------------- /permissions.json: -------------------------------------------------------------------------------- 1 | [ 2 | "apigee.apiproductattributes.get", 3 | "apigee.apiproductattributes.list", 4 | "apigee.apiproducts.get", 5 | "apigee.apiproducts.list", 6 | "apigee.appkeys.get", 7 | "apigee.apps.get", 8 | "apigee.apps.list", 9 | "apigee.archivedeployments.download", 10 | "apigee.archivedeployments.get", 11 | "apigee.archivedeployments.list", 12 | "apigee.caches.list", 13 | "apigee.canaryevaluations.get", 14 | "apigee.datacollectors.get", 15 | "apigee.datacollectors.list", 16 | "apigee.datalocation.get", 17 | "apigee.datastores.get", 18 | "apigee.datastores.list", 19 | "apigee.deployments.get", 20 | "apigee.deployments.list", 21 | "apigee.developerappattributes.get", 22 | "apigee.developerappattributes.list", 23 | "apigee.developerapps.get", 24 | "apigee.developerapps.list", 25 | "apigee.developerattributes.get", 26 | "apigee.developerattributes.list", 27 | "apigee.developerbalances.get", 28 | "apigee.developermonetizationconfigs.get", 29 | "apigee.developers.get", 30 | "apigee.developers.list", 31 | "apigee.developersubscriptions.get", 32 | "apigee.developersubscriptions.list", 33 | "apigee.endpointattachments.get", 34 | "apigee.endpointattachments.list", 35 | "apigee.entitlements.get", 36 | "apigee.envgroupattachments.get", 37 | "apigee.envgroupattachments.list", 38 | "apigee.envgroups.get", 39 | "apigee.envgroups.list", 40 | "apigee.environments.get", 41 | "apigee.environments.getDataLocation", 42 | "apigee.environments.getIamPolicy", 43 | "apigee.environments.getStats", 44 | "apigee.environments.list", 45 | "apigee.exports.get", 46 | "apigee.exports.list", 47 | "apigee.flowhooks.getSharedFlow", 48 | "apigee.flowhooks.list", 49 | "apigee.hostqueries.get", 50 | "apigee.hostqueries.list", 51 | "apigee.hostsecurityreports.get", 52 | "apigee.hostsecurityreports.list", 53 | "apigee.hoststats.get", 54 | "apigee.ingressconfigs.get", 55 | "apigee.instanceattachments.get", 56 | "apigee.instanceattachments.list", 57 | "apigee.instances.get", 58 | "apigee.instances.list", 59 | "apigee.keystorealiases.get", 60 | "apigee.keystorealiases.list", 61 | "apigee.keystores.get", 62 | "apigee.keystores.list", 63 | "apigee.keyvaluemapentries.get", 64 | "apigee.keyvaluemapentries.list", 65 | "apigee.keyvaluemaps.list", 66 | "apigee.maskconfigs.get", 67 | "apigee.nataddresses.get", 68 | "apigee.nataddresses.list", 69 | "apigee.operations.get", 70 | "apigee.operations.list", 71 | "apigee.organizations.get", 72 | "apigee.organizations.list", 73 | "apigee.portals.get", 74 | "apigee.portals.list", 75 | "apigee.projectorganizations.get", 76 | "apigee.proxies.get", 77 | "apigee.proxies.list", 78 | "apigee.proxyrevisions.get", 79 | "apigee.proxyrevisions.list", 80 | "apigee.queries.get", 81 | "apigee.queries.list", 82 | "apigee.rateplans.get", 83 | "apigee.rateplans.list", 84 | "apigee.references.get", 85 | "apigee.references.list", 86 | "apigee.reports.get", 87 | "apigee.reports.list", 88 | "apigee.resourcefiles.get", 89 | "apigee.resourcefiles.list", 90 | "apigee.runtimeconfigs.get", 91 | "apigee.sharedflowrevisions.get", 92 | "apigee.sharedflowrevisions.list", 93 | "apigee.sharedflows.get", 94 | "apigee.sharedflows.list", 95 | "apigee.targetservers.get", 96 | "apigee.targetservers.list", 97 | "resourcemanager.projects.get", 98 | "resourcemanager.projects.getIamPolicy", 99 | "apigee.proxies.create" 100 | ] 101 | -------------------------------------------------------------------------------- /tests/test_topology.py: -------------------------------------------------------------------------------- 1 | """Test suite for topology.""" 2 | import os 3 | import shutil 4 | import sys 5 | import unittest 6 | from unittest.mock import MagicMock, patch 7 | 8 | from topology import ApigeeTopology 9 | sys.path.insert(0, '..') 10 | 11 | 12 | class TestApigeeTopology(unittest.TestCase): 13 | """Test class for ApigeeTopology.""" 14 | 15 | def setUp(self): 16 | """Set up.""" 17 | self.mock_cfg = { 18 | 'inputs': { 19 | 'TARGET_DIR': 'target' 20 | } 21 | } 22 | self.mock_backend_cfg = { 23 | 'topology': { 24 | 'TOPOLOGY_DIR': 'topology', 25 | 'NW_TOPOLOGY_MAPPING': 'nw_topology.json', 26 | 'DATA_CENTER_MAPPING': 'dc_mapping.json' 27 | } 28 | } 29 | mock_cfg_instance = MagicMock() 30 | (mock_cfg_instance.get. 31 | side_effect) = lambda section, key: self.mock_cfg[section][key] 32 | with patch('utils.parse_config') as mock_parse_config, \ 33 | patch('topology.ApigeeClassic') as mock_apigee_classic: 34 | mock_parse_config.side_effect = [self.mock_backend_cfg] 35 | self.topology = ApigeeTopology( 36 | baseurl="https://mock.baseurl", 37 | org="mock_org", 38 | token="mock_token", 39 | auth_type="basic", 40 | cfg=mock_cfg_instance 41 | ) 42 | self.topology.opdk = mock_apigee_classic.return_value 43 | 44 | def tearDown(self): 45 | """Tear down.""" 46 | if hasattr(self, 'topology') and os.path.exists( 47 | self.topology.topology_dir_path): 48 | shutil.rmtree(self.topology.topology_dir_path) 49 | 50 | @patch('topology.write_json') 51 | @patch('topology.pod_mapping', {'test-pod': {}}) 52 | def test_get_topology_mapping(self, mock_write_json): 53 | """Test get_topology_mapping.""" 54 | mock_response = [ 55 | { 56 | "externalHostName": "host1.external", 57 | "externalIP": "1.1.1.1", 58 | "internalHostName": "host1.internal", 59 | "internalIP": "10.1.1.1", 60 | "isUp": True, 61 | "pod": "test-pod", 62 | "reachable": True, 63 | "region": "test-region", 64 | "type": ["test-type"] 65 | } 66 | ] 67 | self.topology.opdk.view_pod_component_details = MagicMock( 68 | return_value=mock_response) 69 | result = self.topology.get_topology_mapping() 70 | self.assertIn('test-pod', result) 71 | self.assertEqual(len(result['test-pod']), 1) 72 | self.assertEqual(result['test-pod'][0]['externalHostName'], 73 | 'host1.external') 74 | mock_write_json.assert_called_once() 75 | 76 | def test_get_data_center_mapping(self): 77 | """Test get_data_center_mapping.""" 78 | pod_component_mapping = { 79 | "test-pod": [ 80 | { 81 | "region": "test-region", 82 | "pod": "test-pod", 83 | "internalIP": "10.1.1.1", 84 | "type": ["test-type"] 85 | } 86 | ] 87 | } 88 | result = self.topology.get_data_center_mapping(pod_component_mapping) 89 | self.assertIn('test-region', result) 90 | self.assertIn('test-pod', result['test-region']) 91 | self.assertEqual(len(result['test-region']['test-pod']), 1) 92 | self.assertEqual(result['test-region']['test-pod'][0]['internalIP'], 93 | '10.1.1.1') 94 | 95 | @patch('topology.Diagram') 96 | @patch('topology.Cluster') 97 | @patch('topology.Blank') 98 | @patch('topology.pod_mapping', {'test-pod': {'bgcolor': '#FFFFFF'}}) 99 | def test_draw_topology_graph_diagram(self, mock_blank, mock_cluster, 100 | mock_diagram): 101 | """Test draw_topology_graph_diagram.""" 102 | data_center = { 103 | "test-region": { 104 | "test-pod": [ 105 | { 106 | "internalIP": "10.1.1.1", 107 | "type": ["test-type"] 108 | } 109 | ] 110 | } 111 | } 112 | self.topology.draw_topology_graph_diagram(data_center) 113 | self.assertEqual(mock_diagram.call_count, 2) 114 | self.assertEqual(mock_cluster.call_count, 5) 115 | mock_blank.assert_called() 116 | 117 | 118 | if __name__ == '__main__': 119 | unittest.main() 120 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | input.properties 2 | visualization.html 3 | qualification_report.xlsx 4 | target/ 5 | *.csv 6 | unzip/ 7 | 8 | # Byte-compiled / optimized / DLL files 9 | __pycache__/ 10 | *.py[cod] 11 | *$py.class 12 | 13 | # C extensions 14 | *.so 15 | 16 | # Distribution / packaging 17 | .Python 18 | build/ 19 | develop-eggs/ 20 | dist/ 21 | downloads/ 22 | eggs/ 23 | .eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | wheels/ 30 | share/python-wheels/ 31 | *.egg-info/ 32 | .installed.cfg 33 | *.egg 34 | MANIFEST 35 | 36 | # PyInstaller 37 | # Usually these files are written by a python script from a template 38 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 39 | *.manifest 40 | *.spec 41 | 42 | # Installer logs 43 | pip-log.txt 44 | pip-delete-this-directory.txt 45 | 46 | # Unit test / coverage reports 47 | htmlcov/ 48 | .tox/ 49 | .nox/ 50 | .coverage 51 | .coverage.* 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | *.cover 56 | *.py,cover 57 | .hypothesis/ 58 | .pytest_cache/ 59 | cover/ 60 | 61 | # Translations 62 | *.mo 63 | *.pot 64 | 65 | # Django stuff: 66 | *.log 67 | local_settings.py 68 | db.sqlite3 69 | db.sqlite3-journal 70 | 71 | # Flask stuff: 72 | instance/ 73 | .webassets-cache 74 | 75 | # Scrapy stuff: 76 | .scrapy 77 | 78 | # Sphinx documentation 79 | docs/_build/ 80 | 81 | # PyBuilder 82 | .pybuilder/ 83 | target/ 84 | 85 | # Jupyter Notebook 86 | .ipynb_checkpoints 87 | 88 | # IPython 89 | profile_default/ 90 | ipython_config.py 91 | 92 | # pyenv 93 | # For a library or package, you might want to ignore these files since the code is 94 | # intended to run in multiple environments; otherwise, check them in: 95 | # .python-version 96 | 97 | # pipenv 98 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 99 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 100 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 101 | # install all needed dependencies. 102 | #Pipfile.lock 103 | 104 | # poetry 105 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 106 | # This is especially recommended for binary packages to ensure reproducibility, and is more 107 | # commonly ignored for libraries. 108 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 109 | #poetry.lock 110 | 111 | # pdm 112 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 113 | #pdm.lock 114 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 115 | # in version control. 116 | # https://pdm.fming.dev/#use-with-ide 117 | .pdm.toml 118 | 119 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 120 | __pypackages__/ 121 | 122 | # Celery stuff 123 | celerybeat-schedule 124 | celerybeat.pid 125 | 126 | # SageMath parsed files 127 | *.sage.py 128 | 129 | # Environments 130 | .env 131 | .venv 132 | env/ 133 | venv/ 134 | ENV/ 135 | env.bak/ 136 | venv.bak/ 137 | 138 | # Spyder project settings 139 | .spyderproject 140 | .spyproject 141 | 142 | # Rope project settings 143 | .ropeproject 144 | 145 | # mkdocs documentation 146 | /site 147 | 148 | # mypy 149 | .mypy_cache/ 150 | .dmypy.json 151 | dmypy.json 152 | 153 | # Pyre type checker 154 | .pyre/ 155 | 156 | # pytype static type analyzer 157 | .pytype/ 158 | 159 | # Cython debug symbols 160 | cython_debug/ 161 | 162 | # PyCharm 163 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 164 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 165 | # and can be added to the global gitignore or merged into this file. For a more nuclear 166 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 167 | #.idea/ 168 | 169 | ### https://raw.github.com/github/gitignore/90f149de451a5433aebd94d02d11b0e28843a1af/Terraform.gitignore 170 | 171 | # Local .terraform directories 172 | **/.terraform/* 173 | 174 | # .tfstate files 175 | *.tfstate 176 | *.tfstate.* 177 | 178 | # Crash log files 179 | crash.log 180 | 181 | # Kitchen files 182 | **/inspec.lock 183 | **/.kitchen 184 | **/kitchen.local.yml 185 | **/Gemfile.lock 186 | 187 | # Ignore any .tfvars files that are generated automatically for each Terraform run. Most 188 | # .tfvars files are managed as part of configuration and so should be included in 189 | # version control. 190 | **/*.tfvars 191 | 192 | credentials.json 193 | 194 | # tf lock file 195 | .terraform.lock.hcl 196 | .DS_Store 197 | 198 | qualification_report.xlsx 199 | 200 | unzip 201 | 202 | input.properties.* 203 | dev 204 | target* -------------------------------------------------------------------------------- /qualification_report_mapping_json/report_summary.json: -------------------------------------------------------------------------------- 1 | { 2 | "header_text": "Apigee X Upgrade Qualification Summary", 3 | "col_merge": 2, 4 | "col_width":50, 5 | "header_row": 1, 6 | "blocks":[{ 7 | "header": "Outstanding Feature Parity", 8 | "sheets": [{ 9 | "text_col": "Proxies Per Env", 10 | "link_of_text": "internal:'Proxies Per Env'!A1", 11 | "result_col": "=IF(OR(MAX('Proxies Per Env'!C:C)>50, MAX('Proxies Per Env'!E:E)>60), \"FAILED\", \"PASSED\")" 12 | }, 13 | { 14 | "text_col": "Northbound mTLS", 15 | "link_of_text": "internal:'Northbound mTLS'!A1", 16 | "result_col": "=IF(COUNTIF('Northbound mTLS'!D:D, \"true*\")>0, \"FAILED\",\"PASSED\")" 17 | }, 18 | { 19 | "text_col": "Company And Developers", 20 | "link_of_text": "internal:'Company And Developers'!A1", 21 | "result_col": "=IF(COUNTA('Company And Developers'!B:B)>1, \"FAILED\", \"PASSED\")" 22 | }, 23 | { 24 | "text_col": "Org Level Resourcefiles", 25 | "link_of_text": "internal:'Org Level Resourcefiles'!A1", 26 | "result_col": "=IF(COUNTA('Org Level Resourcefiles'!B:B)>1, \"FAILED\", \"PASSED\")" 27 | }] 28 | }, 29 | { 30 | "header": "Anti-patterns", 31 | "sheets": [{ 32 | "text_col": "Anti Patterns", 33 | "link_of_text": "internal:'Anti Patterns'!A1", 34 | "result_col": "=IF(OR(COUNTIF('Anti Patterns'!D:D, \"false*\")>0,COUNTIF('Anti Patterns'!E:E, \"true*\")>0), \"FAILED\",\"PASSED\")" 35 | }, 36 | { 37 | "text_col": "CName Anamoly", 38 | "link_of_text": "internal:'CName Anomaly'!A1", 39 | "result_col": "=IF(COUNTA('CName Anomaly'!C:C)>1, \"FAILED\", \"PASSED\")" 40 | }, 41 | { 42 | "text_col": "Apps Without API Products", 43 | "link_of_text": "internal:'Apps Without ApiProducts'!A1", 44 | "result_col": "=IF(COUNTA('Apps Without ApiProducts'!C:C)>1, \"FAILED\", \"PASSED\")" 45 | }, 46 | { 47 | "text_col": "Product Limits - API Limits", 48 | "link_of_text": "internal:'Product Limits - API Limits'!A1", 49 | "result_col": "=IF(MAX('Product Limits - API Limits'!C:C)>250, \"FAILED\",\"PASSED\")" 50 | }, 51 | { 52 | "text_col": "Product Limits - Env Limits", 53 | "link_of_text": "internal:'Product Limits - Env Limits'!A1", 54 | "result_col": "=IF(OR(MAX('Product Limits - Env Limits'!C:C)>1000, MAX('Product Limits - Env Limits'!F:F)>100), \"FAILED\", \"PASSED\")" 55 | }, 56 | { 57 | "text_col": "Product Limits - Org Limits", 58 | "link_of_text": "internal:'Product Limits - Org Limits'!A1", 59 | "result_col": "=IF(MAX('Product Limits - Org Limits'!C:C)>100, \"FAILED\", \"PASSED\")" 60 | }, 61 | { 62 | "text_col": "Cache Without Expiry", 63 | "link_of_text": "internal:'Cache Without Expiry'!A1", 64 | "result_col": "=IF(COUNTA('Cache Without Expiry'!B:B)>1, \"FAILED\", \"PASSED\")" 65 | }, 66 | { 67 | "text_col": "APIs with Multiple BasePaths", 68 | "link_of_text": "internal:'APIs With Multiple BasePaths'!A1", 69 | "result_col": "=IF(ArrayFormula(MAX(COUNTIF('APIs With Multiple BasePaths'!B2:B971, 'APIs With Multiple BasePaths'!B2:B971))) > 5, \"FAILED\", \"PASSED\")" 70 | }] 71 | }, 72 | { 73 | "header": "Product Updates", 74 | "sheets": [{ 75 | "text_col": "Unsupported Policies", 76 | "link_of_text": "internal:'Unsupported Policies'!A1", 77 | "result_col": "=IF(COUNTA('Unsupported Policies'!D:D)>1, \"FAILED\", \"PASSED\")" 78 | }, 79 | { 80 | "text_col": "JSONPath Version Update", 81 | "link_of_text": "internal:'Json Path Enabled'!A1", 82 | "result_col": "=IF(COUNTA('Json Path Enabled'!D:D)>1, \"FAILED\", \"PASSED\")" 83 | }, 84 | { 85 | "text_col": "Keystore aliases with private keys", 86 | "link_of_text": "internal:'Aliases with private keys'!A1", 87 | "result_col": "=IF(COUNTA('Aliases with private keys'!E:E)>1, \"FAILED\", \"PASSED\")" 88 | }] 89 | }, 90 | { 91 | "header": "APIGEE Edge/OPDK (4G) Installation Topology", 92 | "APIGEE_SOURCE": "OPDK", 93 | "sheets": [{ 94 | "text_col": "Apigee (4G) components", 95 | "link_of_text": "internal:'Apigee (4G) components'!A1", 96 | "result_col": "=\"NA\"" 97 | }] 98 | }], 99 | "note_list":{ 100 | "skip_rows": 1, 101 | "notes":[{ 102 | "text": "***NOTE: This report generator is currently in beta version. If you encounter any inaccuracies, we would love to know about them. Please drop a comment into the spreadsheet and we will investigate/fix them on our end.", 103 | "bg_color": "blue" 104 | }] 105 | } 106 | } -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | --- 16 | name: Build and Run Apigee Migration Assessment Image 17 | 18 | on: # yamllint disable-line rule:truthy 19 | push: 20 | branches: 21 | - main 22 | paths-ignore: 23 | - '**.md' 24 | pull_request_target: 25 | paths-ignore: 26 | - '**.md' 27 | branches: 28 | - main 29 | types: 30 | - opened 31 | - synchronize 32 | - reopened 33 | workflow_dispatch: 34 | 35 | permissions: read-all 36 | 37 | env: 38 | REGISTRY: ghcr.io 39 | IMAGE_NAME: ghcr.io/${{ github.repository }}/apigee-migration-assessment-tool 40 | 41 | jobs: 42 | unittest: 43 | runs-on: ubuntu-latest 44 | steps: 45 | - name: Checkout repository 46 | uses: actions/checkout@v5 47 | with: 48 | ref: ${{ github.event.pull_request.head.sha }} 49 | 50 | - name: Set up Python 51 | uses: actions/setup-python@v5 52 | with: 53 | python-version: '3.12' 54 | 55 | - name: Install dependencies 56 | run: | 57 | python -m pip install --upgrade pip 58 | pip install -r requirements.txt 59 | pip install -r test-requirements.txt 60 | 61 | - name: Run unittests 62 | run: python -m unittest discover tests -v 63 | 64 | build-and-push: 65 | runs-on: ubuntu-latest 66 | needs: unittest 67 | permissions: 68 | contents: read 69 | packages: write 70 | outputs: 71 | image_tag: ${{ steps.get_tag.outputs.tag }} 72 | steps: 73 | - name: Checkout repository 74 | uses: actions/checkout@v5 75 | with: 76 | ref: ${{ github.event.pull_request.head.sha }} 77 | 78 | - name: Log into registry ${{ env.REGISTRY }} 79 | uses: docker/login-action@v3 80 | with: 81 | registry: ${{ env.REGISTRY }} 82 | username: ${{ github.actor }} 83 | password: ${{ secrets.GITHUB_TOKEN }} 84 | 85 | - name: Docker meta 86 | id: meta 87 | uses: docker/metadata-action@v5 88 | with: 89 | images: ${{ env.IMAGE_NAME }} 90 | tags: | 91 | type=raw,value=latest,enable={{is_default_branch}} 92 | type=ref,event=branch 93 | type=ref,event=pr 94 | type=sha 95 | 96 | - name: Determine primary image tag 97 | id: get_tag 98 | run: | 99 | if [[ "${{ github.event_name }}" == "pull_request_target" ]]; then 100 | echo "tag=pr-${{ github.event.number }}" >> "$GITHUB_OUTPUT" 101 | elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then 102 | echo "tag=latest" >> "$GITHUB_OUTPUT" 103 | else 104 | # For feature branches, etc. 105 | echo "tag=${{ github.ref_name }}" | sed 's/\//-/g' >> "$GITHUB_OUTPUT" 106 | fi 107 | 108 | - name: Build and push Docker image 109 | uses: docker/build-push-action@v5 110 | with: 111 | context: . 112 | push: true 113 | tags: ${{ steps.meta.outputs.tags }} 114 | labels: ${{ steps.meta.outputs.labels }} 115 | 116 | test-opdk: 117 | runs-on: ubuntu-latest 118 | needs: 119 | - build-and-push 120 | permissions: 121 | id-token: write 122 | contents: read 123 | steps: 124 | - name: Checkout repository 125 | uses: actions/checkout@v5 126 | with: 127 | ref: ${{ github.event.pull_request.head.sha }} 128 | 129 | - name: Set up Python 130 | uses: actions/setup-python@v5 131 | with: 132 | python-version: '3.10' 133 | 134 | - name: Install dependencies 135 | run: pip install requests pyotp 136 | 137 | - name: Generate Apigee Token 138 | id: get_token 139 | env: 140 | APIGEE_EDGE_USER: ${{ secrets.APIGEE_EDGE_USER }} 141 | APIGEE_EDGE_PASSWORD: ${{ secrets.APIGEE_EDGE_PASSWORD }} 142 | OTP_SECRET: ${{ secrets.APIGEE_EDGE_OTP_SECRET }} 143 | run: python tests/ci/generate_apigee_edge_access_token.py 144 | 145 | - name: Fetch input.properties 146 | run: | 147 | echo "${{ secrets.APIGEE_EDGE_INPUT_PROPERTIES }}" | base64 -d > "${{ github.workspace }}/input.properties" 148 | 149 | - name: Run tests on Apigee Edge SAAS. 150 | run: | 151 | mkdir ${{ github.workspace }}/output 152 | sudo chmod 777 ${{ github.workspace }}/output 153 | docker run \ 154 | -v "${{ github.workspace }}/input.properties:/app/input.properties" \ 155 | -v "${{ github.workspace }}/output:/app/target" \ 156 | -e "SOURCE_AUTH_TOKEN=${{ steps.get_token.outputs.access_token }}" \ 157 | "${{ env.IMAGE_NAME }}:${{ needs.build-and-push.outputs.image_tag }}" \ 158 | --skip-target-validation \ 159 | --resources all 160 | 161 | - name: Upload latest assement results 162 | uses: actions/upload-artifact@v4 163 | with: 164 | name: assessment-report 165 | path: "${{ github.workspace }}/output/qualification_report.xlsx" 166 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Main module for the Apigee Migration Assessment Tool. 18 | 19 | This module orchestrates the assessment process: 20 | 1. Parses input configurations. 21 | 2. Performs pre-validation checks. 22 | 3. Exports Apigee artifacts. 23 | 4. Validates the exported artifacts against Apigee X requirements. 24 | 5. Visualizes the assessment results. 25 | 6. Retrieves Apigee topology information (for on-prem). 26 | 7. Generates a qualification report. 27 | """ 28 | 29 | import argparse 30 | import os 31 | 32 | from base_logger import logger 33 | from core_wrappers import ( 34 | export_artifacts, 35 | get_topology, 36 | pre_validation_checks, 37 | qualification_report, 38 | validate_artifacts, 39 | visualize_artifacts, 40 | ) 41 | from utils import parse_config, parse_json, write_json 42 | 43 | 44 | def main(): 45 | """Main function to execute the assessment workflow. 46 | 47 | Parses command-line arguments for resource selection, 48 | then executes the steps of the Apigee migration assessment: 49 | export, validation, visualization, topology retrieval (on-prem), 50 | and qualification report generation. Handles caching of results 51 | between steps to avoid redundant operations. 52 | """ 53 | # Parse Input 54 | parser = argparse.ArgumentParser( 55 | description="details", 56 | usage='use "%(prog)s --help" for more information', 57 | formatter_class=argparse.RawTextHelpFormatter, 58 | ) 59 | 60 | parser.add_argument( 61 | "--resources", 62 | type=str, 63 | dest="resources", 64 | # help='Resources to be exported', 65 | help=""" 66 | resources can be one of or comma seperated list of 67 | 68 | * targetservers 69 | * keyvaluemaps 70 | * references 71 | * resourcefiles 72 | * keystores 73 | * flowhooks 74 | * developers 75 | * apiproducts 76 | * apis 77 | * apps 78 | 79 | For Apigee Environment level objects choose 80 | -> targetservers,keyvaluemaps,references, 81 | resourcefiles,keystores,flowhooks 82 | 83 | For Apigee Organization level objects choose 84 | -> org_keyvaluemaps,developers,apiproducts, 85 | apis,apps,sharedflows 86 | 87 | Example1: --resources targetservers,keyvaluemaps 88 | Example2: --resources keystores,apps 89 | """, 90 | ) 91 | parser.add_argument( 92 | "--skip-target-validation", 93 | action="store_true", 94 | default=False, 95 | dest="skip_target_validation", 96 | help=( 97 | "Skip validation of APIs and SharedFlows " 98 | "against the target environment." 99 | ), 100 | ) 101 | 102 | args = parser.parse_args() 103 | resources_list = ( 104 | [r.strip() for r in args.resources.split(",")] 105 | if args.resources else [] 106 | ) 107 | 108 | # Pre validation checks 109 | cfg = parse_config("input.properties") 110 | backend_cfg = parse_config("backend.properties") 111 | 112 | source_apigee_version = cfg.get("inputs", "SOURCE_APIGEE_VERSION").upper() 113 | if (args.skip_target_validation and 114 | source_apigee_version in ["X", "HYBRID"]): 115 | logger.error( 116 | "--skip-target-validation is not applicable " 117 | "when source is Apigee X/Hybrid." 118 | ) 119 | return 120 | 121 | if not pre_validation_checks(cfg, args.skip_target_validation): 122 | logger.error("Pre validation checks failed. Please, check...") 123 | return 124 | 125 | topology_mapping = {} 126 | target_dir = cfg.get("inputs", "TARGET_DIR") 127 | export_dir = backend_cfg.get("export", "EXPORT_DIR") 128 | export_file = backend_cfg.get("export", "EXPORT_FILE") 129 | export_data_file = f"{target_dir}/{export_dir}/{export_file}" 130 | export_data = parse_json(export_data_file) 131 | 132 | report_data_file = f"{target_dir}/{export_dir}/report.json" 133 | report = parse_json(report_data_file) 134 | 135 | if not export_data.get("export", False): 136 | export_data["export"] = False 137 | topology_mapping = {} 138 | 139 | # Export Artifacts from Apigee OPDK/Edge (4G) 140 | if resources_list == []: 141 | logger.error( 142 | """Please specify --resources argument. 143 | Use -h with the script for help""" 144 | ) 145 | return 146 | 147 | export_data = export_artifacts(cfg, resources_list) 148 | export_data["export"] = True 149 | write_json(export_data_file, export_data) 150 | 151 | if not report.get("report", False) or not export_data.get( 152 | "validation_report", False 153 | ): 154 | report = validate_artifacts( 155 | cfg, resources_list, export_data, args.skip_target_validation 156 | ) 157 | report["report"] = True 158 | export_data["validation_report"] = report 159 | write_json(export_data_file, export_data) 160 | write_json(report_data_file, report) 161 | # Visualize artifacts 162 | if not os.environ.get("IGNORE_VIZ") == "true": 163 | visualize_artifacts(cfg, export_data, report) 164 | 165 | # get Apigee OPDK/Edge (4G) topology mapping 166 | if not os.environ.get("IGNORE_OPDK_TOPOLOGY") == "true": 167 | if source_apigee_version == "OPDK": 168 | topology_mapping = get_topology(cfg) 169 | 170 | # Qualification report 171 | qualification_report(cfg, backend_cfg, export_data, topology_mapping) 172 | 173 | 174 | if __name__ == "__main__": 175 | main() 176 | -------------------------------------------------------------------------------- /tests/test_exporter.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Tests for the exporter module. 4 | """ 5 | import unittest 6 | from unittest.mock import MagicMock, patch, mock_open 7 | 8 | from exporter import ApigeeExporter 9 | 10 | 11 | class TestApigeeExporter(unittest.TestCase): 12 | """ 13 | Test cases for the ApigeeExporter class. 14 | """ 15 | 16 | def setUp(self): 17 | """ 18 | Set up the test case. 19 | """ 20 | self.baseurl = "https://api.enterprise.apigee.com/v1" 21 | self.org = "test_org" 22 | self.token = "test_token" 23 | self.auth_type = "oauth" 24 | self.ssl_verify = True 25 | with patch('exporter.ApigeeClassic'): 26 | self.exporter = ApigeeExporter(self.baseurl, self.org, self.token, 27 | self.auth_type, self.ssl_verify) 28 | self.exporter.apigee = MagicMock() 29 | 30 | def test_export_env(self): 31 | """ 32 | Test the export_env method. 33 | """ 34 | self.exporter.apigee.list_environments.return_value = ["test", "prod"] 35 | self.exporter.export_env() 36 | self.assertIn("test", self.exporter.export_data['envConfig']) 37 | self.assertIn("prod", self.exporter.export_data['envConfig']) 38 | 39 | def test_export_vhosts_edge(self): 40 | """ 41 | Test the export_vhosts method for Edge. 42 | """ 43 | self.exporter.apigee_type = 'edge' 44 | self.exporter.export_data['envConfig'] = {"test": {}} 45 | self.exporter.apigee.list_env_vhosts.return_value = ["default", 46 | "secure"] 47 | self.exporter.apigee.get_env_vhost.return_value = {"name": "default"} 48 | self.exporter.export_vhosts() 49 | self.assertIn("vhosts", self.exporter.export_data['envConfig']['test']) 50 | self.assertEqual( 51 | len(self.exporter.export_data['envConfig']['test']['vhosts']), 2) 52 | 53 | def test_export_vhosts_x(self): 54 | """ 55 | Test the export_vhosts method for X/Hybrid. 56 | """ 57 | self.exporter.apigee_type = 'x' 58 | self.exporter.apigee.list_env_groups.return_value = {"default": {}} 59 | self.exporter.export_vhosts() 60 | self.assertIn("envgroups", self.exporter.export_data['orgConfig']) 61 | 62 | @patch('exporter.create_dir') 63 | @patch('exporter.write_file') 64 | def test_export_env_objects_resourcefiles(self, mock_write_file, 65 | mock_create_dir): 66 | """ 67 | Test the export_env_objects method for resourcefiles. 68 | """ 69 | self.exporter.export_data['envConfig'] = {"test": 70 | {"resourcefiles": {}}} 71 | self.exporter.apigee.list_env_objects.return_value = { 72 | 'resourceFile': [{'name': 'test.js', 'type': 'jsc'}]} 73 | self.exporter.apigee.get_env_object.return_value = b"content" 74 | self.exporter.export_env_objects(['resourcefiles'], 'export_dir') 75 | mock_create_dir.assert_called_with('export_dir/resourceFiles/jsc') 76 | mock_write_file.assert_called_with( 77 | 'export_dir/resourceFiles/jsc/test.js', b"content") 78 | 79 | def test_export_org_objects(self): 80 | """ 81 | Test the export_org_objects method. 82 | """ 83 | self.exporter.apigee.list_org_objects.return_value = ["dev1"] 84 | self.exporter.apigee.get_org_object.return_value = { 85 | "email": "a@a.com"} 86 | self.exporter.export_org_objects(['developers']) 87 | self.assertIn("developers", self.exporter.export_data['orgConfig']) 88 | self.assertIn("dev1", 89 | self.exporter.export_data['orgConfig']['developers']) 90 | 91 | def test_export_api_metadata(self): 92 | """ 93 | Test the export_api_metadata method. 94 | """ 95 | self.exporter.export_data['envConfig'] = {"test": {}} 96 | self.exporter.export_data['orgConfig'] = {} 97 | self.exporter.apigee.list_org_objects.return_value = ["api1"] 98 | self.exporter.apigee.list_api_revisions.return_value = ["1", "2"] 99 | self.exporter.apigee.api_env_mapping.return_value = { 100 | "environment": [{"name": "test", "revision": [{"name": "1"}]}]} 101 | self.exporter.export_api_metadata(['apis']) 102 | self.assertIn("apis", self.exporter.export_data['orgConfig']) 103 | self.assertIn("api1", self.exporter.export_data['orgConfig']['apis']) 104 | self.assertIn("apis", self.exporter.export_data['envConfig']['test']) 105 | self.assertIn( 106 | "api1", 107 | self.exporter.export_data['envConfig']['test']['apis'] 108 | ) 109 | 110 | @patch('exporter.run_parallel') 111 | def test_export_api_proxy_bundles(self, mock_run_parallel): 112 | """ 113 | Test the export_api_proxy_bundles method. 114 | """ 115 | self.exporter.export_data['orgConfig'] = {"apis": {"api1": {}}} 116 | self.exporter.export_api_proxy_bundles('export_dir', ['apis']) 117 | mock_run_parallel.assert_called_once() 118 | 119 | @patch('os.path.isdir') 120 | @patch('os.listdir') 121 | @patch('builtins.open', new_callable=mock_open, 122 | read_data='{"key": "value"}') 123 | # pylint: disable=unused-argument 124 | def test_read_export_state(self, mock_open_file, mock_listdir, mock_isdir): 125 | """ 126 | Test the read_export_state method. 127 | """ 128 | mock_isdir.side_effect = [True, False] 129 | mock_listdir.side_effect = [['orgConfig'], ['file.json']] 130 | data = self.exporter.read_export_state('folder_path') 131 | self.assertIn('orgConfig', data) 132 | self.assertIn('file', data['orgConfig']) 133 | 134 | def test_get_dependencies_data(self): 135 | """ 136 | Test the get_dependencies_data method. 137 | """ 138 | self.exporter.apigee.list_environments.return_value = ["test"] 139 | self.exporter.apigee.list_env_objects.return_value = ["ref1"] 140 | self.exporter.apigee.get_env_object.return_value = {"name": "ref1"} 141 | data = self.exporter.get_dependencies_data(['references']) 142 | self.assertIn('references', data) 143 | self.assertIn('test', data['references']) 144 | self.assertIn('ref1', data['references']['test']) 145 | 146 | 147 | if __name__ == '__main__': 148 | unittest.main() 149 | -------------------------------------------------------------------------------- /unifier.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | """Handles proxy unification and splitting logic. 18 | 19 | This module provides functionalities for 20 | unifying and splitting Apigee proxies based on 21 | configuration and path analysis. It processes 22 | proxy artifacts, analyzes their dependencies, 23 | and generates modified proxy bundles. 24 | """ 25 | 26 | import utils # pylint: disable=import-error 27 | from base_logger import logger 28 | 29 | DEFAULT_GCP_ENV_TYPE = 'BASE' 30 | 31 | 32 | def proxy_unifier(proxy_dir_name): # noqa pylint: disable=R0914 33 | """Unifies and splits proxies based on path analysis. 34 | 35 | Processes proxy artifacts, analyzes 36 | dependencies, and generates modified proxy 37 | bundles based on path grouping and 38 | configuration. 39 | 40 | Args: 41 | proxy_dir_name (str): The name of the 42 | proxy directory. 43 | 44 | Returns: 45 | dict: A dictionary containing merged 46 | proxy objects information. 47 | """ 48 | try: 49 | inputs_cfg = utils.parse_config('input.properties') 50 | 51 | cfg = utils.parse_config('backend.properties') 52 | proxy_dir = f"./{inputs_cfg.get('inputs', 'TARGET_DIR')}/{cfg.get('export','EXPORT_DIR')}{cfg['unifier']['source_unzipped_apis']}" # noqa pylint: disable=C0301 53 | proxy_dest_dir = f"./{inputs_cfg.get('inputs', 'TARGET_DIR')}/{cfg.get('export','EXPORT_DIR')}/{cfg['unifier']['unifier_output_dir']}" # noqa pylint: disable=C0301 54 | proxy_bundle_directory = f"./{inputs_cfg.get('inputs', 'TARGET_DIR')}/{cfg.get('export','EXPORT_DIR')}/{cfg['unifier']['unifier_zipped_bundles']}" # noqa pylint: disable=C0301 55 | 56 | export_debug_file = cfg.getboolean('unifier', 'debug') 57 | 58 | utils.create_dir(proxy_bundle_directory) 59 | proxy_endpoint_count = utils.get_proxy_endpoint_count(cfg) 60 | 61 | final_dict = {} 62 | processed_dict = {} 63 | each_dir = proxy_dir_name 64 | 65 | each_proxy_dict = utils.read_proxy_artifacts( 66 | f"{proxy_dir}/{each_dir}/apiproxy", 67 | utils.parse_proxy_root( 68 | f"{proxy_dir}/{each_dir}/apiproxy") 69 | ) 70 | 71 | if len(each_proxy_dict) > 0: 72 | each_proxy_rel = utils.get_proxy_objects_relationships( 73 | each_proxy_dict) 74 | final_dict[each_dir] = each_proxy_dict 75 | processed_dict[each_dir] = each_proxy_rel 76 | 77 | processing_final_dict = final_dict.copy() 78 | 79 | path_group_map = {} 80 | for each_api, each_api_info in processed_dict.items(): 81 | path_group_map[each_api] = utils.get_api_path_groups(each_api_info) # noqa 82 | 83 | grouped_apis = {} 84 | for each_api, base_path_info in path_group_map.items(): 85 | grouped_apis[each_api] = utils.group_paths_by_path( 86 | base_path_info, proxy_endpoint_count) 87 | 88 | bundled_group = {} 89 | for each_api, grouped_api in grouped_apis.items(): 90 | bundled_group[each_api] = utils.bundle_path(grouped_api) 91 | 92 | merged_pes = {} 93 | merged_objects = {} 94 | for each_api, grouped_api in bundled_group.items(): 95 | for index, each_group in enumerate(grouped_api): 96 | merged_objects[f"{each_api}_{index}"] = { 97 | 'Policies': [], 98 | 'TargetEndpoints': [], 99 | 'ProxyEndpoints': [] 100 | } 101 | for each_path, pes in each_group.items(): 102 | each_pe = '-'.join(pes) 103 | merged_pes[each_pe] = utils.merge_proxy_endpoints( 104 | processing_final_dict[each_api], 105 | each_path, 106 | pes 107 | ) 108 | merged_objects[f"{each_api}_{index}"]['Name'] = f"{final_dict[each_api]['proxyName']}_{index}" # noqa pylint: disable=C0301 109 | merged_objects[f"{each_api}_{index}"]['Policies'].extend( # noqa 110 | [item for pe in pes for item in processed_dict[each_api][pe]['Policies']]) # noqa pylint: disable=C0301 111 | merged_objects[f"{each_api}_{index}"]['TargetEndpoints'].extend( # noqa 112 | [item for pe in pes for item in processed_dict[each_api][pe]['TargetEndpoints']]) # noqa pylint: disable=C0301 113 | merged_objects[f"{each_api}_{index}"]['Policies'] = list(set(merged_objects[f"{each_api}_{index}"]['Policies'])) # noqa pylint: disable=C0301 114 | merged_objects[f"{each_api}_{index}"]['TargetEndpoints'] = list(set(merged_objects[f"{each_api}_{index}"]['TargetEndpoints'])) # noqa pylint: disable=C0301 115 | merged_objects[f"{each_api}_{index}"]['ProxyEndpoints'].append(each_pe) # noqa 116 | 117 | for each_api, grouped_api in bundled_group.items(): 118 | for index, each_group in enumerate(grouped_api): 119 | 120 | utils.clone_proxies( 121 | f"{proxy_dir}/{each_api}/apiproxy", 122 | f"{proxy_dest_dir}/{each_api}_{index}", 123 | merged_objects[f"{each_api}_{index}"], 124 | merged_pes, 125 | proxy_bundle_directory 126 | ) 127 | 128 | files = { 129 | 'final_dict': final_dict, 130 | 'processed_dict': processed_dict, 131 | 'path_group_map': path_group_map, 132 | 'grouped_apis': grouped_apis, 133 | 'bundled_group': bundled_group, 134 | 'merged_pes': merged_pes, 135 | 'merged_objects': merged_objects, 136 | } 137 | if export_debug_file: 138 | utils.export_debug_log(files) 139 | 140 | except Exception as error: # noqa pylint: disable=W0718 141 | logger.error( # noqa pylint: disable=W1203 142 | f"ERROR : Some error occured in unifier module. ERROR-INFO - {error}") # noqa pylint: disable=W1203 143 | return merged_objects 144 | -------------------------------------------------------------------------------- /tests/test_core_wrappers.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Tests for the core_wrappers module. 4 | """ 5 | import unittest 6 | from unittest.mock import patch 7 | from configparser import ConfigParser 8 | 9 | from core_wrappers import ( 10 | pre_validation_checks, 11 | export_artifacts, 12 | validate_artifacts, 13 | visualize_artifacts, 14 | qualification_report, 15 | get_topology, 16 | ) 17 | 18 | 19 | class TestCoreWrappers(unittest.TestCase): 20 | """ 21 | Test cases for the core_wrappers module. 22 | """ 23 | 24 | def setUp(self): 25 | """ 26 | Set up the test case. 27 | """ 28 | self.cfg = ConfigParser() 29 | self.cfg.add_section('inputs') 30 | self.cfg.set('inputs', 'SOURCE_URL', 'http://source.com') 31 | self.cfg.set('inputs', 'SOURCE_ORG', 'source_org') 32 | self.cfg.set('inputs', 'SOURCE_AUTH_TYPE', 'basic') 33 | self.cfg.set('inputs', 'SOURCE_APIGEE_VERSION', 'OPDK') 34 | self.cfg.set('inputs', 'TARGET_DIR', '/tmp') 35 | self.cfg.set('inputs', 'SSL_VERIFICATION', 'False') 36 | self.cfg.set('inputs', 'TARGET_URL', 'http://target.com') 37 | self.cfg.set('inputs', 'GCP_PROJECT_ID', 'gcp_project') 38 | self.cfg.set('inputs', 'TARGET_COMPARE', 'False') 39 | 40 | @patch('core_wrappers.get_source_auth_token') 41 | @patch('core_wrappers.ApigeeClassic') 42 | @patch('core_wrappers.get_access_token') 43 | @patch('core_wrappers.ApigeeNewGen') 44 | def test_pre_validation_checks_success(self, mock_newgen, 45 | mock_get_access_token, 46 | mock_classic, 47 | mock_get_source_auth_token): 48 | """ 49 | Test the pre_validation_checks function for success. 50 | """ 51 | mock_get_source_auth_token.return_value = 'source_token' 52 | mock_classic.return_value.get_org.return_value = {'name': 'source_org'} 53 | mock_get_access_token.return_value = 'gcp_token' 54 | mock_newgen.return_value.validate_permissions.return_value = [] 55 | mock_newgen.return_value.get_org.return_value = { 56 | 'name': 'gcp_project'} 57 | self.assertTrue(pre_validation_checks(self.cfg)) 58 | 59 | @patch('core_wrappers.parse_config') 60 | @patch('core_wrappers.get_source_auth_token') 61 | @patch('core_wrappers.create_dir') 62 | @patch('core_wrappers.ApigeeExporter') 63 | @patch('core_wrappers.sharding') 64 | # noqa pylint: disable=too-many-arguments, unused-argument, too-many-positional-arguments 65 | def test_export_artifacts(self, mock_sharding, mock_exporter, 66 | mock_create_dir, 67 | mock_get_source_auth_token, 68 | mock_parse_config): 69 | """ 70 | Test the export_artifacts function. 71 | """ 72 | mock_parse_config.return_value.get.return_value = 'export' 73 | mock_get_source_auth_token.return_value = 'source_token' 74 | mock_exporter.return_value.get_export_data.return_value = { 75 | 'orgConfig': {}, 'envConfig': {}} 76 | mock_sharding.proxy_dependency_map.return_value = {} 77 | result = export_artifacts(self.cfg, ['all']) 78 | self.assertIn('proxy_dependency_map', result) 79 | 80 | @patch('core_wrappers.parse_config') 81 | @patch('core_wrappers.create_dir') 82 | @patch('core_wrappers.get_access_token') 83 | @patch('core_wrappers.parse_json') 84 | @patch('core_wrappers.ApigeeValidator') 85 | # noqa pylint: disable=too-many-arguments, unused-argument, too-many-positional-arguments 86 | def test_validate_artifacts(self, mock_validator, mock_parse_json, 87 | mock_get_access_token, mock_create_dir, 88 | mock_parse_config): 89 | """ 90 | Test the validate_artifacts function. 91 | """ 92 | mock_parse_config.return_value.get.return_value = 'export' 93 | mock_get_access_token.return_value = 'gcp_token' 94 | mock_parse_json.return_value = {} 95 | (mock_validator.return_value. 96 | validate_env_targetservers.return_value) = [] 97 | export_data = { 98 | 'envConfig': {'test': {'targetServers': {}, 99 | 'resourcefiles': {}, 100 | 'flowhooks': {}, 101 | 'kvms': {}}}, 102 | 'orgConfig': {'kvms': {}, 'developers': {}, 103 | 'apiProducts': {}, 'apps': {}} 104 | } 105 | result = validate_artifacts(self.cfg, ['all'], export_data) 106 | self.assertIsInstance(result, dict) 107 | 108 | @patch('core_wrappers.parse_config') 109 | @patch('core_wrappers.nx.DiGraph') 110 | @patch('core_wrappers.Network') 111 | # pylint: disable=unused-argument 112 | def test_visualize_artifacts(self, mock_network, mock_digraph, 113 | mock_parse_config): 114 | """ 115 | Test the visualize_artifacts function. 116 | """ 117 | mock_parse_config.return_value.get.return_value = 'visualization.html' 118 | export_data = {'orgConfig': {}, 'envConfig': {}} 119 | report = {'report': {}} 120 | visualize_artifacts(self.cfg, export_data, report) 121 | mock_network.return_value.show.assert_called_with( 122 | '/tmp/visualization.html') 123 | 124 | @patch('core_wrappers.QualificationReport') 125 | def test_qualification_report(self, mock_qualification_report): 126 | """ 127 | Test the qualification_report function. 128 | """ 129 | backend_cfg = ConfigParser() 130 | backend_cfg.add_section('report') 131 | backend_cfg.set('report', 'QUALIFICATION_REPORT', 'report.xlsx') 132 | export_data = {} 133 | topology_mapping = {} 134 | qualification_report( 135 | self.cfg, backend_cfg, 136 | export_data, topology_mapping 137 | ) 138 | mock_qualification_report.return_value.close.assert_called_once() 139 | 140 | @patch('core_wrappers.get_source_auth_token') 141 | @patch('core_wrappers.ApigeeTopology') 142 | def test_get_topology(self, mock_topology, mock_get_source_auth_token): 143 | """ 144 | Test the get_topology function. 145 | """ 146 | mock_get_source_auth_token.return_value = 'source_token' 147 | mock_topology.return_value.get_topology_mapping.return_value = {} 148 | mock_topology.return_value.get_data_center_mapping.return_value = {} 149 | result = get_topology(self.cfg) 150 | self.assertIn('pod_component_mapping', result) 151 | 152 | 153 | if __name__ == '__main__': 154 | unittest.main() 155 | -------------------------------------------------------------------------------- /tests/test_rest.py: -------------------------------------------------------------------------------- 1 | """Test suite for rest.""" 2 | import json 3 | import sys 4 | import unittest 5 | from unittest.mock import Mock, patch 6 | from rest import (ApigeeError, EmptyResponse, JsonResponse, PlainResponse, # noqa 7 | RawResponse, RestClient) 8 | sys.path.insert(0, '..') 9 | 10 | 11 | class TestRestClient(unittest.TestCase): 12 | """Test class for RestClient.""" 13 | 14 | def setUp(self): 15 | """Set up.""" 16 | self.mock_session = Mock() 17 | self.patcher = patch('rest.requests.Session', 18 | return_value=self.mock_session) 19 | self.mock_session_class = self.patcher.start() 20 | 21 | def tearDown(self): 22 | """Tear down.""" 23 | self.patcher.stop() 24 | 25 | def test_init_valid_auth_type(self): 26 | """Test init valid auth type.""" 27 | client = RestClient(auth_type='basic', token='test_token') 28 | self.assertEqual(client.auth_type, 'basic') 29 | self.assertEqual(client.base_headers['Authorization'], 30 | 'Basic test_token') 31 | 32 | client = RestClient(auth_type='oauth', token='test_token') 33 | self.assertEqual(client.auth_type, 'oauth') 34 | self.assertEqual(client.base_headers['Authorization'], 35 | 'Bearer test_token') 36 | 37 | def test_init_invalid_auth_type(self): 38 | """Test init invalid auth type.""" 39 | with self.assertRaises(ValueError): 40 | RestClient(auth_type='invalid', token='test_token') 41 | 42 | def _prepare_mock_response(self, status_code, content, 43 | content_type='application/json'): 44 | mock_response = Mock() 45 | mock_response.status_code = status_code 46 | mock_response.text = json.dumps( 47 | content) if content_type == 'application/json' else content 48 | mock_response.content = bytes(mock_response.text, 49 | 'utf-8') if isinstance( 50 | content, str) else content 51 | mock_response.headers = {'Content-Type': content_type} 52 | mock_response.request = Mock() 53 | mock_response.request.method = 'GET' 54 | mock_response.request.url = 'http://example.com' 55 | return mock_response 56 | 57 | def test_get_success(self): 58 | """Test get success.""" 59 | mock_response = self._prepare_mock_response(200, {'key': 'value'}) 60 | self.mock_session.get.return_value = mock_response 61 | 62 | client = RestClient(auth_type='basic', token='test') 63 | response = client.get('http://example.com') 64 | 65 | self.assertEqual(response, {'key': 'value'}) 66 | self.mock_session.get.assert_called_once_with( 67 | 'http://example.com', params=None, 68 | headers={'Authorization': 'Basic test'} 69 | ) 70 | 71 | def test_post_success(self): 72 | """Test post success.""" 73 | mock_response = self._prepare_mock_response(200, 74 | {'status': 'created'}) 75 | self.mock_session.post.return_value = mock_response 76 | 77 | client = RestClient(auth_type='basic', token='test') 78 | response = client.post('http://example.com', data={'name': 'test'}) 79 | 80 | self.assertEqual(response, {'status': 'created'}) 81 | self.mock_session.post.assert_called_once_with( 82 | 'http://example.com', data='{"name": "test"}', 83 | headers={'Authorization': 'Basic test'} 84 | ) 85 | 86 | def test_file_get_success(self): 87 | """Test file get success.""" 88 | mock_response = self._prepare_mock_response( 89 | 200, b'file_content', 'application/octet-stream') 90 | self.mock_session.get.return_value = mock_response 91 | 92 | client = RestClient(auth_type='basic', token='test') 93 | response = client.file_get('http://example.com/file') 94 | 95 | self.assertEqual(response, b'file_content') 96 | self.mock_session.get.assert_called_once() 97 | 98 | def test_file_post_success(self): 99 | """Test file post success.""" 100 | mock_response = self._prepare_mock_response(200, 101 | {'status': 'uploaded'}) 102 | self.mock_session.post.return_value = mock_response 103 | 104 | client = RestClient(auth_type='basic', token='test') 105 | response = client.file_post( 106 | 'http://example.com/upload', 107 | data={'field': 'value'}, 108 | files={'file': b'content'} 109 | ) 110 | 111 | self.assertEqual(response, {'status': 'uploaded'}) 112 | self.mock_session.post.assert_called_once() 113 | 114 | def test_403_forbidden(self): 115 | """Test 403 forbidden.""" 116 | mock_response = self._prepare_mock_response(403, 'Forbidden') 117 | self.mock_session.get.return_value = mock_response 118 | 119 | client = RestClient(auth_type='basic', token='test') 120 | response = client.get('http://example.com') 121 | 122 | self.assertEqual(response, 'Forbidden') 123 | 124 | def test_apigee_error(self): 125 | """Test apigee error.""" 126 | error_content = {'errorCode': 'some.error', 127 | 'message': 'An error occurred'} 128 | mock_response = self._prepare_mock_response(400, error_content) 129 | self.mock_session.get.return_value = mock_response 130 | # Force _is_error to return True 131 | with patch('rest.Response._is_error', return_value=True): 132 | client = RestClient(auth_type='basic', token='test') 133 | with self.assertRaises(ApigeeError) as cm: 134 | client.get('http://example.com') 135 | 136 | self.assertEqual(cm.exception.status_code, 400) 137 | self.assertEqual(cm.exception.error_code, 'some.error') 138 | self.assertEqual(cm.exception.message, 'An error occurred') 139 | 140 | 141 | class TestResponseClasses(unittest.TestCase): 142 | """Test class for ResponseClasses.""" 143 | 144 | def test_json_response(self): 145 | """Test json response.""" 146 | mock_response = Mock() 147 | mock_response.status_code = 200 148 | mock_response.text = '{"key": "value"}' 149 | response = JsonResponse(mock_response) 150 | self.assertEqual(response.content(), {"key": "value"}) 151 | 152 | def test_plain_response(self): 153 | """Test plain response.""" 154 | mock_response = Mock() 155 | mock_response.status_code = 200 156 | mock_response.text = 'OK' 157 | response = PlainResponse(mock_response) 158 | self.assertEqual(response.content(), 'OK') 159 | 160 | def test_empty_response(self): 161 | """Test empty response.""" 162 | response = EmptyResponse(204) 163 | self.assertEqual(response.content(), '') 164 | 165 | def test_raw_response(self): 166 | """Test raw response.""" 167 | mock_response = Mock() 168 | mock_response.status_code = 200 169 | mock_response.content = b'raw_data' 170 | response = RawResponse(mock_response) 171 | self.assertEqual(response.content(), b'raw_data') 172 | 173 | 174 | if __name__ == '__main__': 175 | unittest.main() 176 | -------------------------------------------------------------------------------- /tests/test_nextgen.py: -------------------------------------------------------------------------------- 1 | """Module docstring.""" 2 | 3 | import unittest 4 | from unittest.mock import MagicMock, mock_open, patch 5 | 6 | from nextgen import ApigeeNewGen 7 | 8 | 9 | class TestApigeeNewGen(unittest.TestCase): 10 | """Test class for ApigeeNewGen.""" 11 | 12 | def setUp(self): 13 | """Set up.""" 14 | self.baseurl = "https://apigee.googleapis.com/v1" 15 | self.project_id = "test_project" 16 | self.token = "test_token" 17 | self.env_type = "hybrid" 18 | self.ssl_verify = True 19 | self.nextgen_client = ApigeeNewGen(self.baseurl, self.project_id, 20 | self.token, self.env_type, 21 | self.ssl_verify) 22 | self.nextgen_client.client = MagicMock() 23 | 24 | @patch('nextgen.Credentials') 25 | @patch('nextgen.resourcemanager_v3.ProjectsClient') 26 | def test_validate_permissions(self, mock_projects_client, 27 | mock_credentials): 28 | """Test validate permissions.""" 29 | mock_credentials.return_value = "dummy_credentials" 30 | (mock_projects_client.return_value. 31 | test_iam_permissions.return_value.permissions) = ["permission1"] 32 | with patch('nextgen.parse_json') as mock_parse_json: 33 | mock_parse_json.return_value = ["permission1", "permission2"] 34 | missing = self.nextgen_client.validate_permissions() 35 | self.assertEqual(missing, ["permission2"]) 36 | 37 | def test_get_org(self): 38 | """Test get org.""" 39 | self.nextgen_client.client.get.return_value = {"name": "test_project"} 40 | result = self.nextgen_client.get_org() 41 | self.assertEqual(result, {"name": "test_project"}) 42 | self.nextgen_client.client.get.assert_called_with( 43 | f"{self.baseurl}/organizations/{self.project_id}") 44 | 45 | def test_list_environments(self): 46 | """Test list environments.""" 47 | with patch.object(self.nextgen_client, 48 | 'list_org_objects') as mock_list_org_objects: 49 | mock_list_org_objects.return_value = ["test", "prod"] 50 | result = self.nextgen_client.list_environments() 51 | self.assertEqual(result, ["test", "prod"]) 52 | mock_list_org_objects.assert_called_with('environments') 53 | 54 | def test_list_org_objects_paginated(self): 55 | """Test list org objects paginated.""" 56 | self.nextgen_client.client.get.side_effect = [ 57 | {"developer": [{"email": "a@a.com"}, {"email": "b@b.com"}]}, 58 | {"developer": [{"email": "b@b.com"}, {"email": "c@c.com"}]}, 59 | {} 60 | ] 61 | result = self.nextgen_client.list_org_objects("developers") 62 | self.assertEqual(result, ["a@a.com", "b@b.com", "c@c.com"]) 63 | 64 | def test_list_org_objects_not_paginated(self): 65 | """Test list org objects not paginated.""" 66 | self.nextgen_client.client.get.return_value = [{"name": "test"}, 67 | {"name": "prod"}] 68 | result = self.nextgen_client.list_org_objects("environments") 69 | self.assertEqual(result, [{"name": "test"}, {"name": "prod"}]) 70 | 71 | def test_list_org_objects_expand(self): 72 | """Test list org objects expand.""" 73 | self.nextgen_client.client.get.side_effect = [ 74 | {"apiProduct": [{"name": "p1"}, {"name": "p2"}]}, 75 | {"apiProduct": [{"name": "p2"}, {"name": "p3"}]}, 76 | {} 77 | ] 78 | result = self.nextgen_client.list_org_objects_expand("apiproducts") 79 | self.assertEqual(list(result.keys()), ["p1", "p2", "p3"]) 80 | 81 | def test_get_org_object(self): 82 | """Test get org object.""" 83 | self.nextgen_client.client.get.return_value = { 84 | "name": "test_developer"} 85 | result = self.nextgen_client.get_org_object("developers", "a@a.com") 86 | self.assertEqual(result, {"name": "test_developer"}) 87 | 88 | def test_list_env_objects(self): 89 | """Test list env objects.""" 90 | self.nextgen_client.client.get.return_value = ["target1", "target2"] 91 | result = self.nextgen_client.list_env_objects("test", "targetservers") 92 | self.assertEqual(result, ["target1", "target2"]) 93 | 94 | def test_get_env_object(self): 95 | """Test get env object.""" 96 | self.nextgen_client.client.get.return_value = {"name": "test_target"} 97 | result = self.nextgen_client.get_env_object("test", "targetservers", 98 | "test_target") 99 | self.assertEqual(result, {"name": "test_target"}) 100 | 101 | def test_list_env_groups(self): 102 | """Test list env groups.""" 103 | with patch.object( 104 | self.nextgen_client, 105 | 'list_org_objects_expand') as mock_list_org_objects_expand: 106 | mock_list_org_objects_expand.return_value = {"default": {}} 107 | result = self.nextgen_client.list_env_groups() 108 | self.assertEqual(result, {"default": {}}) 109 | mock_list_org_objects_expand.assert_called_with('envgroups') 110 | 111 | def test_get_env_groups(self): 112 | """Test get env groups.""" 113 | with patch.object(self.nextgen_client, 114 | 'get_org_object') as mock_get_org_object: 115 | mock_get_org_object.return_value = {"name": "default"} 116 | result = self.nextgen_client.get_env_groups("default") 117 | self.assertEqual(result, {"name": "default"}) 118 | mock_get_org_object.assert_called_with('envgroups', 'default') 119 | 120 | def test_list_apis(self): 121 | """Test list apis.""" 122 | with patch.object(self.nextgen_client, 123 | 'list_org_objects') as mock_list_org_objects: 124 | mock_list_org_objects.return_value = ["api1", "api2"] 125 | result = self.nextgen_client.list_apis("apis") 126 | self.assertEqual(result, ["api1", "api2"]) 127 | mock_list_org_objects.assert_called_with('apis') 128 | 129 | def test_list_api_revisions(self): 130 | """Test list api revisions.""" 131 | self.nextgen_client.client.get.return_value = ["1", "2"] 132 | result = self.nextgen_client.list_api_revisions("apis", "test_api") 133 | self.assertEqual(result, ["1", "2"]) 134 | 135 | def test_api_env_mapping(self): 136 | """Test api env mapping.""" 137 | self.nextgen_client.client.get.return_value = { 138 | "deployments": [{"environment": "test", "revision": "1"}]} 139 | result = self.nextgen_client.api_env_mapping("apis", "test_api") 140 | self.assertEqual( 141 | result, 142 | {'environment': [{'name': 'test', 'revision': [{'name': '1'}]}]}) 143 | 144 | def test_list_apis_env(self): 145 | """Test list apis env.""" 146 | self.nextgen_client.client.get.return_value = { 147 | "deployments": [{"apiProxy": "api1"}, {"apiProxy": "api2"}]} 148 | result = self.nextgen_client.list_apis_env("test") 149 | self.assertEqual(result, ["api1", "api2"]) 150 | 151 | @patch("builtins.open", new_callable=mock_open, read_data=b"data") 152 | def test_fetch_api_revision(self, mock_file): 153 | """Test fetch api revision.""" 154 | self.nextgen_client.client.file_get.return_value = b"bundle_data" 155 | self.nextgen_client.fetch_api_revision("apis", "test_api", "1", 156 | "export_dir") 157 | mock_file.assert_called_with("./export_dir/test_api.zip", "wb") 158 | mock_file().write.assert_called_with(b"bundle_data") 159 | 160 | @patch("builtins.open", new_callable=mock_open, read_data=b"data") 161 | def test_create_api(self, _): 162 | """Test create api.""" 163 | self.nextgen_client.client.file_post.return_value = { 164 | "name": "test_api"} 165 | result = self.nextgen_client.create_api("apis", "test_api", 166 | "bundle.zip", "create") 167 | self.assertEqual(result, {"name": "test_api"}) 168 | 169 | 170 | if __name__ == '__main__': 171 | unittest.main() 172 | -------------------------------------------------------------------------------- /tests/test_classic.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Tests for the classic module. 4 | """ 5 | import unittest 6 | from unittest.mock import MagicMock, patch 7 | 8 | from classic import ApigeeClassic 9 | 10 | 11 | class TestApigeeClassic(unittest.TestCase): 12 | """ 13 | Test cases for the ApigeeClassic class. 14 | """ 15 | 16 | def setUp(self): 17 | """ 18 | Set up the test case. 19 | """ 20 | self.baseurl = "https://api.enterprise.apigee.com/v1" 21 | self.org = "test_org" 22 | self.token = "test_token" 23 | self.auth_type = "oauth" 24 | self.ssl_verify = True 25 | self.classic_client = ApigeeClassic(self.baseurl, self.org, self.token, 26 | self.auth_type, self.ssl_verify) 27 | self.classic_client.client = MagicMock() 28 | 29 | def test_get_org(self): 30 | """ 31 | Test the get_org method. 32 | """ 33 | self.classic_client.client.get.return_value = {"name": "test_org"} 34 | result = self.classic_client.get_org() 35 | self.assertEqual(result, {"name": "test_org"}) 36 | self.classic_client.client.get.assert_called_with( 37 | f"{self.baseurl}/organizations/{self.org}") 38 | 39 | def test_list_environments(self): 40 | """ 41 | Test the list_environments method. 42 | """ 43 | self.classic_client.client.get.return_value = ["test", "prod"] 44 | result = self.classic_client.list_environments() 45 | self.assertEqual(result, ["test", "prod"]) 46 | self.classic_client.client.get.assert_called_with( 47 | f"{self.baseurl}/organizations/{self.org}/environments") 48 | 49 | def test_list_org_objects_paginated(self): 50 | """ 51 | Test the list_org_objects method with paginated results. 52 | """ 53 | self.classic_client.client.get.side_effect = [ 54 | ["item1", "item2"], 55 | ["item2", "item3"], 56 | [] 57 | ] 58 | result = self.classic_client.list_org_objects("apis") 59 | self.assertEqual(result, ["item1", "item2", "item3"]) 60 | 61 | def test_list_org_objects_not_paginated(self): 62 | """ 63 | Test the list_org_objects method with non-paginated results. 64 | """ 65 | self.classic_client.client.get.return_value = ["item1", "item2"] 66 | result = self.classic_client.list_org_objects("kvms") 67 | self.assertEqual(result, ["item1", "item2"]) 68 | 69 | def test_list_org_objects_expand(self): 70 | """ 71 | Test the list_org_objects_expand method. 72 | """ 73 | self.classic_client.client.get.side_effect = [ 74 | {"app": [{"appId": "1"}, {"appId": "2"}]}, 75 | {"app": [{"appId": "2"}, {"appId": "3"}]}, 76 | {} 77 | ] 78 | result = self.classic_client.list_org_objects_expand("apps") 79 | self.assertEqual(list(result.keys()), ["1", "2", "3"]) 80 | 81 | def test_get_org_object(self): 82 | """ 83 | Test the get_org_object method. 84 | """ 85 | self.classic_client.client.get.return_value = {"name": "test_object"} 86 | result = self.classic_client.get_org_object("developers", 87 | "test_developer") 88 | self.assertEqual(result, {"name": "test_object"}) 89 | 90 | def test_get_org_object_resourcefiles(self): 91 | """ 92 | Test the get_org_object method for resourcefiles. 93 | """ 94 | self.classic_client.client.get.return_value = "resource_data" 95 | result = self.classic_client.get_org_object( 96 | "resourcefiles", {"type": "jsc", "name": "test.js"}) 97 | self.assertEqual(result, "resource_data") 98 | 99 | def test_list_env_objects(self): 100 | """ 101 | Test the list_env_objects method. 102 | """ 103 | self.classic_client.client.get.return_value = ["target1", "target2"] 104 | result = self.classic_client.list_env_objects("test", "targetservers") 105 | self.assertEqual(result, ["target1", "target2"]) 106 | 107 | def test_get_env_object(self): 108 | """ 109 | Test the get_env_object method. 110 | """ 111 | self.classic_client.client.get.return_value = {"name": "test_target"} 112 | result = self.classic_client.get_env_object("test", "targetservers", 113 | "test_target") 114 | self.assertEqual(result, {"name": "test_target"}) 115 | 116 | def test_list_env_vhosts(self): 117 | """ 118 | Test the list_env_vhosts method. 119 | """ 120 | self.classic_client.client.get.return_value = ["default", "secure"] 121 | result = self.classic_client.list_env_vhosts("test") 122 | self.assertEqual(result, ["default", "secure"]) 123 | 124 | def test_get_env_vhost(self): 125 | """ 126 | Test the get_env_vhost method. 127 | """ 128 | self.classic_client.client.get.return_value = {"name": "secure"} 129 | result = self.classic_client.get_env_vhost("test", "secure") 130 | self.assertEqual(result, {"name": "secure"}) 131 | 132 | def test_list_apis(self): 133 | """ 134 | Test the list_apis method. 135 | """ 136 | self.classic_client.client.get.return_value = ["api1", "api2"] 137 | result = self.classic_client.list_apis("apis") 138 | self.assertEqual(result, ["api1", "api2"]) 139 | 140 | def test_list_api_revisions(self): 141 | """ 142 | Test the list_api_revisions method. 143 | """ 144 | self.classic_client.client.get.return_value = ["1", "2"] 145 | result = self.classic_client.list_api_revisions("apis", "test_api") 146 | self.assertEqual(result, ["1", "2"]) 147 | 148 | def test_api_env_mapping(self): 149 | """ 150 | Test the api_env_mapping method. 151 | """ 152 | self.classic_client.client.get.return_value = {"environment": [ 153 | {"name": "test"}]} 154 | result = self.classic_client.api_env_mapping("apis", "test_api") 155 | self.assertEqual(result, {"environment": [{"name": "test"}]}) 156 | 157 | def test_list_apis_env(self): 158 | """ 159 | Test the list_apis_env method. 160 | """ 161 | self.classic_client.client.get.return_value = { 162 | "aPIProxy": [{"name": "api1"}, {"name": "api2"}]} 163 | result = self.classic_client.list_apis_env("test") 164 | self.assertEqual(result, ["api1", "api2"]) 165 | 166 | @patch("builtins.open", new_callable=unittest.mock.mock_open) 167 | def test_fetch_api_revision(self, mock_open): 168 | """ 169 | Test the fetch_api_revision method. 170 | """ 171 | self.classic_client.client.file_get.return_value = b"test_bundle_data" 172 | self.classic_client.fetch_api_revision("apis", "test_api", "1", 173 | "export_dir") 174 | mock_open.assert_called_with("./export_dir/test_api.zip", "wb") 175 | mock_open().write.assert_called_with(b"test_bundle_data") 176 | 177 | @patch("builtins.open", new_callable=unittest.mock.mock_open) 178 | def test_write_proxy_bundle(self, mock_open): 179 | """ 180 | Test the write_proxy_bundle method. 181 | """ 182 | self.classic_client.write_proxy_bundle("export_dir", "test_api", 183 | b"test_data") 184 | mock_open.assert_called_with("./export_dir/test_api.zip", "wb") 185 | mock_open().write.assert_called_with(b"test_data") 186 | 187 | @patch.object(ApigeeClassic, 'list_api_revisions') 188 | @patch.object(ApigeeClassic, 'fetch_api_revision') 189 | def test_fetch_proxy(self, mock_fetch_api_revision, 190 | mock_list_api_revisions): 191 | """ 192 | Test the fetch_proxy method. 193 | """ 194 | mock_list_api_revisions.return_value = ["1", "2"] 195 | self.classic_client.fetch_proxy(("apis", "test_api", "export_dir")) 196 | mock_list_api_revisions.assert_called_with("apis", "test_api") 197 | mock_fetch_api_revision.assert_called_with("apis", "test_api", "2", 198 | "export_dir") 199 | 200 | def test_view_pod_component_details(self): 201 | """ 202 | Test the view_pod_component_details method. 203 | """ 204 | self.classic_client.client.get.return_value = [{"uUID": "1234"}] 205 | result = self.classic_client.view_pod_component_details("gateway") 206 | self.assertEqual(result, [{"uUID": "1234"}]) 207 | 208 | 209 | if __name__ == '__main__': 210 | unittest.main() 211 | -------------------------------------------------------------------------------- /topology.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Generates and visualizes Apigee topology diagrams. 18 | 19 | This module interacts with the Apigee Management API to retrieve topology 20 | information, including pod and component details. 21 | It then processes this information to create data center mappings and 22 | generates visual representations of the topology using diagrams. 23 | """ 24 | import os 25 | from diagrams import Diagram, Cluster # noqa pylint: disable=E0401 26 | from diagrams.generic.blank import Blank # noqa pylint: disable=E0401 27 | from classic import ApigeeClassic 28 | from topology_mapping.pod import pod_mapping 29 | from utils import write_json, parse_config 30 | from base_logger import logger 31 | 32 | 33 | class ApigeeTopology(): # noqa pylint: disable=R0902 34 | """Represents and visualizes Apigee topology. 35 | 36 | Retrieves topology information from Apigee, creates data center 37 | mappings, and generates topology diagrams. 38 | """ 39 | 40 | def __init__(self, baseurl, org, token, auth_type, cfg): # noqa pylint: disable=R0913,R0917 41 | """Initializes ApigeeTopology. 42 | 43 | Args: 44 | baseurl (str): The base URL for the Apigee Management API. 45 | org (str): The Apigee organization name. 46 | token (str): The authentication token. 47 | auth_type (str): The authentication type. 48 | cfg (configparser.ConfigParser): The configuration object. 49 | """ 50 | self.baseurl = baseurl 51 | self.org = org 52 | self.token = token 53 | self.auth_type = auth_type 54 | self.cfg = cfg 55 | self.backend_cfg = parse_config('backend.properties') 56 | target_dir = self.cfg.get('inputs', 'TARGET_DIR') 57 | topology_dir = self.backend_cfg.get('topology', 'TOPOLOGY_DIR') 58 | 59 | self.topology_dir_path = f"{target_dir}/{topology_dir}" 60 | 61 | if not os.path.isdir(self.topology_dir_path): 62 | os.makedirs(self.topology_dir_path) 63 | try: 64 | ssl_verification = cfg.getboolean('inputs', 'SSL_VERIFICATION') 65 | except ValueError: 66 | ssl_verification = True 67 | self.opdk = ApigeeClassic(baseurl, org, token, self.auth_type, ssl_verification) # noqa pylint: disable=C0301 68 | 69 | def get_topology_mapping(self): 70 | """Retrieves and maps Apigee topology components. 71 | 72 | Retrieves pod and component details from the Apigee Management API 73 | and creates a mapping. 74 | 75 | Returns: 76 | dict: A dictionary containing the topology mapping. 77 | """ 78 | 79 | logger.info('In get APIGEE edge network topology mapping') 80 | pod_component_result = {} 81 | 82 | for pod_name in pod_mapping: 83 | component_type_resp = [] 84 | result_arr = self.opdk.view_pod_component_details(pod_name) 85 | 86 | for result in result_arr: 87 | component_type_resp.append({ 88 | "externalHostName": result["externalHostName"] if "externalHostName" in result else "", # noqa pylint: disable=C0301 89 | "externalIP": result["externalIP"] if "externalIP" in result else "", # noqa 90 | "internalHostName": result["internalHostName"] if "internalHostName" in result else "", # noqa pylint: disable=C0301 91 | "internalIP": result["internalIP"] if "internalIP" in result else "", # noqa 92 | "isUp": result["isUp"] if "isUp" in result else "", 93 | "pod": result["pod"] if "pod" in result else "", 94 | "reachable": result["reachable"] if "reachable" in result else "", # noqa 95 | "region": result["region"] if "region" in result else "", # noqa 96 | "type": result["type"] if "type" in result else "" 97 | }) 98 | 99 | pod_component_result[f'{pod_name}'] = component_type_resp 100 | 101 | nw_toplogy_mapping = self.backend_cfg.get('topology', 'NW_TOPOLOGY_MAPPING') # noqa 102 | write_json( 103 | f"{self.topology_dir_path}/{nw_toplogy_mapping}", pod_component_result) # noqa 104 | 105 | return pod_component_result 106 | 107 | def get_data_center_mapping(self, pod_component_mapping): 108 | """Creates a data center mapping from pod component information. 109 | 110 | Processes the pod component mapping to create a data center mapping. 111 | 112 | Args: 113 | pod_component_mapping (dict): The pod component mapping. 114 | 115 | Returns: 116 | dict: A dictionary containing the data center mapping. 117 | """ 118 | 119 | logger.info('In get data center mapping from network topology mapping') # noqa 120 | data_center = {} 121 | 122 | for pod in pod_component_mapping: 123 | for component_instance in pod_component_mapping[pod]: 124 | 125 | if component_instance['region'] not in data_center: 126 | data_center[component_instance['region']] = {} 127 | 128 | if component_instance['pod'] not in data_center[component_instance['region']]: # noqa pylint: disable=C0301 129 | data_center[component_instance['region'] 130 | ][component_instance['pod']] = [] 131 | 132 | data_center[component_instance['region']][component_instance['pod']].append( # noqa pylint: disable=C0301 133 | component_instance) 134 | 135 | datacenter_mapping = self.backend_cfg.get('topology', 'DATA_CENTER_MAPPING') # noqa 136 | write_json( 137 | f'{self.topology_dir_path}/{datacenter_mapping}', data_center) # noqa 138 | 139 | return data_center 140 | 141 | def draw_topology_graph_diagram(self, data_center): # noqa pylint: disable=R0914,R0912 142 | """Draws a topology graph diagram. 143 | 144 | Generates a visual representation of the Apigee topology 145 | using diagrams. 146 | 147 | Args: 148 | data_center (dict): The data center mapping. 149 | """ 150 | 151 | logger.info('Draw network topology mapping graph diagram') 152 | main_graph_attr = { 153 | "nodesep": "1", 154 | "fontsize": "70", 155 | } 156 | 157 | data_center_attr = { 158 | "bgcolor": "#f3f3f3", 159 | "style": "ortho", 160 | "ranksep": "1", 161 | "fontsize": "25", 162 | } 163 | 164 | pod_attr = { 165 | "fontsize": "30", 166 | } 167 | 168 | ip_attr = { 169 | "nodesep": "1", 170 | "fontsize": "25", 171 | } 172 | with Diagram("Edge Installation Topology with Pod and IP Clustering", filename=f"{self.topology_dir_path}/Edge_Installation_Topology_With_Pod_IPs", show=False, graph_attr=main_graph_attr, node_attr=main_graph_attr, outformat=["png"]): # noqa pylint: disable=C0301 173 | internal_ip_clusters = {} 174 | for dc in data_center: 175 | with Cluster(dc, graph_attr=data_center_attr): 176 | for pod in data_center[dc]: 177 | with Cluster(pod, graph_attr=pod_attr): 178 | for pod_instance in data_center[dc][pod]: 179 | if not pod_instance['internalIP'] in internal_ip_clusters: # noqa 180 | internal_ip_clusters[pod_instance['internalIP']] = [ # noqa 181 | ] 182 | internal_ip_clusters[pod_instance['internalIP']].append( # noqa 183 | pod_instance) 184 | 185 | svc_group = [] 186 | for ip_grp, ip_grp_value in internal_ip_clusters.items(): # noqa pylint: disable=C0301 187 | ip_attr['bgcolor'] = pod_mapping[pod]["bgcolor"] # noqa 188 | with Cluster(ip_grp, graph_attr=ip_attr): # noqa 189 | for int_ip in ip_grp_value: # noqa 190 | for component in int_ip['type']: # noqa 191 | svc_group.append( 192 | Blank(f"{component}", height="0.0001", width="20", fontsize="35")) # noqa pylint: disable=C0301 193 | 194 | with Diagram("Edge Installation Topology with IPs Clustering", filename=f"{self.topology_dir_path}/Edge_Installation_Topology_With_IPs", show=False, graph_attr=main_graph_attr, node_attr=main_graph_attr, outformat=["png"]): # noqa pylint: disable=C0301 195 | internal_ip_clusters = {} 196 | for dc in data_center: 197 | with Cluster(dc, graph_attr=data_center_attr): 198 | for pod in data_center[dc]: 199 | for pod_instance in data_center[dc][pod]: 200 | if not pod_instance['internalIP'] in internal_ip_clusters: # noqa 201 | internal_ip_clusters[pod_instance['internalIP']] = [ # noqa 202 | ] 203 | internal_ip_clusters[pod_instance['internalIP']].append( # noqa 204 | pod_instance) 205 | 206 | svc_group = [] 207 | for ip_grp, ip_grp_value in internal_ip_clusters.items(): 208 | with Cluster(ip_grp, graph_attr=ip_attr): 209 | for int_ip in ip_grp_value: # noqa 210 | for component in int_ip['type']: 211 | svc_group.append( 212 | Blank(f"{component}", height="0.0001", width="20", fontsize="35")) # noqa pylint: disable=C0301 213 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Apigee Migration Assessment Tool 2 | 3 | [![Static Checks](https://github.com/apigee/apigee-migration-assessment-tool/actions/workflows/static-checks.yml/badge.svg)](https://github.com/apigee/apigee-migration-assessment-tool/actions/workflows/static-checks.yml) 4 | [![Build Status](https://github.com/apigee/apigee-migration-assessment-tool/actions/workflows/tests.yml/badge.svg)](https://github.com/apigee/apigee-migration-assessment-tool/actions/workflows/tests.yml) 5 | 6 | This tool helps you plan your migration from Apigee Edge, Apigee X, or Apigee Hybrid to a target Apigee X or Apigee Hybrid environment by analyzing your source Apigee setup and generating a comprehensive assessment report. 7 | 8 | ## Assessment Scenarios 9 | 10 | The following migration paths are supported: 11 | 12 | | Source Apigee Platform | Target Apigee Platform | 13 | | :--------------------- | :--------------------- | 14 | | Apigee Edge (SaaS) | Apigee X | 15 | | Apigee Edge (SaaS) | Apigee Hybrid | 16 | | Apigee Edge (OPDK) | Apigee X | 17 | | Apigee Edge (OPDK) | Apigee Hybrid | 18 | | Apigee Hybrid | Apigee X | 19 | | Apigee X | Apigee Hybrid | 20 | 21 | ## About `--skip-target-validation` 22 | 23 | The `--skip-target-validation` flag allows you to run the assessment tool without connecting to or validating against a target Apigee X/Hybrid Organization. This is useful for early discovery or when you do not have credentials for the target. 24 | 25 | > **Important:** 26 | > This flag is **only supported when your source is Apigee Edge (SaaS or OPDK)**. 27 | > If your source is Apigee X/Hybrid, using this flag will cause the tool to exit with an error. 28 | 29 | ## Prerequisites 30 | 31 | You can run this tool locally using Python or via a Docker container. 32 | 33 | ### Local Setup 34 | 35 | 1. **Install Graphviz:** Follow the official installation instructions for your operating system from [graphviz.org/download/](https://graphviz.org/download/). 36 | 2. **Create and activate a Python virtual environment:** 37 | ```bash 38 | python3 -m venv dev 39 | source dev/bin/activate 40 | ``` 41 | 3. **Install Python dependencies:** 42 | ```bash 43 | pip install -r requirements.txt 44 | ``` 45 | 46 | ### Docker Setup 47 | 48 | 1. **Use a pre-built Docker image (recommended):** 49 | ```bash 50 | docker pull ghcr.io/apigee/apigee-migration-assessment-tool/apigee-migration-assessment-tool:latest 51 | ``` 52 | **OR** 53 | 54 | **Build the Docker image from source:** 55 | ```bash 56 | # Build the image 57 | docker build -t : . 58 | 59 | # (Optional) Push to your container registry 60 | # docker push : 61 | ``` 62 | 63 | 2. **Run the Docker image:** 64 | *(See the "Running the Tool with Docker" section below for detailed examples.)* 65 | 66 | ## Apigee Permissions 67 | 68 | The tool requires specific permissions to access and analyze your Apigee environments. 69 | 70 | * **Source: Apigee Edge (SaaS/OPDK)** 71 | * Assign the `Read-only Organization Administrator` role to the user account that will be used by the tool. 72 | * Refer to the [Apigee Edge built-in roles documentation](https://docs.apigee.com/api-platform/system-administration/edge-built-roles) for more details. 73 | 74 | * **Source/Target: Apigee X/Hybrid** 75 | * The tool needs read-only access to organization and environment-level objects, plus permissions to validate APIs. Assign the following to the relevant user or service account: 76 | * The built-in role: `roles/apigee.readOnlyAdmin` 77 | * A custom role with the `apigee.proxies.create` permission. You can create this role using gcloud: 78 | ```bash 79 | gcloud iam roles create ApigeeAPIValidator --project= \ 80 | --title="Apigee API Validator" --description="Allows Apigee API proxy creation for validation" \ 81 | --permissions="apigee.proxies.create" --stage=GA 82 | ``` 83 | * Refer to the [Apigee roles documentation on GCP](https://cloud.google.com/iam/docs/understanding-roles#apigee-roles) for more information. 84 | 85 | ## Tool Configuration 86 | 87 | 1. **Prepare `input.properties`:** 88 | Create an `input.properties` file in the **root directory** of the project. This file contains essential configuration for the tool to connect to your Apigee environments. 89 | Sample input files are available in the `sample/inputs/` directory: 90 | * [Edge OPDK sample: `sample/inputs/opdk.input.properties`](sample/inputs/opdk.input.properties) 91 | * [Edge SaaS sample: `sample/inputs/saas.input.properties`](sample/inputs/saas.input.properties) 92 | * [X/Hybrid sample: `sample/inputs/x.input.properties`](sample/inputs/x.input.properties) 93 | 94 | The following table details the required properties within the `[input]` section of your `input.properties` file: 95 | 96 | | Section | Property | Description | 97 | | :------ | :---------------------- | :-------------------------------------------------------------------------- | 98 | | `input` | `SOURCE_URL` | Management URL of your source Apigee environment (OPDK, Edge, X, or Hybrid). | 99 | | `input` | `SOURCE_ORG` | Name of your source Apigee organization. | 100 | | `input` | `SOURCE_AUTH_TYPE` | Authentication type for the source: `basic` or `oauth`. | 101 | | `input` | `SOURCE_APIGEE_VERSION` | Flavor of your source Apigee: `OPDK`, `SAAS`, `X`, or `HYBRID`. | 102 | | `input` | `TARGET_URL` | Management URL of your target Apigee X/Hybrid environment (Global or DRZ URL).
**Not required if using `--skip-target-validation`.** | 103 | | `input` | `GCP_PROJECT_ID` | GCP Project ID where your target Apigee X/Hybrid instance is running.
**Not required if using `--skip-target-validation`.** | 104 | | `input` | `TARGET_DIR` | Name of the directory where exported Apigee objects and reports will be saved (e.g., `output`). | 105 | | `input` | `TARGET_COMPARE` | Set to `true` to export apigee objects from target environment and compare with source which includes comparsion of Api proxy bundle. Set to `false` to avoid export and compare. | 106 | | `input` | `SSL_VERIFICATION` | Set to `false` to ignore SSL certificate verification, or `true` to enforce it. | 107 | 108 | 2. **Authentication Tokens:** 109 | Export the necessary authentication tokens as environment variables before running the tool. 110 | 111 | > **Note:** 112 | > If you use the `--skip-target-validation` flag, you do **not** need to provide the `APIGEE_ACCESS_TOKEN` environment variable, and you can omit `TARGET_URL` and `GCP_PROJECT_ID` from your `input.properties`. 113 | 114 | * **For Source: Apigee Edge (SaaS/OPDK) or Apigee X/Hybrid:** 115 | * **Basic Auth (Edge):** 116 | ```bash 117 | export SOURCE_AUTH_TOKEN=$(echo -n ':' | base64) 118 | ``` 119 | * **OAuth2/SAML (Edge):** 120 | Refer to the [Apigee Edge Management API documentation](https://docs.apigee.com/api-platform/system-administration/management-api-overview) for generating OAuth2 tokens. 121 | ```bash 122 | # Example using a helper script like get_token 123 | export SSO_LOGIN_URL=https://login.apigee.com # Adjust if necessary 124 | export SOURCE_AUTH_TOKEN=$(get_token -u : -m ) 125 | ``` 126 | * **OAuth2 (Apigee X/Hybrid as Source):** 127 | ```bash 128 | export SOURCE_AUTH_TOKEN=$(gcloud auth print-access-token) 129 | ``` 130 | 131 | * **For Target: Apigee X/Hybrid:** 132 | ```bash 133 | export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token) 134 | ``` 135 | > **Note:** This token is not required if you use the `--skip-target-validation` flag. 136 | 137 | ## Running the Tool 138 | 139 | The primary script for running the assessment is `main.py`. 140 | 141 | ### Command-Line Arguments 142 | 143 | * `--resources `: Specifies the Apigee objects to assess. 144 | * Use `all` to assess all supported resources. 145 | * Provide a comma-separated list for selective assessment. 146 | * **Available Environment-Level Resources:** `targetservers`, `keyvaluemaps`, `references`, `resourcefiles`, `keystores`, `flowhooks` 147 | * **Available Organization-Level Resources:** `org_keyvaluemaps`, `developers`, `apiproducts`, `apis`, `apps`, `sharedflows` 148 | 149 | * `--skip-target-validation`: 150 | (Optional, **only for Apigee Edge (SaaS/OPDK) sources**) 151 | Skips validation of API Proxies and SharedFlows against the target Apigee X/Hybrid Organization. 152 | - **Do not use this flag if your source is Apigee X/Hybrid. The tool will exit with an error.** 153 | - When this flag is set, you do not need to provide `TARGET_URL` or `GCP_PROJECT_ID` in your `input.properties`, nor the `APIGEE_ACCESS_TOKEN` environment variable. 154 | 155 | **Examples:** 156 | ```bash 157 | # Assess all resources 158 | python3 main.py --resources all 159 | 160 | # Assess only TargetServers and KVMs 161 | python3 main.py --resources targetservers,keyvaluemaps 162 | 163 | # Assess Keystores and Apps 164 | python3 main.py --resources keystores,apps 165 | 166 | # Assess all resources without validating against a target environment 167 | python3 main.py --resources all --skip-target-validation 168 | ``` 169 | 170 | ### Running Locally 171 | 172 | Ensure you have configured `input.properties` and exported authentication tokens as described above. 173 | ```bash 174 | python3 main.py --resources 175 | ``` 176 | > Note: `export IGNORE_VIZ="true"` can be leveraged to skip generation of graph visualization for the migration artifacts. 177 | 178 | ### Running with Docker 179 | 180 | 1. **Create an output directory on your host machine:** This directory will be mounted into the container to store the assessment results. 181 | ```bash 182 | mkdir output 183 | sudo chmod 777 output # Ensure the container can write to this directory 184 | ``` 185 | 2. **Set the Docker image variable:** 186 | ```bash 187 | # If using the pre-built image 188 | export DOCKER_IMAGE="ghcr.io/apigee/apigee-migration-assessment-tool/apigee-migration-assessment-tool:latest" 189 | # Or, if you built your own 190 | # export DOCKER_IMAGE=":" 191 | ``` 192 | 3. **Run the Docker container:** 193 | ```bash 194 | docker run --rm \ 195 | -v "$(pwd)/output:/app/target" \ 196 | -v "$(pwd)/input.properties:/app/input.properties" \ 197 | -e SOURCE_AUTH_TOKEN="$SOURCE_AUTH_TOKEN" \ 198 | -e APIGEE_ACCESS_TOKEN="$APIGEE_ACCESS_TOKEN" \ 199 | "$DOCKER_IMAGE" --resources all 200 | ``` 201 | *(Adjust `--resources` as needed.)* 202 | 203 | > Note: `-e IGNORE_VIZ="true"` can be leveraged to skip generation of graph visualization for the migration artifacts. 204 | 205 | To run without target validation (and without the `APIGEE_ACCESS_TOKEN`): 206 | ```bash 207 | docker run --rm \ 208 | -v "$(pwd)/output:/app/target" \ 209 | -v "$(pwd)/input.properties:/app/input.properties" \ 210 | -e SOURCE_AUTH_TOKEN="$SOURCE_AUTH_TOKEN" \ 211 | "$DOCKER_IMAGE" --resources all --skip-target-validation 212 | ``` 213 | 214 | ## Accessing the Report and Visualization 215 | 216 | The tool generates the following outputs in the directory specified by `TARGET_DIR` in your `input.properties` (e.g., `./output/`): 217 | 218 | 1. **Assessment Report:** 219 | * Filename: `qualification_report.xlsx` 220 | * This Excel file contains the detailed findings of the assessment. 221 | * A sample report can be found at [`sample/outputs/sample_qualification_report.xlsx`](sample/outputs/sample_qualification_report.xlsx). 222 | 223 | 2. **Topology Visualization (for OPDK source):** 224 | * Filename: `visualization.html` 225 | * Open this HTML file in a web browser to view a diagram of your Apigee OPDK topology. 226 | * A sample visualization is shown below: 227 | ![Sample Apigee Topology Visualization](assets/visualization.png) 228 | 229 | ## Project Structure Overview 230 | 231 | * `main.py`: The main executable script for the tool. 232 | * `input.properties`: Main configuration file (user-created). 233 | * `backend.properties`: Internal configuration for the tool. 234 | * `requirements.txt`: Python dependencies. 235 | * `Dockerfile`: For building the Docker image. 236 | * `assessment_mapping/`, `assessment_mapping_json/`: Contains mappings and definitions for assessing various Apigee resources. 237 | * `qualification_report_mapping/`, `qualification_report_mapping_json/`: Defines the structure and content of the Excel qualification report. 238 | * `sample/`: Contains sample input files and an example output report. 239 | * `sample/inputs/`: Example `input.properties` files for different Apigee platforms. 240 | * `sample/outputs/`: An example of the `qualification_report.xlsx`. 241 | * `assets/`: Contains static assets like images for documentation. 242 | * `.github/`: GitHub Actions workflows for CI/CD (linting, testing, publishing). 243 | 244 | ## Contributing 245 | 246 | We welcome contributions from the community! If you would like to contribute to this project, please see our [Contribution Guidelines](./CONTRIBUTING.md). 247 | 248 | ## License 249 | 250 | All solutions within this repository are provided under the [Apache 2.0 License](https://www.apache.org/licenses/LICENSE-2.0). Please see the [LICENSE](./LICENSE) file for more detailed terms and conditions. 251 | 252 | ## Disclaimer 253 | 254 | This repository and its contents are not an official Google product. 255 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | """Test suite for utils.""" 2 | import json 3 | import os 4 | import shutil 5 | import unittest 6 | import zipfile 7 | from configparser import ConfigParser 8 | from unittest.mock import MagicMock, mock_open, patch 9 | import utils 10 | 11 | 12 | # pylint: disable=too-many-public-methods 13 | class TestUtils(unittest.TestCase): 14 | """Test class for utils.""" 15 | 16 | def setUp(self): 17 | """Set up.""" 18 | self.test_dir = "test_dir" 19 | os.makedirs(self.test_dir, exist_ok=True) 20 | 21 | def tearDown(self): 22 | """Tear down.""" 23 | shutil.rmtree(self.test_dir) 24 | 25 | def test_parse_config(self): 26 | """Test parse config.""" 27 | config_content = "[test]\nkey=value" 28 | with patch("builtins.open", mock_open(read_data=config_content)): 29 | with patch("configparser.ConfigParser.read"): 30 | with patch("configparser.ConfigParser.sections", 31 | return_value=["test"]): 32 | config = utils.parse_config("dummy_path") 33 | self.assertIsInstance(config, ConfigParser) 34 | 35 | def test_get_env_variable(self): 36 | """Test get env variable.""" 37 | with patch.dict(os.environ, {"TEST_KEY": "test_value"}): 38 | self.assertEqual(utils.get_env_variable("TEST_KEY"), "test_value") 39 | self.assertIsNone(utils.get_env_variable("NON_EXISTENT_KEY")) 40 | 41 | @patch("requests.get") 42 | def test_is_token_valid(self, mock_get): 43 | """Test is token valid.""" 44 | mock_response = MagicMock() 45 | mock_response.status_code = 200 46 | mock_response.json.return_value = {"email": "test@example.com"} 47 | mock_get.return_value = mock_response 48 | self.assertTrue(utils.is_token_valid("valid_token")) 49 | 50 | mock_response.status_code = 400 51 | self.assertFalse(utils.is_token_valid("invalid_token")) 52 | 53 | @patch('utils.is_token_valid', return_value=True) 54 | def test_get_access_token(self, _): 55 | """Test get access token.""" 56 | with patch.dict(os.environ, {'APIGEE_ACCESS_TOKEN': 'test_token'}): 57 | self.assertEqual(utils.get_access_token(), 'test_token') 58 | 59 | @patch('sys.exit') 60 | @patch('utils.is_token_valid', return_value=False) 61 | def test_get_access_token_invalid(self, _, mock_exit): 62 | """Test get access token invalid.""" 63 | with patch.dict(os.environ, {'APIGEE_ACCESS_TOKEN': 'test_token'}): 64 | utils.get_access_token() 65 | mock_exit.assert_called_with(1) 66 | 67 | def test_get_source_auth_token(self): 68 | """Test get source auth token.""" 69 | with patch.dict(os.environ, {'SOURCE_AUTH_TOKEN': 'test_token'}): 70 | self.assertEqual(utils.get_source_auth_token(), 'test_token') 71 | 72 | @patch('sys.exit') 73 | def test_get_source_auth_token_not_set(self, mock_exit): 74 | """Test get source auth token not set.""" 75 | with patch.dict(os.environ, {}, clear=True): 76 | utils.get_source_auth_token() 77 | mock_exit.assert_called_with(1) 78 | 79 | def test_create_dir(self): 80 | """Test create dir.""" 81 | dir_path = os.path.join(self.test_dir, "new_dir") 82 | utils.create_dir(dir_path) 83 | self.assertTrue(os.path.exists(dir_path)) 84 | 85 | def test_list_dir(self): 86 | """Test list dir.""" 87 | test_subdir = os.path.join(self.test_dir, "list_dir_test") 88 | os.makedirs(test_subdir) 89 | file_path = os.path.join(test_subdir, "test_file.txt") 90 | with open(file_path, "w", encoding='utf-8') as f: 91 | f.write("test") 92 | self.assertEqual(utils.list_dir(test_subdir), ["test_file.txt"]) 93 | 94 | def test_delete_folder(self): 95 | """Test delete folder.""" 96 | dir_path = os.path.join(self.test_dir, "delete_me") 97 | os.makedirs(dir_path) 98 | utils.delete_folder(dir_path) 99 | self.assertFalse(os.path.exists(dir_path)) 100 | 101 | @patch('utils.logger.info') 102 | def test_print_json(self, mock_logger_info): 103 | """Test print json.""" 104 | test_data = {"key": "value"} 105 | utils.print_json(test_data) 106 | mock_logger_info.assert_called_with(json.dumps(test_data, indent=2)) 107 | 108 | def test_parse_json(self): 109 | """Test parse json.""" 110 | json_content = '{"key": "value"}' 111 | with patch("builtins.open", mock_open(read_data=json_content)): 112 | self.assertEqual(utils.parse_json("dummy.json"), {"key": "value"}) 113 | 114 | def test_write_json(self): 115 | """Test write json.""" 116 | file_path = os.path.join(self.test_dir, "test.json") 117 | data = {"key": "value"} 118 | utils.write_json(file_path, data) 119 | with open(file_path, "r", encoding='utf-8') as f: 120 | self.assertEqual(json.load(f), data) 121 | 122 | def test_read_file(self): 123 | """Test read file.""" 124 | file_path = os.path.join(self.test_dir, "test.txt") 125 | content = b"hello world" 126 | with open(file_path, "wb") as f: 127 | f.write(content) 128 | self.assertEqual(utils.read_file(file_path), content) 129 | 130 | def test_write_file(self): 131 | """Test write file.""" 132 | file_path = os.path.join(self.test_dir, "test_write.txt") 133 | data = b"some data" 134 | utils.write_file(file_path, data) 135 | with open(file_path, "rb") as f: 136 | self.assertEqual(f.read(), data) 137 | 138 | def test_compare_hash(self): 139 | """Test compare hash.""" 140 | self.assertTrue(utils.compare_hash(b"data1", b"data1")) 141 | self.assertFalse(utils.compare_hash(b"data1", b"data2")) 142 | 143 | def test_get_proxy_endpoint_count(self): 144 | """Test get proxy endpoint count.""" 145 | config = ConfigParser() 146 | config['unifier'] = {'proxy_endpoint_count': '5'} 147 | config['inputs'] = {'MAX_PROXY_ENDPOINT_LIMIT': '10'} 148 | self.assertEqual(utils.get_proxy_endpoint_count(config), 5) 149 | 150 | @patch('sys.exit') 151 | def test_get_proxy_endpoint_count_negative(self, mock_exit): 152 | """Test get proxy endpoint count negative.""" 153 | config = ConfigParser() 154 | config['unifier'] = {'proxy_endpoint_count': '-1'} 155 | config['inputs'] = {'MAX_PROXY_ENDPOINT_LIMIT': '10'} 156 | utils.get_proxy_endpoint_count(config) 157 | mock_exit.assert_called_with(1) 158 | 159 | @patch('sys.exit') 160 | def test_get_proxy_endpoint_count_exceeds_limit(self, mock_exit): 161 | """Test get proxy endpoint count exceeds limit.""" 162 | config = ConfigParser() 163 | config['unifier'] = {'proxy_endpoint_count': '11'} 164 | config['inputs'] = {'MAX_PROXY_ENDPOINT_LIMIT': '10'} 165 | utils.get_proxy_endpoint_count(config) 166 | mock_exit.assert_called_with(1) 167 | 168 | @patch('sys.exit') 169 | def test_get_proxy_endpoint_count_value_error(self, mock_exit): 170 | """Test get proxy endpoint count value error.""" 171 | config = ConfigParser() 172 | config['unifier'] = {'proxy_endpoint_count': 'abc'} 173 | config['inputs'] = {'MAX_PROXY_ENDPOINT_LIMIT': '10'} 174 | utils.get_proxy_endpoint_count(config) 175 | mock_exit.assert_called_with(1) 176 | 177 | def test_generate_env_groups_tfvars(self): 178 | """Test generate env groups tfvars.""" 179 | project_id = "test-project" 180 | env_config = { 181 | "prod": { 182 | "vhosts": { 183 | "secure": { 184 | "hostAliases": ["api.example.com"] 185 | } 186 | } 187 | } 188 | } 189 | expected_tfvars = { 190 | 'project_id': 'test-project', 191 | 'envgroups': { 192 | 'prod-secure': ['api.example.com'] 193 | }, 194 | 'environments': { 195 | 'prod': { 196 | 'display_name': 'prod', 197 | 'description': 'Apis for environment prod', 198 | 'envgroups': ['prod-secure'] 199 | } 200 | } 201 | } 202 | self.assertEqual( 203 | utils.generate_env_groups_tfvars(project_id, env_config), 204 | expected_tfvars) 205 | 206 | def test_write_csv_report(self): 207 | """Test write csv report.""" 208 | file_path = os.path.join(self.test_dir, "report.csv") 209 | header = ["col1", "col2"] 210 | rows = [["a", "b"], ["c", "d"]] 211 | utils.write_csv_report(file_path, header, rows) 212 | with open(file_path, "r", encoding='utf-8') as f: 213 | lines = f.readlines() 214 | self.assertEqual(lines[0].strip(), "col1,col2") 215 | self.assertEqual(lines[1].strip(), "a,b") 216 | self.assertEqual(lines[2].strip(), "c,d") 217 | 218 | def test_get_proxy_entrypoint(self): 219 | """Test get proxy entrypoint.""" 220 | xml_file = os.path.join(self.test_dir, "proxy.xml") 221 | with open(xml_file, "w", encoding='utf-8') as f: 222 | f.write("") 223 | self.assertEqual(utils.get_proxy_entrypoint(self.test_dir), xml_file) 224 | 225 | def test_parse_xml(self): 226 | """Test parse xml.""" 227 | xml_content = "value" 228 | with patch("builtins.open", mock_open(read_data=xml_content)): 229 | self.assertEqual(utils.parse_xml("dummy.xml"), 230 | {"root": {"key": "value"}}) 231 | 232 | def test_get_proxy_files(self): 233 | """Test get proxy files.""" 234 | proxies_dir = os.path.join(self.test_dir, "proxies") 235 | os.makedirs(proxies_dir) 236 | with open(os.path.join(proxies_dir, "a.xml"), "w", 237 | encoding='utf-8') as f: 238 | f.write("") 239 | with open(os.path.join(proxies_dir, "b.xml"), "w", 240 | encoding='utf-8') as f: 241 | f.write("") 242 | self.assertEqual(sorted(utils.get_proxy_files(self.test_dir)), 243 | sorted(["a", "b"])) 244 | 245 | def test_delete_file(self): 246 | """Test delete file.""" 247 | file_path = os.path.join(self.test_dir, "delete_me.txt") 248 | with open(file_path, "w", encoding='utf-8') as f: 249 | f.write("test") 250 | utils.delete_file(file_path) 251 | self.assertFalse(os.path.exists(file_path)) 252 | 253 | def test_write_xml_from_dict(self): 254 | """Test write xml from dict.""" 255 | file_path = os.path.join(self.test_dir, "test.xml") 256 | data = {"root": {"key": "value"}} 257 | utils.write_xml_from_dict(file_path, data) 258 | with open(file_path, "r", encoding='utf-8') as f: 259 | content = f.read() 260 | self.assertIn("", content) 261 | self.assertIn("value", content) 262 | self.assertIn("", content) 263 | 264 | def test_copy_folder(self): 265 | """Test copy folder.""" 266 | src_dir = os.path.join(self.test_dir, "src") 267 | dst_dir = os.path.join(self.test_dir, "dst") 268 | os.makedirs(src_dir) 269 | with open(os.path.join(src_dir, "test.txt"), "w", 270 | encoding='utf-8') as f: 271 | f.write("test") 272 | utils.copy_folder(src_dir, dst_dir) 273 | self.assertTrue(os.path.exists(dst_dir)) 274 | self.assertTrue(os.path.exists(os.path.join(dst_dir, "test.txt"))) 275 | 276 | def test_clean_up_artifacts(self): 277 | """Test clean up artifacts.""" 278 | target_dir = os.path.join(self.test_dir, "artifacts") 279 | os.makedirs(target_dir, exist_ok=True) 280 | with open(os.path.join(target_dir, "a.xml"), "w", 281 | encoding='utf-8') as f: 282 | f.write("") 283 | with open(os.path.join(target_dir, "b.xml"), "w", 284 | encoding='utf-8') as f: 285 | f.write("") 286 | utils.clean_up_artifacts(target_dir, ["a"]) 287 | self.assertTrue(os.path.exists(os.path.join(target_dir, "a.xml"))) 288 | self.assertFalse(os.path.exists(os.path.join(target_dir, "b.xml"))) 289 | 290 | def test_filter_objects(self): 291 | """Test filter objects.""" 292 | obj_data = {"Policy": ["a", "b", "c"]} 293 | self.assertEqual(utils.filter_objects(obj_data, "Policy", ["a", "c"]), 294 | {"Policy": ["a", "c"]}) 295 | self.assertEqual(utils.filter_objects(obj_data, "Policy", ["d"]), 296 | {"Policy": []}) 297 | 298 | def test_zipdir(self): 299 | """Test zipdir.""" 300 | zip_path = os.path.join(self.test_dir, "test.zip") 301 | with open(os.path.join(self.test_dir, "file1.txt"), "w", 302 | encoding='utf-8') as f: 303 | f.write("file1") 304 | sub_dir = os.path.join(self.test_dir, "sub") 305 | os.makedirs(sub_dir) 306 | with open(os.path.join(sub_dir, "file2.txt"), "w", 307 | encoding='utf-8') as f: 308 | f.write("file2") 309 | 310 | with zipfile.ZipFile(zip_path, 'w') as zipf: 311 | utils.zipdir(self.test_dir, zipf) 312 | 313 | with zipfile.ZipFile(zip_path, 'r') as zipf: 314 | self.assertEqual(len(zipf.namelist()), 315 | 3) 316 | self.assertIn('test_dir/file1.txt', zipf.namelist()) 317 | self.assertIn('test_dir/sub/file2.txt', zipf.namelist()) 318 | 319 | @patch('utils.copy_folder') 320 | @patch('utils.get_proxy_entrypoint') 321 | @patch('utils.parse_proxy_root') 322 | @patch('utils.delete_file') 323 | @patch('utils.filter_objects') 324 | @patch('utils.clean_up_artifacts') 325 | @patch('utils.write_xml_from_dict') 326 | @patch('zipfile.ZipFile') 327 | # noqa pylint: disable=too-many-arguments, too-many-locals, unused-argument, too-many-positional-arguments 328 | def test_clone_proxies(self, mock_zipfile, mock_write_xml, mock_clean_up, 329 | mock_filter, mock_delete_file, mock_parse, 330 | mock_get_entrypoint, mock_copy): 331 | """Test clone proxies.""" 332 | source_dir = "source" 333 | target_dir = "target" 334 | objects = { 335 | "Name": "test_proxy", 336 | "Policies": ["policy1"], 337 | "TargetEndpoints": ["target1"], 338 | "ProxyEndpoints": ["proxy1"] 339 | } 340 | merged_pes = {"proxy1": {"ProxyEndpoint": {}}} 341 | proxy_bundle_directory = "bundles" 342 | 343 | mock_get_entrypoint.return_value = "source/apiproxy/test_proxy.xml" 344 | mock_parse.return_value = { 345 | "APIProxy": { 346 | "@name": "old_name", 347 | "Policies": {"Policy": ["policy1", "policy2"]}, 348 | "TargetEndpoints": {"TargetEndpoint": ["target1", "target2"]}, 349 | "ProxyEndpoints": {"ProxyEndpoint": ["proxy1", "proxy2"]} 350 | } 351 | } 352 | mock_filter.side_effect = [ 353 | {"Policy": ["policy1"]}, 354 | {"TargetEndpoint": ["target1"]} 355 | ] 356 | 357 | utils.clone_proxies( 358 | source_dir, target_dir, objects, merged_pes, 359 | proxy_bundle_directory 360 | ) 361 | mock_copy.assert_called_with(source_dir, f"{target_dir}/apiproxy") 362 | mock_write_xml.assert_called() 363 | self.assertEqual(mock_clean_up.call_count, 3) 364 | self.assertEqual(mock_filter.call_count, 2) 365 | (mock_zipfile.assert_called_with( 366 | f"{proxy_bundle_directory}/test_proxy.zip", 367 | 'w', 368 | zipfile.ZIP_DEFLATED)) 369 | 370 | 371 | if __name__ == "__main__": 372 | unittest.main() 373 | -------------------------------------------------------------------------------- /classic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python # noqa pylint: disable=R0801 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Interacts with Apigee Edge (Classic) using the Management API. 18 | 19 | This module provides a client for interacting with a classic 20 | Apigee Edge organization via its Management API. 21 | It offers methods for retrieving organization details, environments, 22 | various entities (APIs, apps, developers, API products, etc.), 23 | and their configurations. It handles pagination for certain 24 | entity types and allows exporting API proxy bundles. 25 | """ 26 | 27 | import os 28 | from requests.utils import quote as urlencode # pylint: disable=E0401 29 | from base_logger import logger 30 | from rest import RestClient 31 | 32 | 33 | class ApigeeClassic(): 34 | """A client for interacting with Apigee Edge (Classic) 35 | via the Management API. 36 | 37 | Provides methods for retrieving organization details, environments, 38 | various entities (APIs, apps, developers, API products, etc.), 39 | and their configurations. Handles pagination for certain entity types 40 | and allows exporting API proxy bundles. 41 | """ 42 | 43 | def __init__(self, baseurl, org, token, auth_type, ssl_verify): # noqa pylint: disable=R0913,R0917 44 | self.baseurl = baseurl 45 | self.org = org 46 | self.token = token 47 | self.auth_type = auth_type 48 | self.client = RestClient(self.auth_type, token, ssl_verify) 49 | self.requires_pagination = ['apis', 'apps', 'developers', 50 | 'apiproducts'] 51 | self.can_expand = { 52 | 'apps': {'expand_key': 'app', 'id': 'appId'}, 53 | 'developers': {'expand_key': 'developer', 'id': 'email'}, 54 | 'apiproducts': {'expand_key': 'apiProduct', 'id': 'name'} 55 | } 56 | 57 | def get_org(self): 58 | """Retrieves details of the Apigee organization. 59 | 60 | Returns: 61 | dict: A dictionary containing the organization details. 62 | """ 63 | url = f"{self.baseurl}/organizations/{self.org}" 64 | org = self.client.get(url) 65 | return org 66 | 67 | def list_environments(self): 68 | """Lists all environments in the Apigee organization. 69 | 70 | Returns: 71 | list: A list of environment names. 72 | """ 73 | url = f"{self.baseurl}/organizations/{self.org}/environments" 74 | envs = self.client.get(url) 75 | return envs 76 | 77 | def list_org_objects(self, org_object): 78 | """Lists organization-level objects of a specific type. 79 | 80 | Handles pagination for certain object types. 81 | 82 | Args: 83 | org_object (str): The type of organization object to list 84 | (e.g., 'apis', 'apps', 'developers'). 85 | 86 | Returns: 87 | list: A list of organization object names or details, 88 | depending on the object type. 89 | """ 90 | org_objects = [] 91 | object_count = int(os.getenv('PAGE_SIZE', '100')) 92 | if org_object in self.requires_pagination: 93 | start_url = (f"{self.baseurl}/organizations/{self.org}/" 94 | f"{org_object}?count={object_count}") 95 | each_org_object = self.client.get(start_url) 96 | org_objects.extend(each_org_object) 97 | while len(each_org_object) > 0: 98 | start_key = each_org_object[-1] 99 | params = {'startKey': start_key} 100 | each_org_object = self.client.get(start_url, params=params) 101 | # Capture the type of the response for diagnostic purposes. 102 | logger.debug(f"For '{org_object}' paginated API call, " 103 | f"received {type(each_org_object)}.") 104 | 105 | # Safely handle the API response 106 | if isinstance(each_org_object, list): 107 | if start_key in each_org_object: 108 | logger.debug(f"Successfully received next page for " 109 | f"'{org_object}'; removing start_key.") 110 | each_org_object.remove(start_key) 111 | else: 112 | # This is the final page or an unexpected list, 113 | # which is a valid state. 114 | logger.debug(f"Received final page or non-overlapping " 115 | f"list for '{org_object}'.") 116 | else: 117 | # This handles the customer's error case 118 | logger.error(f"For '{org_object}' paginated API call, " 119 | f"expected a list but received " 120 | f"{type(each_org_object)} with value: " 121 | f"{each_org_object}") 122 | # We must clear the list to break the while loop 123 | each_org_object = [] 124 | org_objects.extend(each_org_object) 125 | else: 126 | url = f"{self.baseurl}/organizations/{self.org}/{org_object}" 127 | org_objects = self.client.get(url) 128 | return org_objects 129 | 130 | def list_org_objects_expand(self, org_object): 131 | """Lists organization-level objects with expanded details. 132 | 133 | Handles pagination and expands details for supported object types. 134 | 135 | Args: 136 | org_object (str): The type of organization object to list 137 | (e.g., 'apps', 'developers', 'apiproducts'). 138 | 139 | Returns: 140 | dict: A dictionary of organization objects, 141 | keyed by their ID, with expanded details. 142 | """ 143 | org_objects = {} 144 | object_count = int(os.getenv('PAGE_SIZE', '100')) 145 | expand_key = self.can_expand.get(org_object).get('expand_key') 146 | id_key = self.can_expand.get(org_object).get('id') 147 | start_url = f"{self.baseurl}/organizations/{self.org}/{org_object}?count={object_count}&expand=true" # noqa pylint: disable=C0301 148 | each_org_object = self.client.get(start_url) 149 | each_org_object = each_org_object.get(expand_key, {}) 150 | for each_item in each_org_object: 151 | org_objects[each_item[id_key]] = each_item 152 | while len(each_org_object) > 0: 153 | start_key = each_org_object[-1].get(id_key) 154 | params = {'startKey': start_key} 155 | each_org_object = self.client.get(start_url, params=params) 156 | each_org_object = each_org_object.get(expand_key, {}) 157 | if each_org_object: 158 | each_org_object.pop(0) 159 | for each_item in each_org_object: 160 | org_objects[each_item[id_key]] = each_item 161 | return org_objects 162 | 163 | def get_org_object(self, org_object, org_object_name): 164 | """Retrieves details of a specific organization-level object. 165 | 166 | Args: 167 | org_object (str): The type of organization object 168 | (e.g., 'developers', 'apiproducts'). 169 | org_object_name (str or dict): The name or identifier 170 | (including type for resourcefiles) of the object. 171 | 172 | Returns: 173 | dict: A dictionary containing the object details. 174 | """ 175 | if len(org_object_name) == 0: 176 | return {'name': 'EMPTY_OBJECT_NAME'} 177 | if org_object == "resourcefiles": 178 | resource_type = org_object_name["type"] 179 | name = org_object_name["name"] 180 | url = f"{self.baseurl}/organizations/{self.org}/{org_object}/{resource_type}/{name}" # noqa pylint: disable=C0301 181 | data = self.client.get(url) 182 | return data 183 | org_object_name = urlencode(org_object_name) 184 | url = f"{self.baseurl}/organizations/{self.org}/{org_object}/{org_object_name}" # noqa 185 | org_object = self.client.get(url) 186 | return org_object 187 | 188 | def list_env_objects(self, env, env_object): 189 | """Lists environment-level objects of a specific type. 190 | 191 | Args: 192 | env (str): The environment name. 193 | env_object (str): The type of environment object to list 194 | (e.g., 'targetservers', 'caches'). 195 | 196 | Returns: 197 | list: A list of environment object names or details. 198 | """ 199 | url = f"{self.baseurl}/organizations/{self.org}/environments/{env}/{env_object}" # noqa 200 | env_objects = self.client.get(url) 201 | return env_objects 202 | 203 | def get_env_object(self, env, env_object, env_object_name): 204 | """Retrieves details of a specific environment-level object. 205 | 206 | Args: 207 | env (str): The environment name. 208 | env_object (str): The type of environment object 209 | (e.g., 'targetservers', 'caches'). 210 | env_object_name (str or dict): The name or identifier 211 | (including type for resourcefiles) of the object. 212 | 213 | Returns: 214 | dict: A dictionary containing the object details. 215 | """ 216 | if len(env_object_name) == 0: 217 | return {'name': 'EMPTY_OBJECT_NAME'} 218 | if env_object == "resourcefiles": 219 | resource_type = env_object_name["type"] 220 | name = env_object_name["name"] 221 | url = f"{self.baseurl}/organizations/{self.org}/environments/{env}/{env_object}/{resource_type}/{name}" # noqa pylint: disable=C0301 222 | data = self.client.get(url) 223 | else: 224 | env_object_name = urlencode(env_object_name) 225 | url = f"{self.baseurl}/organizations/{self.org}/environments/{env}/{env_object}/{env_object_name}" # noqa pylint: disable=C0301 226 | data = self.client.get(url) 227 | return data 228 | 229 | def list_env_vhosts(self, env): 230 | """Lists virtual hosts in a specific environment. 231 | 232 | Args: 233 | env (str): The environment name. 234 | 235 | Returns: 236 | list: A list of virtual host names. 237 | """ 238 | url = f"{self.baseurl}/organizations/{self.org}/environments/{env}/virtualhosts" # noqa 239 | env_objects = self.client.get(url) 240 | return env_objects 241 | 242 | def get_env_vhost(self, env, vhost): 243 | """Retrieves details of a specific virtual host in an environment. 244 | 245 | Args: 246 | env (str): The environment name. 247 | vhost (str): The virtual host name. 248 | 249 | Returns: 250 | dict: A dictionary containing the virtual host details. 251 | """ 252 | url = f"{self.baseurl}/organizations/{self.org}/environments/{env}/virtualhosts/{vhost}" # noqa pylint: disable=C0301 253 | env_object = self.client.get(url) 254 | return env_object 255 | 256 | def list_apis(self, api_type): 257 | """Lists APIs or Sharedflows of a given type. 258 | 259 | Args: 260 | api_type (str): The type of API - 'apis' or 'sharedflows' 261 | 262 | Returns: 263 | list: A list of API or Sharedflow names 264 | """ 265 | url = f"{self.baseurl}/organizations/{self.org}/{api_type}" 266 | apis = self.client.get(url) 267 | return apis 268 | 269 | def list_api_revisions(self, api_type, api_name): 270 | """Lists revisions of a specific API or Sharedflow. 271 | 272 | Args: 273 | api_type (str): The type of API - 'apis' or 'sharedflows'. 274 | api_name (str): The name of the API or Sharedflow. 275 | 276 | Returns: 277 | list: A list of revision numbers. 278 | """ 279 | url = f"{self.baseurl}/organizations/{self.org}/{api_type}/{api_name}/revisions" # noqa 280 | revisions = self.client.get(url) 281 | return revisions 282 | 283 | def api_env_mapping(self, api_type, api_name): 284 | """Retrieves the environment deployment mapping for an API 285 | or Sharedflow. 286 | 287 | Args: 288 | api_type (str): The type of API - 'apis' or 'sharedflows'. 289 | api_name (str): The name of the API or Sharedflow. 290 | 291 | Returns: 292 | dict: A dictionary containing the deployment mapping. 293 | """ 294 | url = f"{self.baseurl}/organizations/{self.org}/{api_type}/{api_name}/deployments" # noqa 295 | deployments = self.client.get(url) 296 | return deployments 297 | 298 | def list_apis_env(self, env_name): 299 | """Lists APIs deployed in a specific environment. 300 | 301 | Args: 302 | env_name (str): The environment name. 303 | 304 | Returns: 305 | list: A list of API names deployed in the environment. 306 | """ 307 | url = f"{self.baseurl}/organizations/{self.org}/environments/{env_name}/deployments" # noqa 308 | deployments = self.client.get(url) 309 | apis_list = [api["name"] for api in deployments["aPIProxy"]] 310 | return apis_list 311 | 312 | def fetch_api_revision(self, api_type, api_name, revision, export_dir): 313 | """Downloads the bundle for a specific API or Sharedflow revision. 314 | 315 | Args: 316 | api_type (str): The type of API - 'apis' or 'sharedflows'. 317 | api_name (str): The name of the API or Sharedflow. 318 | revision (str): The revision number. 319 | export_dir (str): The directory to save the bundle to. 320 | """ 321 | url = f"{self.baseurl}/organizations/{self.org}/{api_type}/{api_name}/revisions/{revision}?format=bundle" # noqa pylint: disable=C0301 322 | bundle = self.client.file_get(url) 323 | self.write_proxy_bundle(export_dir, api_name, bundle) 324 | 325 | def write_proxy_bundle(self, export_dir, file_name, data): 326 | """Writes a proxy bundle to a file. 327 | 328 | Args: 329 | export_dir (str): The directory to write the file to. 330 | file_name (str): The name of the file. 331 | data (bytes): The bundle data. 332 | """ 333 | file_path = f"./{export_dir}/{file_name}.zip" 334 | with open(file_path, 'wb') as fl: 335 | fl.write(data) 336 | 337 | def fetch_proxy(self, arg_tuple): 338 | """Fetches the latest revision of an API proxy bundle. 339 | 340 | Args: 341 | arg_tuple (tuple): A tuple containing 342 | (api_type, api_name, export_dir). 343 | """ 344 | revisions = self.list_api_revisions(arg_tuple[0], arg_tuple[1]) 345 | if len(revisions) > 0: 346 | self.fetch_api_revision( 347 | arg_tuple[0], arg_tuple[1], revisions[-1], arg_tuple[2]) 348 | 349 | def view_pod_component_details(self, pod): 350 | """Retrieves the details of components within a specific pod. 351 | 352 | Args: 353 | pod (str): The name of the pod. 354 | 355 | Returns: 356 | list: A list of dictionaries, each containing 357 | details of a component. 358 | """ 359 | url = f"{self.baseurl}/servers?pod={pod}" 360 | view_pod_response = self.client.get(url) 361 | return view_pod_response 362 | -------------------------------------------------------------------------------- /rest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2025 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License 16 | 17 | """Provides a REST client for interacting with 18 | Apigee Management APIs. 19 | 20 | This module offers a versatile `RestClient` class 21 | for making HTTP requests to Apigee Management APIs. 22 | It supports various authentication methods, handles 23 | different response formats (JSON, plain text, 24 | raw bytes), and provides error handling for 25 | Apigee-specific error codes. It simplifies 26 | interaction with Apigee by abstracting away 27 | low-level request details and providing a 28 | Pythonic interface. 29 | """ 30 | 31 | import json 32 | import requests # pylint: disable=E0401 33 | from urllib3.exceptions import InsecureRequestWarning # pylint: disable=E0401 34 | from base_logger import logger, EXEC_INFO 35 | 36 | # Suppress the warnings from urllib3 37 | requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) # noqa pylint: disable=E1101 38 | 39 | UNKNOWN_ERROR = 'internal.unknown' 40 | 41 | 42 | class ApigeeError(Exception): 43 | """Represents an error during interaction with 44 | the Apigee management API. 45 | """ 46 | def __init__(self, status_code, error_code, message): 47 | """Initializes an ApigeeError. 48 | 49 | Args: 50 | status_code (int): The HTTP status code. 51 | error_code (str): The Apigee error code. 52 | message (str): A descriptive error message. 53 | """ 54 | self.status_code = status_code 55 | self.error_code = error_code 56 | self.message = message 57 | 58 | def __str__(self): 59 | """Returns a string representation of the error. 60 | 61 | Returns: 62 | str: A string containing the status 63 | code and message. 64 | """ 65 | return f'{self.status_code}: {self.message}' 66 | 67 | 68 | class RestClient(object): # noqa pylint: disable=R0205 69 | """A client for making HTTP requests to RESTful 70 | APIs, especially Apigee. 71 | 72 | This client simplifies interaction with REST APIs 73 | by providing methods for common HTTP operations 74 | (GET, POST, PUT, PATCH, DELETE) with support for 75 | different authentication mechanisms (Basic, 76 | OAuth2), various content types, and streamlined 77 | error handling. It's particularly useful for 78 | working with Apigee Management APIs. 79 | 80 | Attributes: 81 | auth_type (str): The authentication type 82 | ('basic' or 'oauth'). 83 | token (str): The authentication token 84 | (Basic auth credentials or OAuth2 token). 85 | ssl_verify (bool): Whether to verify SSL 86 | certificates (default: True). 87 | session (requests.Session): The underlying 88 | requests session object. 89 | base_headers (dict): Default headers for 90 | all requests. 91 | """ 92 | 93 | def __init__(self, auth_type, token, ssl_verify=True): 94 | self._allowed_auth_types = ['basic', 'oauth'] 95 | self.session = requests.Session() 96 | self.session.verify = ssl_verify 97 | if auth_type not in self._allowed_auth_types: 98 | raise ValueError( 99 | f'Unknown Auth type , Allowed types are {" ,".join(self._allowed_auth_types)}') # noqa pylint: disable=C0301 100 | self.auth_type = auth_type 101 | 102 | self.base_headers = { 103 | 'Authorization': f'Basic {token}' if auth_type == 'basic' else f'Bearer {token}' # noqa pylint: disable=C0301 104 | } 105 | 106 | def get(self, url, params=None): 107 | """Makes a GET request. 108 | 109 | Args: 110 | url (str): The URL to send the request to. 111 | params (dict, optional): Query parameters. 112 | 113 | Returns: 114 | The response content. 115 | 116 | Raises: 117 | ApigeeError: If the API request returns 118 | an error. 119 | """ 120 | headers = self.base_headers.copy() 121 | response = self.session.get(url, params=params, headers=headers) 122 | logger.debug(f"Response: {response.content}") # noqa pylint: disable=W1203 123 | return self._process_response(response) 124 | 125 | def file_get(self, url, params=None): 126 | """Makes a GET request for file download. 127 | 128 | Args: 129 | url (str): The URL. 130 | params (dict, optional): Query parameters. 131 | 132 | Returns: 133 | The raw response content. 134 | 135 | Raises: 136 | ApigeeError: If an error occurs. 137 | """ 138 | headers = self.base_headers.copy() 139 | response = self.session.get( 140 | url, params=params, headers=headers, stream=True) 141 | logger.debug(f"Response: {response.content}") # noqa pylint: disable=W1203 142 | return self._process_response(response) 143 | 144 | def post(self, url, data=None): 145 | """Makes a POST request. 146 | 147 | Args: 148 | url (str): The URL. 149 | data (dict, optional): Request body data. 150 | 151 | Returns: 152 | The response content. 153 | 154 | Raises: 155 | ApigeeError: If an error occurs. 156 | """ 157 | headers = self.base_headers.copy() 158 | response = self.session.post( 159 | url, data=json.dumps(data or {}), headers=headers) 160 | logger.debug(f"Response: {response.content}") # noqa pylint: disable=W1203 161 | return self._process_response(response) 162 | 163 | def file_post(self, url, params=None, data=None, files=None): 164 | """Makes a file upload POST request. 165 | 166 | Args: 167 | url (str): The URL. 168 | params (dict, optional): Query params. 169 | data (dict, optional): Request body. 170 | files (dict, optional): Files to upload. 171 | 172 | Returns: 173 | The response content. 174 | 175 | Raises: 176 | ApigeeError: If an error occurs. 177 | 178 | """ 179 | headers = self.base_headers.copy() 180 | headers['Content-Type'] = 'application/octet-stream' 181 | response = self.session.post( 182 | url, data=data, files=files, headers=headers, params=params) 183 | logger.debug(f"Response: {response.content}") # noqa pylint: disable=W1203 184 | return self._process_response(response) 185 | 186 | def patch(self, url, data=None): 187 | """Makes a PATCH request. 188 | 189 | Args: 190 | url (str): The URL. 191 | data (dict, optional): Request body data. 192 | 193 | Returns: 194 | The response content. 195 | 196 | Raises: 197 | ApigeeError: If an error occurs. 198 | """ 199 | headers = self.base_headers.copy() 200 | response = self.session.patch( 201 | url, data=json.dumps(data or {}), headers=headers) 202 | logger.debug(f"Response: {response.content}") # noqa pylint: disable=W1203 203 | return self._process_response(response) 204 | 205 | def put(self, url, data=None): 206 | """Makes a PUT request. 207 | 208 | Args: 209 | url (str): The URL. 210 | data (dict, optional): The request body. 211 | 212 | Returns: 213 | The response content. 214 | 215 | Raises: 216 | ApigeeError: If an error occurs. 217 | """ 218 | headers = self.base_headers.copy() 219 | response = self.session.put( 220 | url, data=json.dumps(data or {}), headers=headers) 221 | logger.debug(f"Response: {response.content}") # noqa pylint: disable=W1203 222 | return self._process_response(response) 223 | 224 | def delete(self, url, params=None): 225 | """Makes a DELETE request. 226 | 227 | Args: 228 | url (str): The URL. 229 | params (dict, optional): Query parameters. 230 | 231 | Returns: 232 | The response content. 233 | 234 | Raises: 235 | ApigeeError: If an error occurs. 236 | """ 237 | headers = self.base_headers.copy() 238 | response = self.session.delete(url, headers=headers, params=params or {}) # noqa pylint: disable=C0301 239 | logger.debug(f"Response: {response.content}") # noqa pylint: disable=W1203 240 | return self._process_response(response) 241 | 242 | def _process_response(self, response): 243 | """Processes the response from an HTTP request. 244 | 245 | Args: 246 | response: The HTTP response object. 247 | 248 | Returns: 249 | The content of the response. 250 | """ 251 | status_code = response.status_code 252 | if status_code >= 400: 253 | logger.warning(f"{response.request.method} Access to URL {response.request.url} returned {status_code}") # noqa pylint: disable=C0301,W1203 254 | return self._parse(response).content() 255 | 256 | def _parse(self, response): 257 | """Parses the response content based on content type. 258 | 259 | Args: 260 | response: The HTTP response object. 261 | 262 | Returns: 263 | A Response object (JsonResponse, PlainResponse, 264 | EmptyResponse, or RawResponse). 265 | """ 266 | if not response.text: 267 | return EmptyResponse(response.status_code) 268 | try: 269 | if response.headers['Content-Type'] == 'application/octet-stream': 270 | return RawResponse(response) 271 | if response.headers['Content-Type'].startswith('application/json'): 272 | return JsonResponse(response) 273 | return PlainResponse(response) 274 | except ValueError: 275 | logger.error('Unable to parse response as JSON', 276 | exc_info=EXEC_INFO) 277 | return "" 278 | 279 | 280 | class Response(object): # noqa pylint: disable=R0205,R0903 281 | """Represents an HTTP response. 282 | 283 | Base class for different response types 284 | (JSON, plain text, raw, etc.). Handles 285 | common response processing like status code 286 | checking and error raising. 287 | 288 | Attributes: 289 | _status_code (int): The HTTP status code. 290 | _content: The response content. 291 | """ 292 | def __init__(self, status_code, content): 293 | """Initializes a Response object. 294 | 295 | Args: 296 | status_code (int): The HTTP status code. 297 | content: The response content. 298 | """ 299 | self._status_code = status_code 300 | self._content = content 301 | 302 | def content(self): 303 | """Returns the response content. 304 | 305 | Raises: 306 | ApigeeError: If the response represents 307 | an error. 308 | 309 | Returns: 310 | The response content if successful. 311 | """ 312 | if self._is_error(): 313 | raise ApigeeError(status_code=self._status_code, 314 | error_code=self._error_code(), 315 | message=self._error_message()) 316 | return self._content 317 | 318 | def _is_error(self): 319 | """Checks if the response is an error. 320 | 321 | Returns: 322 | True if error, False otherwise. 323 | """ 324 | return self._status_code is None # or self._status_code >= 400 # noqa pylint: disable=C0301 325 | 326 | # Adding these methods to force implementation in subclasses because they are references in this parent class # noqa pylint: disable=C0301 327 | def _error_code(self): 328 | """Returns the error code. 329 | 330 | Must be implemented by subclasses. 331 | 332 | Raises: 333 | NotImplementedError: If not 334 | implemented. 335 | """ 336 | raise NotImplementedError 337 | 338 | def _error_message(self): 339 | """Returns the error message. 340 | 341 | Must be implemented by subclasses. 342 | 343 | Raises: 344 | NotImplementedError: If not 345 | implemented. 346 | """ 347 | raise NotImplementedError 348 | 349 | 350 | class JsonResponse(Response): # noqa pylint: disable=R0903 351 | """Represents a JSON HTTP response. 352 | 353 | Parses and handles JSON response content, 354 | including error checking. Inherits from 355 | the `Response` base class. 356 | """ 357 | def __init__(self, response): 358 | """Initializes a JsonResponse. 359 | 360 | Args: 361 | response: The HTTP response object. 362 | """ 363 | content = json.loads(response.text) 364 | super(JsonResponse, self).__init__(response.status_code, content) # noqa pylint: disable=R1725 365 | 366 | def _error_code(self): 367 | """Returns the JSON error code. 368 | 369 | Checks various keys for the error code 370 | in the JSON content. 371 | 372 | Returns: 373 | str: The error code, or a default 374 | if not found. 375 | """ 376 | 377 | if 'errorCode' in self._content: 378 | return self._content.get('errorCode') 379 | if 'error' in self._content: 380 | return self._content.get('error') 381 | return UNKNOWN_ERROR 382 | 383 | def _error_message(self): 384 | """Returns the JSON error message. 385 | 386 | Extracts the error message from the 387 | JSON content. 388 | 389 | Returns: 390 | str: The error message or an empty string. 391 | """ 392 | message = self._content.get('message', '') 393 | if message is not None and message != '': 394 | return message 395 | return self._content.get('error', '') 396 | 397 | 398 | class PlainResponse(Response): # noqa pylint: disable=R0903 399 | """Represents a plain text HTTP response. 400 | 401 | Handles plain text content. Inherits from 402 | the `Response` base class. 403 | """ 404 | def __init__(self, response): 405 | """Initializes a PlainResponse. 406 | 407 | Args: 408 | response: The HTTP response object. 409 | """ 410 | super(PlainResponse, self).__init__( # noqa pylint: disable=R1725 411 | response.status_code, response.text) 412 | 413 | def _error_code(self): 414 | """Returns the error code. 415 | 416 | Returns: 417 | str: The default unknown error code. 418 | """ 419 | return UNKNOWN_ERROR 420 | 421 | def _error_message(self): 422 | """Returns the error message. 423 | 424 | Returns: 425 | str: The plain text content as the error message. 426 | """ 427 | return self._content 428 | 429 | 430 | class EmptyResponse(Response): # noqa pylint: disable=R0903 431 | """Represents an empty HTTP response. 432 | 433 | Handles responses with no content. Inherits 434 | from the `Response` base class. 435 | """ 436 | def __init__(self, status_code): 437 | """Initializes an EmptyResponse. 438 | 439 | Args: 440 | status_code (int): The HTTP status code. 441 | """ 442 | super(EmptyResponse, self).__init__(status_code, '') # noqa pylint: disable=R1725 443 | 444 | def _error_code(self): 445 | """Returns the error code. 446 | 447 | Returns: 448 | str: The default unknown error code. 449 | """ 450 | return UNKNOWN_ERROR 451 | 452 | def _error_message(self): 453 | """Returns the error message. 454 | 455 | Returns: 456 | str: An empty string. 457 | """ 458 | return '' 459 | 460 | 461 | class RawResponse(Response): # noqa pylint: disable=R0903 462 | """Represents a raw byte HTTP response. 463 | 464 | Handles responses with raw byte content, 465 | typically for file downloads. Inherits 466 | from the `Response` base class. 467 | """ 468 | def __init__(self, response): 469 | """Initializes a RawResponse. 470 | 471 | Args: 472 | response: The HTTP response object. 473 | """ 474 | content = response.content 475 | super(RawResponse, self).__init__(response.status_code, content) # noqa pylint: disable=R1725 476 | 477 | def _error_code(self): 478 | """Returns the error code. 479 | 480 | Returns: 481 | str: The default unknown error code. 482 | """ 483 | return UNKNOWN_ERROR 484 | 485 | def _error_message(self): 486 | """Returns the error message. 487 | 488 | Returns: 489 | bytes: The raw byte content as the error message. # noqa 490 | """ 491 | return self._content 492 | --------------------------------------------------------------------------------