├── requirements-dev.txt ├── requirements.txt ├── Dockerfile ├── tox.ini ├── .github ├── dependabot.yml └── workflows │ └── docker-image.yml ├── config.json.tmpl ├── .pre-commit-config.yaml ├── .gitignore ├── README.md ├── contrast_application_licenses.py └── contrast_api.py /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | #black==22.6.0 2 | #click==8.1.3 3 | #mypy-extensions==0.4.3 4 | #pathspec==0.9.0 5 | #platformdirs==2.5.2 6 | #tomli==2.0.1 7 | 8 | pre-commit==2.20.0 9 | 10 | -r requirements.txt 11 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2023.7.22 2 | charset-normalizer==3.1.0 3 | idna==3.4 4 | prometheus-client==0.16.0 5 | PyYAML==6.0.1 6 | requests==2.31.0 7 | schedule==1.2.0 8 | urllib3==2.0.2 9 | voluptuous==0.13.1 10 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10-alpine 2 | 3 | WORKDIR /usr/src/app 4 | RUN mkdir output 5 | 6 | COPY requirements.txt ./ 7 | RUN pip install --no-cache-dir -r requirements.txt 8 | COPY *.py ./ 9 | 10 | ENTRYPOINT [ "python", "contrast_application_licenses.py" ] 11 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # To work with Black 3 | ignore = 4 | E203, #Whitespace before ':' 5 | E501, #Line too long 6 | W503, #Line break occurred before a binary operator 7 | 8 | #Imported but unused 9 | per-file-ignores = __init__.py:F401 10 | 11 | [isort] 12 | profile = black 13 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for Docker 4 | - package-ecosystem: "docker" 5 | directory: "/" 6 | schedule: 7 | interval: "daily" 8 | 9 | # Maintain dependencies for GitHub Actions 10 | - package-ecosystem: "github-actions" 11 | directory: "/" 12 | schedule: 13 | interval: "daily" 14 | 15 | # Maintain dependencies for pip 16 | - package-ecosystem: "pip" 17 | directory: "/" 18 | schedule: 19 | interval: "daily" 20 | -------------------------------------------------------------------------------- /config.json.tmpl: -------------------------------------------------------------------------------- 1 | { 2 | "environments": [ 3 | { 4 | "teamserverUrl": "http://environment1.my.tld/Contrast/api/ng/", 5 | "apiKey": "org-api-key", 6 | "authorizationHeader": "view-user-secret", 7 | "name": "Environment1", 8 | "orgId": "org-id-here" 9 | }, 10 | { 11 | "teamserverUrl": "http://environment2.eu.my.tld/Contrast/api/ng/", 12 | "apiKey": "org-api-key", 13 | "authorizationHeader": "view-user-secret", 14 | "name": "Environment2-EU", 15 | "orgId": "org-id-here" 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/asottile/pyupgrade 5 | rev: v2.37.3 6 | hooks: 7 | - id: pyupgrade 8 | args: [--py39-plus] 9 | - repo: https://github.com/hadialqattan/pycln 10 | rev: v2.1.1 11 | hooks: 12 | - id: pycln 13 | - repo: https://github.com/PyCQA/isort 14 | rev: 5.10.1 15 | hooks: 16 | - id: isort 17 | - repo: https://github.com/pre-commit/pre-commit-hooks 18 | rev: v4.3.0 19 | hooks: 20 | - id: trailing-whitespace 21 | - id: end-of-file-fixer 22 | - id: check-json 23 | - id: check-yaml 24 | - id: check-added-large-files 25 | - repo: https://github.com/PyCQA/flake8 26 | rev: 5.0.4 27 | hooks: 28 | - id: flake8 29 | - repo: https://github.com/psf/black 30 | rev: 22.6.0 31 | hooks: 32 | - id: black 33 | - repo: https://github.com/kumaraditya303/mirrors-pyright 34 | rev: v1.1.267 35 | hooks: 36 | - id: pyright 37 | -------------------------------------------------------------------------------- /.github/workflows/docker-image.yml: -------------------------------------------------------------------------------- 1 | name: Docker Image CI 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | tags: ["v*"] 7 | pull_request: 8 | branches: ["main"] 9 | 10 | env: 11 | REGISTRY: ghcr.io 12 | IMAGE_NAME: contrast-security-oss/assess-application-counter 13 | 14 | jobs: 15 | build_image: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Checkout repository 19 | uses: actions/checkout@v3 20 | 21 | - name: Set up QEMU 22 | uses: docker/setup-qemu-action@v2 23 | 24 | - name: Setup Docker Buildx 25 | uses: docker/setup-buildx-action@v2 26 | 27 | - name: Log in to the Container registry 28 | uses: docker/login-action@v2 29 | with: 30 | registry: ${{ env.REGISTRY }} 31 | username: ${{ github.actor }} 32 | password: ${{ secrets.GITHUB_TOKEN }} 33 | 34 | - name: Extract metadata (tags, labels) for Docker 35 | id: meta 36 | uses: docker/metadata-action@v4 37 | with: 38 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 39 | tags: | 40 | type=ref,event=branch 41 | type=semver,pattern={{version}} 42 | type=semver,pattern={{major}}.{{minor}} 43 | type=semver,pattern={{major}},enable=${{ !startsWith(github.ref, 'refs/tags/v0.') }} 44 | 45 | - name: Build and push Docker image 46 | uses: docker/build-push-action@v4 47 | with: 48 | context: . 49 | platforms: linux/amd64, linux/arm64 50 | push: ${{ github.event_name == 'push' }} 51 | tags: ${{ steps.meta.outputs.tags }} 52 | labels: ${{ steps.meta.outputs.labels }} 53 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | config.json 2 | run.sh 3 | 4 | #Python .gitignore 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | cover/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | .pybuilder/ 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | # For a library or package, you might want to ignore these files since the code is 91 | # intended to run in multiple environments; otherwise, check them in: 92 | # .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 102 | __pypackages__/ 103 | 104 | # Celery stuff 105 | celerybeat-schedule 106 | celerybeat.pid 107 | 108 | # SageMath parsed files 109 | *.sage.py 110 | 111 | # Environments 112 | .env 113 | .venv 114 | env/ 115 | venv/ 116 | ENV/ 117 | env.bak/ 118 | venv.bak/ 119 | 120 | # Spyder project settings 121 | .spyderproject 122 | .spyproject 123 | 124 | # Rope project settings 125 | .ropeproject 126 | 127 | # mkdocs documentation 128 | /site 129 | 130 | # mypy 131 | .mypy_cache/ 132 | .dmypy.json 133 | dmypy.json 134 | 135 | # Pyre type checker 136 | .pyre/ 137 | 138 | # pytype static type analyzer 139 | .pytype/ 140 | 141 | # Cython debug symbols 142 | cython_debug/ 143 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Contrast Assess Count License Usage 2 | 3 | Script to count licensed Contrast Assess Applications across environments, de-duplicating them by name, language, and metadata. 4 | Intended for use with Prometheus. 5 | 6 | A total unique application count metric is emitted, as well as used license counts for each environment. 7 | 8 | ## Requirements 9 | - Python 3.10 (other versions _may_ work but are untested) 10 | - Ability to install Python libraries from `requirements.txt` 11 | 12 | ## Setup 13 | You can run this script locally with a Python install, or, in a container with the provided `Dockerfile` 14 | 15 | ### Container use 16 | 17 | #### Pre-built 18 | ```bash 19 | docker run -it -v $PWD/config.json:/usr/src/app/config.json ghcr.io/contrast-security-oss/assess-application-counter:main <...args...> 20 | ``` 21 | 22 | #### Local build 23 | ```bash 24 | docker build . --tag contrast-count-assess-licenses # Build the container 25 | docker run -it -v $PWD/config.json:/usr/src/app/config.json contrast-count-assess-licenses <...args...> # Run the container 26 | ``` 27 | 28 | ### Local use 29 | Use of a virtual environment is encouraged 30 | ```bash 31 | python3 -m venv venv # Create the virtual environment 32 | . venv/bin/activate # Activate the virtual environment 33 | pip3 install -r requirements.txt # Install dependencies 34 | python3 contrast_application_licenses.py # Run script 35 | ``` 36 | 37 | ## Connection and Authentication 38 | 39 | Connection details for your environments should be specified in the format described in [`config.json.tmpl`](config.json.tmpl). 40 | 41 | Each environment must be distinctly named. 42 | 43 | ## Running 44 | 45 | Full usage information: 46 | 47 | ``` 48 | usage: contrast_application_licenses.py [-h] [-c CONFIG_FILE] [-i UPDATE_INTERVAL] [-l {CRITICAL,ERROR,WARN,INFO,DEBUG}] [-p PROMETHEUS_LISTEN_PORT | -u PROMETHEUS_PUSH_GATEWAY] 49 | 50 | Utility to count licensed Contrast Assess Applications across environments, de-duplicating them by name, language, and metadata. 51 | 52 | options: 53 | -h, --help show this help message and exit 54 | -c CONFIG_FILE, --config_file CONFIG_FILE, --config-file CONFIG_FILE 55 | Path to JSON config or - to read it from stdin, defaults to config.json 56 | -i UPDATE_INTERVAL, --update-interval UPDATE_INTERVAL, --update_interval UPDATE_INTERVAL 57 | Number of minutes to wait between polls of the configured environments for licensed applications. Only used when serving prometheus data with -p. 58 | -l {CRITICAL,ERROR,WARN,INFO,DEBUG}, --log-level {CRITICAL,ERROR,WARN,INFO,DEBUG}, --log_level {CRITICAL,ERROR,WARN,INFO,DEBUG} 59 | Log level 60 | -p PROMETHEUS_LISTEN_PORT, --prometheus-listen-port PROMETHEUS_LISTEN_PORT, --prometheus_listen_port PROMETHEUS_LISTEN_PORT 61 | Port to serve metrics on. 62 | -u PROMETHEUS_PUSH_GATEWAY, --prometheus-push-gateway PROMETHEUS_PUSH_GATEWAY, --prometheus_push_gateway PROMETHEUS_PUSH_GATEWAY 63 | URL for a Prometheus push gateway where metrics will be sent. 64 | ``` 65 | 66 | If used with `-p`, the license data will be periodically refreshed (default every 5 minutes), and served on the specified port (daemon mode). 67 | 68 | If used with `-u`, the license data is retrieved once and sent to the specified push gateway URL. This is good for cron-style environments. 69 | 70 | Both options may not be used together. 71 | 72 | If neither option is provided, counts are logged at the default info level. 73 | 74 | ## Output 75 | 76 | ``` 77 | # HELP contrast_assess_unique_licensed_applications Number of unique licensed Contrast Assess applications, de-duplicated by name, language and metadata values. 78 | # TYPE contrast_assess_unique_licensed_applications gauge 79 | contrast_assess_unique_licensed_applications 6.0 80 | # HELP contrast_assess_licensed_applications_total Number of licensed Contrast Assess applications on an environment. 81 | # TYPE contrast_assess_licensed_applications_total gauge 82 | contrast_assess_licensed_applications_total{environment="Environment1"} 5.0 83 | contrast_assess_licensed_applications_total{environment="Environment2-EU"} 3.0 84 | # HELP contrast_assess_licensed_applications Number of licensed Contrast Assess applications in a specific language. 85 | # TYPE contrast_assess_licensed_applications gauge 86 | contrast_assess_licensed_applications{environment="Environment1",language="Node"} 3.0 87 | contrast_assess_licensed_applications{environment="Environment1",language="Java"} 1.0 88 | contrast_assess_licensed_applications{environment="Environment1",language="PHP"} 1.0 89 | contrast_assess_licensed_applications{environment="Environment2-EU",language="Node"} 3.0 90 | # HELP contrast_assess_licensed_applications_updated_unixtime Time the Contrast Assess licensed applications were last polled. 91 | # TYPE contrast_assess_licensed_applications_updated_unixtime gauge 92 | contrast_assess_licensed_applications_updated_unixtime 1.663061168118352e+09 93 | # HELP contrast_assess_licensed_applications_update_duration_seconds Time it took to update license counts. 94 | # TYPE contrast_assess_licensed_applications_update_duration_seconds gauge 95 | contrast_assess_licensed_applications_update_duration_seconds 0.0750698340125382 96 | 97 | ``` 98 | 99 | ## Development Setup 100 | Various tools enforce code standards, and are run as a pre-commit hook. This must be setup before committing changes with the following commands: 101 | ```bash 102 | python3 -m venv venv # setup a virtual environment 103 | . venv/bin/activate # activate the virtual environment 104 | pip3 install -r requirements-dev.txt # install development dependencies (will also include app dependencies) 105 | pre-commit install # setup the pre-commit hook which handles formatting 106 | ``` 107 | -------------------------------------------------------------------------------- /contrast_application_licenses.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import sys 4 | from collections import defaultdict 5 | from dataclasses import dataclass 6 | from time import sleep 7 | 8 | from contrast_api import contrast_instance_from_json, load_config 9 | 10 | args_parser = argparse.ArgumentParser( 11 | description="Utility to count licensed Contrast Assess Applications across environments, de-duplicating them by name, language, and metadata." 12 | ) 13 | args_parser.add_argument( 14 | "-c", 15 | "--config_file", 16 | "--config-file", 17 | help="Path to JSON config or - to read it from stdin, defaults to config.json", 18 | default="config.json", 19 | type=argparse.FileType("r"), 20 | ) 21 | args_parser.add_argument( 22 | "-i", 23 | "--update-interval", 24 | "--update_interval", 25 | help="Number of minutes to wait between polls of the configured environments for licensed applications. Only used when serving prometheus data with -p.", 26 | type=int, 27 | default=5, 28 | ) 29 | args_parser.add_argument( 30 | "-l", 31 | "--log-level", 32 | "--log_level", 33 | help="Log level", 34 | choices=["CRITICAL", "ERROR", "WARN", "INFO", "DEBUG"], 35 | type=str.upper, 36 | default="INFO", 37 | ) 38 | group = args_parser.add_mutually_exclusive_group() 39 | group.add_argument( 40 | "-p", 41 | "--prometheus-listen-port", 42 | "--prometheus_listen_port", 43 | help="Port to serve metrics on.", 44 | type=int, 45 | ) 46 | group.add_argument( 47 | "-u", 48 | "--prometheus-push-gateway", 49 | "--prometheus_push_gateway", 50 | help="URL for a Prometheus push gateway where metrics will be sent.", 51 | ) 52 | args = args_parser.parse_args() 53 | 54 | logging.basicConfig(level=args.log_level, format="%(levelname)s: %(message)s") 55 | logger = logging.getLogger(__file__) 56 | 57 | try: 58 | from prometheus_client import ( 59 | CollectorRegistry, 60 | Gauge, 61 | push_to_gateway, 62 | start_http_server, 63 | ) 64 | except ImportError: 65 | logger.fatal("prometheus-client is not installed") 66 | sys.exit(1) 67 | 68 | try: 69 | import schedule 70 | except ImportError: 71 | logger.fatal("schedule is not installed") 72 | sys.exit(1) 73 | 74 | 75 | config = load_config(file=args.config_file) 76 | 77 | environments = {} 78 | for iteration, org in enumerate(config): 79 | environment = contrast_instance_from_json(org) 80 | name = org["name"] 81 | 82 | if name in environments: 83 | logger.error( 84 | f"Environment named '{name}' was already added prior to environment[{iteration}], please use distinct names" 85 | ) 86 | sys.exit(1) 87 | 88 | if not environment.test_connection(): 89 | logger.error(f"Test connection failed for Environment '{name}'") 90 | sys.exit(1) 91 | if not environment.test_org_access(): 92 | logger.error(f"Organization access failed for environment '{name}'") 93 | sys.exit(1) 94 | 95 | environments[name] = environment 96 | 97 | 98 | @dataclass(eq=True, frozen=True) 99 | class Application: 100 | """Dataclass to represent an application using name, language and metadata as composite unique key.""" 101 | 102 | name: str 103 | language: str 104 | metadata: str 105 | 106 | 107 | def metadata_to_str(metadata_entities: dict) -> str: 108 | """Convert metadata dictionary from the application response into a string like the YAML format to provide application metadata.""" 109 | return ",".join( 110 | map( 111 | lambda meta: f"{meta['fieldName']}={meta['fieldValue']}", 112 | sorted(metadata_entities, key=lambda meta: meta["fieldName"]), 113 | ) 114 | ) 115 | 116 | 117 | @dataclass 118 | class AppCounts: 119 | unique_licensed_applications: int 120 | environment_applications: dict[str, dict] 121 | 122 | 123 | def count_licensed_applications() -> AppCounts: 124 | apps: set[Application] = set() 125 | # map of environment -> language -> application count 126 | environment_apps_by_language: dict[str, dict[str, int]] = {} 127 | 128 | for environment_name, environment in environments.items(): 129 | # map of language -> application count, defaulting at 0 for new keys 130 | language_count = defaultdict(int) 131 | logger.info(f"Listing applications for environment '{environment_name}'...") 132 | applications = environment.list_org_apps( 133 | environment._org_uuid, 134 | include_archived=True, 135 | include_merged=False, 136 | quick_filter="LICENSED", 137 | ) 138 | for application in applications: 139 | metadata = metadata_to_str(application["metadataEntities"]) 140 | 141 | app = Application( 142 | application["name"], 143 | application["language"], 144 | metadata, 145 | ) 146 | apps.add(app) 147 | language_count[app.language] = language_count[app.language] + 1 148 | 149 | logger.debug(f"Unique application count is now: {len(apps)}") 150 | 151 | environment_app_count = len(applications) 152 | logger.info( 153 | f"Environment '{environment_name}' app count: {environment_app_count}" 154 | ) 155 | environment_apps_by_language[environment_name] = language_count 156 | 157 | unique_license_count = len(apps) 158 | logger.info(f"Unique license count: {unique_license_count}") 159 | return AppCounts(unique_license_count, environment_apps_by_language) 160 | 161 | 162 | registry = CollectorRegistry() 163 | unique_guage = Gauge( 164 | "contrast_assess_unique_licensed_applications", 165 | "Number of unique licensed Contrast Assess applications, de-duplicated by name, language and metadata values.", 166 | registry=registry, 167 | ) 168 | environment_guage = Gauge( 169 | "contrast_assess_licensed_applications_total", 170 | "Number of licensed Contrast Assess applications on an environment.", 171 | ["environment"], 172 | registry=registry, 173 | ) 174 | language_guage = Gauge( 175 | "contrast_assess_licensed_applications", 176 | "Number of licensed Contrast Assess applications in a specific language.", 177 | ["language", "environment"], 178 | registry=registry, 179 | ) 180 | poll_gauge = Gauge( 181 | "contrast_assess_licensed_applications_updated_unixtime", 182 | "Time the Contrast Assess licensed applications were last polled.", 183 | registry=registry, 184 | ) 185 | update_timer = Gauge( 186 | "contrast_assess_licensed_applications_update_duration_seconds", 187 | "Time it took to update license counts.", 188 | registry=registry, 189 | ) 190 | 191 | 192 | @update_timer.time() 193 | def update_registry(): 194 | data = count_licensed_applications() 195 | unique_guage.set(data.unique_licensed_applications) 196 | 197 | for environment, language_counts in data.environment_applications.items(): 198 | total_apps_in_environment = 0 199 | 200 | for language, count in language_counts.items(): 201 | total_apps_in_environment += count 202 | gauge = language_guage.labels(environment=environment, language=language) 203 | gauge.set(count) 204 | 205 | gauge = environment_guage.labels(environment=environment) 206 | gauge.set(total_apps_in_environment) 207 | 208 | poll_gauge.set_to_current_time() 209 | 210 | 211 | update_registry() 212 | 213 | if listen_port := args.prometheus_listen_port: 214 | start_http_server(listen_port, registry=registry) 215 | logger.info(f"Listening on port {listen_port}") 216 | logger.info(f"Scheduling update every {args.update_interval} minute(s)") 217 | schedule.every(args.update_interval).minutes.do(update_registry) 218 | while True: 219 | schedule.run_pending() 220 | sleep(1) 221 | 222 | if url := args.prometheus_push_gateway: 223 | logger.info(f"Pushing data to gateway at {url}...") 224 | push_to_gateway(url, job="contrast_assess_licenses_used", registry=registry) 225 | logger.info("Successfully pushed data to gateway.") 226 | -------------------------------------------------------------------------------- /contrast_api.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import logging 4 | import os 5 | from json import JSONDecodeError 6 | from typing import Any 7 | from urllib.parse import urlparse 8 | 9 | logger = logging.getLogger(__file__) 10 | 11 | try: 12 | import requests 13 | except ImportError: 14 | logger.fatal("requests module is not installed (see README)") 15 | exit(1) 16 | 17 | 18 | try: 19 | from voluptuous import All, Length, Schema, Url 20 | from voluptuous.error import MultipleInvalid 21 | except ImportError: 22 | logger.fatal("voluptuous module is not installed (see README)") 23 | exit(1) 24 | 25 | 26 | def load_config(file_path="config.json", file=None) -> list[dict]: 27 | try: 28 | if file: 29 | config = json.load(file) 30 | file.close() 31 | else: 32 | try: 33 | with open(file_path) as config: 34 | config = json.load(config) 35 | except OSError as e: 36 | logger.error("Could not load config", e) 37 | exit(1) 38 | except JSONDecodeError as e: 39 | logger.error("Could not decode JSON from config", e) 40 | exit(1) 41 | 42 | # Validate environments in the config.json have expected keys 43 | config_schema = Schema( 44 | { 45 | "environments": All( 46 | [ 47 | { 48 | "name": All(str, Length(1)), 49 | "teamserverUrl": Url(), 50 | "apiKey": All(str, Length(1)), 51 | "authorizationHeader": All(str, Length(1)), 52 | "orgId": All(str, Length(1)), 53 | } 54 | ], 55 | Length(1), 56 | required=True, 57 | ) 58 | }, 59 | required=True, 60 | ) 61 | 62 | try: 63 | config_schema(config) 64 | except MultipleInvalid as e: 65 | logger.error("config invalid - " + str(e)) 66 | for error in e.errors: 67 | logger.error(f"\t{error.path[0]}: {error.msg}") 68 | exit(1) 69 | 70 | return config["environments"] 71 | 72 | 73 | def load_config_from_env() -> dict[str, str]: 74 | """Validate configuration has been supplied by environment variables.""" 75 | config_schema = Schema( 76 | { 77 | "CONTRAST__API__URL": Url(), # type: ignore 78 | "CONTRAST__API__API_KEY": All(Length(1)), 79 | "CONTRAST__API__AUTH_HEADER": All(str, Length(1)), 80 | } 81 | ) 82 | environment_config = {k: os.getenv(k) for (k) in config_schema.schema.keys()} 83 | 84 | try: 85 | config_schema(environment_config) 86 | except MultipleInvalid as e: 87 | logger.error("ERROR: configuration invalid, check environment variables:") 88 | for error in e.errors: 89 | logger.error(f"\t{error.path[0]}: {error.msg}") 90 | exit(1) 91 | 92 | return environment_config # type: ignore - voluptuous has validated values are set 93 | 94 | 95 | class ContrastTeamServer: 96 | def __init__( 97 | self, 98 | teamserver_url, 99 | api_key, 100 | authorization_header, 101 | org_uuid, 102 | ): 103 | self._teamserver_url = teamserver_url 104 | self._api_key = api_key 105 | self._authorization_header = authorization_header 106 | self._org_uuid = org_uuid 107 | 108 | self._is_superadmin = False 109 | self._connection_checked = False 110 | self._skip_certificate_validation = False 111 | if "INSECURE_SKIP_CERT_VALIDATION" in os.environ: 112 | self._skip_certificate_validation = os.environ[ 113 | "INSECURE_SKIP_CERT_VALIDATION" 114 | ].lower() in ["1", "true"] 115 | 116 | self._title_cwe_cache = {} 117 | 118 | def request_headers(self, api_key) -> dict[str, str]: 119 | return { 120 | "Accept": "application/json", 121 | "Api-Key": api_key, 122 | "Authorization": self._authorization_header, 123 | "Content-Type": "application/json", 124 | } 125 | 126 | def api_request( 127 | self, 128 | path: str, 129 | method: str = "GET", 130 | body: Any | None = None, 131 | should_retry: bool = False, 132 | api_key: str | None = None, 133 | ): 134 | """ 135 | Make a HTTP request to TeamServer. 136 | Raises :class:`HTTPError`, if one occurred (after retrying, if `should_retry` is True). 137 | 138 | :param path: URI for the request. 139 | :param method: method for the Request: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. 140 | :param body: (optional) A JSON serializable Python object to send in the body of the Request. 141 | :param should_retry: Boolean. Enable/disable retrying when an exception occurs with the request. 142 | :param api_key: (optional) TeamServer API key if you wish to override the class `api_key`. 143 | :return: JSON body of the response. 144 | """ 145 | if not self._connection_checked and path != "profile": 146 | self.test_connection() 147 | 148 | logger.debug(method + " " + path) 149 | if api_key is None: 150 | api_key = self._api_key 151 | 152 | response = requests.request( 153 | method, 154 | self._teamserver_url + path, 155 | headers=self.request_headers(api_key), 156 | json=body, 157 | verify=not self._skip_certificate_validation, 158 | ) 159 | if should_retry: 160 | retry_count = 0 161 | while retry_count < 2: 162 | try: 163 | response.raise_for_status() 164 | return response.json() 165 | except Exception as e: 166 | if retry_count < 2: 167 | retry_count += 1 168 | response = requests.request( 169 | method, 170 | self._teamserver_url + path, 171 | headers=self.request_headers(api_key), 172 | json=body, 173 | verify=not self._skip_certificate_validation, 174 | ) 175 | else: 176 | raise e 177 | 178 | return response.json() 179 | 180 | def paginate_through_all( 181 | self, path, response_key, api_key, body=None, method="GET" 182 | ): 183 | """Helper to retrieve all pages of a paginated call.""" 184 | output = [] 185 | 186 | call = path 187 | offset = 0 188 | more = True 189 | 190 | while more: 191 | url = f"{call}&offset={offset}" 192 | response = self.api_request( 193 | url, method, body, should_retry=True, api_key=api_key 194 | ) 195 | 196 | output.extend(response[response_key]) 197 | count = len(response[response_key]) 198 | 199 | offset += count 200 | more = count > 0 201 | 202 | return output 203 | 204 | def list_orgs(self): 205 | """Superadmin API call to retrieve all organizations.""" 206 | if self._is_superadmin: 207 | # fetch both and return only those we are allowed to access, as access could be limited for this specific user 208 | all_orgs = self.api_request("superadmin/organizations?limit=100")[ 209 | "organizations" 210 | ] 211 | profile_orgs = self.api_request("profile/organizations")["organizations"] 212 | allowed_orgs_ids = list( 213 | map(lambda org: org["organization_uuid"], profile_orgs) 214 | ) 215 | 216 | orgs_allowed_access_to = [] 217 | for org in all_orgs: 218 | if org["organization_uuid"] in allowed_orgs_ids: 219 | orgs_allowed_access_to.append(org) 220 | 221 | return orgs_allowed_access_to 222 | else: 223 | return self.api_request("profile/organizations")["organizations"] 224 | 225 | def org_api_key(self, org_id) -> str | None: 226 | """Superadmin API call to retrieve the API key for a specific organization.""" 227 | if self._is_superadmin: 228 | call = f"superadmin/organizations/{org_id}/apiKey" 229 | else: 230 | call = f"{org_id}/users/keys/apikey" 231 | 232 | response = self.api_request(call) 233 | 234 | if "api_key" in response: 235 | return response["api_key"] 236 | else: 237 | return None 238 | 239 | def list_org_apps( 240 | self, 241 | org_id, 242 | api_key=None, 243 | include_merged=True, 244 | include_archived=False, 245 | quick_filter="ALL", 246 | ) -> list[dict]: 247 | """Organization specific API call to list all applications.""" 248 | call = org_id + "/applications/filter?expand=license,metadata&sort=appName" 249 | body = {"quickFilter": quick_filter} 250 | if include_merged: 251 | call += "&includeMerged=true" 252 | if include_archived: 253 | body["includeArchived"] = "true" 254 | 255 | return self.paginate_through_all( 256 | call, "applications", api_key, body=body, method="POST" 257 | ) 258 | 259 | def org_traces_app(self, org_id, app_id, api_key): 260 | """Organization specific API call to list all traces (vulnerabilities) for an application.""" 261 | call = org_id + "/traces/" + app_id + "/filter?expand=application&limit=500" 262 | 263 | traces = self.paginate_through_all(call, "traces", api_key=api_key) 264 | 265 | return traces 266 | 267 | @staticmethod 268 | def format_time(timestamp): 269 | """Helper function to format timestamps from the responses.""" 270 | if not timestamp: 271 | return "" 272 | return datetime.datetime.utcfromtimestamp(timestamp / 1000).isoformat() 273 | 274 | def test_connection(self): 275 | """Check TeamServer Connection and Credentials.""" 276 | try: 277 | response = self.api_request("profile") 278 | except JSONDecodeError as e: 279 | logger.error("Profile check failed - check credentials: ", e) 280 | return False 281 | if "success" in response and not response["success"]: 282 | logger.error("Profile check failed - check credentials: ", response) 283 | return False 284 | self._connection_checked = True 285 | 286 | if "superadmin_role" not in response["user"] or ( 287 | "superadmin_role" in response["user"] 288 | and response["user"]["superadmin_role"] 289 | not in ["SUPERADMIN", "SERVER_ADMIN"] 290 | ): 291 | logger.warn( 292 | "Account is not superadmin or serveradmin, access will be limited to a subset of organizations" 293 | ) 294 | self._is_superadmin = False 295 | return True 296 | 297 | self._is_superadmin = True 298 | return True 299 | 300 | def test_org_access(self, orgId=None): 301 | """Check we can access the specified organization.""" 302 | if not orgId: 303 | orgId = self._org_uuid 304 | response = self.api_request(f"{orgId}/organizations") 305 | return response["success"] 306 | 307 | 308 | def contrast_instance_from_json(json: dict) -> ContrastTeamServer: 309 | url = json["teamserverUrl"] 310 | url_parts = urlparse(url) 311 | if url_parts.path != "/Contrast/api/ng/": 312 | url = f"{url_parts.scheme}://{url_parts.netloc}/Contrast/api/ng/" 313 | 314 | logger.debug(f"Base URL: {url}") 315 | 316 | return ContrastTeamServer( 317 | url, json["apiKey"], json["authorizationHeader"], json["orgId"] 318 | ) 319 | --------------------------------------------------------------------------------