├── .coverage ├── .github └── workflows │ ├── build.yml │ ├── manual-deploy.yml │ ├── pc-cli-example.yml │ └── release.yml ├── .gitignore ├── .pylintrc ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── SUPPORT.md ├── bin └── pc ├── docs ├── how-to-enable-or-disable-policies-at-scale-via-csv.md └── how-to-use-in-pipelines.md ├── entrypoint.sh ├── help.png ├── mapping.yaml ├── prismacloud ├── __init__.py └── cli │ ├── __init__.py │ ├── api.py │ ├── cspm │ ├── __init__.py │ ├── cmd_alert.py │ ├── cmd_check.py │ ├── cmd_cloud.py │ ├── cmd_compliance.py │ ├── cmd_current.py │ ├── cmd_iam.py │ ├── cmd_inventory.py │ ├── cmd_licenses.py │ ├── cmd_policy.py │ ├── cmd_pov.py │ ├── cmd_resource.py │ ├── cmd_rql.py │ ├── cmd_saas_version.py │ └── cmd_usage.py │ ├── cwpp │ ├── __init__.py │ ├── cmd_audits.py │ ├── cmd_containers.py │ ├── cmd_credentials.py │ ├── cmd_defenders.py │ ├── cmd_discovery.py │ ├── cmd_host_auto_deploy.py │ ├── cmd_hosts.py │ ├── cmd_images.py │ ├── cmd_incidents.py │ ├── cmd_intelligence.py │ ├── cmd_license.py │ ├── cmd_logs.py │ ├── cmd_monitor.py │ ├── cmd_policies.py │ ├── cmd_registry.py │ ├── cmd_scans.py │ ├── cmd_serverless_auto_deploy.py │ ├── cmd_settings.py │ ├── cmd_stats.py │ ├── cmd_tags.py │ ├── cmd_users.py │ └── cmd_version.py │ ├── pccs │ ├── __init__.py │ ├── cmd_repositories.py │ ├── cmd_reviews.py │ └── cmd_suppressions.py │ └── version.py ├── pytest.ini ├── requirements.txt ├── results.png ├── screenshot.png ├── setup.cfg ├── setup.py ├── tests └── test_cli.py └── use-cases ├── README.md ├── custom-query-reports.md ├── june-2022.md └── use-rql.md /.coverage: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/ad41f2754cc53b49d2dd3ae6aeca67b8b76857c2/.coverage -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Prisma Cloud CLI Build Workflow 2 | 3 | on: 4 | push: 5 | pull_request: 6 | branches: 7 | - main 8 | 9 | env: 10 | PC_ACCESS_KEY: ${{ secrets.PC_ACCESS_KEY }} 11 | PC_SAAS_API_ENDPOINT: ${{ secrets.PC_SAAS_API_ENDPOINT }} 12 | PC_SECRET_KEY: ${{ secrets.PC_SECRET_KEY }} 13 | 14 | jobs: 15 | qa: 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3 20 | 21 | - name: Set up Python 3.11 22 | uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4 23 | with: 24 | python-version: "3.11" 25 | 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install build 30 | pip install pylint 31 | pip install -r requirements.txt 32 | 33 | - name: Analysing the code with pylint 34 | run: | 35 | pylint $(git ls-files '*.py') 36 | 37 | - name: Install flake8 38 | run: | 39 | pip install flake8 40 | 41 | - name: Analysing the code with flake8 42 | run: | 43 | # stop the build if there are Python syntax errors or undefined names 44 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 45 | # execute overall flake8 scan 46 | flake8 . --count --statistics 47 | 48 | test: 49 | needs: qa 50 | runs-on: ubuntu-latest 51 | strategy: 52 | matrix: 53 | python-version: ["3.8", "3.9", "3.10", "3.11"] 54 | 55 | steps: 56 | - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3 57 | - name: Set up Python 58 | uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4 59 | with: 60 | python-version: ${{ matrix.python-version }} 61 | 62 | - name: Check for required environment variables 63 | run: | 64 | required_env_vars=("PC_ACCESS_KEY" "PC_SAAS_API_ENDPOINT" "PC_SECRET_KEY") 65 | for env_var in "${required_env_vars[@]}"; do 66 | if [[ -z "${!env_var}" ]]; then 67 | echo "Error: $env_var is not set" 68 | exit 1 69 | fi 70 | done 71 | 72 | - name: Install dependencies 73 | run: | 74 | python -m pip install --upgrade pip 75 | pip install build 76 | pip install pylint 77 | pip install -r requirements.txt 78 | 79 | - name: Build package 80 | run: python -m build 81 | 82 | - name: Install package 83 | run: pip install . 84 | 85 | - name: Accept Support Message 86 | run: | 87 | mkdir ~/.prismacloud 88 | touch ~/.prismacloud/.community_supported_accepted 89 | 90 | - name: Run tests 91 | run: SKIP_BENCHMARK=1 pytest tests 92 | 93 | - name: Upload test results 94 | if: always() 95 | uses: actions/upload-artifact@ff15f0306b3f739f7b6fd43fb5d26cd321bd4de5 # v3 96 | with: 97 | name: test-results 98 | path: .pytest_cache 99 | 100 | 101 | -------------------------------------------------------------------------------- /.github/workflows/manual-deploy.yml: -------------------------------------------------------------------------------- 1 | 2 | --- 3 | name: Manual Deploy 4 | on: workflow_dispatch 5 | 6 | jobs: 7 | pypireleaser: 8 | runs-on: ubuntu-latest 9 | permissions: write-all 10 | needs: 11 | - create-github-release 12 | steps: 13 | - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3 14 | - name: Set up Python 15 | uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4 16 | with: 17 | python-version: "3.8" 18 | 19 | - name: Install dependencies 20 | run: | 21 | python -m pip install --upgrade pip 22 | pip install build 23 | pip install pylint 24 | pip install -r requirements.txt 25 | 26 | - name: Analysing the code with pylint 27 | run: | 28 | pylint $(git ls-files '*.py') 29 | 30 | - name: Install flake8 31 | run: | 32 | pip install flake8 33 | 34 | - name: Analysing the code with flake8 35 | run: | 36 | # stop the build if there are Python syntax errors or undefined names 37 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 38 | # execute overall flake8 scan 39 | flake8 . --count --statistics 40 | 41 | - name: Build package 42 | run: python -m build 43 | 44 | - name: Publish package 45 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 46 | with: 47 | user: __token__ 48 | password: ${{ secrets.PYPI_API_TOKEN }} 49 | 50 | publish-dockerhub: 51 | permissions: write-all 52 | runs-on: ubuntu-latest 53 | needs: 54 | - pypireleaser 55 | - create-github-release 56 | steps: 57 | - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3 58 | - name: Publish to Registry 59 | uses: elgohr/Publish-Docker-Github-Action@219613003b08f4d049f34cb56c92e84345e1bb3f # v5 60 | with: 61 | name: simonpanw/prismacloud-cli 62 | username: ${{ secrets.DOCKERHUB_USERNAME }} 63 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 64 | tags: "latest,${{ env.PRISMA_CLOUD_CLI_VERSION }}" 65 | - name: Update Docker Hub README 66 | uses: peter-evans/dockerhub-description@dc67fad7001ef9e8e3c124cb7a64e16d0a63d864 # v3.4.2 67 | with: 68 | username: ${{ secrets.DOCKERHUB_USERNAME }} 69 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 70 | repository: simonpanw/prismacloud-cli 71 | short-description: ${{ github.event.repository.description }} 72 | readme-filepath: ./README.md 73 | -------------------------------------------------------------------------------- /.github/workflows/pc-cli-example.yml: -------------------------------------------------------------------------------- 1 | name: Prisma Cloud CLI run 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | prismacloud-cli: 8 | runs-on: ubuntu-latest 9 | env: 10 | PC_ACCESS_KEY: ${{ secrets.PC_ACCESS_KEY }} 11 | PC_SAAS_API_ENDPOINT: ${{ secrets.PC_SAAS_API_ENDPOINT }} 12 | PC_SECRET_KEY: ${{ secrets.PC_SECRET_KEY }} 13 | 14 | steps: 15 | - name: Set up Python 16 | uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4 17 | with: 18 | python-version: "3.11" 19 | 20 | - name: Check for required environment variables 21 | run: | 22 | required_env_vars=("PC_ACCESS_KEY" "PC_SAAS_API_ENDPOINT" "PC_SECRET_KEY") 23 | for env_var in "${required_env_vars[@]}"; do 24 | if [[ -z "${!env_var}" ]]; then 25 | echo "Error: $env_var is not set" 26 | exit 1 27 | fi 28 | done 29 | 30 | - name: Cache pip dependencies 31 | uses: actions/cache@2f8e54208210a422b2efd51efaa6bd6d7ca8920f # v3 32 | with: 33 | path: ~/.cache/pip 34 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} 35 | restore-keys: | 36 | ${{ runner.os }}-pip- 37 | 38 | - name: Install or Upgrade pip 39 | run: | 40 | if [[ ! -d "~/.cache/pip" ]]; then 41 | python -m pip install --upgrade pip 42 | fi 43 | 44 | - name: Environment Setup 45 | run: | 46 | if [[ ! -d "~/.cache/pip" ]]; then 47 | sudo pip3 install prismacloud-cli -U 48 | mkdir ~/.prismacloud 49 | touch ~/.prismacloud/.community_supported_accepted 50 | fi 51 | 52 | - name: Run command 53 | run: pc --config environment version 54 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | 2 | --- 3 | name: Release new version on PyPI and DockerHub 4 | on: 5 | workflow_dispatch: 6 | pull_request: 7 | types: 8 | - closed 9 | 10 | jobs: 11 | create-github-release: 12 | if: github.event.pull_request.merged == true 13 | runs-on: ubuntu-latest 14 | permissions: write-all 15 | steps: 16 | - name: Check out code 17 | uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3 18 | 19 | - name: Extract version from prismacloud/cli/version.py 20 | run: | 21 | version=$(grep 'version = ' prismacloud/cli/version.py | sed -E "s/version = \"([^\"]+)\"/\1/") 22 | echo "PRISMA_CLOUD_CLI_VERSION=$version" >> $GITHUB_ENV 23 | 24 | - name: Create GitHub Release 25 | run: | 26 | gh release create ${{ env.PRISMA_CLOUD_CLI_VERSION }} --generate-notes --latest 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | 30 | pypireleaser: 31 | runs-on: ubuntu-latest 32 | permissions: write-all 33 | needs: 34 | - create-github-release 35 | steps: 36 | - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3 37 | - name: Set up Python 38 | uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # v4 39 | with: 40 | python-version: "3.8" 41 | 42 | - name: Install dependencies 43 | run: | 44 | python -m pip install --upgrade pip 45 | pip install build 46 | pip install pylint 47 | pip install -r requirements.txt 48 | 49 | - name: Analysing the code with pylint 50 | run: | 51 | pylint $(git ls-files '*.py') 52 | 53 | - name: Install flake8 54 | run: | 55 | pip install flake8 56 | 57 | - name: Analysing the code with flake8 58 | run: | 59 | # stop the build if there are Python syntax errors or undefined names 60 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 61 | # execute overall flake8 scan 62 | flake8 . --count --statistics 63 | 64 | - name: Build package 65 | run: python -m build 66 | 67 | - name: Publish package 68 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 69 | with: 70 | user: __token__ 71 | password: ${{ secrets.PYPI_API_TOKEN }} 72 | 73 | publish-dockerhub: 74 | permissions: write-all 75 | runs-on: ubuntu-latest 76 | needs: 77 | - pypireleaser 78 | - create-github-release 79 | steps: 80 | - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3 81 | - name: Publish to Registry 82 | uses: elgohr/Publish-Docker-Github-Action@219613003b08f4d049f34cb56c92e84345e1bb3f # v5 83 | with: 84 | name: simonpanw/prismacloud-cli 85 | username: ${{ secrets.DOCKERHUB_USERNAME }} 86 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 87 | tags: "latest,${{ env.PRISMA_CLOUD_CLI_VERSION }}" 88 | - name: Update Docker Hub README 89 | uses: peter-evans/dockerhub-description@dc67fad7001ef9e8e3c124cb7a64e16d0a63d864 # v3.4.2 90 | with: 91 | username: ${{ secrets.DOCKERHUB_USERNAME }} 92 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 93 | repository: simonpanw/prismacloud-cli 94 | short-description: ${{ github.event.repository.description }} 95 | readme-filepath: ./README.md 96 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Distribution / packaging 2 | 3 | .Python 4 | env/ 5 | venv/ 6 | build/ 7 | develop-egs/ 8 | dist/ 9 | downloads/ 10 | eggs/ 11 | .eggs/ 12 | lib/ 13 | lib64/ 14 | parts/ 15 | sdist/ 16 | var/ 17 | .vscode/ 18 | *.egg-info/ 19 | prismacloud_cli.egg-info/ 20 | .installed.cfg 21 | *.egg 22 | *.pyc 23 | *.html 24 | *cache*/ 25 | *$py.class 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Test Output 34 | test-results.* 35 | error.log 36 | 37 | # Configuration Files 38 | 39 | *.conf 40 | 41 | # OSX 42 | 43 | .DS_Store 44 | output.html 45 | output.json -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # Use multiple processes to speed up Pylint. 4 | # Specifying 0 will auto-detect the number of processors available to use. 5 | # 6 | jobs=1 7 | fail-under=9.0 8 | disable= 9 | C0114, # missing-module-docstring 10 | E1102, # pc_api.get_endpoint is not callable (not-callable) 11 | W0613, # Unused argument 'ctx' (unused-argument) 12 | 13 | # Workaround for the following issue generating inconsistent errors: 14 | # https://github.com/PyCQA/pylint/issues/5319 15 | init-hook='import sys; sys.path.append(".")' 16 | 17 | [BASIC] 18 | 19 | # Regular expression matching correct constant names. Overrides const-naming-style. 20 | # 21 | const-rgx=(([A-Z_][A-Z0-9_]*)|([a-z_][a-z0-9_]*)|(__.*__))$ 22 | 23 | [DESIGN] 24 | 25 | # Minimum number of public methods for a class (see R0903). 26 | # 27 | min-public-methods=1 28 | max-public-methods=24 29 | max-args=8 30 | max-branches=21 31 | max-nested-blocks=8 32 | max-statements=64 33 | 34 | [FORMAT] 35 | 36 | # Maximum number of characters on a single line. 37 | # 38 | max-line-length=255 39 | 40 | [MESSAGES CONTROL] 41 | 42 | # Disable the message, report, category or checker with the given id(s). 43 | # You can either give multiple identifiers separated by comma (,) 44 | # or put this option multiple times (only on the command line, 45 | # not in the configuration file where it should appear only once). 46 | # 47 | # You can also use "--disable=all" to 48 | # disable everything first and then reenable specific checks. 49 | # 50 | # For example, if you want to run only the similarities checker, 51 | # you can use "--disable=all --enable=similarities". 52 | # 53 | # If you want to run only the classes checker, but have no Warning level messages displayed, 54 | # use "--disable=all --enable=classes --disable=W". 55 | # 56 | disable= 57 | consider-using-dict-items, 58 | consider-using-f-string, 59 | consider-using-with, 60 | duplicate-code, 61 | fixme, 62 | missing-function-docstring, 63 | pointless-string-statement, 64 | unspecified-encoding 65 | 66 | [REPORTS] 67 | 68 | # Set the output format. 69 | # Available formats are text, parseable, colorized, json and msvs (visual studio). 70 | # 71 | output-format=colorized 72 | 73 | [TYPECHECK] 74 | ignored-modules=socket 75 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to Contribute 2 | 3 | :+1::tada: First off, thanks for taking the time to contribute! :tada::+1: 4 | 5 | Following these guidelines helps keep the project maintainable, easy to contribute to, and more secure. 6 | Thank you for taking the time to read this. Thank you for taking the time to follow this guide. 7 | 8 | ## Where To Start 9 | 10 | There are many ways to contribute. 11 | You can fix a bug, improve the documentation, submit feature requests and issues, or work on a feature you need for yourself. 12 | 13 | Pull requests are necessary for all contributions of code or documentation. 14 | If you are new to open source and not sure what a pull request is ... welcome, we're glad to have you! 15 | All of us once had a contribution to make and didn't know where to start. 16 | 17 | Even if you don't write code for your job, don't worry, the skills you learn during your first contribution to open source can be applied in so many ways, you'll wonder what you ever did before you had this knowledge. 18 | 19 | Here are a few resources on how to contribute to open source for the first time. 20 | 21 | - [First Contributions](https://github.com/firstcontributions/first-contributions/blob/master/README.md) 22 | - [Public Learning Paths](https://lab.github.com/githubtraining/paths) 23 | 24 | ## Pull Requests 25 | 26 | If you want to contribute to the project, go through the process 27 | of making a fork and pull request yourself: 28 | 29 | > 1. Create your own fork of the code 30 | > 2. Clone the fork locally 31 | > 3. Make the changes in your local clone 32 | > 4. Push the changes from local to your fork 33 | > 5. Create a pull request to pull the changes from your fork back into the 34 | > upstream repository 35 | 36 | Please use clear commit messages so we can understand what each commit does. 37 | We'll review every PR and might offer feedback or request changes before 38 | merging. 39 | 40 | - Validate your code using `pylint` as per below, and test your changes 41 | - We might offer feedback or request modifications before merging 42 | 43 | ``` 44 | pylint pc_lib/*.py pc_lib/*/*.py scripts/*.py 45 | ``` 46 | 47 | ## Validate your code before submitting 48 | 49 | ``` 50 | python3 -m virtualenv venv && source venv/bin/activate 51 | pip install -r requirements.txt 52 | pip install flake8 53 | pip install black 54 | flake8 $(git ls-files '*.py') 55 | black $(git ls-files '*.py') --line-length=127 56 | ``` -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10-alpine 2 | 3 | ENV RUN_IN_DOCKER=True 4 | 5 | # Metadata as described above 6 | LABEL maintainer="Simon Melotte " \ 7 | description="Docker image for prismacloud-cli" 8 | 9 | RUN apk --no-cache add build-base git curl jq bash 10 | RUN pip3 install --no-cache-dir -U prismacloud-cli 11 | 12 | # Copy your entrypoint script 13 | COPY ./entrypoint.sh /entrypoint.sh 14 | RUN chmod +x /entrypoint.sh 15 | 16 | # Default command when container runs 17 | ENTRYPOINT ["/entrypoint.sh"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Prisma Cloud EMEA SE Team 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all clean build install upload test 2 | 3 | .SILENT: 4 | test: 5 | @pc -o csv policy >> /dev/null 2>&1 && echo "success!" || echo "failure!" 6 | @pc stats vulnerabilities --cve CVE-2022-0847 >> /dev/null 2>&1 && echo "success!" || echo "failure!" 7 | @pc -o json policy list | jq >> /dev/null 2>&1 && echo "success!" || echo "failure!" 8 | @pc tags >> /dev/null 2>&1 && echo "success!" || echo "failure!" 9 | @pc stats dashboard >> /dev/null 2>&1 && echo "success!" || echo "failure!" 10 | @pc -o json stats dashboard >> /dev/null 2>&1 && echo "success!" || echo "failure!" 11 | @pc cloud names >> /dev/null 2>&1 && echo "success!" || echo "failure!" 12 | @pc cloud type >> /dev/null 2>&1 && echo "success!" || echo "failure!" 13 | @pc --columns defendersSummary.host stats dashboard >> /dev/null 2>&1 && echo "success!" || echo "failure!" 14 | 15 | clean: 16 | @echo "Clean dist folder" 17 | @rm -rf dist/* 18 | @echo "Clean done" 19 | 20 | build: 21 | @make clean 22 | @python3 -m build 23 | 24 | install: 25 | @pip3 install . 26 | 27 | upload: 28 | @twine upload dist/* 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Prisma Cloud CLI 2 | 3 | [![Code Quality Check](https://github.com/PaloAltoNetworks/prismacloud-cli/actions/workflows/build.yml/badge.svg)](https://github.com/PaloAltoNetworks/prismacloud-cli/actions/workflows/build.yml) 4 | 5 | The Prisma Cloud CLI is a command line interface for [Prisma Cloud](https://www.paloaltonetworks.com/prisma/cloud) by [Palo Alto Networks](https://www.paloaltonetworks.com/). 6 | 7 | # Community Supported 8 | This template/solution is released under an as-is, best effort, support policy. These scripts should be seen as community supported and Palo Alto Networks will contribute our expertise as and when possible. We do not provide technical support or help in using or troubleshooting the components of the project through our normal support options such as Palo Alto Networks support teams, or ASC (Authorized Support Centers) partners and backline support options. The underlying product used (Prisma Cloud) by the scripts or templates are still supported, but the support is only for the product functionality and not for help in deploying or using the template or script itself. 9 | 10 | Unless explicitly tagged, all projects or work posted in our GitHub repository (at https://github.com/PaloAltoNetworks) or sites other than our official Downloads page on https://support.paloaltonetworks.com are provided under the best effort policy. 11 | 12 | # Getting started 13 | 14 | ### Requirements 15 | * Python >= 3.8 16 | * Pip3 17 | 18 | ### Installation 19 | 20 | ```sh 21 | pip3 install prismacloud-cli 22 | ``` 23 | 24 | Installation on Alpine: 25 | ```sh 26 | sudo pip3 install --upgrade pip && pip3 install --upgrade setuptools 27 | sudo pip3 install prismacloud-cli 28 | ``` 29 | 30 | Installation on Ubuntu: 31 | ```sh 32 | sudo apt update 33 | sudo apt install -y python3-venv python3-pip jq 34 | mkdir python_virtual_environments/ 35 | cd python_virtual_enviornments/ 36 | python3 -m venv prisma_cli_env 37 | source prisma_cli_env/bin/activate 38 | pip3 install prismacloud-cli 39 | ``` 40 | 41 | ### Run the script 42 | 43 | Run the pc cli script. If you don't have a config file yet, it will help you to create one. 44 | 45 | ``` 46 | pc version 47 | ``` 48 | 49 | This process looks like the screenshot below. the prismacloud-cli asks you for some details, stores it in the credentials file and uses that file when it is already available. 50 | 51 | ![First run](https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/main/screenshot.png) 52 | 53 | ### Create your own configuration 54 | 55 | Create an access key from Settings then Access key 56 | Get the path to console from Compute tab, System, Utilities 57 | 58 | Create a file into home directory .prismacloud/credentials.json with the following structure. 59 | 60 | ```json 61 | { 62 | "url": "__REDACTED__", 63 | "identity": "__REDACTED__", 64 | "secret": "__REDACTED__" 65 | } 66 | ``` 67 | 68 | You can add additional configurations which you can call by using --config. For example, create a file 69 | called ~/.prismacloud/demo.json with the contents above. 70 | 71 | Add ```--config demo``` to your cli commands. 72 | 73 | For example: 74 | 75 | ``` 76 | pc --config demo -o csv policy 77 | ``` 78 | 79 | ### Use environment variables for configuration 80 | 81 | By setting the environment variables: 82 | 83 | ``` 84 | PC_ACCESS_KEY 85 | PC_SAAS_API_ENDPOINT 86 | PC_SECRET_KEY 87 | ``` 88 | 89 | And then run pc referring to a configuration called environment: 90 | 91 | `pc --config environment ` 92 | 93 | 94 | ## How to use the Prisma Cloud CLI in pipelines (e.g. Github Actions) 95 | See [Prisma Cloud CLI in GitHub Actions](docs/how-to-use-in-pipelines.md) 96 | 97 | ## How to enable or disable policies at scale via CSV 98 | See [How to enable or disable policies at scale via CSV](docs/how-to-enable-or-disable-policies-at-scale-via-csv.md) 99 | 100 | 101 | 102 | ## Examples 103 | ``` 104 | pc -o csv policy 105 | pc -o json policy | jq 106 | pc tags 107 | pc stats dashboard 108 | pc -o json stats dashboard 109 | pc cloud name 110 | pc --columns defendersSummary.host stats dashboard 111 | ``` 112 | 113 | ## Global options 114 | The following global options are available 115 | 116 | ``` 117 | Options: 118 | -v, --verbose Enables verbose mode. 119 | -vv, --very_verbose Enables very verbose mode. 120 | -o, --output [text|csv|json|html|clipboard|markdown|columns] 121 | -c, --config TEXT Select configuration 122 | ~/.prismacloud/[CONFIGURATION].json 123 | --columns TEXT Select columns for output 124 | --help Show this message and exit. 125 | ``` 126 | 127 | Use -o columns to get a list of columns available for --columns, e.g.: 128 | 129 | ``` 130 | pc -o columns images 131 | pc --columns hostname,repoTag.repo,osDistro -o csv images -l 1 132 | ``` 133 | 134 | ## Environment variables 135 | 136 | To overwrite the default output settings, use environment variables MAX_WIDTH (console output), MAX_ROWS, MAX_COLUMNS and MAX_LINES. 137 | 138 | - MAX_LINES is used to defined the maximum number of lines within a cell when wrapping the contents. 139 | 140 | ## Commands 141 | The cli has several commands to work with, see the screenshot below for an example, but use ```pc --help``` to see the latest list for your version. 142 | 143 | ![Help](https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/main/help.png) 144 | 145 | ## Use cases 146 | 147 | ### Log4J Impacted Resources 148 | ``` 149 | pc -o json stats vulnerabilities --cve CVE-2021-44228 | jq 150 | pc stats vulnerabilities --cve CVE-2021-44228 151 | ``` 152 | 153 | Use something similar for getting the *Spring Shell* impacted resources. 154 | 155 | ### Search scan reports for images scanned by the Jenkins plugin or twistcli. 156 | ``` 157 | pc scans --help 158 | ``` 159 | 160 | Select only specific columns for the output: 161 | 162 | ``` 163 | pc --columns entityInfo.repoTag.registry,entityInfo.repoTag.repo,entityInfo.repoTag.tag,entityInfo.vulnerabilitiesCount scans -l 20 -s nginx 164 | ``` 165 | 166 | You might also want to add some additional columns and save the output as html: 167 | 168 | ``` 169 | pc --config local -o html --columns entityInfo.repoTag.registry,entityInfo.repoTag.repo,entityInfo.repoTag.tag,entityInfo.vulnerabilitiesCount,entityInfo.vulnerabilityDistribution.critical,entityInfo.vulnerabilityDistribution.high,entityInfo.vulnerabilityDistribution.medium scans -l 20 -s nginx > /tmp/results.html 170 | ``` 171 | 172 | Then, open /tmp/results.html: 173 | 174 | ![Results](https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/main/results.png) 175 | 176 | 177 | ### Enable CSPM policies with Prisma Cloud CLI 178 | 179 | ``` 180 | pc policy set --help 181 | pc -vv policy set --status enable --compliance_standard 'CIS v1.4.0 (AWS)' 182 | ``` 183 | 184 | ### Disable CSPM policies with Prisma Cloud CLI 185 | 186 | ``` 187 | pc -vv policy set --status disable --compliance_standard 'CIS v1.4.0 (AWS)' 188 | ``` 189 | 190 | ### Code Security 191 | 192 | The below examples are using Github as integration but it works as well with other integration: 193 | - Bitbucket 194 | - Gitlab 195 | - AzureRepos 196 | - Github Enterprise 197 | - Gitlab Enterprise 198 | - Bitbucket Enterprise 199 | 200 | Count the number of unique git authors across all Github repositories: 201 | ``` 202 | pc -ojson repositories count-git-authors -i Github | jq . 203 | ``` 204 | 205 | Get the details of all CVE across all Github repositories: 206 | ``` 207 | pc -o json repositories search -i Github -c Vulnerabilities -t packageCve --details | jq . 208 | ``` 209 | 210 | Get all secrets across all Github repositories: 211 | ``` 212 | pc -o json repositories search -i Github -c Secrets -t violation | jq . 213 | ``` 214 | 215 | Get all drift across all Github repositories: 216 | ``` 217 | pc repositories search --integration_type Github --categories Drift 218 | ``` 219 | 220 | 221 | ### Container registries 222 | 223 | #### Listing registries 224 | To list all container registries: 225 | 226 | ```bash 227 | pc registry list 228 | ``` 229 | 230 | #### Triggering Scans 231 | To trigger scans on all registries: 232 | 233 | ```bash 234 | pc registry scan 235 | ``` 236 | 237 | #### Including Specific Registries 238 | To include specific registries or repositories in the scan: 239 | 240 | ```bash 241 | pc registry scan --include "registry_name/repo_name" --i "another_registry" 242 | ``` 243 | 244 | #### Excluding Specific Registries 245 | To exclude specific registries or repositories from the scan: 246 | 247 | ```bash 248 | pc registry scan --exclude "registry_name/repo_name" --e "another_registry" 249 | ``` -------------------------------------------------------------------------------- /SUPPORT.md: -------------------------------------------------------------------------------- 1 | Community Supported 2 | 3 | The software and templates in the repo are released under an as-is, best effort, support policy. This software should be seen as community supported and Palo Alto Networks will contribute our expertise as and when possible. We do not provide technical support or help in using or troubleshooting the components of the project through our normal support options such as Palo Alto Networks support teams, or ASC (Authorized Support Centers) partners and backline support options. The underlying product used (the VM-Series firewall) by the scripts or templates are still supported, but the support is only for the product functionality and not for help in deploying or using the template or script itself. Unless explicitly tagged, all projects or work posted in our GitHub repository (at https://github.com/PaloAltoNetworks) or sites other than our official Downloads page on https://support.paloaltonetworks.com are provided under the best effort policy. 4 | -------------------------------------------------------------------------------- /bin/pc: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import re 4 | import sys 5 | 6 | # Import from the local prismacloud.cli namespace. 7 | sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 8 | from prismacloud.cli import cli 9 | 10 | if __name__ == '__main__': 11 | #print(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 12 | sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) 13 | sys.exit(cli()) 14 | -------------------------------------------------------------------------------- /docs/how-to-enable-or-disable-policies-at-scale-via-csv.md: -------------------------------------------------------------------------------- 1 | # How to enable or disable policies at scale via CSV 2 | 3 | ## 📋 Description 4 | 5 | - Added `--csv` option to the `policy set` command. 6 | - Enable or disable policies at scale using a CSV file. 7 | 8 | ## 🎯 Motivation and Context 9 | 10 | - Update out-of-the-box policy statuses using a CSV. 11 | - Flexibility to only update specific policies. 12 | 13 | ## 📝 How to Use 14 | 15 | 1️⃣ **Generate Example CSV:** 16 | ```bash 17 | pc -o csv --columns policyId,enabled,^name$ policy list|head -n 5 > ~/policies.csv 18 | ``` 19 | 20 | 2️⃣ **Edit the CSV:** 21 | ```csv 22 | policyId,name,enabled 23 | 82908c8a-6bb8-4c63-b4b5-24967c9f7145,S3 bucket MFA Delete is not enabled,True 24 | ... 25 | ``` 26 | 27 | 3️⃣ **Run Command:** 28 | ```bash 29 | python3 bin/pc -v policy set --csv ~/policies.csv 30 | ``` 31 | 32 | ## 🧪 How Has This Been Tested? 33 | 34 | - Dry-run option available: `--dry-run` 35 | ```bash 36 | python3 bin/pc -v policy set --csv ~/policies.csv --dry-run 37 | ``` 38 | 39 | ## 📸 Screenshots 40 | 41 | ![Overview](https://github.com/PaloAltoNetworks/prismacloud-cli/assets/96180461/eb6af137-37a7-4d32-9773-8319d78a81ef) 42 | -------------------------------------------------------------------------------- /docs/how-to-use-in-pipelines.md: -------------------------------------------------------------------------------- 1 | # Prisma Cloud CLI in GitHub Actions 2 | 3 | ## How to Use 4 | 1. **Setup GitHub Secrets** for `PC_ACCESS_KEY`, `PC_SAAS_API_ENDPOINT`, and `PC_SECRET_KEY`. 5 | 2. Add the [pc-cli-example.yml](/.github/workflows/pc-cli-example.yml) to your `.github/workflows` directory, or see the example below. 6 | 3. Trigger the workflow manually or automatically. 7 | 8 | **Make sure to run the pc command with ```--config environment```** 9 | 10 | ## Example 11 | 12 | ``` 13 | name: Prisma Cloud CLI run 14 | 15 | on: 16 | workflow_dispatch: 17 | 18 | jobs: 19 | prismacloud-cli: 20 | runs-on: ubuntu-latest 21 | env: 22 | PC_ACCESS_KEY: ${{ secrets.PC_ACCESS_KEY }} 23 | PC_SAAS_API_ENDPOINT: ${{ secrets.PC_SAAS_API_ENDPOINT }} 24 | PC_SECRET_KEY: ${{ secrets.PC_SECRET_KEY }} 25 | 26 | steps: 27 | - name: Set up Python 28 | uses: actions/setup-python@v4 29 | with: 30 | python-version: "3.11" 31 | 32 | - name: Check for required environment variables 33 | run: | 34 | required_env_vars=("PC_ACCESS_KEY" "PC_SAAS_API_ENDPOINT" "PC_SECRET_KEY") 35 | for env_var in "${required_env_vars[@]}"; do 36 | if [[ -z "${!env_var}" ]]; then 37 | echo "Error: $env_var is not set" 38 | exit 1 39 | fi 40 | done 41 | 42 | - name: Cache pip dependencies 43 | uses: actions/cache@v3 44 | with: 45 | path: ~/.cache/pip 46 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} 47 | restore-keys: | 48 | ${{ runner.os }}-pip- 49 | 50 | - name: Install or Upgrade pip 51 | run: | 52 | if [[ ! -d "~/.cache/pip" ]]; then 53 | python -m pip install --upgrade pip 54 | fi 55 | 56 | - name: Environment Setup 57 | run: | 58 | if [[ ! -d "~/.cache/pip" ]]; then 59 | sudo pip3 install prismacloud-cli -U 60 | mkdir ~/.prismacloud 61 | touch ~/.prismacloud/.community_supported_accepted 62 | fi 63 | 64 | - name: Run command 65 | run: pc --config environment version 66 | ``` 67 | -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Accept Support Message 4 | mkdir ~/.prismacloud 5 | touch ~/.prismacloud/.community_supported_accepted 6 | 7 | # Leverage the default env variables as described in: 8 | # https://docs.github.com/en/actions/reference/environment-variables#default-environment-variables 9 | if [[ $GITHUB_ACTIONS != "true" ]] 10 | then 11 | pc --config environment "$@" 12 | exit $? 13 | fi 14 | -------------------------------------------------------------------------------- /help.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/ad41f2754cc53b49d2dd3ae6aeca67b8b76857c2/help.png -------------------------------------------------------------------------------- /mapping.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - prismacloud: "EC2 Classic Instance" 3 | terraform: "aws_instance" 4 | - prismacloud: "EC2 Client VPN Endpoint" 5 | terraform: "aws_ec2_client_vpn_endpoint" 6 | - prismacloud: "EC2 Elastic Address" 7 | terraform: "aws_eip" 8 | - prismacloud: "EC2 Image" 9 | terraform: "aws_ami" 10 | - prismacloud: "EC2 Instance" 11 | terraform: "aws_instance" 12 | - prismacloud: "EC2 Internet Gateway" 13 | terraform: "aws_internet_gateway" 14 | - prismacloud: "EC2 Key Pair" 15 | terraform: "aws_key_pair" 16 | - prismacloud: "EC2 Managed Prefix List" 17 | terraform: "aws_managed_prefix_list" 18 | - prismacloud: "EC2 NAT Gateway" 19 | terraform: "aws_nat_gateway" 20 | - prismacloud: "EC2 Network ACL" 21 | terraform: "aws_network_acl" 22 | - prismacloud: "EC2 Network Interface" 23 | terraform: "aws_network_interface" 24 | - prismacloud: "EC2 Security Group" 25 | terraform: "aws_security_group" 26 | - prismacloud: "EC2 Subnet" 27 | terraform: "aws_subnet" 28 | - prismacloud: "EC2 Transit Gateway" 29 | terraform: "aws_transit_gateway" 30 | - prismacloud: "EC2 Transit Gateway Attachment" 31 | terraform: "aws_transit_gateway_attachment" 32 | - prismacloud: "EC2 Transit Gateway Route Table" 33 | terraform: "aws_transit_gateway_route_table" 34 | - prismacloud: "EC2 VPC" 35 | terraform: "aws_vpc" 36 | - prismacloud: "EC2 VPC DHCP OPTIONS" 37 | terraform: "aws_vpc_dhcp_options" 38 | - prismacloud: "EC2 VPC Endpoint" 39 | terraform: "aws_vpc_endpoint" 40 | - prismacloud: "EC2 VPC Endpoint Service Configuration" 41 | terraform: "aws_vpc_endpoint_service_configuration" 42 | - prismacloud: "EC2 VPC Peering Connection" 43 | terraform: "aws_vpc_peering_connection" 44 | - prismacloud: "EC2 VPC Route Table" 45 | terraform: "aws_vpc_route_table" 46 | - prismacloud: "EC2 VPC Stats" 47 | terraform: "aws_vpc" 48 | - prismacloud: "Amazon VPC Flow Logs" 49 | terraform: "aws_flow_log" 50 | -------------------------------------------------------------------------------- /prismacloud/__init__.py: -------------------------------------------------------------------------------- 1 | __import__("pkg_resources").declare_namespace(__name__) 2 | -------------------------------------------------------------------------------- /prismacloud/cli/__init__.py: -------------------------------------------------------------------------------- 1 | """ Prisma Cloud CLI Configuration and Output """ 2 | 3 | import logging 4 | import os 5 | import sys 6 | import warnings 7 | import re 8 | import textwrap 9 | import json 10 | import ast 11 | 12 | 13 | import click 14 | import click_completion 15 | import coloredlogs 16 | import pandas as pd 17 | from click_help_colors import HelpColorsMultiCommand 18 | from pydantic_settings import BaseSettings 19 | from typing import Optional 20 | from tabulate import tabulate 21 | from update_checker import UpdateChecker 22 | 23 | import prismacloud.cli.version as cli_version 24 | 25 | click_completion.init() 26 | 27 | # Set defaults 28 | pd.set_option("display.max_rows", None) 29 | pd.set_option("display.max_columns", 400) 30 | pd.set_option("display.width", 1000) 31 | pd.set_option("display.colheader_justify", "center") 32 | pd.set_option("display.precision", 3) 33 | warnings.simplefilter(action="ignore", category=FutureWarning) 34 | 35 | 36 | def get_available_version(): 37 | # Get available python package version 38 | try: 39 | checker = UpdateChecker() 40 | result = checker.check("prismacloud-cli", cli_version.version) 41 | # Show available version 42 | logging.debug("Available version: %s", result.available_version) 43 | update_available = result.available_version 44 | except Exception: # pylint:disable=broad-except 45 | update_available = False 46 | 47 | if update_available: 48 | update_available_text_block = """\b 49 | Update available: {} -> {} 50 | Run {} to update 51 | """.format( 52 | cli_version.version, update_available, click.style("pip3 install -U prismacloud-cli", fg="red") 53 | ) 54 | else: 55 | update_available_text_block = "" 56 | 57 | return update_available_text_block 58 | 59 | 60 | class Settings(BaseSettings): # pylint:disable=too-few-public-methods 61 | """Prisma Cloud CLI Settings""" 62 | 63 | app_name: str = "Prisma Cloud CLI" 64 | max_columns: int = 7 65 | max_rows: int = 1000000 66 | max_width: int = 25 67 | max_levels: int = 2 68 | max_lines: int = 10 69 | 70 | url: Optional[str] = None 71 | identity: Optional[str] = None 72 | secret: Optional[str] = None 73 | 74 | 75 | settings = Settings() 76 | 77 | 78 | CONTEXT_SETTINGS = dict(auto_envvar_prefix="PC") 79 | 80 | 81 | class Environment: 82 | """Initialize environment and define logging""" 83 | 84 | def __init__(self): 85 | """Initialize environment""" 86 | self.verbose = False 87 | self.home = os.getcwd() 88 | 89 | def log(self, msg, *args): 90 | """Logs a message to stderr""" 91 | if args: 92 | msg %= args 93 | click.echo(msg, file=sys.stderr) 94 | 95 | def vlog(self, msg, *args): 96 | """Logs a message to stderr only if verbose is enabled""" 97 | if self.verbose: 98 | self.log(msg, *args) 99 | 100 | 101 | pass_environment = click.make_pass_decorator(Environment, ensure=True) 102 | cwpp_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), "cwpp")) 103 | cspm_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), "cspm")) 104 | pccs_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), "pccs")) 105 | 106 | 107 | class PrismaCloudCLI(HelpColorsMultiCommand): 108 | """Collect commands""" 109 | 110 | def list_commands(self, ctx): 111 | """Read commands from command files""" 112 | commands = [] 113 | 114 | # Iterate through cwpp commands 115 | for filename in os.listdir(cwpp_folder): 116 | if filename.endswith(".py") and filename.startswith("cmd_"): 117 | commands.append(filename[4:-3]) 118 | # Iterate through cspm commands 119 | for filename in os.listdir(cspm_folder): 120 | if filename.endswith(".py") and filename.startswith("cmd_"): 121 | commands.append(filename[4:-3]) 122 | # Iterate through pccs commands 123 | for filename in os.listdir(pccs_folder): 124 | if filename.endswith(".py") and filename.startswith("cmd_"): 125 | commands.append(filename[4:-3]) 126 | 127 | commands.sort() 128 | return commands 129 | 130 | # pylint: disable=R1710 131 | def get_command(self, ctx, cmd_name): 132 | """Import command""" 133 | 134 | # Find the command file and import it. 135 | # This file can be in the cwpp folder, cspm folder, or pccs folder. 136 | module_types = ["cwpp", "cspm", "pccs"] 137 | 138 | for module_type in module_types: 139 | try: 140 | mod = __import__(f"prismacloud.cli.{module_type}.cmd_{cmd_name}", None, None, ["cli"]) 141 | except ImportError: 142 | continue 143 | return mod.cli 144 | 145 | 146 | @click.command( 147 | cls=PrismaCloudCLI, 148 | context_settings=CONTEXT_SETTINGS, 149 | help_headers_color="yellow", 150 | help_options_color="green", 151 | help=""" 152 | \b 153 | Prisma Cloud CLI (version: {0}) 154 | {1} 155 | """.format( 156 | cli_version.version, get_available_version() 157 | ), 158 | ) 159 | @click.option("-v", "--verbose", is_flag=True, help="Enables verbose mode") 160 | @click.option("-vv", "--very_verbose", is_flag=True, help="Enables very verbose mode") 161 | @click.option("--filter", "query_filter", help="Add search filter") 162 | @click.option( 163 | "-o", 164 | "--output", 165 | type=click.Choice(["text", "csv", "json", "html", "clipboard", "markdown", "columns", "raw", "count"]), 166 | default="text", 167 | ) 168 | @click.option( 169 | "-c", 170 | "--config", 171 | "configuration", 172 | help="Select configuration file in ~/.prismacloud/[CONFIGURATION].json", 173 | default="credentials", 174 | ) 175 | @click.option("--columns", "columns", help="Select columns for output", default=None) 176 | @pass_environment 177 | # pylint: disable=W0613 178 | def cli(ctx, very_verbose, verbose, configuration, output, query_filter, columns=None): 179 | """Define the command line""" 180 | ctx.configuration = configuration 181 | ctx.output = output 182 | log_format = "%(asctime)s - %(levelname)s - %(message)s" 183 | 184 | if verbose: 185 | coloredlogs.install(level="INFO", fmt=log_format) 186 | elif very_verbose: 187 | coloredlogs.install(level="DEBUG", fmt=log_format) 188 | else: 189 | coloredlogs.install(level="ERROR", fmt=log_format) 190 | 191 | 192 | def get_parameters(): 193 | """Get parameters from command line""" 194 | 195 | # Retrieve parameters 196 | params = click.get_current_context().find_root().params 197 | 198 | # If there is a parameter columns, split it into a list 199 | if params["columns"]: 200 | columns = params["columns"].split(",") 201 | else: 202 | columns = False 203 | 204 | return params, columns 205 | 206 | 207 | def log_settings(): 208 | """Log settings""" 209 | logging.debug("Settings:") 210 | logging.debug(" Max columns: %s", settings.max_columns) 211 | logging.debug(" Max rows: %s", settings.max_rows) 212 | logging.debug(" Max width: %s", settings.max_width) 213 | logging.debug(" Max levels: %s", settings.max_levels) 214 | 215 | 216 | def process_data_frame(data): 217 | params, columns = get_parameters() 218 | # https://pandas.pydata.org/docs/reference/api/pandas.json_normalize.html 219 | # json_normalize() requires a dictionary or list of dictionaries 220 | # normalize = False 221 | # if isinstance(data, dict): 222 | # normalize = True 223 | # if isinstance(data, list): 224 | # if all(isinstance(item, dict) for item in data) 225 | # normalize = True 226 | try: 227 | data_frame_normalized = pd.json_normalize(data) 228 | except Exception as _exc: # pylint:disable=broad-except 229 | logging.error("Error converting data via json_normalize(): %s", _exc) 230 | sys.exit(1) 231 | 232 | # If the size of our normalized data is 0, something went wrong but no exception was raised. 233 | if data_frame_normalized.size > 0: 234 | logging.debug("Using json_normalize() data") 235 | # data_frame = flatten_nested_json_df(data_frame_normalized) 236 | data_frame = data_frame_normalized 237 | else: 238 | try: 239 | data_frame = pd.DataFrame(data) 240 | except Exception as _exc: # pylint:disable=broad-except 241 | logging.error("Error converting data via DataFrame(): %s", _exc) 242 | sys.exit(1) 243 | 244 | # If a column contains time, try convert it to datetime 245 | for column in str(data_frame.columns): 246 | if column.lower() in ["time", "lastmodified", "availableasof"]: 247 | try: 248 | data_frame[column] = pd.to_datetime(data_frame[column], unit="ms") 249 | except Exception as _exc2: # pylint:disable=broad-except 250 | logging.debug("Error converting column to milliseconds: %s", _exc2) 251 | try: 252 | data_frame[column] = pd.to_datetime(data_frame[column], unit="s") 253 | except Exception as _exc3: # pylint:disable=broad-except 254 | logging.debug("Error converting column to seconds: %s", _exc3) 255 | 256 | data_frame.fillna("", inplace=True) 257 | 258 | # If a filter is set, try to apply it 259 | if params["query_filter"]: 260 | try: 261 | data_frame = data_frame.query(params["query_filter"]) 262 | except Exception as _exc: # pylint:disable=broad-except 263 | logging.error("Error applying query filter: %s", _exc) 264 | logging.error("You might be filtering on a dynamic column.") 265 | logging.error("For example, if a certain tag does not exist, there is no way to filter on it.") 266 | logging.error("The given filter has not been applied.") 267 | 268 | # The usage command generates columns starting with dataPoints 269 | try: 270 | # If we have one or more columns with dataPoints.counts, 271 | # calculate the sum of all columns starting with dataPoints.counts 272 | if len(data_frame.filter(regex="dataPoints.counts").columns) > 0: 273 | data_frame["used"] = data_frame.filter(regex="dataPoints.counts").sum(axis=1) 274 | # Calculate a new column usage based as percentage on column used and column workloadsPurchased 275 | # If we have a column named workloadsPurchased, we can calculate the percentage 276 | if "workloadsPurchased" in data_frame.columns: 277 | data_frame["usage"] = data_frame["used"] / data_frame["workloadsPurchased"] * 100 278 | # Extra columns are added, proceed. 279 | except Exception as _exc: # pylint:disable=broad-except 280 | logging.debug("Error calculating columns: %s", _exc) 281 | 282 | # Change all nan values to empty string 283 | data_frame = data_frame.fillna("") 284 | 285 | # We have a dataframe, output here after we have dropped all but the selected columns 286 | if params["columns"]: 287 | # logging.debug("Dropping these columns: %s", data_frame.columns.difference(columns)) 288 | # data_frame.drop(columns=data_frame.columns.difference(columns), 289 | # axis=1, inplace=True, errors="ignore") 290 | 291 | # Find columns in data_frame whose name contains one of the 292 | # values of parameter columns and filter on the resulting columns 293 | regex_ = r"(" + "|".join(columns) + ")" 294 | logging.debug("Filtering columns based on case-insensitive regex: %s", regex_) 295 | data_frame = data_frame.filter(regex=re.compile("(" + "|".join(columns) + ")", re.I)) 296 | 297 | # Before we show the output, remove the index column (which is not data_frame.index), 298 | # but only if the column exists. 299 | if "index" in data_frame.columns: 300 | data_frame.drop(columns=["index"], inplace=True) 301 | 302 | # Before we show the output, try to remove duplicate rows 303 | try: 304 | # Convert all columns to string 305 | data_frame = data_frame.applymap(str) 306 | data_frame = data_frame.drop_duplicates() 307 | except Exception as _exc: # pylint:disable=broad-except 308 | logging.debug("Error dropping duplicates: %s", _exc) 309 | 310 | return data_frame 311 | 312 | 313 | def cli_output(data, sort_values=False): 314 | """Parse data and formay output, except if we""" 315 | """want to see raw json.""" 316 | params = get_parameters()[0] 317 | log_settings() # Log settings in debug level 318 | 319 | if params["output"] == "raw": 320 | click.secho(json.dumps(data)) 321 | sys.exit(1) 322 | 323 | # Read data, convert to dataframe and process it 324 | data_frame = process_data_frame(data) 325 | 326 | # Generate and show the output 327 | show_output(data_frame, params, data) 328 | 329 | 330 | def json_parse(json_data, level=0): 331 | if isinstance(json_data, (dict, list)): 332 | json_obj = json.loads(json.dumps(json_data)) 333 | else: 334 | json_obj = json.loads(json_data) 335 | 336 | output_str = "" 337 | indent = " " * level 338 | if isinstance(json_obj, list): 339 | for item in json_obj: 340 | output_str += json_parse(item, level) 341 | elif isinstance(json_obj, dict): 342 | for key, value in json_obj.items(): 343 | if isinstance(value, (dict, list)): 344 | output_str += f"{indent}{key}:\n" 345 | output_str += json_parse(value, level + 1) 346 | else: 347 | output_str += f"{indent}{key}: {value}\n" 348 | 349 | return output_str + "\n" * (level == 0) 350 | 351 | 352 | def wrap_text(text): 353 | """Truncate a string to max_width characters""" 354 | 355 | try: 356 | data = json.loads(text) 357 | if isinstance(data, (list, dict)): 358 | text = json_parse(data) 359 | except json.decoder.JSONDecodeError: 360 | pass 361 | 362 | try: 363 | data = ast.literal_eval(text) 364 | if isinstance(data, (list, dict)): 365 | text = json_parse(data) 366 | except (SyntaxError, ValueError): 367 | pass 368 | wrapped_text = textwrap.fill(text=text, width=settings.max_width, max_lines=settings.max_lines, replace_whitespace=False) 369 | return wrapped_text 370 | 371 | 372 | def show_output(data_frame, params, data): 373 | try: 374 | if params["output"] == "count": 375 | click.secho(data_frame.shape[0], fg="red") 376 | if params["output"] == "text": 377 | # Drop all rows after max_rows 378 | data_frame = data_frame.iloc[: settings.max_rows] 379 | 380 | # Drop all but first settings.max_columns columns from data_frame 381 | data_frame = data_frame.iloc[:, : settings.max_columns] 382 | 383 | # Wrap all cells 384 | data_frame_truncated = data_frame.applymap(wrap_text, na_action="ignore") 385 | 386 | # Wrap column names 387 | data_frame_truncated.columns = list(map(wrap_text, data_frame_truncated.columns)) 388 | 389 | table_output = tabulate(data_frame_truncated, headers="keys", tablefmt="fancy_grid", showindex=False) 390 | click.secho(table_output, fg="green") 391 | if params["output"] == "json": 392 | # Cannot use 'index=False' here, otherwise '.to_json' returns a hash instead of an array of hashes. 393 | # But '.to_json' does not output the index anyway. 394 | click.secho(data_frame.to_json(orient="records"), fg="green") 395 | if params["output"] == "csv": 396 | click.secho(data_frame.to_csv(index=False), fg="green") 397 | if params["output"] == "clipboard": 398 | click.secho(data_frame.to_clipboard(index=False), fg="green") 399 | if params["output"] == "markdown": 400 | # Drop all rows after max_rows 401 | data_frame = data_frame.iloc[: settings.max_rows] 402 | 403 | # Drop all but first settings.max_columns columns from data_frame 404 | data_frame = data_frame.iloc[:, : settings.max_columns] 405 | 406 | # Wrap all cells 407 | data_frame_truncated = data_frame.applymap(wrap_text, na_action="ignore") 408 | 409 | # Wrap column names 410 | data_frame_truncated.columns = list(map(wrap_text, data_frame_truncated.columns)) 411 | 412 | click.secho(data_frame_truncated.to_markdown(index=False), fg="green") 413 | if params["output"] == "html": 414 | # pre-table-html 415 | pre_table_html = """ 416 | 417 | 418 | 419 | 420 | 421 | 422 | 423 | """ # noqa 424 | click.secho(pre_table_html) 425 | click.secho( 426 | data_frame.to_html( 427 | index=False, 428 | max_cols=settings.max_columns, 429 | na_rep="", 430 | classes="table table-sm table-striped text-left", 431 | justify="left", 432 | ), 433 | fg="green", 434 | ) 435 | # post-table-html 436 | post_table_html = """ 437 | 438 | 439 | 440 | 441 | 442 | """ # noqa 443 | click.secho(post_table_html) 444 | if params["output"] == "columns": 445 | for column in data_frame.columns: 446 | click.secho(column, fg="green") 447 | except Exception as _exc: # pylint:disable=broad-except 448 | # There is no dataframe, might be just a single value, like version. 449 | click.echo(data) 450 | logging.debug("Error: %s", _exc) 451 | # We have shown normal data through this exception. 452 | # Exit with code 0 instead of 1. 453 | sys.exit(0) 454 | 455 | 456 | if __name__ == "__main__": 457 | try: 458 | # Get update_available_text 459 | update_available_text = get_available_version() 460 | 461 | # pylint: disable=E1120 462 | cli() 463 | except Exception as exc: # pylint:disable=broad-except 464 | logging.error("An error has occured: %s", exc) 465 | -------------------------------------------------------------------------------- /prismacloud/cli/api.py: -------------------------------------------------------------------------------- 1 | """ CLI Configuration and Prisma Cloud API Library Wrapper """ 2 | 3 | import json 4 | import logging 5 | import os 6 | import sys 7 | import types 8 | 9 | try: 10 | from pathlib import Path 11 | 12 | home_directory = str(Path.home()) 13 | except Exception as _exc: # pylint:disable=broad-except 14 | logging.debug("Error identifying home directory: %s", _exc) 15 | if "USERPROFILE" in os.environ: 16 | home_directory = os.environ["USERPROFILE"] 17 | else: 18 | home_directory = os.environ["HOME"] 19 | 20 | import click 21 | 22 | # pylint: disable=import-error,no-name-in-module 23 | from prismacloud.api import pc_api, PrismaCloudUtility as pc_util 24 | import prismacloud.api.version as api_version 25 | import prismacloud.cli.version as cli_version 26 | 27 | 28 | """ CLI Configuration """ 29 | 30 | # Set the User Agent for accessing the APIs 31 | pc_api.user_agent = f"PrismaCloudCLI/{cli_version.version}" # Dynamically set default User-Agent 32 | 33 | 34 | def community_supported(): 35 | """If the community supported message has not been accepted yet, 36 | it must be shown with the possibility to accept.""" 37 | 38 | community_supported_message = """ 39 | # Community Supported 40 | 41 | This solution is released under an as-is, best-effort, support policy. 42 | 43 | This solution should be seen as community-supported, and Palo Alto Networks will 44 | contribute our expertise as and when possible. We do not provide technical support 45 | or help in using or troubleshooting the components of this solution through our normal 46 | support options such as Palo Alto Networks support teams, Authorized Support Centers, 47 | partners, and backline support options. The underlying product (Prisma Cloud) used 48 | by these scripts is still supported, but the support is only for the product 49 | functionality itself and not for help in using this solution itself. 50 | 51 | Unless explicitly tagged, all projects or work posted in our GitHub organization 52 | (at https://github.com/PaloAltoNetworks) or sites other than our official Downloads page 53 | (on https://support.paloaltonetworks.com) are provided under this policy. 54 | """ 55 | 56 | # Check if the message already has been accepted. 57 | config_directory = home_directory + "/.prismacloud/" 58 | community_support_accepted = config_directory + ".community_supported_accepted" 59 | if os.path.exists(community_support_accepted): 60 | return True 61 | print(community_supported_message) 62 | answer = input("Enter 'y' or 'yes' to confirm you have read the message above: ") 63 | print() 64 | if any(answer.lower() == f for f in ["y", "yes"]): 65 | print("Message confirmed.") 66 | print() 67 | # Create file to verify that the message already has been accepted. 68 | if not os.path.exists(config_directory): 69 | logging.info("Configuration directory does not exist, creating %s", config_directory) 70 | try: 71 | os.makedirs(config_directory) 72 | except Exception as exc: # pylint:disable=broad-except 73 | logging.info("Error creating configuration directory: %s", exc) 74 | with open(community_support_accepted, "w") as _accepted: 75 | _accepted.write("Yes") 76 | else: 77 | print("You need to confirm the message above to use this command. Exiting.") 78 | print() 79 | sys.exit(1) 80 | return True 81 | 82 | 83 | def read_cli_config_from_environment(): 84 | """Read configuration from environment""" 85 | logging.debug("Reading configuration from environment") 86 | settings = {} 87 | try: 88 | # API Key Current CLI ENV VAR Deprecated CLI ENV VAR(s) 89 | settings["name"] = os.environ.get("PC_NAME", "") 90 | settings["url"] = os.environ.get( 91 | "PC_URL", os.environ.get("PC_SAAS_API_ENDPOINT", os.environ.get("PC_COMPUTE_API_ENDPOINT", "")) 92 | ) 93 | settings["identity"] = os.environ.get("PC_IDENTITY", os.environ.get("PC_ACCESS_KEY", "")) 94 | settings["secret"] = os.environ.get("PC_SECRET", os.environ.get("PC_SECRET_KEY", "")) 95 | settings["verify"] = os.environ.get("PC_VERIFY", os.environ.get("PC_CA_BUNDLE", False)) 96 | # Normalize URL. 97 | settings["url"] = pc_util.normalize_api(settings["url"]) 98 | # Mask all except the first two characters of keys when debugging. 99 | masked_identity = settings["identity"][:3] + "*" * (len(settings["identity"]) - 4) 100 | masked_secret = settings["secret"][:3] + "*" * (len(settings["secret"]) - 4) 101 | logging.debug("Environment variable found: PC_URL/PC_SAAS_API_ENDPOINT/PC_COMPUTE_API_ENDPOINT: %s", settings["url"]) 102 | logging.debug("Environment variable found: PC_IDENTITY/PC_ACCESS_KEY: %s", masked_identity) 103 | logging.debug("Environment variable found: PC_SECRET/PC_SECRET_KEY: %s", masked_secret) 104 | except Exception as exc: # pylint:disable=broad-except 105 | logging.debug("Error reading configuration from environment: %s", exc) 106 | logging.debug("Configuration read from environment") 107 | return settings 108 | 109 | 110 | def get_cli_config(): 111 | """ 112 | Try to access params["configuration"]. 113 | If it is equal to env or environment, try to read the following environment variables. 114 | 115 | # Current CLI ENV VAR Deprecated CLI ENV VAR(s) 116 | PC_URL PC_SAAS_API_ENDPOINT, PC_COMPUTE_API_ENDPOINT 117 | PC_IDENTITY PC_ACCESS_KEY 118 | PC_SECRET PC_SECRET_KEY 119 | PC_VERIFY PC_CA_BUNDLE 120 | 121 | If PC_URL, PC_IDENTITY and PC_SECRET are not set, try to read the configuration file specified by params["configuration"]. 122 | The PC_VERIFY setting can be a boolean or a string path to a file, as per the 'verify' parameter of requests.request(). 123 | """ 124 | 125 | logging.info("Running prismacloud-cli version %s / prismacloud-api version %s", cli_version.version, api_version.version) 126 | 127 | community_supported() # Check if support message has been shown and accepted 128 | 129 | params = {} 130 | try: 131 | params = click.get_current_context().find_root().params 132 | except Exception as exc: # pylint:disable=broad-except 133 | logging.debug("Error getting current context to find root params: %s", exc) 134 | params["configuration"] = "credentials" 135 | 136 | # To fix calling 'pc' without a command. 137 | if "configuration" not in params: 138 | params["configuration"] = "credentials" 139 | 140 | # Try to read configuration from environment. 141 | if params["configuration"] in ["env", "environment"]: 142 | config_env_settings = read_cli_config_from_environment() 143 | if config_env_settings: 144 | return config_env_settings 145 | 146 | # Read (or write) configuration from (or to) a file. 147 | config_directory = home_directory + "/.prismacloud/" 148 | config_file_name = config_directory + params["configuration"] + ".json" 149 | if not os.path.exists(config_file_name): 150 | config_file_name = config_directory + params["configuration"] + ".conf" 151 | 152 | if os.path.exists(config_file_name): 153 | settings = read_cli_config_file(config_file_name) 154 | # API Key Current CLI Key Deprecated CLI Key(s) 155 | settings["url"] = settings.get( 156 | "url", 157 | settings.pop( 158 | "api_endpoint", settings.pop("api", settings.pop("pcc_api_endpoint", settings.pop("api_compute", ""))) 159 | ), 160 | ) 161 | settings["identity"] = settings.get("identity", settings.pop("access_key_id", settings.pop("username", ""))) 162 | settings["secret"] = settings.get("secret", settings.pop("secret_key", settings.pop("password", ""))) 163 | settings["verify"] = settings.get("verify", settings.pop("ca_bundle", False)) 164 | # Normalize URL. 165 | settings["url"] = pc_util.normalize_api(settings["url"]) 166 | else: 167 | if not os.path.exists(config_directory): 168 | logging.info("Configuration directory does not exist, creating %s", config_directory) 169 | try: 170 | os.makedirs(config_directory) 171 | except Exception as exc: # pylint:disable=broad-except 172 | logging.info("Error creating configuration directory: %s", exc) 173 | settings = { 174 | "url": input( 175 | "Prisma Cloud Tenant (or Compute Console, if PCCE) URL, eg: api.prismacloud.io or twistlock.example.com" 176 | ), 177 | "identity": input("Access Key (or Compute Username, if PCCE): "), 178 | "secret": input("Secret Key (or Compute Password, if PCCE): "), 179 | "verify": False, 180 | } 181 | # Normalize URL. 182 | settings["url"] = pc_util.normalize_api(settings["url"]) 183 | write_cli_config_file(config_file_name, settings) 184 | return settings 185 | 186 | 187 | def map_cli_config_to_api_config(): 188 | """Map keys between the Prisma Cloud API package and Prisma Cloud CLI package""" 189 | try: 190 | click.get_current_context() 191 | except Exception as exc: # pylint:disable=broad-except 192 | logging.debug("Error getting current context: %s", exc) 193 | settings = get_cli_config() 194 | 195 | return { 196 | # API Key Current CLI Key Deprecated CLI Key(s) 197 | "name": settings.get("name", ""), 198 | "url": settings.get( 199 | "url", 200 | settings.get( 201 | "api_endpoint", settings.get("api", settings.get("pcc_api_endpoint", settings.get("api_compute", ""))) 202 | ), 203 | ), 204 | "identity": settings.get("identity", settings.get("access_key_id", settings.get("username", ""))), 205 | "secret": settings.get("secret", settings.get("secret_key", settings.get("password", ""))), 206 | "verify": settings.get("verify", settings.get("ca_bundle", False)), 207 | } 208 | 209 | 210 | def read_cli_config_file(config_file_name): 211 | """Read cli configuration from a file""" 212 | logging.debug("Reading configuration from file: %s", config_file_name) 213 | config_file_settings = {} 214 | try: 215 | with open(config_file_name, "r") as config_file: 216 | config_file_settings = json.load(config_file) 217 | except Exception as exc: # pylint:disable=broad-except 218 | logging.info("Error reading configuration from file: %s", exc) 219 | logging.debug("Configuration read from file: %s", config_file_name) 220 | return config_file_settings 221 | 222 | 223 | def write_cli_config_file(config_file_name, config_file_settings): 224 | """Write cli configuration to a file""" 225 | logging.debug("Writing configuration to file: %s", config_file_name) 226 | try: 227 | json_string = json.dumps(config_file_settings, sort_keys=True, indent=4) 228 | with open(config_file_name, "w") as config_file: 229 | config_file.write(json_string) 230 | except Exception as exc: # pylint:disable=broad-except 231 | logging.info("Error writing configuration to file: %s", exc) 232 | logging.debug("Configuration written to file: %s", config_file_name) 233 | 234 | 235 | """ Prisma Cloud API Library Wrapper """ 236 | 237 | 238 | def get_endpoint(_self, endpoint, query_params=None, api="cwpp", request_type="GET"): 239 | """Make a request without using an endpoint-specific method""" 240 | pc_api.configure(map_cli_config_to_api_config()) 241 | logging.debug("Calling API Endpoint (%s): %s", request_type, endpoint) 242 | result = None 243 | if api == "cspm": 244 | try: 245 | result = pc_api.execute(request_type, endpoint, query_params) 246 | except Exception as exc: # pylint:disable=broad-except 247 | logging.error( 248 | "There was an error executing the request. Check if this API (CSPM) is available in your environment." 249 | ) # noqa: E501 250 | logging.error("Please check your config and try again. Error: %s", exc) # noqa: E501 251 | sys.exit(1) 252 | if api == "cwpp": 253 | if not endpoint.startswith("api"): 254 | endpoint = "api/v1/%s" % endpoint 255 | try: 256 | result = pc_api.execute_compute(request_type, endpoint, query_params) 257 | except Exception as exc: # pylint:disable=broad-except 258 | logging.error( 259 | "There was an error executing the request. Check if this API (CWP) is available in your environment." 260 | ) # noqa: E501 261 | logging.error("Please check your config and try again. Error: %s", exc) # noqa: E501 262 | sys.exit(1) 263 | if api == "code": 264 | try: 265 | result = pc_api.execute_code_security(request_type, endpoint, query_params) 266 | except Exception as exc: # pylint:disable=broad-except 267 | logging.error( 268 | "There was an error executing the request. Check if this API (CCS) is available in your environment." 269 | ) # noqa: E501 270 | logging.error("Please check your config and try again. Error: %s", exc) # noqa: E501 271 | sys.exit(1) 272 | return result 273 | 274 | 275 | """ Instance of the Prisma Cloud API """ 276 | 277 | pc_api.configure(map_cli_config_to_api_config()) 278 | # Add the get_endpoint method to this instance. 279 | pc_api.get_endpoint = types.MethodType(get_endpoint, pc_api) 280 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/ad41f2754cc53b49d2dd3ae6aeca67b8b76857c2/prismacloud/cli/cspm/__init__.py -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_alert.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import click 3 | import datetime 4 | 5 | from prismacloud.cli import cli_output, pass_environment 6 | from prismacloud.cli.api import pc_api 7 | from urllib.parse import quote 8 | 9 | 10 | # Helper function to convert epoch (in milliseconds) to a datetime object 11 | def convert_epoch_to_datetime(epoch_ms): 12 | return datetime.datetime.fromtimestamp(int(epoch_ms) / 1000) 13 | 14 | 15 | # Helper function to convert datetime to human-readable format 16 | def datetime_to_readable(dt): 17 | return dt.strftime("%Y-%m-%d %H:%M:%S") 18 | 19 | 20 | @click.group( 21 | "alert", short_help="[CSPM] Returns a list of alerts that match the constraints specified in the query parameters." 22 | ) 23 | @pass_environment 24 | def cli(ctx): 25 | pass 26 | 27 | 28 | @click.command(name="list") 29 | @click.option("--compliance-standard", help="Compliance standard, e.g.: 'CIS v1.4.0 (AWS)'") 30 | @click.option("--policy-id", help="Policy ID, e.g.: '6c561dd0-e24b-4afe-b1fd-78808a45956d'") 31 | @click.option("--alert-rule", help="Alert rule name, e.g.: 'alertrule-1'") 32 | @click.option("--cloud-account", help="Cloud Account Name, e.g.: 'MyCloudAccount'") 33 | @click.option("--account-group", help="Account Group ID, e.g.: 'MyAccountGroup'") 34 | @click.option("--amount", default="1", help="Number of units selected with --unit") 35 | @click.option( 36 | "--unit", default="day", type=click.Choice(["minute", "hour", "day", "week", "month", "year"], case_sensitive=False) 37 | ) 38 | @click.option( 39 | "--status", default="open", type=click.Choice(["open", "resolved", "snoozed", "dismissed"], case_sensitive=False) 40 | ) 41 | @click.option("--detailed/--no-detailed", default=False) 42 | @click.option("--days-ahead", default=0, type=int, help="Filter alerts that are dismissing until the next X days.") 43 | def list_alerts( 44 | compliance_standard, cloud_account, account_group, amount, unit, status, detailed, policy_id, alert_rule, days_ahead 45 | ): 46 | """Returns a list of alerts from the Prisma Cloud platform""" 47 | data = { 48 | "alert.status": status, 49 | "alertRule.name": alert_rule, 50 | "detailed": detailed, 51 | "limit": "10000", 52 | "policy.complianceStandard": compliance_standard, 53 | "timeAmount": amount, 54 | "timeType": "relative", 55 | "timeUnit": unit, 56 | } 57 | 58 | if policy_id: 59 | data["policy.id"] = policy_id 60 | 61 | if cloud_account: 62 | data["cloud.account"] = cloud_account 63 | 64 | if account_group: 65 | data["account.group"] = account_group 66 | 67 | # Fetch the alerts 68 | alerts = pc_api.get_endpoint("alert", query_params=data, api="cspm") 69 | 70 | if days_ahead > 0 and status == "snoozed": 71 | # Calculate future date for filter only if days_ahead > 0 and status is 'snoozed' 72 | future_date = datetime.datetime.now() + datetime.timedelta(days=days_ahead) 73 | 74 | # Filter alerts where dismissalUntilTs is before the future date 75 | alerts = [ 76 | alert 77 | for alert in alerts 78 | if "dismissalUntilTs" in alert and convert_epoch_to_datetime(alert["dismissalUntilTs"]) < future_date 79 | ] 80 | 81 | # Try to add a new column with a url to the alert investigate page 82 | base_url = f"https://{pc_api.api.replace('api', 'app')}/alerts/overview?viewId=default" 83 | 84 | for alert in alerts: 85 | try: 86 | alert_id = alert["id"] 87 | 88 | for key in ["firstSeen", "lastSeen", "alertTime", "lastUpdated", "eventOccurred", "dismissalUntilTs"]: 89 | if key in alert: 90 | alert[key] = datetime_to_readable(convert_epoch_to_datetime(alert[key])) 91 | 92 | # Correctly using double braces for literal curly braces in f-string 93 | filters = ( 94 | f'{{"timeRange":{{"type":"to_now","value":"epoch"}},' 95 | f'"timeRange.type":"ALERT_OPENED","alert.status":["open"],' 96 | f'"alert.id":["{alert_id}"]}}' 97 | ) 98 | # Encoding the filters part 99 | encoded_filters = quote(filters) 100 | 101 | # Constructing the full URL 102 | alert_url = f"{base_url}&filters={encoded_filters}" 103 | alert["alert.resource.url"] = alert_url 104 | except Exception: # pylint:disable=broad-except 105 | pass 106 | 107 | # We want to get the related policy information so fetch the policies 108 | policies = pc_api.policy_list_read() 109 | 110 | # Iterate through alerts and add the policy description 111 | logging.debug("Iterating through alerts and adding policy information") 112 | for alert in alerts: 113 | for policy in policies: 114 | if policy["policyId"] == alert["policyId"]: 115 | alert["policy.name"] = policy["name"] 116 | alert["policy.severity"] = policy["severity"] 117 | alert["policy.description"] = policy["description"] 118 | logging.debug("Done iterating through alerts and adding policy information") 119 | 120 | cli_output(alerts) 121 | 122 | 123 | cli.add_command(list_alerts) 124 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_check.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("check", short_help="[CSPM] Check and see if the Prisma Cloud API is up and running") 8 | @pass_environment 9 | def cli(ctx): 10 | result = pc_api.get_endpoint("check", api="cspm") 11 | cli_output(result) 12 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_cloud.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("cloud", short_help="[CSPM] Lists all cloud accounts onboarded onto the Prisma Cloud platform") 8 | @pass_environment 9 | def cli(ctx): 10 | """List Cloud Accounts and Types""" 11 | 12 | 13 | @click.command(name="list") 14 | def list_accounts(): 15 | """Returns Cloud Accounts.""" 16 | result = pc_api.cloud_accounts_list_read() 17 | cli_output(result) 18 | 19 | 20 | @click.command() 21 | def names(): 22 | """Returns Cloud Account IDs and Names.""" 23 | result = pc_api.cloud_accounts_list_names_read() 24 | cli_output(result) 25 | 26 | 27 | @click.command(name="type") 28 | def cloud_type(): 29 | """Returns all Cloud Types.""" 30 | result = pc_api.cloud_types_list_read() 31 | cli_output(result) 32 | 33 | 34 | cli.add_command(list_accounts) 35 | cli.add_command(names) 36 | cli.add_command(cloud_type) 37 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_compliance.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import click 3 | 4 | from prismacloud.cli import cli_output, pass_environment 5 | from prismacloud.cli.api import pc_api 6 | 7 | 8 | class ComplianceHelper: 9 | def get_compliance_finding( 10 | self, 11 | standard: str, 12 | compliance_requirement: str, 13 | compliance_section: str, 14 | scan_status: str, 15 | account_group: str, 16 | limit: int = 1000, 17 | ): 18 | parameters = {} 19 | parameters["filters"] = [ 20 | {"name": "includeEventForeignEntities", "operator": "=", "value": "true"}, 21 | {"name": "account.group", "operator": "=", "value": account_group}, 22 | {"name": "policy.complianceSection", "operator": "=", "value": compliance_section}, 23 | {"name": "policy.complianceRequirement", "operator": "=", "value": compliance_requirement}, 24 | {"name": "policy.complianceStandard", "operator": "=", "value": standard}, 25 | {"name": "scan.status", "operator": "=", "value": scan_status}, 26 | {"name": "decorateWithDerivedRRN", "operator": "=", "value": True}, 27 | ] 28 | parameters["limit"] = 10 29 | parameters["timeRange"] = {"type": "to_now", "value": "epoch"} # Latest results 30 | 31 | # response = requests.request("POST", url, headers=self.headers, data=payload) 32 | return pc_api.resource_scan_info_read(body_params=parameters) 33 | 34 | def get_compliance_standard(self, standard_name: str): 35 | response = pc_api.compliance_standard_list_read() 36 | logging.info("API - GET STANDARD %s", response) 37 | for standard in response: 38 | if standard.get("name", "") == standard_name: 39 | return standard 40 | 41 | 42 | @click.group("compliance", short_help="[CSPM] Returns a list of alerts based on compliance related findings in Prisma Cloud.") 43 | @pass_environment 44 | def cli(ctx): 45 | pass 46 | 47 | 48 | @click.command(name="export") 49 | @click.option("--compliance-standard", help="Compliance standard, e.g.: 'CIS v1.4.0 (AWS)'") 50 | @click.option("--account-group", help="Account Group ID, e.g.: 'MyAccountGroup'") 51 | def compliance_exporter(compliance_standard, account_group): 52 | """Returns a list of alerts based on compliance related findings in Prisma Cloud.""" 53 | data = [] 54 | helper = ComplianceHelper() 55 | 56 | logging.info("API - Starting compliance exporter ...") 57 | 58 | # Main logic 59 | # Get compliance standard information 60 | standard = helper.get_compliance_standard(standard_name=compliance_standard) 61 | 62 | # Get all requirements from compliance standard 63 | requirements = pc_api.compliance_standard_requirement_list_read(compliance_standard_id=standard["id"]) 64 | logging.info("API - Requirements collected: %s", requirements) 65 | for requirement in requirements: 66 | # Get all sections from compliance standard 67 | sections = pc_api.compliance_standard_requirement_section_list_read(compliance_requirement_id=requirement["id"]) 68 | logging.info("API - Sections collected: %s", sections) 69 | for section in sections: 70 | 71 | def get_results(requirement, section, scan_status: str): 72 | """Helper function to get compliance findings""" 73 | func_data = [] 74 | findings = helper.get_compliance_finding( 75 | standard["name"], requirement["name"], section["sectionId"], scan_status, account_group 76 | ) 77 | for resource in findings: 78 | func_data = func_data + [ 79 | { 80 | "standard_name": standard["name"], 81 | "requirement_name": requirement["name"], 82 | "requirement_id": requirement["requirementId"], 83 | "section_id": section["sectionId"], 84 | "account_name": resource["accountName"], 85 | "account_id": resource["accountId"], 86 | "cloud_type": resource["cloudType"], 87 | "rrn": resource.get("rrn", resource["id"]), 88 | "status": scan_status, 89 | } 90 | ] 91 | return func_data 92 | 93 | # Get finding results for given section of compliance standard 94 | data = data + get_results(requirement, section, "failed") 95 | data = data + get_results(requirement, section, "passed") 96 | 97 | cli_output(data) 98 | 99 | 100 | cli.add_command(compliance_exporter) 101 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_current.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("check", short_help="[CSPM] Output details about the current user") 8 | @pass_environment 9 | def cli(ctx): 10 | result = pc_api.current_user() 11 | cli_output(result) 12 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_iam.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import click 4 | 5 | from prismacloud.cli import cli_output, pass_environment 6 | from prismacloud.cli.api import pc_api 7 | 8 | 9 | @click.group("iam", short_help="[IAM] Investiguate on the IAM Permissions.") 10 | @pass_environment 11 | def cli(ctx): 12 | """IAM""" 13 | 14 | 15 | @click.option("--details", is_flag=True, help="Include the Azure Services in the output") 16 | @click.option("--amount", default="1", help="Number of units selected with --unit") 17 | @click.option( 18 | "--unit", default="week", type=click.Choice(["minute", "hour", "day", "week", "month", "year"], case_sensitive=False) 19 | ) 20 | @click.command(name="azure-guest") 21 | def azure_guest(details, amount, unit): 22 | """List Azure guest accounts with wildcard permissions""" 23 | data = [] 24 | 25 | query = "config from cloud.resource where cloud.type = 'azure' AND api.name = 'azure-active-directory-user' AND json.rule = userType equals \"Guest\"" # noqa: E501 26 | search_params = {} 27 | search_params["limit"] = 1000 28 | search_params["timeRange"] = {} 29 | search_params["timeRange"]["type"] = "relative" 30 | search_params["timeRange"]["relativeTimeType"] = "BACKWARD" 31 | search_params["timeRange"]["value"] = {} 32 | search_params["timeRange"]["value"]["unit"] = unit 33 | search_params["timeRange"]["value"]["amount"] = amount 34 | search_params["withResourceJson"] = False 35 | search_params["heuristicSearch"] = True 36 | search_params["query"] = query 37 | 38 | config_result_list = pc_api.search_config_read(search_params=search_params) 39 | 40 | for result in config_result_list: 41 | asset_id = result["assetId"] 42 | query = f"config from iam where source.cloud.resource.uai = '{asset_id}'" 43 | logging.debug(f"API - IAM RQL: {query}") 44 | search_params = {} 45 | search_params["limit"] = 1000 46 | search_params["searchType"] = "iam" 47 | search_params["query"] = query 48 | user_permissions = pc_api.search_iam_granter_to_dest(search_params=search_params) 49 | for permission in user_permissions: 50 | if permission["destCloudResourceName"] == "*": 51 | data_entry = { 52 | "name": result["name"], 53 | "accountId": result["accountId"], 54 | "accountName": result["accountName"], 55 | "service": result["service"], 56 | "grantedByEntityType": permission["grantedByEntityType"], 57 | "grantedByEntityName": permission["grantedByEntityName"], 58 | "destCloudResourceName": permission["destCloudResourceName"], 59 | } 60 | if details: 61 | data_entry["destCloudServiceName"] = permission.get("destCloudServiceName") 62 | 63 | data += [data_entry] 64 | 65 | cli_output(data) 66 | 67 | 68 | cli.add_command(azure_guest) 69 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_inventory.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("inventory", short_help="[CSPM] Returns asset inventory pass/fail data for the specified time period.") 8 | @pass_environment 9 | def cli(ctx): 10 | """Inventory""" 11 | 12 | 13 | @click.command(name="list") 14 | def inventory(): 15 | """Returns Cloud Accounts.""" 16 | query_params = {"timeType": "relative", "timeAmount": "24", "timeUnit": "hour"} 17 | result = pc_api.asset_inventory_list_read_v3(query_params=query_params) 18 | cli_output(result) 19 | 20 | 21 | cli.add_command(inventory) 22 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_licenses.py: -------------------------------------------------------------------------------- 1 | # import logging 2 | import click 3 | 4 | from prismacloud.cli import cli_output, pass_environment 5 | from prismacloud.cli.api import pc_api 6 | 7 | 8 | @click.group("licenses", short_help="[CSPM] Retrieve licences information") 9 | @pass_environment 10 | def cli(ctx): 11 | pass 12 | 13 | 14 | @click.command("list", short_help="Get license per account") 15 | @click.option("--amount", default="1", help="Number of units selected with --unit") 16 | @click.option( 17 | "--unit", default="month", type=click.Choice(["minute", "hour", "day", "week", "month", "year"], case_sensitive=False) 18 | ) 19 | def list_license(amount, unit): 20 | data = [] 21 | accountIds = [] 22 | 23 | query_params = {"includeGroupInfo": True} 24 | cloud_accounts = pc_api.cloud_accounts_list_read(query_params=query_params) 25 | 26 | body_params = { 27 | "cloudTypes": ["aws", "azure", "oci", "alibaba_cloud", "gcp", "others"], 28 | "accountIds": [], 29 | "timeRange": {"type": "relative", "value": {"unit": unit, "amount": amount}}, 30 | } 31 | usage = pc_api.resource_usage_by_cloud_type_v2(body_params=body_params) 32 | 33 | for item in usage["items"]: 34 | for account in cloud_accounts: 35 | for group in account["groups"]: 36 | accountId = account["accountId"] 37 | if accountId == item["account"]["id"]: 38 | data = data + [ 39 | { 40 | "accountId": accountId, 41 | "account_name": account["name"], 42 | "group_name": group["name"], 43 | "cloud_type": item["cloudType"], 44 | "total": item["total"], 45 | "resource_type_count": item["resourceTypeCount"], 46 | } 47 | ] 48 | accountIds.append(accountId) 49 | 50 | body_params = { 51 | "cloudTypes": ["repositories"], 52 | "accountIds": [], 53 | "timeRange": {"type": "relative", "value": {"unit": unit, "amount": amount}}, 54 | } 55 | usage = pc_api.resource_usage_by_cloud_type_v2(body_params=body_params) 56 | for item in usage["items"]: 57 | accountId = item["account"]["id"] 58 | if accountId not in accountIds: 59 | data = data + [ 60 | { 61 | "accountId": accountId, 62 | "account_name": item["account"]["name"], 63 | "group_name": "na", 64 | "cloud_type": item["cloudType"], 65 | "total": item["total"], 66 | "resource_type_count": item["resourceTypeCount"], 67 | } 68 | ] 69 | accountIds.append(accountId) 70 | 71 | cli_output(data) 72 | 73 | 74 | cli.add_command(list_license) 75 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_policy.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import csv 4 | from colorama import Fore, Style 5 | import click 6 | 7 | 8 | from prismacloud.cli import cli_output, pass_environment 9 | from prismacloud.cli.api import pc_api 10 | 11 | 12 | def validate_csv(ctx, param, value): 13 | if value: 14 | if not value.name.endswith(".csv"): 15 | raise click.BadParameter("CSV file extension must be '.csv'") 16 | return value 17 | 18 | 19 | @click.group("policy", short_help="[CSPM] Returns available policies, both system default and custom.") 20 | @pass_environment 21 | def cli(ctx): 22 | pass 23 | 24 | 25 | @click.command("list", short_help="[CSPM] Returns available policies, both system default and custom") 26 | def list_policies(): 27 | result = pc_api.policy_list_read() 28 | cli_output(result) 29 | 30 | 31 | @click.command("set", short_help="[CSPM] Turn on and off policies") 32 | @click.option( 33 | "--csv", 34 | "csv_file", # Add this line to map the option to the parameter 35 | type=click.File("r"), 36 | callback=validate_csv, 37 | help="CSV file containing policyId and enabled columns.", 38 | ) 39 | @click.option("--dry-run", is_flag=True, help="Preview changes without actually making them.") 40 | @click.option( 41 | "--policy_severity", 42 | default="high", 43 | type=click.Choice(["low", "medium", "high"]), 44 | help="Enable or disable Policies by Policy Severity.", 45 | ) 46 | @click.option("--all_policies", is_flag=True, help="Enable or disable all Policies.") 47 | @click.option( 48 | "--cloud_type", 49 | type=click.Choice(["aws", "azure", "gcp", "oci", "alibaba_cloud"]), 50 | help="Enable or disable Policies by Cloud Type.", 51 | ) 52 | @click.option( 53 | "--policy_type", 54 | type=click.Choice(["config", "network", "audit_event", "anomaly"]), 55 | help="Enable or disable Policies by Policy Type.", 56 | ) 57 | @click.option( 58 | "--compliance_standard", 59 | default=None, 60 | help="Enable or disable Policies by Compliance Standard, e.g.: 'CIS v1.4.0 (AWS)'", 61 | ) 62 | @click.option("--status", type=click.Choice(["enable", "disable"]), help="Policy status to set (enable or disable).") 63 | def enable_or_disable_policies( 64 | policy_severity, all_policies, cloud_type, policy_type, status, compliance_standard, csv_file, dry_run 65 | ): 66 | """Enable or Disable policies""" 67 | 68 | logging.debug("API - Getting list of Policies ...") 69 | policy_list = pc_api.policy_v2_list_read() 70 | logging.debug("API - All policies have been fetched.") 71 | 72 | # Initialize counters to keep track of planned actions 73 | total_fetched_policies = len(policy_list) # Count the total number of fetched policies 74 | total_policies_csv = 0 75 | policies_to_update = 0 76 | policies_no_change = 0 77 | changes_true_to_false = 0 78 | changes_false_to_true = 0 79 | 80 | # Ensure that the --dry-run flag is only used in conjunction with the --csv option. 81 | if dry_run and not csv_file: 82 | logging.error("The --dry-run option is only valid when --csv is provided.") 83 | return 84 | 85 | if dry_run: 86 | logging.info(f"{Fore.MAGENTA}=== DRY-RUN SUMMARY ==={Style.RESET_ALL}") 87 | 88 | # When --csv is provided, the function follows a different logic path for updating policies, 89 | # and the --status option becomes optional. 90 | if csv_file: 91 | logging.debug("Processing CSV file: %s", csv_file.name) 92 | reader = csv.DictReader(csv_file) # Use csv directly as file object 93 | for row in reader: 94 | total_policies_csv += 1 # Increment total count for each row in CSV 95 | policy_id = row.get("policyId") 96 | required_status = row.get("enabled").lower() == "true" 97 | policy = next((p for p in policy_list if p["policyId"] == policy_id), None) 98 | logging.debug(f"Policy ID: {policy_id}, ({policy['enabled']}->{required_status}), Name: {policy['name']}") 99 | if policy: 100 | if policy["enabled"] != required_status: 101 | logging.debug(f"API - Updating Policy:, ({policy['enabled']}->{required_status}), Name: {policy['name']}") 102 | 103 | if dry_run: 104 | # Check if the policy's current status matches the required status 105 | if policy["enabled"] != required_status: 106 | policies_to_update += 1 107 | action = "-" if policy["enabled"] else "+" 108 | if action == "-": 109 | changes_true_to_false += 1 110 | else: 111 | changes_false_to_true += 1 112 | 113 | # Log the planned action with color coding 114 | color = Fore.LIGHTGREEN_EX if action == "+" else Fore.LIGHTRED_EX 115 | logging.info(f"{color}[{action}] Policy: {policy['name']}{Style.RESET_ALL}") 116 | else: 117 | try: 118 | pc_api.policy_status_update(policy_id, required_status.lower()) 119 | except Exception as exc: # pylint:disable=broad-except 120 | logging.error( 121 | f"Unable to update Policy ID: {policy_id}. It may have been changed in the past 4 hours." 122 | ) 123 | logging.info("Error:: %s", exc) 124 | else: 125 | if dry_run: 126 | policies_no_change += 1 # Increment if no change is needed 127 | 128 | # Log that no changes are needed for this policy 129 | action = "=" 130 | color = Fore.LIGHTYELLOW_EX if action == "=" else Fore.CYAN 131 | logging.info(f"{color}[{action}] Policy: {policy['name']}{Style.RESET_ALL}") 132 | 133 | # Show summary of actions based on CSV input 134 | if dry_run and csv_file: 135 | logging.info(f"{Fore.MAGENTA}=== DRY-RUN COMPLETE ==={Style.RESET_ALL}") 136 | logging.info(f"{Fore.CYAN}Total Policies Fetched: {total_fetched_policies}{Style.RESET_ALL}") 137 | logging.info(f"{Fore.CYAN}Policies in CSV: {total_policies_csv}{Style.RESET_ALL}") 138 | logging.info( 139 | f"{Fore.LIGHTGREEN_EX}To Update: {policies_to_update} " 140 | f"({Fore.LIGHTRED_EX}Disabling: {changes_true_to_false}, " 141 | f"{Fore.LIGHTGREEN_EX}Enabling: {changes_false_to_true})" 142 | f"{Style.RESET_ALL}" 143 | ) 144 | 145 | logging.info(f"{Fore.LIGHTYELLOW_EX}No Changes: {policies_no_change}{Style.RESET_ALL}") 146 | else: 147 | logging.info("API - All policies from CSV have been updated.") 148 | return 149 | 150 | specified_policy_status = bool(status.lower() == "enable") 151 | specified_policy_status_string = str(specified_policy_status).lower() 152 | 153 | policy_list_to_update = [] 154 | if compliance_standard is not None: 155 | logging.info("API - Getting list of Policies by Compliance Standard: %s", compliance_standard) 156 | policy_list = pc_api.compliance_standard_policy_v2_list_read(compliance_standard) 157 | logging.info("API - Done") 158 | for policy in policy_list: 159 | # Do not update a policy if it is already in the desired state. 160 | if policy["enabled"] is not specified_policy_status: 161 | policy_list_to_update.append(policy) 162 | else: 163 | if all_policies: 164 | for policy in policy_list: 165 | # Do not update a policy if it is already in the desired state. 166 | if policy["enabled"] is not specified_policy_status: 167 | policy_list_to_update.append(policy) 168 | elif cloud_type is not None: 169 | cloud_type = cloud_type.lower() 170 | for policy in policy_list: 171 | if policy["enabled"] is not specified_policy_status: 172 | if cloud_type == policy["cloudType"]: 173 | policy_list_to_update.append(policy) 174 | elif policy_severity is not None: 175 | policy_severity = policy_severity.lower() 176 | for policy in policy_list: 177 | if policy["enabled"] is not specified_policy_status: 178 | if policy_severity == policy["severity"]: 179 | policy_list_to_update.append(policy) 180 | elif policy_type is not None: 181 | policy_type = policy_type.lower() 182 | for policy in policy_list: 183 | if policy["enabled"] is not specified_policy_status: 184 | if policy_type == policy["policyType"]: 185 | policy_list_to_update.append(policy) 186 | 187 | if policy_list_to_update: 188 | logging.info("API - Updating Policies ...") 189 | for policy in policy_list_to_update: 190 | logging.info("API - Updating Policy: %s", policy["name"]) 191 | pc_api.policy_status_update(policy["policyId"], specified_policy_status_string) 192 | logging.info("API - All policies have been updated.") 193 | else: 194 | logging.info("API - No Policies match the specified parameter, or all matching Policies are already in desired status") 195 | 196 | 197 | cli.add_command(list_policies) 198 | cli.add_command(enable_or_disable_policies) 199 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_pov.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import click 3 | 4 | # from prismacloud.cli import cli_output, pass_environment 5 | from prismacloud.cli import pass_environment 6 | from prismacloud.cli.api import pc_api 7 | 8 | 9 | @click.group("pov", short_help="[CSPM] Set best practice settings for a project") 10 | @pass_environment 11 | def cli(ctx): 12 | pass 13 | 14 | 15 | @click.command(name="start") 16 | def start_pov(): 17 | """Set best practice settings for a project""" 18 | logging.info("API - Implementing best practices on your Prisma Cloud tenant.") 19 | 20 | body_params = { 21 | "sessionTimeout": 60, 22 | "userAttributionInNotification": False, 23 | "requireAlertDismissalNote": True, 24 | "autoEnableAttackPathAndModulePolicies": True, 25 | "defaultPoliciesEnabled": {"informational": True, "low": True, "medium": True, "high": True, "critical": True}, 26 | "alarmEnabled": True, 27 | "applyDefaultPoliciesEnabled": True, 28 | "accessKeyMaxValidity": -1, 29 | "auditLogSiemIntgrIds": [], 30 | "auditLogsEnabled": False, 31 | "unsubscribeChronicles": False, 32 | "namedUsersAccessKeysExpiryNotificationsEnabled": False, 33 | "serviceUsersAccessKeysExpiryNotificationsEnabled": False, 34 | "notificationThresholdAccessKeysExpiry": 0, 35 | } 36 | pc_api.enterprise_settings_config(body_params=body_params) 37 | logging.info("API - Changed enterprise settings and enabled all policies.") 38 | 39 | # Changed Account hijacking attempts policy 40 | policy_id = "e12e1b44-3018-11e7-93ae-92361f002671" 41 | body_params = {"alertDisposition": "aggressive", "trainingModelThreshold": "low"} 42 | pc_api.anomaly_settings_config(body_params=body_params, policy_id=policy_id) 43 | logging.info("API - Changed Account hijacking attempts policy.") 44 | 45 | # Changed Anomalous compute provisioning activity policy. 46 | policy_id = "e64fb48f-7d36-2309-dda2-2304c689116c" 47 | body_params = {"alertDisposition": "aggressive"} 48 | pc_api.anomaly_settings_config(body_params=body_params, policy_id=policy_id) 49 | logging.info("API - Changed Anomalous compute provisioning activity policy.") 50 | 51 | # Changed Unusual user activity policy. 52 | policy_id = "e12e1edc-3018-11e7-93ae-92361f002671" 53 | body_params = {"alertDisposition": "aggressive", "trainingModelThreshold": "low"} 54 | pc_api.anomaly_settings_config(body_params=body_params, policy_id=policy_id) 55 | logging.info("API - Changed Unusual user activity policy.") 56 | 57 | # Get the resource list 58 | resource_lists = pc_api.resource_list_read() 59 | 60 | # Initialize the flag to False 61 | found = False 62 | compute_resource_list_id = "" 63 | 64 | # Iterate through each resource in the list 65 | for resource in resource_lists: 66 | if resource["name"] == "Compute Alert Rule": 67 | found = True 68 | compute_resource_list_id = resource["id"] 69 | break 70 | 71 | # Add a resource list if flag is false 72 | if found: 73 | logging.info(f"API - Resource List 'Compute Alert Rule' exists. Compute Alert List ID is {compute_resource_list_id}") 74 | else: 75 | logging.info("API - Resource List 'Compute Alert Rule' does not exist.") 76 | body_params = { 77 | "name": "Compute Alert Rule", 78 | "resourceListType": "COMPUTE_ACCESS_GROUP", 79 | "description": "Created by Prisma Cloud CLI", 80 | "members": [ 81 | { 82 | "appIDs": ["*"], 83 | "clusters": ["*"], 84 | "codeRepos": ["*"], 85 | "containers": ["*"], 86 | "functions": ["*"], 87 | "hosts": ["*"], 88 | "images": ["*"], 89 | "labels": ["*"], 90 | "namespaces": ["*"], 91 | } 92 | ], 93 | } 94 | compute_resource_list = pc_api.resource_list_create(body_params) 95 | compute_resource_list_id = compute_resource_list["id"] 96 | logging.info(f"API - Add a resource list 'Compute Alert Rule'. Compute Alert List ID is {compute_resource_list_id}") 97 | 98 | # Get the alert rule list 99 | alert_rules = pc_api.alert_rule_list_read() 100 | 101 | # Get the account groups list 102 | account_groups = pc_api.cloud_account_group_list_read() 103 | 104 | # Initialize alert_rule_id 105 | alert_rule_id = None 106 | 107 | # Find the alert rule id for 'Prisma Default Alert Rule' 108 | for alert_rule in alert_rules: 109 | if alert_rule["name"] == "Prisma Default Alert Rule": 110 | alert_rule_id = alert_rule["policyScanConfigId"] 111 | break 112 | 113 | # Check if the alert rule was found 114 | if alert_rule_id is None: 115 | raise ValueError("Alert rule 'Prisma Default Alert Rule' not found") 116 | 117 | # Extract all account group ids 118 | account_group_ids = [group["id"] for group in account_groups] 119 | 120 | # Prepare the body parameters for the update 121 | body_params = { 122 | "policyScanConfigId": alert_rule_id, 123 | "name": "Prisma Default Alert Rule", 124 | "description": "Prisma Default alert rule to scan all policies associated with label Prisma_Cloud and all account added to 'Default Account Group' - Updated with Prisma Cloud CLI", # noqa: E501 125 | "enabled": True, 126 | "scanAll": True, 127 | "deleted": False, 128 | "alertRuleNotificationConfig": [], 129 | "allowAutoRemediate": False, 130 | "delayNotificationMs": 0, 131 | "scanConfigType": "STANDARD", 132 | "notifyOnOpen": True, 133 | "notifyOnSnoozed": False, 134 | "notifyOnDismissed": False, 135 | "notifyOnResolved": False, 136 | "readOnly": False, 137 | "policies": [], 138 | "target": { 139 | "alertRulePolicyFilter": {}, 140 | "accountGroups": account_group_ids, 141 | "excludedAccounts": [], 142 | "regions": [], 143 | "tags": [], 144 | }, 145 | } 146 | 147 | # Update the alert rule 148 | pc_api.alert_rule_update(alert_rule_id, body_params) 149 | logging.info("API - Update default alert rule") 150 | 151 | # Initialize the flag to False 152 | found = False 153 | 154 | # Iterate through each resource in the list 155 | for alert_rule in alert_rules: 156 | if alert_rule["name"] == "Compute Alert Rule": 157 | found = True 158 | alert_rule_id = alert_rule["policyScanConfigId"] 159 | break 160 | 161 | # Add a resource list if flag is false 162 | if found: 163 | logging.info("API - Alert Rule 'Compute Alert Rule' exists.") 164 | else: 165 | # Prepare the body parameters for the update 166 | body_params = { 167 | "name": "Compute Alert Rule", 168 | "description": "Compute Alert Rule created by Prisma Cloud CLI", 169 | "enabled": True, 170 | "scanAll": False, 171 | "alertRuleNotificationConfig": [], 172 | "notifyOnOpen": True, 173 | "policies": [ 174 | "2f6a1ddf-d2f2-40c8-8598-c52f3438d0dc", 175 | "30cfde92-4a3f-4f1d-966b-08c42b8b0f26", 176 | "45905e17-b6f4-485a-9bbe-6610a417a8e6", 177 | "287de5f8-f5e6-4908-bdad-50c48e71a5da", 178 | "34c1e0fd-f516-4b46-8d59-f69b6f28a504", 179 | "1a3c9450-ffa3-427b-8bc4-d6a8bfdb0f36", 180 | "0452698b-cd31-4f99-8ed4-9337b1ec6451", 181 | ], 182 | "target": { 183 | "alertRulePolicyFilter": {}, 184 | "accountGroups": [], 185 | "excludedAccounts": [], 186 | "includedResourceLists": {"computeAccessGroupIds": [compute_resource_list_id]}, 187 | "regions": [], 188 | }, 189 | } 190 | 191 | # Update the alert rule 192 | pc_api.alert_rule_create(body_params) 193 | logging.info("API - Create compute alert rule") 194 | 195 | # Add current user to SSO Bypass List 196 | current_user = pc_api.current_user() 197 | logging.info(f"API - Current user email address: {current_user['email']}") 198 | 199 | body_params = [current_user["email"]] 200 | # Update the alert rule 201 | pc_api.user_bypass_sso(body_params) 202 | logging.info("API - Current user added to SSO Bypass list") 203 | 204 | # Create Cloud Security report with a schedule 205 | users = pc_api.user_list_read() 206 | user_emails = [user["email"] for user in users] 207 | logging.info(f"API - List email addresses: {user_emails}") 208 | 209 | # Get the cloud security reports 210 | reports = pc_api.adoptionadvisor_report_read() 211 | logging.info("API - Get existing cloud security reports") 212 | 213 | # Initialize the flag to False 214 | found = False 215 | 216 | # Iterate through each resource in the list 217 | for report in reports: 218 | if report["name"] == "Scheduled Cloud Security Report": 219 | found = True 220 | break 221 | 222 | # Add a resource list if flag is false 223 | if found: 224 | logging.info("API - Cloud Security Report exists.") 225 | else: 226 | # Prepare the body parameters for the update 227 | body_params = { 228 | "emailIds": user_emails, 229 | "name": "Scheduled Cloud Security Report", 230 | "widgetDays": 30, 231 | "isRecurring": True, 232 | "target": { 233 | "scheduleEnabled": True, 234 | "schedule": "DTSTART;TZID=Europe/Brussels:20240701T000000\nINTERVAL=1;FREQ=WEEKLY;BYHOUR=3;BYMINUTE=0;BYSECOND=0;BYDAY=MO", # noqa: E501 235 | }, 236 | "ruleOptions": { 237 | "target": { 238 | "schedule": {"interval": "1", "frequency": 2, "weekday": [0], "hour": 3, "timezone": "Europe/Brussels"} 239 | } 240 | }, 241 | "schedule": "DTSTART;TZID=Europe/Brussels:20240701T000000\nINTERVAL=1;FREQ=WEEKLY;BYHOUR=3;BYMINUTE=0;BYSECOND=0;BYDAY=MO", # noqa: E501 242 | "enabled": True, 243 | } 244 | 245 | # Update the alert rule 246 | pc_api.adoptionadvisor_report_create(report_to_add=body_params) 247 | logging.info("API - Created Cloud Security Report") 248 | 249 | enforcement_rules = pc_api.enforcement_rules_read() 250 | 251 | # Extract the id where mainRule is True 252 | main_rule_id = next((rule["id"] for rule in enforcement_rules["rules"] if rule.get("mainRule")), None) 253 | 254 | logging.info(f"API - Enforcement rules - Main Rule ID: {main_rule_id}") 255 | # Prepare the body parameters for the update 256 | body_params = { 257 | "id": f"{main_rule_id}", 258 | "name": "Security default findings", 259 | "repositories": [], 260 | "labels": [], 261 | "codeCategories": { 262 | "LICENSES": {"softFailThreshold": "LOW", "hardFailThreshold": "OFF", "commentsBotThreshold": "HIGH"}, 263 | "VULNERABILITIES": {"softFailThreshold": "LOW", "hardFailThreshold": "OFF", "commentsBotThreshold": "HIGH"}, 264 | "IAC": {"softFailThreshold": "INFO", "hardFailThreshold": "OFF", "commentsBotThreshold": "INFO"}, 265 | "WEAKNESSES": {"softFailThreshold": "OFF", "hardFailThreshold": "OFF", "commentsBotThreshold": "OFF"}, 266 | "SECRETS": {"softFailThreshold": "LOW", "hardFailThreshold": "OFF", "commentsBotThreshold": "HIGH"}, 267 | "BUILD_INTEGRITY": {"softFailThreshold": "OFF", "hardFailThreshold": "OFF", "commentsBotThreshold": "OFF"}, 268 | }, 269 | } 270 | 271 | # Update the alert rule 272 | pc_api.enforcement_rules_update(rules=body_params) 273 | logging.info("API - Enforcement rules updated") 274 | 275 | logging.info("API - === END ===") 276 | 277 | 278 | cli.add_command(start_pov) 279 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_resource.py: -------------------------------------------------------------------------------- 1 | import click 2 | import json 3 | import yaml 4 | 5 | from prismacloud.cli import cli_output, pass_environment 6 | from prismacloud.cli.api import pc_api 7 | 8 | 9 | @click.group("resource", short_help="[CSPM] Returns detailed information for the resource with the given rrn.") 10 | @pass_environment 11 | def cli(ctx): 12 | pass 13 | 14 | 15 | @click.option( 16 | "-a", 17 | "--account", 18 | help="Cloud Account", 19 | multiple=True, 20 | ) 21 | @click.option( 22 | "-r", 23 | "--region", 24 | help="Cloud Region", 25 | multiple=True, 26 | ) 27 | @click.option( 28 | "-s", 29 | "--service", 30 | help="Cloud Service", 31 | multiple=True, 32 | ) 33 | @click.option( 34 | "-rt", 35 | "--resource_type", 36 | help="Resource Type", 37 | multiple=True, 38 | ) 39 | @click.option( 40 | "-t", 41 | "--type", 42 | help="Cloud Type", 43 | multiple=True, 44 | ) 45 | @click.option( 46 | "-st", 47 | "--status", 48 | help="Scan Status", 49 | multiple=True, 50 | ) 51 | @click.option( 52 | "-tg", 53 | "--tag", 54 | help="Resource Tag (in the format 'key:value')", 55 | multiple=True, 56 | ) 57 | @click.option( 58 | "-f", 59 | "--tf_file", 60 | help="Path to the Terraform output file", 61 | default="import.tf", 62 | ) 63 | @click.command("list", short_help="[CSPM] Returns detailed information for the resource with the given rrn.") 64 | def list_resource(region, service, type, status, account, resource_type, tag, tf_file): 65 | 66 | base_filters = [ 67 | {"name": "includeEventForeignEntities", "operator": "=", "value": "false"}, 68 | {"name": "decorateWithDerivedRRN", "operator": "=", "value": False}, 69 | ] 70 | 71 | region_filters = [{"name": "cloud.region", "operator": "=", "value": r} for r in region] 72 | service_filters = [{"name": "cloud.service", "operator": "=", "value": s} for s in service] 73 | type_filters = [{"name": "cloud.type", "operator": "=", "value": t} for t in type] 74 | status_filters = [{"name": "scan.status", "operator": "=", "value": st} for st in status] 75 | account_filters = [{"name": "cloud.account", "operator": "=", "value": a} for a in account] 76 | resource_type_filters = [{"name": "resource.type", "operator": "=", "value": rt} for rt in resource_type] 77 | tag_filters = [] 78 | for tg in tag: 79 | key, value = tg.split(":") 80 | tag_filters.append({"name": "resource.tagv2", "operator": "=", "value": json.dumps({"key": key, "value": value})}) 81 | 82 | payload = { 83 | "filters": base_filters 84 | + region_filters 85 | + service_filters 86 | + type_filters 87 | + status_filters 88 | + account_filters 89 | + resource_type_filters 90 | + tag_filters, # noqa: E501 91 | "limit": 100, 92 | "timeRange": {"type": "to_now", "value": "epoch"}, 93 | } 94 | result = pc_api.resource_scan_info_read(body_params=payload) 95 | 96 | if tf_file: 97 | generate_tf_file(result, tf_file, "mapping.yaml") 98 | 99 | cli_output(result) 100 | 101 | 102 | def get_terraform_mapping(yaml_file_path, prismacloud_type): 103 | with open(yaml_file_path, "r") as yaml_file: 104 | mappings = yaml.safe_load(yaml_file) 105 | 106 | for mapping in mappings: 107 | if mapping["prismacloud"] == prismacloud_type: 108 | return mapping["terraform"] 109 | return None 110 | 111 | 112 | def generate_tf_file(json_data, tf_file_path, yaml_file_path): 113 | written_ids = set() # A set to store the IDs that have been written to the file 114 | 115 | with open(tf_file_path, "w") as tf_file: 116 | for entry in json_data: 117 | entry_id = entry.get("id") 118 | 119 | # If the ID has already been written to the file, skip this entry 120 | if entry_id in written_ids: 121 | continue 122 | 123 | terraform_type = get_terraform_mapping(yaml_file_path, entry.get("assetType")) 124 | 125 | if terraform_type is not None: 126 | name_slug = entry.get("name").lower().replace(" ", "_").replace("-", "_") 127 | tf_file.write( 128 | f""" 129 | import {{ 130 | to = {terraform_type}.{name_slug} 131 | id = "{entry_id}" 132 | }} 133 | """ 134 | ) 135 | # Add the ID to the set of written IDs 136 | written_ids.add(entry_id) 137 | 138 | 139 | cli.add_command(list_resource) 140 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_rql.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import click 4 | import yaml 5 | 6 | from prismacloud.cli import cli_output, pass_environment 7 | from prismacloud.cli.api import pc_api 8 | 9 | 10 | @click.command( 11 | "rql", short_help="[CSPM] Returns a list of alerts that match the constraints specified in the query parameters." 12 | ) 13 | @click.option("--query", help="RQL Query", required=False) 14 | @click.option("--file", help="RQL Queries File (yaml format)", required=False) 15 | @click.option("--amount", default="1", help="Number of units selected with --unit") 16 | @click.option( 17 | "--unit", default="day", type=click.Choice(["minute", "hour", "day", "week", "month", "year"], case_sensitive=False) 18 | ) 19 | @click.option("--field", default="") 20 | @pass_environment 21 | def cli(ctx, query, amount, unit, field="", file=False): 22 | """ 23 | Returns the results of a RQL query from the Prisma Cloud 24 | platform Sample queries: 25 | \b 26 | Config: "config from cloud.resource where api.name = 'aws-ec2-describe-instances'" 27 | Network: "network from vpc.flow_record where bytes > 0 AND threat.source = 'AutoFocus' AND threat.tag.group = 'Cryptominer'" 28 | Event: "event from cloud.audit_logs where operation IN ( 'AddUserToGroup', 'AttachGroupPolicy', 'AttachUserPolicy' , 'AttachRolePolicy' , 'CreateAccessKey', 'CreateKeyPair', 'DeleteKeyPair', 'DeleteLogGroup' )" 29 | """ # noqa 30 | search_params = {} 31 | search_params["limit"] = 1000 32 | search_params["timeRange"] = {} 33 | search_params["timeRange"]["type"] = "relative" 34 | search_params["timeRange"]["value"] = {} 35 | search_params["timeRange"]["value"]["unit"] = unit 36 | search_params["timeRange"]["value"]["amount"] = amount 37 | 38 | search_params["withResourceJson"] = False 39 | search_params["query"] = query 40 | 41 | # Check if we have a file as input 42 | if file: 43 | logging.debug("Parsing file: " + file) 44 | 45 | # Try to open file and iterate through the items 46 | try: 47 | with open(file) as file: 48 | items = yaml.safe_load(file) 49 | 50 | for item in items: 51 | name = item["name"] 52 | query = item["query"] 53 | search_params["query"] = query 54 | click.secho("\nRQL Query name: " + name, fg="green") 55 | click.secho("RQL Query: " + query, fg="green") 56 | logging.debug("API - Getting the RQL results ...") 57 | if query.startswith("config from iam"): 58 | search_params["searchType"] = "iam" 59 | search_params["timeRange"] = {"type": "to_now", "value": "epoch"} # Latest results 60 | result_list = pc_api.search_iam_read(search_params=search_params) 61 | elif query.startswith("config from"): 62 | result_list = pc_api.search_config_read(search_params=search_params) 63 | elif query.startswith("network from"): 64 | result_list = pc_api.search_network_read(search_params=search_params) 65 | elif query.startswith("event from"): 66 | result_list = pc_api.search_event_read(search_params=search_params) 67 | else: 68 | logging.error("Unknown RQL query type (limited to: config|network|event).") 69 | 70 | if field == "": 71 | cli_output(result_list) 72 | else: 73 | # We have field as input to select a deeper level of data. 74 | # Our main result returns data on the query and the results are in one of the main field. 75 | # This option gives the ability to retrieve that data. 76 | field_path = field.split(".") 77 | for _field in field_path: 78 | result_list = result_list[_field] 79 | 80 | cli_output(result_list) 81 | except Exception as exc: # pylint:disable=broad-except 82 | logging.error("An error has occured: %s", exc) 83 | else: 84 | logging.debug("API - Getting the RQL results ...") 85 | if query.startswith("config from iam"): 86 | search_params["searchType"] = "iam" 87 | search_params["timeRange"] = {"type": "to_now", "value": "epoch"} # Latest results 88 | result_list = pc_api.search_iam_read(search_params=search_params) 89 | elif query.startswith("config from"): 90 | result_list = pc_api.search_config_read(search_params=search_params) 91 | elif query.startswith("network from"): 92 | # For a network query, focus on field data.nodes 93 | field = "data.nodes" 94 | result_list = pc_api.search_network_read(search_params=search_params) 95 | elif query.startswith("event from"): 96 | result_list = pc_api.search_event_read(search_params=search_params) 97 | else: 98 | logging.error("Unknown RQL query type (limited to: config|network|event).") 99 | 100 | if field == "": 101 | cli_output(result_list) 102 | else: 103 | # We have field as input to select a deeper level of data. 104 | # Our main result returns data on the query and the results are in one of the main field. 105 | # This option gives the ability to retrieve that data. 106 | field_path = field.split(".") 107 | for _field in field_path: 108 | result_list = result_list[_field] 109 | 110 | cli_output(result_list) 111 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_saas_version.py: -------------------------------------------------------------------------------- 1 | import click 2 | import re 3 | 4 | from prismacloud.cli import cli_output, pass_environment 5 | from prismacloud.cli.api import pc_api 6 | 7 | 8 | @click.command("saas_version", short_help="[CSPM] Shows SaaS (CSPM and CWPP) version") 9 | @pass_environment 10 | def cli(ctx): 11 | version_string = pc_api.get_endpoint("version", api="cspm") 12 | version_tag = "unknown" 13 | version_sha = "unknown" 14 | if version_string: 15 | try: 16 | results = re.search(r"Tag: (.+?), Version:\s+(.+)", version_string) 17 | if results: 18 | version_tag = results.group(1) 19 | version_sha = results.group(2) 20 | except AttributeError: 21 | pass 22 | compute_version = pc_api.get_endpoint("version", api="cwpp") 23 | version = {"cspm_tag": version_tag, "cspm_sha": version_sha, "cwpp_version": compute_version} 24 | cli_output(version) 25 | -------------------------------------------------------------------------------- /prismacloud/cli/cspm/cmd_usage.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("usage", short_help="[CSPM] Retrieve credits usage information") 8 | @pass_environment 9 | def cli(ctx): 10 | body_params = {"accountIds": [], "timeRange": {"type": "relative", "value": {"unit": "day", "amount": 90}}} 11 | result = pc_api.resource_usage_over_time(body_params=body_params) 12 | cli_output(result) 13 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/ad41f2754cc53b49d2dd3ae6aeca67b8b76857c2/prismacloud/cli/cwpp/__init__.py -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_audits.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | 3 | import click 4 | import logging 5 | 6 | from prismacloud.cli import cli_output, pass_environment 7 | from prismacloud.cli.api import pc_api 8 | 9 | 10 | @click.group("audits", short_help="[CWPP] Retrieve audits for Prisma Cloud") 11 | @pass_environment 12 | def cli(ctx): 13 | pass 14 | 15 | 16 | @click.command() 17 | @click.option("-l", "--limit", default=5, help="Number of documents to return") 18 | def container(limit=5): 19 | """ 20 | 21 | Sample usage: 22 | 23 | pc --config local --columns os,msg,type,attackType,severity,containerName,hostname audits container 24 | 25 | """ 26 | last_hour_date_time = datetime.now() - timedelta(hours=3) 27 | from_field = last_hour_date_time.strftime("%Y-%m-%dT%H:%M:%S.%f%z")[:-3] + "Z" 28 | to_field = "2030-01-01T00:00:00.000Z" 29 | result = pc_api.get_endpoint( 30 | "audits/runtime/container", {"from": from_field, "to": to_field, "sort": "time", "reverse": "true"} 31 | ) 32 | cli_output(result) 33 | 34 | 35 | @click.command() 36 | @click.option("-l", "--limit", default=5, help="Number of documents to return") 37 | def firewall(limit=5): 38 | """ 39 | 40 | Sample usage: 41 | 42 | pc --config local --columns ruleName,msg,containerName,requestHost,subnet audits firewall 43 | 44 | """ 45 | last_hour_date_time = datetime.now() - timedelta(hours=3) 46 | from_field = last_hour_date_time.strftime("%Y-%m-%dT%H:%M:%S.%f%z")[:-3] + "Z" 47 | to_field = "2030-01-01T00:00:00.000Z" 48 | result = pc_api.get_endpoint( 49 | "audits/firewall/app/container", {"from": from_field, "to": to_field, "sort": "time", "reverse": "true"} 50 | ) 51 | cli_output(result) 52 | 53 | 54 | @click.command() 55 | @click.option("-l", "--limit", default=5, help="Number of documents to return") 56 | def incidents(limit=5): 57 | last_hour_date_time = datetime.now() - timedelta(days=7) 58 | from_field = last_hour_date_time.strftime("%Y-%m-%dT%H:%M:%S.%f%z")[:-3] + "Z" 59 | to_field = "2030-01-01T00:00:00.000Z" 60 | result = pc_api.get_endpoint("audits/incidents", {"from": from_field, "to": to_field, "sort": "time", "reverse": "true"}) 61 | cli_output(result) 62 | 63 | 64 | @click.command() 65 | @click.option("-i", "--id", help="Incident ID") 66 | @click.option("-l", "--limit", default=5, help="Number of documents to return") 67 | def snapshot(id="", limit=5): 68 | # We have an incident ID that we want to get the snapshot for 69 | # First we need to find the profile ID 70 | result = pc_api.get_endpoint("audits/incidents", {"id": id}) 71 | profileID = result[0]["profileID"] 72 | 73 | # Log profileID found for incident ID 74 | logging.debug("Profile ID found for incident ID: " + str(profileID)) 75 | 76 | # Now get the forensic snapshot 77 | result = pc_api.get_endpoint("profiles/container/" + profileID + "/forensic", {"incidentID": id}) 78 | cli_output(result) 79 | 80 | 81 | cli.add_command(container) 82 | cli.add_command(firewall) 83 | cli.add_command(incidents) 84 | cli.add_command(snapshot) 85 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_containers.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("containers", short_help="[CWPP] Container scan reports.") 8 | @pass_environment 9 | def cli(ctx): 10 | pass 11 | 12 | 13 | @click.command(name="list") 14 | def list_containers(): 15 | result = pc_api.get_endpoint("containers") 16 | cli_output(result) 17 | 18 | 19 | @click.command() 20 | def names(): 21 | result = pc_api.get_endpoint("containers/names") 22 | cli_output(result) 23 | 24 | 25 | @click.command() 26 | def count(): 27 | result = pc_api.get_endpoint("containers/count") 28 | cli_output(result) 29 | 30 | 31 | cli.add_command(list_containers) 32 | cli.add_command(names) 33 | cli.add_command(count) 34 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_credentials.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("credentials", short_help="[CWPP] Returns the credentials") 8 | @pass_environment 9 | def cli(ctx): 10 | result = pc_api.credential_list_read() 11 | cli_output(result) 12 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_defenders.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("defenders", short_help="[CWPP] Retrieves Defenders information.") 8 | @pass_environment 9 | def cli(ctx): 10 | pass 11 | 12 | 13 | @click.command(name="list") 14 | @click.option("--connected", is_flag=True, help="Print Summary of Connected defenders only") 15 | def list_defenders(connected): 16 | query_param = "" 17 | if connected is True: 18 | query_param = {"connected": "true"} 19 | result = pc_api.defenders_list_read(query_param) 20 | 21 | cli_output(result) 22 | 23 | 24 | @click.command() 25 | def names(): 26 | result = pc_api.defenders_names_list_read() 27 | cli_output(result) 28 | 29 | 30 | @click.command() 31 | def summary(): 32 | result = pc_api.get_endpoint("defenders/summary") 33 | cli_output(result) 34 | 35 | 36 | cli.add_command(list_defenders) 37 | cli.add_command(names) 38 | cli.add_command(summary) 39 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_discovery.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import click 3 | 4 | from prismacloud.cli import cli_output, pass_environment 5 | from prismacloud.cli.api import pc_api 6 | 7 | 8 | @click.group("discovery", short_help="[CWPP] Returns a list of all cloud discovery scan results.") 9 | @pass_environment 10 | def cli(ctx): 11 | pass 12 | 13 | 14 | @click.command(name="list") 15 | def list_discovery(): 16 | result = pc_api.cloud_discovery_read() 17 | cli_output(result) 18 | 19 | 20 | @click.command(name="entities") 21 | def list_entities(): 22 | result = pc_api.cloud_discovery_entities() 23 | cli_output(result) 24 | 25 | 26 | @click.command(name="vms") 27 | @click.option("-a", "--account", help="Cloud Account", multiple=True, is_flag=False) 28 | @click.option("-t", "--type", help="Cloud Type", multiple=True, is_flag=False) 29 | @click.option("-r", "--region", help="Cloud Region", multiple=True, is_flag=False) 30 | def vms_discovery(type, region, account): 31 | 32 | query_param = "" 33 | if region: 34 | region_string = ",".join(r for r in region) 35 | region_filters = f"®ion={region_string}" 36 | query_param += region_filters 37 | 38 | if type: 39 | provider_string = ",".join(t for t in type) 40 | type_filters = f"&provider={provider_string}" 41 | query_param += type_filters 42 | 43 | if account: 44 | account_string = ",".join(a for a in account) 45 | account_filters = f"&accountIDs={account_string}" 46 | query_param += account_filters 47 | 48 | logging.info("API - Query Params: %s", query_param) 49 | 50 | result = pc_api.cloud_discovery_vms(query_param) 51 | 52 | cli_output(result) 53 | 54 | 55 | cli.add_command(list_discovery) 56 | cli.add_command(list_entities) 57 | cli.add_command(vms_discovery) 58 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_host_auto_deploy.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import click 4 | 5 | from prismacloud.cli import cli_output, pass_environment 6 | from prismacloud.cli.api import pc_api 7 | 8 | 9 | @click.group("host_auto_deploy", short_help="[CSPM] Create host auto defend rules") 10 | @pass_environment 11 | def cli(ctx): 12 | pass 13 | 14 | 15 | @click.command("list", short_help="[CSPM] List host auto defend policies.") 16 | def host_auto_deploy_read(): 17 | result = pc_api.settings_host_auto_deploy_read() 18 | cli_output(result) 19 | 20 | 21 | @click.command("update", short_help="Update host auto defend rules based on the cloud accounts onboarded") 22 | @click.option( 23 | "--provider", 24 | default="aws", 25 | type=click.Choice(["aws", "azure", "gcp"]), 26 | help="Cloud Service Provider", 27 | ) 28 | @click.option( 29 | "--aws_region_type", 30 | default="regular", 31 | type=click.Choice(["regular", "government", "china"]), 32 | help="Scanning scope", 33 | ) 34 | @click.option( 35 | "--bucket_region", 36 | default="US-EAST1", 37 | help="[GCP ONLY] - Bucket Region", 38 | ) 39 | @click.option( 40 | "--console_hostname", 41 | default="", 42 | help="Console Hostname", 43 | ) 44 | @click.option( 45 | "--collection_name", 46 | default="All", 47 | help="Collection to use for the scope of the rule", 48 | ) 49 | def host_auto_deploy_update(provider, aws_region_type, bucket_region, console_hostname, collection_name): 50 | """Update repository""" 51 | logging.info("API - Updating host auto-defend rule") 52 | 53 | cloud_accounts = [] 54 | 55 | credentials = pc_api.get_endpoint("credentials?cloud=true") 56 | for credential in credentials: 57 | if credential["type"] == provider and credential["useAWSRole"] is False: 58 | logging.info("API - Found Credential: %s", credential["_id"]) 59 | cloud_accounts.append(credential["_id"]) 60 | 61 | body_params = [] 62 | if cloud_accounts: 63 | logging.info("API - All cloud accounts: %s", cloud_accounts) 64 | 65 | cloud_collection = "" 66 | collections = pc_api.get_endpoint("collections") 67 | for collection in collections: 68 | if collection["name"] == collection_name: 69 | del collection["system"] 70 | del collection["prisma"] 71 | del collection["modified"] 72 | cloud_collection = collection 73 | logging.info("API - Found collection: %s", cloud_collection) 74 | 75 | for cloud_account in cloud_accounts: 76 | autodefend = {} 77 | autodefend["provider"] = provider 78 | autodefend["name"] = cloud_account 79 | autodefend["credentialID"] = cloud_account 80 | autodefend["awsRegionType"] = aws_region_type 81 | autodefend["bucketRegion"] = bucket_region 82 | autodefend["consoleHostname"] = console_hostname 83 | autodefend["collections"] = [cloud_collection] 84 | body_params.append(autodefend) 85 | else: 86 | logging.error("API - ERROR No cloud account were found. ") 87 | 88 | if body_params: 89 | logging.info("API - List of rules to be updated %s", body_params) 90 | result = pc_api.settings_host_auto_deploy_write(body=body_params) 91 | logging.info("API - Host auto-defend rule have been updated: %s", result) 92 | else: 93 | logging.error("API - Something went wrong with building the object for the policies") 94 | 95 | 96 | @click.command("create", short_help="Create only one rule and erase all the other host auto-defend rules") 97 | @click.option( 98 | "--provider", 99 | default="aws", 100 | type=click.Choice(["aws", "azure", "gcp"]), 101 | help="Cloud Service Provider", 102 | ) 103 | @click.option( 104 | "--name", 105 | help="Rule name", 106 | ) 107 | @click.option( 108 | "--credential_id", 109 | help="Cloud credentials", 110 | ) 111 | @click.option( 112 | "--aws_region_type", 113 | default="regular", 114 | type=click.Choice(["regular", "government", "china"]), 115 | help="Scanning scope", 116 | ) 117 | @click.option( 118 | "--bucket_region", 119 | default="US-EAST1", 120 | help="[GCP ONLY] - Bucket Region", 121 | ) 122 | @click.option( 123 | "--console_hostname", 124 | default="europe-west3.cloud.twistlock.com", 125 | help="Console Hostname", 126 | ) 127 | @click.option( 128 | "--collection_name", 129 | default="All", 130 | help="Collection to use for the scope of the rule", 131 | ) 132 | def host_auto_deploy_create(provider, name, credential_id, aws_region_type, bucket_region, console_hostname, collection_name): 133 | """Update repository""" 134 | logging.info("API - Updating host auto-defend rule") 135 | 136 | body_params = [] 137 | cloud_collection = "" 138 | collections = pc_api.get_endpoint("collections") 139 | for collection in collections: 140 | if collection["name"] == collection_name: 141 | del collection["system"] 142 | del collection["prisma"] 143 | del collection["modified"] 144 | cloud_collection = collection 145 | logging.info("API - Found collection: %s", cloud_collection) 146 | 147 | autodefend = {} 148 | autodefend["provider"] = provider 149 | autodefend["name"] = name 150 | autodefend["credentialID"] = credential_id 151 | autodefend["awsRegionType"] = aws_region_type 152 | autodefend["bucketRegion"] = bucket_region 153 | autodefend["consoleHostname"] = console_hostname 154 | autodefend["collections"] = [cloud_collection] 155 | body_params.append(autodefend) 156 | 157 | if body_params: 158 | logging.info("API - List of rules to be updated %s", body_params) 159 | result = pc_api.settings_host_auto_deploy_write(body=body_params) 160 | logging.info("API - Host auto-defend rule have been updated: %s", result) 161 | else: 162 | logging.error("API - Something went wrong with building the object for the policies") 163 | 164 | 165 | cli.add_command(host_auto_deploy_read) 166 | cli.add_command(host_auto_deploy_update) 167 | cli.add_command(host_auto_deploy_create) 168 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_hosts.py: -------------------------------------------------------------------------------- 1 | import click 2 | import re 3 | 4 | from prismacloud.cli import cli_output, pass_environment 5 | from prismacloud.cli.api import pc_api 6 | 7 | 8 | @click.group("hosts", short_help="[CWPP] Retrieves all host scan reports.") 9 | @pass_environment 10 | def cli(ctx): 11 | pass 12 | 13 | 14 | @click.command(name="report", help="Full report for each hosts") 15 | @click.option("--complianceids", "compliance_ids", help="Filter by compliance id.") 16 | def report(compliance_ids=""): 17 | 18 | query_param = {"sort": "complianceRiskScore", "reverse": "true"} 19 | if compliance_ids: 20 | query_param = {"complianceIDs": compliance_ids, "sort": "complianceRiskScore", "reverse": "true"} 21 | result = pc_api.hosts_list_read(query_param) 22 | cli_output(result) 23 | 24 | 25 | @click.command(name="compliances", help="Get a report for compliance issues for each hosts") 26 | def compliances(): 27 | 28 | query_param = {"sort": "complianceRiskScore", "reverse": "true"} 29 | hosts = pc_api.hosts_list_read(query_param) 30 | 31 | data = [] 32 | 33 | for host in hosts: 34 | if not host["complianceIssues"]: 35 | continue 36 | for issue in host["complianceIssues"]: 37 | 38 | # Extract compliance framework from title 39 | match = re.search(r"\(([^)]+)\)", issue["title"]) 40 | if match: 41 | compliance_framework = match.group(1) 42 | else: 43 | continue # Skip this issue if no compliance framework found 44 | 45 | data.append( 46 | { 47 | "hostname": host["hostname"], 48 | "account_id": host["cloudMetadata"]["accountID"], 49 | "collections": host["collections"], 50 | "scanTime": host["scanTime"], 51 | "complianceIssuesCount": host["complianceIssuesCount"], 52 | "complianceRiskScore": host["complianceRiskScore"], 53 | "compliance_framework": compliance_framework, 54 | "id": issue["id"], 55 | "severity": issue["severity"], 56 | "cause": issue["cause"], 57 | "description": issue["description"], 58 | "title": issue["title"], 59 | } 60 | ) 61 | 62 | cli_output(data) 63 | 64 | 65 | @click.command(name="vulnerabilities", help="Get a report for vulnerability issues for each hosts") 66 | def vulnerabilities(): 67 | 68 | query_param = {"sort": "vulnerabilityRiskScore", "reverse": "true"} 69 | hosts = pc_api.hosts_list_read(query_param) 70 | 71 | data = [] 72 | 73 | for host in hosts: 74 | if not host["vulnerabilities"]: 75 | continue 76 | for issue in host["vulnerabilities"]: 77 | data.append( 78 | { 79 | "hostname": host["hostname"], 80 | "account_id": host["cloudMetadata"]["accountID"], 81 | "collections": host["collections"], 82 | "scanTime": host["scanTime"], 83 | "vulnerabilitiesCount": host["vulnerabilitiesCount"], 84 | "vulnerabilityRiskScore": host["vulnerabilityRiskScore"], 85 | "cve": issue["cve"], 86 | "severity": issue["severity"], 87 | "cvss": issue["cvss"], 88 | "packageName": issue["packageName"], 89 | "packageVersion": issue["packageVersion"], 90 | "status": issue["status"], 91 | } 92 | ) 93 | 94 | cli_output(data) 95 | 96 | 97 | cli.add_command(report) 98 | cli.add_command(compliances) 99 | cli.add_command(vulnerabilities) 100 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_images.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("images", short_help="Deployed images scan reports") 8 | @pass_environment 9 | def cli(ctx): 10 | pass 11 | 12 | 13 | @click.command(name="list") 14 | @click.option("-l", "--limit") 15 | def list_(limit=50): 16 | """Deployed images scan reports""" 17 | result = pc_api.images_list_read(query_params={"limit": limit}) 18 | cli_output(result) 19 | 20 | 21 | @click.command(name="packages") 22 | @click.option("-p", "--package", default=None, help="Specify a package to filter on.") 23 | @click.option("-i", "--image-name", default=None, help="Specify an image name to filter on.") 24 | @click.option("-c", "--cluster", default=None, help="Specify a cluster to filter on.") 25 | @click.option("-l", "--limit", default=50, help="Limit the number of images to process. Default limit is 50") 26 | def packages_(package, image_name, cluster, limit): 27 | """Show deployed images package information.""" 28 | # query_params = {"limit": limit} 29 | query_params = {} 30 | if image_name: 31 | query_params["image_name"] = image_name 32 | if cluster: 33 | query_params["cluster"] = cluster 34 | 35 | images = pc_api.images_list_read(query_params=query_params) 36 | 37 | package_list = [] 38 | 39 | # Go through images 40 | for image in images: 41 | # Go through packages 42 | for pkg_group in image["packages"]: 43 | # Go through list of packages 44 | for pkg in pkg_group["pkgs"]: 45 | # Check if a specific package is specified and filter on that 46 | if package is None or package.lower() in pkg["name"].lower(): 47 | 48 | image_tag = "Unknown" 49 | if image["repoTag"] is not None: # Check if repoTag is not None 50 | image_tag = ( 51 | image["repoTag"]["registry"] + "/" + image["repoTag"]["repo"] + ":" + image["repoTag"]["tag"] 52 | ) 53 | 54 | pkg_info = { 55 | "image_name": image["instances"][0]["image"] if image["instances"] else "Unknown", 56 | "image_id": image["id"], 57 | "image_tag": image_tag, 58 | "namespace": image.get("namespaces", "Unknown"), 59 | "os_distro": image.get("installedProducts", {}).get("osDistro", "Unknown"), 60 | "package_name": pkg["name"], 61 | "package_version": pkg["version"], 62 | "package_license": pkg["license"], 63 | "package_cve_count": pkg["cveCount"], 64 | } 65 | package_list.append(pkg_info) 66 | 67 | cli_output(package_list) 68 | 69 | 70 | cli.add_command(list_) 71 | cli.add_command(packages_) 72 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_incidents.py: -------------------------------------------------------------------------------- 1 | import click 2 | import logging 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("incidents", short_help="Retrieves a list of incidents that are not acknowledged.") 8 | @pass_environment 9 | def cli(ctx): 10 | """Main command for interacting with incidents.""" 11 | pass 12 | 13 | 14 | @click.command(name="list") 15 | @click.option("--limit", type=int, default=50, help="Number of reports to retrieve.") 16 | @click.option("--search", type=str, help="Search term for the results.") 17 | @click.option("--sort", type=str, help="Sort key for the results.") 18 | @click.option("--reverse", is_flag=True, help="Flag to sort the results in reverse order.") 19 | @click.option("--archived", is_flag=True, help="Flag to list archived incidents.") 20 | @click.option("--host", type=str, help="Host for the incidents.") 21 | @click.option("--cluster", type=str, help="Cluster for the incidents.") 22 | @click.option("--type", type=str, help="Type of the incidents.") 23 | @click.option("--category", type=str, help="Category of the incidents.") 24 | @click.option("--collection", type=str, help="Collection for the incidents.") 25 | @click.option("--provider", type=str, help="Provider for the incidents.") 26 | @click.option("--from", "from_date", type=str, help="Starting date for the incidents.") 27 | @click.option("--to", "to_date", type=str, help="Ending date for the incidents.") 28 | def list_incidents( 29 | limit, search, sort, reverse, archived, host, cluster, type, category, collection, provider, from_date, to_date 30 | ): 31 | """List incidents based on the provided filters.""" 32 | logging.debug("Preparing to retrieve incidents") 33 | query_params = { 34 | "limit": limit, 35 | "search": search, 36 | "sort": sort, 37 | "reverse": reverse, 38 | "archived": archived, 39 | "host": host, 40 | "cluster": cluster, 41 | "type": type, 42 | "category": category, 43 | "collection": collection, 44 | "provider": provider, 45 | "from": from_date, 46 | "to": to_date, 47 | } 48 | 49 | result = pc_api.execute_compute("GET", "api/v1/audits/incidents", query_params=query_params, paginated=True) 50 | 51 | logging.debug(f"Retrieved {len(result)} incidents") 52 | cli_output(result) 53 | 54 | 55 | def handle_incidents(id, category, type, operation, all_flag): 56 | """ 57 | Function to handle incidents based on the provided arguments. 58 | 59 | :param id: ID of the incident 60 | :param category: Category of the incident 61 | :param type: Type of the incident 62 | :param operation: Operation to perform on the incident 63 | :param all_flag: Flag indicating whether to perform the operation on all incidents 64 | """ 65 | logging.debug(f"{operation.capitalize()} incidents...") 66 | changed_incidents = [] 67 | if id: 68 | logging.debug(f"Handling single incident with ID: {id}") 69 | pc_api.execute_compute( 70 | "PATCH", f"api/v1/audits/incidents/acknowledge/{id}", body_params={"acknowledged": operation == "archive"} 71 | ) 72 | else: 73 | # Get all incidents 74 | logging.debug("Retrieving all incidents...") 75 | incidents = pc_api.execute_compute("GET", "api/v1/audits/incidents", paginated=True) 76 | logging.debug(f"Retrieved {len(incidents)} incidents") 77 | 78 | unchanged_incidents_count = 0 79 | 80 | for incident in incidents: 81 | logging.debug(f"Inspecting incident with ID: {incident['_id']}") 82 | if category and incident.get("category") != category: 83 | logging.debug(f"Skipping incident {incident['_id']} due to category mismatch.") 84 | unchanged_incidents_count += 1 85 | continue 86 | if type and incident.get("type") != type: 87 | logging.debug(f"Skipping incident {incident['_id']} due to type mismatch.") 88 | unchanged_incidents_count += 1 89 | continue 90 | if "archived" in incident: 91 | if (operation == "archive" and incident["archived"]) or (operation == "restore" and not incident["archived"]): 92 | logging.debug(f"Skipping incident {incident['_id']} due to archived status.") 93 | unchanged_incidents_count += 1 94 | continue 95 | logging.debug(f"{operation.capitalize()} incident: {incident['_id']}") 96 | pc_api.execute_compute( 97 | "PATCH", 98 | f"api/v1/audits/incidents/acknowledge/{incident['_id']}", 99 | body_params={"acknowledged": operation == "archive"}, 100 | ) 101 | changed_incidents.append(incident) 102 | 103 | logging.debug(f"Number of changed incidents: {len(changed_incidents)}") 104 | logging.debug(f"Number of unchanged incidents: {unchanged_incidents_count}") 105 | 106 | logging.debug(f"Finished {operation} incidents.") 107 | 108 | result = changed_incidents 109 | cli_output(result) 110 | 111 | 112 | @click.command(name="archive") 113 | @click.option("--id", type=str, help="ID of the incident to archive.") 114 | @click.option("--category", type=str, help="Category of the incidents to archive.") 115 | @click.option("--type", type=str, help="Type of the incidents to archive.") 116 | @click.option("--all", "all_flag", is_flag=True, help="Flag to archive all incidents.") 117 | def archive_incidents(id, category, type, all_flag): 118 | """Archive incidents based on the provided arguments.""" 119 | if not any([id, category, type, all_flag]): 120 | logging.error("Please provide an option or use --all to archive all incidents.") 121 | return 122 | handle_incidents(id, category, type, "archive", all_flag) 123 | 124 | 125 | @click.command(name="restore") 126 | @click.option("--id", type=str, help="ID of the incident to restore.") 127 | @click.option("--category", type=str, help="Category of the incidents to restore.") 128 | @click.option("--type", type=str, help="Type of the incidents to restore.") 129 | @click.option("--all", "all_flag", is_flag=True, help="Flag to restore all incidents.") 130 | def restore_incidents(id, category, type, all_flag): 131 | """Restore incidents based on the provided arguments.""" 132 | if not any([id, category, type, all_flag]): 133 | logging.error("Please provide an option or use --all to restore all incidents.") 134 | return 135 | handle_incidents(id, category, type, "restore", all_flag) 136 | 137 | 138 | cli.add_command(list_incidents) 139 | cli.add_command(archive_incidents) 140 | cli.add_command(restore_incidents) 141 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_intelligence.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("intelligence", short_help="[CWPP] Output details about the intelligence stream") 8 | @pass_environment 9 | def cli(ctx): 10 | result = pc_api.statuses_intelligence() 11 | cli_output(result) 12 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_license.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("license", short_help="[CWPP] Returns the license stats including the credit per defender") 8 | @pass_environment 9 | def cli(ctx): 10 | result = pc_api.get_endpoint("stats/license") 11 | cli_output(result) 12 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_logs.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import click 3 | 4 | from prismacloud.cli import cli_output, pass_environment 5 | from prismacloud.cli.api import pc_api 6 | 7 | 8 | @click.group("logs", short_help="[CWPP] Retrieve logs for Prisma Cloud") 9 | @pass_environment 10 | def cli(ctx): 11 | pass 12 | 13 | 14 | @click.command() 15 | @click.option("-l", "--limit", default=150, help="Number of documents to return") 16 | @click.option("--hostname", help="Defender hostname", required=True) 17 | def defender(limit=150, hostname=""): 18 | result = pc_api.get_endpoint("logs/defender", {"lines": limit, "hostname": hostname}) 19 | cli_output(result) 20 | 21 | 22 | @click.command() 23 | @click.option("-l", "--limit", default=150, help="Number of documents to return") 24 | def console(limit=150): 25 | result = pc_api.get_endpoint("logs/console", {"lines": limit}) 26 | cli_output(result) 27 | 28 | 29 | @click.command() 30 | @click.option( 31 | "-t", 32 | "--type", 33 | "type_", 34 | type=click.Choice(["login", "profile", "settings", "rule", "user", "group", "credential", "tag"], case_sensitive=True), 35 | help="Type of log to retrieve", 36 | required=False, 37 | ) 38 | @click.option("-h", "--hours", default=1, help="Show results for last n hours") 39 | def audit(type_="", hours=1): 40 | # Calculate utc time since x hours ago (default 1) 41 | utc_time = datetime.datetime.utcnow() - datetime.timedelta(hours=hours) 42 | from_ = utc_time 43 | 44 | # Convert from_ to isoformat and add a Z at the end 45 | from_ = from_.isoformat() + "Z" 46 | 47 | result = pc_api.get_endpoint("audits/mgmt", {"type": type_, "from": from_, "reverse": "true", "sort": "time"}) 48 | cli_output(result) 49 | 50 | 51 | @click.command() 52 | @click.option("--filename", help="Download filename (Default: /tmp/agentless_logs.tgz)", default="/tmp/agentless_logs.tgz") 53 | def agentless(filename=""): 54 | result = pc_api.agentless_logs_read() 55 | with open(filename, "wb") as download: 56 | download.write(result) 57 | cli_output({"Download File Name": filename}) 58 | 59 | 60 | cli.add_command(console) 61 | cli.add_command(defender) 62 | cli.add_command(audit) 63 | cli.add_command(agentless) 64 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_monitor.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("monitor", short_help="[CWPP] Retrieves monitor data") 8 | @pass_environment 9 | def cli(ctx): 10 | pass 11 | 12 | 13 | @click.command() 14 | @click.option("--complianceids", "compliance_ids", help="Filter by compliance id.") 15 | def compliance(compliance_ids=""): 16 | result_hosts = pc_api.get_endpoint( 17 | "hosts", {"complianceIDs": compliance_ids, "sort": "complianceIssuesCount", "reverse": "true"} 18 | ) 19 | result_containers = pc_api.get_endpoint( 20 | "containers", {"complianceIDs": compliance_ids, "sort": "info.complianceIssuesCount", "reverse": "true"} 21 | ) 22 | result_serverless = pc_api.get_endpoint( 23 | "serverless", {"complianceIDs": compliance_ids, "sort": "complianceIssuesCount", "reverse": "true"} 24 | ) 25 | 26 | # Concatenate results 27 | result = result_hosts + result_containers + result_serverless 28 | cli_output(result) 29 | 30 | 31 | cli.add_command(compliance) 32 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_policies.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("policies", short_help="[CWPP] Retrieve policies for the resources protected by Prisma Cloud") 8 | @pass_environment 9 | def cli(ctx): 10 | pass 11 | 12 | 13 | @click.command() 14 | @click.option("--field", default="") 15 | def runtimecontainer(field=""): 16 | result = pc_api.get_endpoint("policies/runtime/container") 17 | 18 | if field == "": 19 | cli_output(result) 20 | else: 21 | # We have field as input to select a deeper level of data. 22 | # Our main result returns data on the query and the results are in one of the main field. 23 | # This option gives the ability to retrieve that data. 24 | field_path = field.split(".") 25 | for _field in field_path: 26 | result = result[_field] 27 | 28 | cli_output(result) 29 | 30 | 31 | cli.add_command(runtimecontainer) 32 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_registry.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("registry", short_help="[CWPP] Scan reports for images in your registry.") 8 | @pass_environment 9 | def cli(ctx): 10 | pass 11 | 12 | 13 | @click.command("images", short_help="Retrieves registry image scan reports.") 14 | @click.option("--field", default="") 15 | def images(field=""): 16 | result = pc_api.registry_list_read("registry") 17 | 18 | if field == "": 19 | cli_output(result) 20 | else: 21 | # We have field as input to select a deeper level of data. 22 | # Our main result returns data on the query and the results are in one of the main field. 23 | # This option gives the ability to retrieve that data. 24 | field_path = field.split(".") 25 | for _field in field_path: 26 | result = result[_field] 27 | 28 | cli_output(result) 29 | 30 | 31 | @click.command("list", short_help="Retrieves the list of registries Prisma Cloud is configured to scan. ") 32 | @click.option( 33 | "--include", 34 | "-i", 35 | multiple=True, 36 | help="Include a registry, a repository, or both for the scan. \ 37 | Format: registry_name/repo_name, registry_name, or repo_name", 38 | default=[], 39 | ) 40 | @click.option( 41 | "--exclude", 42 | "-e", 43 | multiple=True, 44 | help="Exclude a registry, a repository, or both from the scan. \ 45 | Format: registry_name/repo_name, registry_name, or repo_name", 46 | default=[], 47 | ) 48 | def list(include, exclude): 49 | registries_to_scan = [] 50 | registries = pc_api.settings_registry_read() 51 | for registry_item in registries["specifications"]: 52 | registries_to_scan.append( 53 | {"registry": registry_item["registry"], "repo": registry_item["repository"], "tag": registry_item["tag"]} 54 | ) 55 | if exclude: 56 | # Later in your main function, when processing registries to scan: 57 | excluded_registries = [normalize_registry_name(e) for e in exclude] # Prepare the list of normalized exclusions 58 | registries_to_scan = [r for r in registries_to_scan if not is_excluded(r, excluded_registries)] 59 | 60 | # Filter registries based on inclusion criteria if any are specified 61 | if include: 62 | registries_to_scan = [r for r in registries_to_scan if is_included(r, include)] 63 | 64 | cli_output(registries_to_scan) 65 | 66 | 67 | @click.command("scan", short_help="Trigger the scan of registries.") 68 | @click.option( 69 | "--include", 70 | "-i", 71 | multiple=True, 72 | help="Include a registry, a repository, or both for the scan. \ 73 | Format: registry_name/repo_name, registry_name, or repo_name", 74 | default=[], 75 | ) 76 | @click.option( 77 | "--exclude", 78 | "-e", 79 | multiple=True, 80 | help="Exclude a registry, a repository, or both from the scan. \ 81 | Format: registry_name/repo_name, registry_name, or repo_name", 82 | default=[], 83 | ) 84 | def scan(include, exclude): 85 | registries_to_scan = [] 86 | registries = pc_api.settings_registry_read() 87 | for registry_item in registries["specifications"]: 88 | registries_to_scan.append( 89 | {"registry": registry_item["registry"], "repo": registry_item["repository"], "tag": registry_item["tag"]} 90 | ) 91 | if exclude: 92 | # Later in your main function, when processing registries to scan: 93 | excluded_registries = [normalize_registry_name(e) for e in exclude] # Prepare the list of normalized exclusions 94 | registries_to_scan = [r for r in registries_to_scan if not is_excluded(r, excluded_registries)] 95 | 96 | # Filter registries based on inclusion criteria if any are specified 97 | if include: 98 | registries_to_scan = [r for r in registries_to_scan if is_included(r, include)] 99 | 100 | # Transform each registry to the new format 101 | payload = [{"tag": {"registry": r["registry"], "repo": r["repo"], "tag": r["tag"]}} for r in registries_to_scan] 102 | 103 | pc_api.registry_scan_select(body_params=payload) 104 | cli_output(registries_to_scan) 105 | 106 | 107 | def normalize_registry_name(registry_name): 108 | """Strip 'https://' prefix and trailing slash from registry names for consistent comparison.""" 109 | normalized_name = registry_name 110 | if normalized_name.startswith("https://"): 111 | normalized_name = normalized_name[len("https://"):] 112 | if normalized_name.endswith("/"): 113 | normalized_name = normalized_name[:-1] 114 | return normalized_name.lower() # Consider lowercasing to make comparison case-insensitive 115 | 116 | 117 | def is_excluded(registry, exclusions): 118 | """Check if a given registry should be excluded based on the exclusions list.""" 119 | for exclusion in exclusions: 120 | exclusion_normalized = normalize_registry_name(exclusion) 121 | registry_normalized = normalize_registry_name(registry["registry"]) 122 | repo_normalized = registry["repo"].lower() 123 | # Check if the exclusion matches the registry or repository name 124 | if exclusion_normalized in registry_normalized or exclusion_normalized in repo_normalized: 125 | return True 126 | return False 127 | 128 | 129 | def is_included(registry, inclusions): 130 | """Check if a given registry should be included based on the inclusions list.""" 131 | # If no inclusions are specified, assume everything is included by default 132 | if not inclusions: 133 | return True 134 | 135 | for inclusion in inclusions: 136 | inclusion_normalized = normalize_registry_name(inclusion) 137 | registry_normalized = normalize_registry_name(registry["registry"]) 138 | repo_normalized = registry["repo"].lower() 139 | 140 | # Split inclusion criteria in case it specifies both registry and repo 141 | if "/" in inclusion_normalized: 142 | inclusion_parts = inclusion_normalized.split("/", 1) 143 | inclusion_registry = inclusion_parts[0] 144 | inclusion_repo = inclusion_parts[1] 145 | 146 | # Check if both registry and repo match the inclusion criteria 147 | if inclusion_registry in registry_normalized and inclusion_repo in repo_normalized: 148 | return True 149 | else: 150 | # If the inclusion criteria is only one part, check both registry and repo for a match 151 | if inclusion_normalized in registry_normalized or inclusion_normalized in repo_normalized: 152 | return True 153 | 154 | # If none of the inclusions match, return False 155 | return False 156 | 157 | 158 | cli.add_command(images) 159 | cli.add_command(list) 160 | cli.add_command(scan) 161 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_scans.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("scans", short_help="[CWPP] Retrieves scan reports for images scanned by the Jenkins plugin or twistcli") 8 | @click.option("-l", "--limit", help="Number of documents to return") 9 | @click.option("-s", "--search", help="Search term") 10 | @pass_environment 11 | def cli(ctx, limit, search): 12 | result = pc_api.get_endpoint("scans", {"limit": limit, "search": search, "sort": "time", "reverse": "true"}) 13 | cli_output(result) 14 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_serverless_auto_deploy.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import click 4 | 5 | from prismacloud.cli import cli_output, pass_environment 6 | from prismacloud.cli.api import pc_api 7 | 8 | 9 | @click.group("serverless_auto_deploy", short_help="[CSPM] Create serverless defend rules") 10 | @pass_environment 11 | def cli(ctx): 12 | pass 13 | 14 | 15 | @click.command("list", short_help="[CSPM] List serverless auto defend policies.") 16 | def serverless_auto_deploy_read(): 17 | result = pc_api.settings_serverless_auto_deploy_read() 18 | cli_output(result) 19 | 20 | 21 | @click.command("update", short_help="Update serverless auto defend rules based on the cloud accounts onboarded") 22 | @click.option( 23 | "--provider", 24 | default="aws", 25 | type=click.Choice(["aws", "azure", "gcp"]), 26 | help="Cloud Service Provider", 27 | ) 28 | @click.option( 29 | "--aws_region_type", 30 | default="regular", 31 | type=click.Choice(["regular", "government", "china"]), 32 | help="Scanning scope", 33 | ) 34 | @click.option( 35 | "--collection_name", 36 | default="All", 37 | help="Collection to use for the scope of the rule", 38 | ) 39 | @click.option( 40 | "--runtimes", 41 | "-r", 42 | type=click.Choice(["python3.6", "python3.7", "python3.8", "python3.9", "ruby2.7", "nodejs12.x", "nodejs14.x"]), 43 | multiple=True, 44 | help="Runtime to select", 45 | ) 46 | def serverless_auto_deploy_update(provider, aws_region_type, collection_name, runtimes): 47 | """Update repository""" 48 | logging.info("API - Updating serverless auto-defend rule") 49 | 50 | cloud_accounts = [] 51 | 52 | credentials = pc_api.get_endpoint("credentials?cloud=true") 53 | for credential in credentials: 54 | if credential["type"] == provider and credential["useAWSRole"] is False: 55 | logging.info("API - Found Credential: %s", credential["_id"]) 56 | cloud_accounts.append(credential["_id"]) 57 | 58 | body_params = [] 59 | if cloud_accounts: 60 | logging.info("API - All cloud accounts: %s", cloud_accounts) 61 | 62 | cloud_collection = "" 63 | collections = pc_api.get_endpoint("collections") 64 | for collection in collections: 65 | if collection["name"] == collection_name: 66 | del collection["system"] 67 | del collection["prisma"] 68 | del collection["modified"] 69 | cloud_collection = collection 70 | logging.info("API - Found collection: %s", cloud_collection) 71 | 72 | for cloud_account in cloud_accounts: 73 | autodefend = {} 74 | autodefend["provider"] = provider 75 | autodefend["name"] = cloud_account 76 | autodefend["credentialID"] = cloud_account 77 | autodefend["awsRegionType"] = aws_region_type 78 | autodefend["collections"] = [cloud_collection] 79 | autodefend["runtimes"] = runtimes 80 | body_params.append(autodefend) 81 | else: 82 | logging.error("API - ERROR No cloud account were found. ") 83 | 84 | if body_params: 85 | logging.info("API - List of rules to be updated %s", body_params) 86 | result = pc_api.settings_serverless_auto_deploy_write(body=body_params) 87 | logging.info("API - serverless auto-defend rule have been updated: %s", result) 88 | else: 89 | logging.error("API - Something went wrong with building the object for the policies") 90 | 91 | 92 | @click.command("create", short_help="Create only one rule and erase all the other serverless auto-defend rules") 93 | @click.option( 94 | "--provider", 95 | default="aws", 96 | type=click.Choice(["aws", "azure", "gcp"]), 97 | help="Cloud Service Provider", 98 | ) 99 | @click.option( 100 | "--name", 101 | help="Rule name", 102 | ) 103 | @click.option( 104 | "--credential_id", 105 | help="Cloud credentials", 106 | ) 107 | @click.option( 108 | "--aws_region_type", 109 | default="regular", 110 | type=click.Choice(["regular", "government", "china"]), 111 | help="Scanning scope", 112 | ) 113 | @click.option( 114 | "--collection_name", 115 | default="All", 116 | help="Collection to use for the scope of the rule", 117 | ) 118 | @click.option( 119 | "--runtimes", 120 | "-r", 121 | type=click.Choice(["python3.6", "python3.7", "python3.8", "python3.9", "ruby2.7", "nodejs12.x", "nodejs14.x"]), 122 | multiple=True, 123 | help="Runtime to select", 124 | ) 125 | def serverless_auto_deploy_create(provider, name, credential_id, aws_region_type, collection_name, runtimes): 126 | """Update repository""" 127 | logging.info("API - Updating serverless auto-defend rule") 128 | 129 | body_params = [] 130 | cloud_collection = "" 131 | collections = pc_api.get_endpoint("collections") 132 | for collection in collections: 133 | if collection["name"] == collection_name: 134 | del collection["system"] 135 | del collection["prisma"] 136 | del collection["modified"] 137 | cloud_collection = collection 138 | logging.info("API - Found collection: %s", cloud_collection) 139 | 140 | autodefend = {} 141 | autodefend["provider"] = provider 142 | autodefend["name"] = name 143 | autodefend["credentialID"] = credential_id 144 | autodefend["collections"] = [cloud_collection] 145 | autodefend["runtimes"] = runtimes 146 | body_params.append(autodefend) 147 | 148 | if body_params: 149 | logging.info("API - List of rules to be updated %s", body_params) 150 | result = pc_api.settings_serverless_auto_deploy_write(body=body_params) 151 | logging.info("API - serverless auto-defend rule have been updated: %s", result) 152 | else: 153 | logging.error("API - Something went wrong with building the object for the policies") 154 | 155 | 156 | cli.add_command(serverless_auto_deploy_read) 157 | cli.add_command(serverless_auto_deploy_update) 158 | cli.add_command(serverless_auto_deploy_create) 159 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_settings.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("settings", short_help="[CWPP] Shows CWPP settings.") 8 | @pass_environment 9 | def cli(ctx): 10 | result = pc_api.get_endpoint("settings/defender") 11 | cli_output(result) 12 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_stats.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import click 4 | 5 | from prismacloud.cli import cli_output, pass_environment 6 | from prismacloud.cli.api import pc_api 7 | 8 | 9 | @click.group("stats", short_help="[CWPP] Retrieve statistics for the resources protected by Prisma Cloud") 10 | @pass_environment 11 | def cli(ctx): 12 | pass 13 | 14 | 15 | @click.command() 16 | def daily(): 17 | result = pc_api.stats_daily_read() 18 | cli_output(result) 19 | 20 | 21 | @click.command() 22 | def dashboard(): 23 | result = pc_api.stats_trends_read() 24 | cli_output(result) 25 | 26 | 27 | @click.command() 28 | def events(): 29 | result = pc_api.stats_events_read("") 30 | cli_output(result) 31 | 32 | 33 | @click.command(name="license") 34 | def license_stats(): 35 | result = pc_api.stats_license_read() 36 | cli_output(result) 37 | 38 | 39 | @click.command() 40 | @click.option("-cve", "--cve") 41 | @click.option("-collection", "--collection") 42 | @click.option( 43 | "--severity", 44 | "-s", 45 | type=click.Choice( 46 | [ 47 | "low", 48 | "medium", 49 | "high", 50 | "critical", 51 | ] 52 | ), 53 | help="Retrieves a list of vulnerabilities (CVEs) that matches the specified value of the severity threshold or higher.", 54 | ) 55 | @click.option( 56 | "--cvss", 57 | help="CVSS Threshold is the minimum CVSS score.", 58 | ) 59 | @click.option( 60 | "--resource-type", 61 | "-rt", 62 | type=click.Choice( 63 | [ 64 | "images", 65 | "hosts", 66 | "registryImages", 67 | "containers", 68 | "functions", 69 | "all", 70 | ] 71 | ), 72 | multiple=True, 73 | default=["images"], 74 | help="Specify the resource types to search for vulnerabilities. Use 'all' to include all types.", 75 | ) 76 | @click.option("-l", "--limit", default=10, help="Number of top vulnerabilities to search. Max is 100.") 77 | def vulnerabilities(cve, collection, severity, cvss, resource_type, limit): 78 | if "all" in resource_type: 79 | resource_type = ["images", "hosts", "registryImages", "containers", "functions"] 80 | 81 | logging.debug(f"Searching for {resource_type} resoursce type") 82 | 83 | if not cve and not (cvss or severity): 84 | result = pc_api.stats_vulnerabilities_read({"limit": limit, "offset": 0, "collections": collection}) 85 | result = result[0] 86 | return cli_output(result) 87 | 88 | elif not cve and (cvss and severity): 89 | logging.debug("CVSS to search for: {cvss} with Severity: {severity}") 90 | results = pc_api.stats_vulnerabilities_read( 91 | {"limit": limit, "offset": 0, "severityThreshold": severity, "cvssThreshold": cvss} 92 | ) 93 | return cli_output(process_vulnerability_results(results, resource_type)) 94 | 95 | elif not cve and cvss: 96 | logging.debug("CVSS to search for: {cvss}") 97 | results = pc_api.stats_vulnerabilities_read({"limit": limit, "offset": 0, "cvssThreshold": cvss}) 98 | 99 | return cli_output(process_vulnerability_results(results, resource_type)) 100 | 101 | elif not cve and severity: 102 | logging.debug("Severity to search for: {severity}") 103 | results = pc_api.stats_vulnerabilities_read({"limit": limit, "offset": 0, "severityThreshold": severity}) 104 | return cli_output(process_vulnerability_results(results, resource_type)) 105 | 106 | elif cve: 107 | logging.debug("CVE to search for: {cve}") 108 | results = pc_api.stats_vulnerabilities_read({"limit": limit, "offset": 0, "cve": cve}) 109 | return cli_output(process_vulnerability_results(results, resource_type)) 110 | 111 | 112 | def process_vulnerability_results(results, resource_type): 113 | image_data = [] 114 | tags = pc_api.tags_list_read() 115 | for result in results: 116 | for key in resource_type: 117 | if key in result and "vulnerabilities" in result[key]: 118 | vulnerabilities = result[key]["vulnerabilities"] 119 | with click.progressbar(vulnerabilities) as vulnerabilities_bar: 120 | for vulnerability in vulnerabilities_bar: 121 | logging.debug(f"Found CVE {vulnerability['cve']} from {vulnerability['impactedResourceType']}") 122 | image_data = search_impacted_resource_per_cve(vulnerability, tags, image_data) 123 | return image_data 124 | 125 | 126 | def search_impacted_resource_per_cve(vulnerability, tags, image_data): 127 | resources = pc_api.stats_vulnerabilities_impacted_resoures_read( 128 | {"cve": vulnerability["cve"], "resourceType": vulnerability["impactedResourceType"]} 129 | ) 130 | 131 | # Function to create image_info with optional tag name 132 | def add_prisma_cloud_tags(base_info, tags): 133 | for tag in tags: 134 | if "vulns" in tag and tag["vulns"]: 135 | for tag_vuln in tag["vulns"]: 136 | if vulnerability["cve"] == tag_vuln.get("id") and "resourceType" not in tag_vuln: 137 | logging.debug( 138 | f"=================> CVE {vulnerability['cve']} has a tag named {tag['name']} for all resourceType" 139 | ) 140 | base_info["prima_cloud_tag"] = tag["name"] 141 | base_info["prima_cloud_tag_comment"] = tag_vuln.get("comment") 142 | elif vulnerability["cve"] == tag_vuln.get("id") and vulnerability["impactedResourceType"] == tag_vuln.get( 143 | "resourceType" 144 | ): 145 | logging.debug(f"=================> CVE {vulnerability['cve']} has a tag named {tag['name']}") 146 | base_info["prima_cloud_tag"] = tag["name"] 147 | base_info["prima_cloud_tag_comment"] = tag_vuln.get("comment") 148 | 149 | return base_info 150 | 151 | if "registryImages" in resources: 152 | for image in resources["registryImages"]: 153 | image_info = add_prisma_cloud_tags( 154 | { 155 | "type": "registry_image", 156 | "cve": vulnerability["cve"], 157 | "resourceID": image["resourceID"], 158 | "packages": image["packages"], 159 | "risk_score": vulnerability["riskScore"], 160 | "impacted_packages": vulnerability["impactedPkgs"], 161 | "cve_description": vulnerability["description"], 162 | }, 163 | tags, 164 | ) 165 | logging.debug(f"Image info: {image_info}") 166 | image_data.append(image_info) 167 | 168 | if "images" in resources: 169 | for image in resources["images"]: 170 | for container in image["containers"]: 171 | image_info = add_prisma_cloud_tags( 172 | { 173 | "type": "deployed_image", 174 | "cve": vulnerability["cve"], 175 | "resourceID": image["resourceID"], 176 | "image": container.get("image", "na"), 177 | "imageID": container.get("imageID", "na"), 178 | "container": container.get("container", "na"), 179 | "host": container.get("host", "na"), 180 | "namespace": container.get("namespace", "na"), 181 | "factors": container["factors"], 182 | "packages": image["packages"], 183 | "risk_score": vulnerability["riskScore"], 184 | "impacted_packages": vulnerability["impactedPkgs"], 185 | "cve_description": vulnerability["description"], 186 | }, 187 | tags, 188 | ) 189 | logging.debug(f"Image info: {image_info} -- Container: {container}") 190 | image_data.append(image_info) 191 | 192 | if "hosts" in resources: 193 | for host in resources["hosts"]: 194 | host_info = add_prisma_cloud_tags( 195 | { 196 | "type": "host", 197 | "cve": vulnerability["cve"], 198 | "resourceID": host["resourceID"], 199 | "packages": host["packages"], 200 | "risk_score": vulnerability["riskScore"], 201 | "impacted_packages": vulnerability["impactedPkgs"], 202 | "cve_description": vulnerability["description"], 203 | }, 204 | tags, 205 | ) 206 | image_data.append(host_info) 207 | 208 | if "functions" in resources: 209 | for function in resources["functions"]: 210 | function_info = add_prisma_cloud_tags( 211 | { 212 | "type": "function", 213 | "cve": vulnerability["cve"], 214 | "resourceID": function["resourceID"], 215 | "function_details": function["functionDetails"], 216 | "packages": function["packages"], 217 | "risk_score": vulnerability["riskScore"], 218 | "impacted_packages": vulnerability["impactedPkgs"], 219 | "cve_description": vulnerability["description"], 220 | }, 221 | tags, 222 | ) 223 | image_data.append(function_info) 224 | 225 | return image_data 226 | 227 | 228 | cli.add_command(daily) 229 | cli.add_command(dashboard) 230 | cli.add_command(events) 231 | cli.add_command(license_stats) 232 | cli.add_command(vulnerabilities) 233 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_tags.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("tags", short_help="[CWPP] Retrieves a list of tags") 8 | @pass_environment 9 | def cli(ctx): 10 | result = pc_api.get_endpoint("tags") 11 | cli_output(result) 12 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_users.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("users", short_help="[CWPP] Retrieves a list of all users") 8 | @pass_environment 9 | def cli(ctx): 10 | result = pc_api.get_endpoint("users") 11 | cli_output(result) 12 | -------------------------------------------------------------------------------- /prismacloud/cli/cwpp/cmd_version.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.command("version", short_help="[CWPP] Shows CWPP version.") 8 | @pass_environment 9 | def cli(ctx): 10 | compute_version = pc_api.get_endpoint("version", api="cwpp") 11 | version = {"cwpp_version": compute_version} 12 | cli_output(version) 13 | -------------------------------------------------------------------------------- /prismacloud/cli/pccs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/ad41f2754cc53b49d2dd3ae6aeca67b8b76857c2/prismacloud/cli/pccs/__init__.py -------------------------------------------------------------------------------- /prismacloud/cli/pccs/cmd_reviews.py: -------------------------------------------------------------------------------- 1 | import click 2 | 3 | from prismacloud.cli import cli_output, pass_environment 4 | from prismacloud.cli.api import pc_api 5 | 6 | 7 | @click.group("reviews", short_help="[APPSEC] Get Code review runs data") 8 | @pass_environment 9 | def cli(ctx): 10 | pass 11 | 12 | 13 | @click.command("list", short_help="List Code review runs data") 14 | def list_codereviews(): 15 | result = pc_api.get_endpoint("code/api/v1/development-pipeline/code-review/runs/data", api="code") 16 | cli_output(result) 17 | 18 | 19 | cli.add_command(list_codereviews) 20 | -------------------------------------------------------------------------------- /prismacloud/cli/pccs/cmd_suppressions.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import datetime 3 | import click 4 | 5 | from prismacloud.cli import cli_output, pass_environment 6 | from prismacloud.cli.api import pc_api 7 | 8 | 9 | @click.group("suppressions", short_help="[APPSEC] List suppression rules") 10 | @pass_environment 11 | def cli(ctx): 12 | pass 13 | 14 | 15 | @click.command("list", short_help="List suppression rules") 16 | def list_suppressions(): 17 | """List suppression rules""" 18 | suppressions = pc_api.suppressions_list_read() 19 | cli_output(suppressions) 20 | 21 | 22 | @click.command("justifications", short_help="Get suppressions justifications for all policy id and accounts") 23 | def list_justifications(): 24 | """Get suppressions justifications for all policy id and accounts""" 25 | data = [] 26 | suppressions = pc_api.suppressions_list_read() 27 | for suppression in suppressions: 28 | logging.info("Get policy ID: %s", suppression["id"]) 29 | if "resources" in suppression: 30 | accounts = [] 31 | for account in suppression["resources"]: 32 | accounts.append(account["accountId"]) 33 | 34 | query_params = { 35 | "accounts": accounts, 36 | } 37 | justifications = pc_api.suppressions_justifications_list_read(suppression["policyId"], query_params=query_params) 38 | for justification in justifications: 39 | if "resources" in justification and "origin" in justification: 40 | data = data + [ 41 | { 42 | "accounts": accounts, 43 | "resources": justification["resources"], 44 | "active": justification["active"], 45 | "comment": justification["comment"], 46 | "date": justification["date"], 47 | "suppressionType": justification["suppressionType"], 48 | "violationId": justification["violationId"], 49 | "origin": justification["origin"], 50 | "type": justification["type"], 51 | "customer": justification["customer"], 52 | "id": justification["id"], 53 | "policyId": suppression["policyId"], 54 | } 55 | ] 56 | 57 | cli_output(data) 58 | 59 | 60 | @click.command("create", short_help="Create new suppression") 61 | @click.option( 62 | "-i", 63 | "--integration-type", 64 | type=click.Choice( 65 | [ 66 | "Github", 67 | "Bitbucket", 68 | "Gitlab", 69 | "AzureRepos", 70 | "cli", 71 | "AWS", 72 | "Azure", 73 | "GCP", 74 | "Docker", 75 | "githubEnterprise", 76 | "gitlabEnterprise", 77 | "bitbucketEnterprise", 78 | "terraformCloud", 79 | "githubActions", 80 | "circleci", 81 | "codebuild", 82 | "jenkins", 83 | "tfcRunTasks", 84 | "admissionController", 85 | "terraformEnterprise", 86 | ] 87 | ), 88 | required=True, 89 | help="Type of the integration to update", 90 | ) 91 | @click.option("-r", "--repository", required=True, help="Repository Name. e.g.: 'SimOnPanw/my-terragoat'") 92 | @click.option("-f", "--files", multiple=True, help="File Name. Can specify multiple. e.g.: '-f s3.tf -f sns.tf'") 93 | def create(integration_type, repository, files): 94 | """Create new suppression""" 95 | data = [] 96 | current_date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") 97 | parameters = {} 98 | parameters["sourceTypes"] = [integration_type] 99 | parameters["repository"] = repository 100 | 101 | # Get all the files that contain errors in them 102 | all_repo_file_error_summaries = pc_api.errors_files_list(criteria=parameters)["data"] 103 | 104 | for file_summary in all_repo_file_error_summaries: 105 | # Process all files if no specific files are provided, 106 | # or just the specific ones if they are provided and match the current file path 107 | if not files or any(file_summary["filePath"].endswith(f) for f in files): 108 | if files: # This check ensures we only log for specific files, not all files 109 | logging.info(f"Parsing this specific files: {file_summary['filePath']}") 110 | 111 | parameters["filePath"] = file_summary["filePath"] 112 | parameters["types"] = ["Errors"] 113 | 114 | impacted_files = pc_api.errors_file_list(criteria=parameters) 115 | for error_in_file in impacted_files: 116 | resource_id = f"{error_in_file['errorId']}::{repository}::{error_in_file['resourceId']}" 117 | body_data = { 118 | "comment": f"{current_date} - Suppressed via Prisma Cloud CLI.", 119 | "origin": "Platform", 120 | "resources": {"id": resource_id, "accountId": repository}, 121 | "suppressionType": "Resources", 122 | } 123 | try: 124 | pc_api.suppressions_create(error_in_file["errorId"], body_data) 125 | data = data + [ 126 | { 127 | "action": "Suppresed by Policy", 128 | "policy": error_in_file["errorId"], 129 | "repository": repository, 130 | "file": error_in_file["resourceId"], 131 | "comment": f"{current_date} - Suppressed via Prisma Cloud CLI.", 132 | } 133 | ] 134 | logging.info(f"Suppression created for {error_in_file['resourceId']} in repository {repository}") 135 | except Exception as e: 136 | logging.error(f"An error occurred while creating suppression: {e}") 137 | data = data + [ 138 | { 139 | "action": "Error during suppression", 140 | "policy": error_in_file["errorId"], 141 | "repository": repository, 142 | "file": error_in_file["resourceId"], 143 | "comment": f"{current_date} - Suppressed via Prisma Cloud CLI.", 144 | } 145 | ] 146 | 147 | cli_output(data) 148 | 149 | 150 | cli.add_command(create) 151 | cli.add_command(list_suppressions) 152 | cli.add_command(list_justifications) 153 | -------------------------------------------------------------------------------- /prismacloud/cli/version.py: -------------------------------------------------------------------------------- 1 | version = "0.8.11" 2 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | check_env_vars_or_credentials_file: Mark a test to check for the presence of environment variables or the credentials file. 4 | 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | api-client 2 | click 3 | click_completion 4 | click_help_colors 5 | colorama 6 | coloredlogs 7 | datetime 8 | jsondiff 9 | pandas 10 | prismacloud-api==5.2.24 11 | pydantic-settings 12 | pydantic 13 | requests 14 | tabulate 15 | update_checker 16 | pyyaml 17 | pytest 18 | pytest-benchmark 19 | -------------------------------------------------------------------------------- /results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/ad41f2754cc53b49d2dd3ae6aeca67b8b76857c2/results.png -------------------------------------------------------------------------------- /screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PaloAltoNetworks/prismacloud-cli/ad41f2754cc53b49d2dd3ae6aeca67b8b76857c2/screenshot.png -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = prismacloud-cli 3 | 4 | [options] 5 | packages = prismacloud_cli 6 | 7 | [flake8] 8 | max-line-length = 127 9 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from importlib import util 3 | 4 | from setuptools import find_namespace_packages, setup 5 | 6 | # Utility function to read the README file. 7 | # Used for the long_description. It's nice, because now 1) we have a top level 8 | # README file and 2) it's easier to type in the README file than to put a raw 9 | # string in below ... 10 | 11 | 12 | def read(fname): 13 | return open(os.path.join(os.path.dirname(__file__), fname)).read() 14 | 15 | 16 | spec = util.spec_from_file_location("prismacloud.cli.version", os.path.join("prismacloud", "cli", "version.py")) 17 | 18 | # noinspection PyUnresolvedReferences 19 | mod = util.module_from_spec(spec) 20 | spec.loader.exec_module(mod) # type: ignore 21 | version = mod.version # type: ignore 22 | 23 | setup( 24 | extras_require={}, 25 | install_requires=[ 26 | "api-client", 27 | "click", 28 | "click_completion", 29 | "click_help_colors", 30 | "coloredlogs", 31 | "jsondiff", 32 | "pandas", 33 | "requests", 34 | "tabulate", 35 | "colorama", 36 | "update_checker", 37 | "pydantic-settings", 38 | "pydantic", 39 | "datetime", 40 | "pyyaml", 41 | "prismacloud-api==5.2.24", 42 | "pytest", 43 | "pytest-benchmark", 44 | ], 45 | name="prismacloud-cli", 46 | version=version, 47 | python_requires=">=3.7", 48 | author="Steven de Boer, Simon Melotte, Tom Kishel", 49 | author_email="stdeboer@paloaltonetworks.com, smelotte@paloaltonetworks.com, tkishel@paloaltonetworks.com", 50 | description=("Prisma Cloud CLI"), 51 | license="BSD", 52 | keywords="prisma cloud cli", 53 | url="https://github.com/PaloAltoNetworks/prismacloud-cli", 54 | packages=find_namespace_packages(), 55 | long_description=read("README.md"), 56 | long_description_content_type="text/markdown", 57 | classifiers=[ 58 | "Development Status :: 3 - Alpha", 59 | "Intended Audience :: Developers", 60 | "Intended Audience :: System Administrators", 61 | "License :: OSI Approved :: MIT License", 62 | "Programming Language :: Python", 63 | "Topic :: Utilities", 64 | ], 65 | entry_points=""" 66 | [console_scripts] 67 | pc=prismacloud.cli:cli 68 | """, 69 | ) 70 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import os 3 | import json 4 | from pathlib import Path 5 | import pytest 6 | 7 | commands = [ 8 | ["-o", "csv", "policy"], 9 | ["stats", "vulnerabilities", "--cve", "CVE-2022-0847"], 10 | ["-o", "json", "policy", "list"], 11 | ["tags"], 12 | ["stats", "dashboard"], 13 | ["-o", "json", "stats", "dashboard"], 14 | ["cloud", "names"], 15 | ["cloud", "type"], 16 | ["--columns", "defendersSummary.host", "stats", "dashboard"], 17 | ] 18 | 19 | 20 | @pytest.fixture(scope="session", autouse=True) 21 | def check_env_vars_or_credentials_file(): 22 | required_env_vars = { 23 | "PC_ACCESS_KEY": "access_key_id", 24 | "PC_SAAS_API_ENDPOINT": "api_endpoint", 25 | "PC_SECRET_KEY": "secret_key", 26 | } 27 | 28 | env_vars_set = all(os.environ.get(env_var) for env_var in required_env_vars) 29 | 30 | credentials_file = Path("~/.prismacloud/credentials.json").expanduser() 31 | 32 | if not env_vars_set: 33 | if credentials_file.is_file(): 34 | with open(credentials_file, "r") as json_file: 35 | data = json.load(json_file) 36 | for env_var, alternative_key in required_env_vars.items(): 37 | value = data.get(env_var) or data.get(alternative_key) 38 | if value is not None: 39 | os.environ[env_var] = value 40 | 41 | env_vars_set = all(os.environ.get(env_var) for env_var in required_env_vars) 42 | if not env_vars_set: 43 | r = "Environment variables are not set, and ~/.prismacloud/credentials.json does not exist. Stopping the test suite." 44 | pytest.exit(r) 45 | 46 | 47 | @pytest.mark.parametrize("command", commands, ids=[str(command) for command in commands]) 48 | def test_cli_commands(command, benchmark): 49 | """Test various CLI commands and check if they run successfully.""" 50 | 51 | def run_command(): 52 | try: 53 | result = subprocess.run( 54 | ["python3", "bin/pc", "--config", "env"] + command, capture_output=True, text=True, check=True 55 | ) 56 | assert result.returncode == 0 57 | except subprocess.CalledProcessError as test_error: 58 | pytest.fail( 59 | f"Command {' '.join(command)} failed with return code {test_error.returncode} " 60 | f"and output:\\n{test_error.output}" 61 | ) 62 | 63 | if os.environ.get("SKIP_BENCHMARK") == "1": 64 | run_command() 65 | else: 66 | benchmark(run_command) 67 | -------------------------------------------------------------------------------- /use-cases/README.md: -------------------------------------------------------------------------------- 1 | # Advanced Use-Cases for Prisma cloud CLI 2 | 3 | [Use RQL with Prisma Cloud CI](./use-rql.md) - Webinar on the 29th of June 16:00-17:00 CET (10:00-11:00 EST) 4 | 5 | [Webinar Use-Cases](./june-2022.md) - Webinar on the 29th of June 16:00-17:00 CET (10:00-11:00 EST) 6 | 7 | [Custom Query Reports](./custom-query-reports.md) - Examples of how to fetch and filter data based on an RQL query and output to various forms. 8 | -------------------------------------------------------------------------------- /use-cases/custom-query-reports.md: -------------------------------------------------------------------------------- 1 | # Use Cases for Querying IAM data and reporting 2 | 3 | As of this writing there are various ways to output your data. Run `pc --help` to see currently available options. 4 | ``` 5 | pc --help 6 | Usage: pc [OPTIONS] COMMAND [ARGS]... 7 | 8 | Prisma Cloud CLI (version: 0.6.9) 9 | 10 | Options: 11 | -v, --verbose Enables verbose mode 12 | -vv, --very_verbose Enables very verbose mode 13 | --filter TEXT Add search filter 14 | -o, --output [text|csv|json|html|clipboard|markdown|columns] 15 | -c, --config TEXT Select configuration file in 16 | ~/.prismacloud/[CONFIGURATION].json 17 | --columns TEXT Select columns for output 18 | --help Show this message and exit. 19 | ``` 20 | We will utilize several variations of the current options in the examples to provide different results. Explore more on your own. 21 | 22 | ## Getting Started - For all the below examples, we will typically want to do 2 things: 23 | 1. Determine the query you need and export as an environment variable. Suggest to run and confirm a valid query in Prisma Cloud console first. 24 | > Example 25 | ``` 26 | export RQL="config from iam where grantedby.cloud.policy.name = 'AdministratorAccess'" 27 | ``` 28 | 2. Determine what fields you would like to filter on to narrow down your output. You can fetch all the available fields with this simple command: 29 | > Example 30 | ``` 31 | pc -o columns rql --query $RQL 32 | ``` 33 | 34 | ## Examples 35 | 36 | ### Output only the Identities that are granted a specific policy 37 | Reminder, export query as a variable and determine what fields you would like to filter on to narrow down your output. 38 | ``` 39 | export RQL_S3Full="config from iam where grantedby.cloud.policy.name = 'AmazonS3FullAccess'" 40 | pc -o columns rql --query $RQL_S3Full 41 | ``` 42 | 43 | To display results via JSON 44 | ``` 45 | pc -o json rql --query $RQL_S3Full | jq 'map(.sourceResourceName) | unique' 46 | ``` 47 | > Example Output: 48 | ``` 49 | [ 50 | "AppUser1", 51 | "AppUser2", 52 | "i-022a31ebcde34fa20" 53 | ] 54 | ``` 55 | 56 | To display results via columns 57 | ``` 58 | pc --columns sourceResourceName,grantedByCloudEntityType rql --query $RQL_S3Full 59 | ``` 60 | > Example Output: 61 | ``` 62 | ╒══════════════════════╤════════════════════════════╕ 63 | │ sourceResourceName │ grantedByCloudEntityType │ 64 | ╞══════════════════════╪════════════════════════════╡ 65 | │ AppUser1 │ group │ 66 | ├──────────────────────┼────────────────────────────┤ 67 | │ AppUser2 │ group │ 68 | ├──────────────────────┼────────────────────────────┤ 69 | │ i-022a31ebcde34fa20 │ role │ 70 | ╘══════════════════════╧════════════════════════════╛ 71 | ``` 72 | 73 | 74 | ### AWS Roles with permissions to assume:role* or passrole:* to any resource without a conditional 75 | ``` 76 | export RQL_AssumePassRole="config from iam where dest.cloud.type = 'AWS' and dest.cloud.resource.name = '*'and grantedby.cloud.policy.type != 'Resource-based Policy' and source.cloud.resource.name!='*' and action.name in ( 'sts:AssumeRole', 'iam:PassRole' )" 77 | ``` 78 | To display results via columns 79 | ``` 80 | pc --columns sourceResourceName,grantedByCloudEntityType,grantedByCloudPolicyType,grantedByCloudPolicyName rql --query $RQL_AssumePassRole 81 | ``` 82 | > Example Output: 83 | ``` 84 | ╒═══════════════════════════╤════════════════════════════╤════════════════════════════╤════════════════════════════╕ 85 | │ sourceResourceName │ grantedByCloudPolicyName │ grantedByCloudPolicyType │ grantedByCloudEntityType │ 86 | ╞═══════════════════════════╪════════════════════════════╪════════════════════════════╪════════════════════════════╡ 87 | │ AppUser1 │ AdministratorAccess │ AWS Managed Policy │ group │ 88 | ├───────────────────────────┼────────────────────────────┼────────────────────────────┼────────────────────────────┤ 89 | │ DevOpsUser3 │ PowerUserAccess │ AWS Managed Policy │ role │ 90 | ├───────────────────────────┼────────────────────────────┼────────────────────────────┼────────────────────────────┤ 91 | │ privesc-sre-user │ privesc-sre-admin-policy │ Customer Managed Policy │ role │ 92 | ├───────────────────────────┼────────────────────────────┼────────────────────────────┼────────────────────────────┤ 93 | │ application12345 │ LambdaExecutionRolePolicy │ Inline Policy │ role │ 94 | ├───────────────────────────┼────────────────────────────┼────────────────────────────┼────────────────────────────┤ 95 | │ TheBoss │ AdministratorAccess │ AWS Managed Policy │ user │ 96 | ╘═══════════════════════════╧════════════════════════════╧════════════════════════════╧════════════════════════════╛ 97 | ``` 98 | To display same results via html and save to a file 99 | ``` 100 | pc -o html --columns sourceResourceName,grantedByCloudEntityType,grantedByCloudPolicyType,grantedByCloudPolicyName rql --query $RQL_AssumePassRole > file.html 101 | ``` 102 | To display same results to your clipboard 103 | ``` 104 | pc -o clipboard --columns sourceResourceName,grantedByCloudEntityType,grantedByCloudPolicyType,grantedByCloudPolicyName rql --query $RQL_AssumePassRole 105 | ``` 106 | - After above command executes, you can open up a spreadsheet tool like MS Excel or Google Sheets and simply paste the results. 107 | 108 | 109 | ### List Identities that can delete S3 buckets and if they used this permission in the last 90 days 110 | 111 | ``` 112 | export RQL_DeleteBucket="config from iam where dest.cloud.service.name = 's3' AND action.name = 's3:deletebucket' AND action.lastaccess.days > 90 " 113 | ``` 114 | ``` 115 | pc --columns sourceResourceName,lastAccessStatus,lastAccessDate rql --query $RQL_DeleteBucket 116 | ``` 117 | > Example Output: 118 | ``` 119 | ╒═══════════════════════════╤══════════════════╤═══════════════════════════╕ 120 | │ sourceResourceName │ lastAccessDate │ lastAccessStatus │ 121 | ╞═══════════════════════════╪══════════════════╪═══════════════════════════╡ 122 | │ AppUser1 │ │ NOT_ACCESSED_IN_TRACKING_ │ 123 | │ │ │ PERIOD │ 124 | ├───────────────────────────┼──────────────────┼───────────────────────────┤ 125 | │ DevOpsUser3 │ 2023-01-25 │ ACCESSED │ 126 | ├───────────────────────────┼──────────────────┼───────────────────────────┤ 127 | │ TheBoss │ │ NOT_ACCESSED_IN_TRACKING_ │ 128 | │ │ │ PERIOD │ 129 | ╘═══════════════════════════╧══════════════════╧═══════════════════════════╛ 130 | ``` 131 | 132 | 133 | ### List Azure Service Principals granted Owner Role and what level they were granted by 134 | 135 | ``` 136 | export RQL_AZ_OWNER_SPs="config from iam where dest.cloud.type = 'AZURE' AND grantedby.cloud.policy.type = 'Built-in Role' AND grantedby.cloud.policy.name = 'Owner' AND grantedby.cloud.type = 'AZURE' AND grantedby.cloud.entity.type = 'Service Principal' " 137 | ``` 138 | ``` 139 | pc --columns sourceResourceName,grantedByCloudEntityType,sourceCloudAccount,grantedByLevelType,grantedByLevelName rql --query $RQL_AZ_OWNER_SPs 140 | ``` 141 | > Example Output: 142 | ``` 143 | ╒═══════════════════════════╤═══════════════════════════╤════════════════════════════╤════════════════════════╤═══════════════════════════| 144 | │ sourceCloudAccount │ sourceResourceName │ grantedByCloudEntityType │ grantedByLevelType │ grantedByLevelId │ 145 | ╞═══════════════════════════╪═══════════════════════════╪════════════════════════════╪════════════════════════╪═══════════════════════════╡ 146 | │ Azure QA │ ombarFuntionApp │ Service Principal │ Azure Subscription │ /subscriptions/REDACTED- │ 147 | │ │ │ │ │ -XXXXXXXXXXXXXX │ 148 | │ │ │ │ │ │ 149 | ├───────────────────────────┼───────────────────────────┼────────────────────────────┼────────────────────────┼───────────────────────────| 150 | │ Azure QA │ AzureResourceDeletionApp │ Service Principal │ Azure Subscription │ /subscriptions/REDACTED- │ 151 | │ │ │ │ │ -XXXXXXXXXXXXXX │ 152 | │ │ │ │ │ │ 153 | ├───────────────────────────┼───────────────────────────┼────────────────────────────┼────────────────────────┼───────────────────────────| 154 | │ Azure QA_Static_7 │ AzureResourceDeletionApp │ Service Principal │ Azure Resource │ /subscriptions/REDACTED- │ 155 | │ │ │ │ │ -XXXXXXXXXXXXXXX/resource │ 156 | │ │ │ │ │ Groups/TESTQA-AUTOMATION/ │ 157 | │ │ │ │ │ providers/Microsoft.DataL │ 158 | │ │ │ │ │ akeAnalytics/accounts/sta │ 159 | │ │ │ │ │ ticdla │ 160 | ├───────────────────────────┼───────────────────────────┼────────────────────────────┼────────────────────────┼───────────────────────────| 161 | │ Azure QA Tenant │ Tenant onboard │ Service Principal │ Azure Management Group │ /providers/Microsoft.Mana │ 162 | │ │ │ │ │ gement/managementGroups/f │ 163 | │ │ │ │ │ REDACTED-XXXXXXXXXXXXX- │ 164 | │ │ │ │ │ YYYYYYYYYYYY │ 165 | ├───────────────────────────┼───────────────────────────┼────────────────────────────┼────────────────────────┼───────────────────────────| 166 | ``` 167 | 168 | ### List Azure Identities granted either Key Vault Administrator or Key Vault Reader Role and have unrestricted destination access 169 | This would show all identities with these High risk roles that are not scoped to specific Key Vaults and could pose a data risk. 170 | 171 | ``` 172 | export RQL_AZ_KV="config from iam where dest.cloud.type = 'AZURE' AND grantedby.cloud.policy.type = 'Built-in Role' AND grantedby.cloud.policy.name IN ('Key Vault Reader', 'Key Vault Administrator') AND dest.cloud.service.name = 'Microsoft.KeyVault' AND dest.cloud.resource.name = '*' " 173 | ``` 174 | ``` 175 | pc --columns sourceResourceName,grantedByCloudEntityType,grantedByCloudPolicyName,destResourceName rql --query $RQL_AZ_KV 176 | ``` 177 | > Example Output: 178 | ``` 179 | ╒═════════════════════════╤════════════════════╤════════════════════════════╤════════════════════════════╕ 180 | │ sourceResourceName │ destResourceName │ grantedByCloudPolicyName │ grantedByCloudEntityType │ 181 | ╞═════════════════════════╪════════════════════╪════════════════════════════╪════════════════════════════╡ 182 | │ abcdef-app-registration │ * │ Key Vault Reader │ Service Principal │ 183 | ├─────────────────────────┼────────────────────┼────────────────────────────┼────────────────────────────┤ 184 | │ Shreyas │ * │ Key Vault Administrator │ user │ 185 | ╘═════════════════════════╧════════════════════╧════════════════════════════╧════════════════════════════╛ 186 | ``` 187 | 188 | 189 | ### Find all identities that can delete MS SQL DBs 190 | ``` 191 | export RQL_AZ_SQL_DELETE="config from iam where dest.cloud.type = 'AZURE' AND dest.cloud.resource.name = 'Microsoft.Sql' AND dest.cloud.resource.type = 'servers' AND action.name = 'Microsoft.Sql/servers/delete'" 192 | ``` 193 | To display results via columns 194 | ``` 195 | pc --columns sourceResourceName,grantedByCloudEntityType,grantedByCloudPolicyName,destResourceName rql --query $RQL_AZ_SQL_DELETE 196 | ``` 197 | > Example Output: 198 | ``` 199 | ╒═══════════════════════════╤════════════════════╤════════════════════════════╤════════════════════════════╕ 200 | │ sourceResourceName │ destResourceName │ grantedByCloudPolicyName │ grantedByCloudEntityType │ 201 | ╞═══════════════════════════╪════════════════════╪════════════════════════════╪════════════════════════════╡ 202 | │ Prath │ * │ Owner │ group │ 203 | ├───────────────────────────┼────────────────────┼────────────────────────────┼────────────────────────────┤ 204 | │ Varad │ * │ Owner │ user │ 205 | ├───────────────────────────┼────────────────────┼────────────────────────────┼────────────────────────────┤ 206 | │ Varad │ * │ Contributor │ group │ 207 | ├───────────────────────────┼────────────────────┼────────────────────────────┼────────────────────────────┤ 208 | │ cohen │ * │ Contributor │ user │ 209 | ├───────────────────────────┼────────────────────┼────────────────────────────┼────────────────────────────┤ 210 | │ Eli │ * │ Contributor │ user │ 211 | ├───────────────────────────┼────────────────────┼────────────────────────────┼────────────────────────────┤ 212 | │ azureautomationaccount01_ │ * │ Contributor │ Service Principal │ 213 | │ REDACTED= │ │ │ │ 214 | ├───────────────────────────┼────────────────────┼────────────────────────────┼────────────────────────────┤ 215 | ``` 216 | To display same results via html and save to a file 217 | ``` 218 | pc -o html --columns sourceResourceName,grantedByCloudEntityType,grantedByCloudPolicyName,destResourceName rql --query $RQL_AZ_SQL_DELETE > file.html 219 | ``` 220 | To display same results to your clipboard 221 | ``` 222 | pc -o clipboard --columns sourceResourceName,grantedByCloudEntityType,grantedByCloudPolicyName,destResourceName rql --query $RQL_AZ_SQL_DELETE 223 | ``` 224 | - After above command executes, you can open up a spreadsheet tool like MS Excel or Google Sheets and simply paste the results. 225 | 226 | 227 | ### Find all GCP VM instances with permissions to impersonate a service account 228 | 229 | ``` 230 | export RQL_GCP="config from iam where dest.cloud.type = 'GCP' AND source.cloud.type = 'GCP' AND source.cloud.service.name = 'compute' and source.cloud.resource.type = 'Instances' AND action.name IN ('iam.serviceAccounts.getAccessToken', 'iam.serviceAccounts.signBlob', 'iam.serviceAccounts.signJwt', 'iam.serviceAccounts.implicitDelegation', 'iam.serviceAccounts.getOpenIdToken', 'iam.serviceAccounts.actAs') and grantedby.cloud.policy.type != 'Resource-based Policy'" 231 | ``` 232 | To display results via columns 233 | ``` 234 | pc --columns sourceResourceName,grantedByCloudEntityType rql --query $RQL_GCP 235 | ``` 236 | > Example Output: 237 | ``` 238 | ╒═══════════════════════════╤════════════════════════════╕ 239 | │ sourceResourceName │ grantedByCloudEntityType │ 240 | ╞═══════════════════════════╪════════════════════════════╡ 241 | │ user111111@examplecompany │ user │ 242 | │ .com │ │ 243 | ├───────────────────────────┼────────────────────────────┤ 244 | │ user222222@examplecompany │ user │ 245 | │ .com │ │ 246 | ├───────────────────────────┼────────────────────────────┤ 247 | │ user333333@examplecompany │ user │ 248 | │ .com │ │ 249 | ╘═══════════════════════════╧════════════════════════════╛ 250 | ``` 251 | 252 | To make the output print nicer, you can also play around with seetings such as `MAX_COLUMNS` & `MAX_WIDTH`. Using the example above, let's widen the columns 253 | ``` 254 | export MAX_WIDTH="40" 255 | ``` 256 | And re-run the cli command: 257 | ``` 258 | pc --columns sourceResourceName,grantedByCloudEntityType rql --query $RQL_GCP 259 | ``` 260 | > Example Output: 261 | ``` 262 | ╒════════════════════════════════╤════════════════════════════╕ 263 | │ sourceResourceName │ grantedByCloudEntityType │ 264 | ╞════════════════════════════════╪════════════════════════════╡ 265 | │ user111111@examplecompany.com │ user │ 266 | ├────────────────────────────────┼────────────────────────────┤ 267 | │ user222222@examplecompany.com │ user │ 268 | ├────────────────────────────────┼────────────────────────────┤ 269 | │ user333333@examplecompany.com │ user │ 270 | ╘════════════════════════════════╧════════════════════════════╛ 271 | ``` 272 | -------------------------------------------------------------------------------- /use-cases/june-2022.md: -------------------------------------------------------------------------------- 1 | # Webinar in June 2022 2 | 3 | ## Installation and configuration 4 | 5 | Installation and configurations instructions are already describe [in here](../README.md) 6 | 7 | ## Use-Cases for Prisma Cloud CLI 8 | 9 | 10 | ### Query Prisma Cloud 11 | 12 | 13 | #### Search for LOG4J vulnerability 14 | 15 | ``` 16 | pc stats vulnerabilities --cve CVE-2021-44228 17 | pc -o json stats vulnerabilities --cve CVE-2021-44228 | jq 18 | ``` 19 | 20 | #### Search for LOG4J vulnerability with --config 21 | 22 | ``` 23 | pc --config selfhosted stats vulnerabilities --cve CVE-2021-44228 24 | pc --config selfhosted -o json stats vulnerabilities --cve CVE-2021-44228 | jq 25 | ``` 26 | 27 | #### Get and watch latest logins 28 | ``` 29 | watch pc --columns username,sourceIP,time,type,status logs audit -h 1 -t login 30 | ``` 31 | 32 | #### Get a list of discovery scan results 33 | ``` 34 | pc --columns credentialId,provider,serviceType,defended,total,err discovery 35 | ``` 36 | 37 | 38 | #### Get latest alerts 39 | ``` 40 | pc --columns ^id$,alertTime,policy.policyType,resource.name,resource.data.tagSets,policy.name,policy.description alert list --unit minute --amount 30 41 | ``` 42 | 43 | #### Execute RQL and fetch results 44 | ``` 45 | export RQL="config from cloud.resource where cloud.type = 'aws' AND cloud.region NOT IN ( 'AWS Ireland' , 'AWS Frankfurt' , 'AWS London' ) AND cloud.service = 'Amazon EC2' AND resource.status = Active AND api.name = 'aws-ec2-describe-instances' AND json.rule = state.name equals running" 46 | pc -o json rql --query $RQL --amount 1 --unit day|jq 47 | ``` 48 | 49 | ### Filtering 50 | 51 | ``` 52 | pc --config local -o html --columns entityInfo.repoTag.registry,entityInfo.repoTag.repo,entityInfo.repoTag.tag,entityInfo.vulnerabilitiesCount,entityInfo.vulnerabilityDistribution.critical,entityInfo.vulnerabilityDistribution.high,entityInfo.vulnerabilityDistribution.medium scans -l 20 -s nginx > /tmp/results.html 53 | ``` 54 | 55 | ### Bulk edit policies 56 | 57 | #### See the command options 58 | 59 | ``` 60 | pc policy set --help 61 | ``` 62 | 63 | #### Enable CSPM policies with Prisma Cloud CLI 64 | 65 | ``` 66 | pc -vv policy set --status enable --compliance_standard 'CIS v1.4.0 (AWS)' 67 | ``` 68 | 69 | #### Disable CSPM policies with Prisma Cloud CLI 70 | 71 | ``` 72 | pc -vv policy set --status disable --compliance_standard 'CIS v1.4.0 (AWS)' 73 | ``` 74 | 75 | #### Disable all policies and enable only certain compliance framework 76 | 77 | ``` 78 | pc -vv policy set --status disable --all_policies 79 | pc -vv policy set --status enable --compliance_standard 'CIS v1.4.0 (AWS)' 80 | pc -vv policy set --status enable --compliance_standard 'ISO 27001:2013' 81 | pc -vv policy set --status enable --compliance_standard 'GDPR' 82 | pc -vv policy set --status enable --compliance_standard 'SOC 2' 83 | pc -vv policy set --status enable --compliance_standard 'AWS Well-Architected Framework' 84 | ``` 85 | -------------------------------------------------------------------------------- /use-cases/use-rql.md: -------------------------------------------------------------------------------- 1 | 2 | # RQL With Prisma Cloud CLI 3 | 4 | The new --field parameter of the rql command can be used to parse a file with 5 | RQL queries. This file needs to be in yaml format as the example below: 6 | 7 | ``` 8 | - name: Find all permissions granted to Users 9 | query: config from iam where grantedby.cloud.entity.type = 'user' 10 | 11 | - name: Find all permissions granted to Roles 12 | query: config from iam where grantedby.cloud.entity.type = 'role' 13 | 14 | - name: IAM identities that can delete DynamoDB tables 15 | query: config from iam where action.name = 'dynamodb:DeleteTable' 16 | ``` 17 | 18 | Examples to use this: 19 | 20 | `pc -o count rql --file ~/.prismacloud/my-important-queries.yaml` 21 | `pc rql --file ~/.prismacloud/my-important-queries.yaml` 22 | 23 | ### Sample output 24 | 25 | Command 26 | 27 | `pc -o markdown rql --file ~/.prismacloud/my-important-queries.yaml ` 28 | 29 | RQL Query name: Find all permissions granted to Roles 30 | RQL Query: config from iam where grantedby.cloud.entity.type = 'role' 31 | | id | sourcePublic | sourceCloudType | sourceCloudAccount | sourceCloudRegion | 32 | |:-------------------------------|:---------------|:------------------|:---------------------|:--------------------| 33 | | 7984fc7e5041b7439272897da5c948 | False | AWS | Pedro AWS Account | AWS Oregon | 34 | | 538adb5f6ccea83434be64b9e3b882 | | | | | 35 | | 2c47 | | | | | 36 | | 3206a93cd56dc0d983f67a994a648a | False | AWS | Pedro AWS Account | AWS Oregon | 37 | | 9a7e2f47f8a1d8851f37433312c1bc | | | | | 38 | | a3d5 | | | | | 39 | 40 | RQL Query name: Find all permissions granted to Groups 41 | RQL Query: config from iam where grantedby.cloud.entity.type = 'group' 42 | | id | sourcePublic | sourceCloudType | sourceCloudAccount | sourceCloudRegion | 43 | |:-------------------------------|:---------------|:------------------|:---------------------|:--------------------| 44 | | 177bef83192f13a4e11f439fa8f7bb | False | AWS | pete-aws | AWS Global | 45 | | dc50ce83185092er44re4431d3cad0 | | | | | 46 | | 19ce | | | | | 47 | | 177bef83192f13aer11f4erfa8f7bb | False | AWS | pete-aws | AWS Global | 48 | | dc50ce831850928ddfeb9461d3cad0 | | | | | 49 | | 19ce | | | | | 50 | 51 | RQL Query name: Show all INACTIVE identities and their allowed actions over the last specified number of days 52 | RQL Query: config from iam where action.lastaccess.days > 90 53 | 54 | 55 | --------------------------------------------------------------------------------