├── .github
└── workflows
│ ├── install_test.yml
│ ├── install_test_coverage.yml
│ ├── install_test_setup_py.yml
│ ├── pylint_and_mypy.yml
│ └── pypi_push.yml
├── .gitignore
├── .mypy.ini
├── .pylintrc
├── .style.yapf
├── CODEOWNERS
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── README.md
├── config
└── linux
│ └── jenkins_libcloudforensics_install.sh
├── docs
├── Makefile
├── conf.py
├── contributing.md
├── gettingstarted.md
├── index.md
├── make.bat
├── requirements.txt
├── source
│ ├── images
│ │ ├── ebs.png
│ │ └── sas.png
│ ├── libcloudforensics.providers.aws.internal.rst
│ ├── libcloudforensics.providers.aws.rst
│ ├── libcloudforensics.providers.azure.internal.rst
│ ├── libcloudforensics.providers.azure.rst
│ ├── libcloudforensics.providers.gcp.internal.rst
│ ├── libcloudforensics.providers.gcp.rst
│ └── libcloudforensics.scripts.rst
└── usermanual
│ └── index.md
├── libcloudforensics
├── __init__.py
├── errors.py
├── logging_utils.py
├── prompts.py
├── providers
│ ├── __init__.py
│ ├── aws
│ │ ├── __init__.py
│ │ ├── forensics.py
│ │ └── internal
│ │ │ ├── __init__.py
│ │ │ ├── account.py
│ │ │ ├── common.py
│ │ │ ├── ebs.py
│ │ │ ├── ec2.py
│ │ │ ├── iam.py
│ │ │ ├── iampolicies
│ │ │ ├── ebs_copy_to_s3_policy.json
│ │ │ ├── ec2_assume_role_policy.json
│ │ │ └── revoke_old_sessions.json
│ │ │ ├── kms.py
│ │ │ ├── log.py
│ │ │ └── s3.py
│ ├── azure
│ │ ├── __init__.py
│ │ ├── forensics.py
│ │ └── internal
│ │ │ ├── __init__.py
│ │ │ ├── account.py
│ │ │ ├── common.py
│ │ │ ├── compute.py
│ │ │ ├── compute_base_resource.py
│ │ │ ├── monitoring.py
│ │ │ ├── network.py
│ │ │ ├── resource.py
│ │ │ └── storage.py
│ ├── gcp
│ │ ├── __init__.py
│ │ ├── forensics.py
│ │ └── internal
│ │ │ ├── __init__.py
│ │ │ ├── bigquery.py
│ │ │ ├── build.py
│ │ │ ├── cloudresourcemanager.py
│ │ │ ├── cloudsql.py
│ │ │ ├── common.py
│ │ │ ├── compute.py
│ │ │ ├── compute_base_resource.py
│ │ │ ├── function.py
│ │ │ ├── gke.py
│ │ │ ├── log.py
│ │ │ ├── monitoring.py
│ │ │ ├── project.py
│ │ │ ├── serviceusage.py
│ │ │ ├── storage.py
│ │ │ └── storagetransfer.py
│ ├── kubernetes
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── cluster.py
│ │ ├── container.py
│ │ ├── enumerations
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ └── gcp.py
│ │ ├── mitigation.py
│ │ ├── netpol.py
│ │ ├── selector.py
│ │ ├── services.py
│ │ ├── volume.py
│ │ └── workloads.py
│ └── utils
│ │ ├── __init__.py
│ │ └── storage_utils.py
└── scripts
│ ├── __init__.py
│ ├── ebs_snapshot_copy_aws.sh
│ ├── forensics_packages_startup.sh
│ ├── forensics_packages_startup_aws.sh
│ └── utils.py
├── poetry.lock
├── poetry.toml
├── pyproject.toml
├── setup.py
├── tests
├── __init__.py
├── providers
│ ├── __init__.py
│ ├── aws
│ │ ├── __init__.py
│ │ ├── aws_cli.py
│ │ ├── aws_mocks.py
│ │ ├── e2e.py
│ │ ├── e2e_cli.py
│ │ ├── internal
│ │ │ ├── __init__.py
│ │ │ ├── test_common.py
│ │ │ ├── test_ebs.py
│ │ │ ├── test_ec2.py
│ │ │ ├── test_log.py
│ │ │ └── test_s3.py
│ │ └── test_forensics.py
│ ├── azure
│ │ ├── __init__.py
│ │ ├── azure_cli.py
│ │ ├── azure_mocks.py
│ │ ├── e2e.py
│ │ ├── e2e_cli.py
│ │ ├── internal
│ │ │ ├── __init__.py
│ │ │ ├── test_common.py
│ │ │ ├── test_compute.py
│ │ │ ├── test_monitoring.py
│ │ │ ├── test_resource.py
│ │ │ └── test_storage.py
│ │ └── test_forensics.py
│ ├── gcp
│ │ ├── __init__.py
│ │ ├── e2e.py
│ │ ├── e2e_cli.py
│ │ ├── gcp_cli.py
│ │ ├── gcp_mocks.py
│ │ ├── internal
│ │ │ ├── __init__.py
│ │ │ ├── test_bigquery.py
│ │ │ ├── test_build.py
│ │ │ ├── test_cloudresourcemanager.py
│ │ │ ├── test_cloudsql.py
│ │ │ ├── test_common.py
│ │ │ ├── test_compute.py
│ │ │ ├── test_compute_base_resource.py
│ │ │ ├── test_gke.py
│ │ │ ├── test_log.py
│ │ │ ├── test_monitoring.py
│ │ │ ├── test_serviceusage.py
│ │ │ ├── test_storage.py
│ │ │ └── test_storagetransfer.py
│ │ └── test_forensics.py
│ └── kubernetes
│ │ ├── __init__.py
│ │ ├── k8s_mocks.py
│ │ ├── test_base.py
│ │ ├── test_netpol.py
│ │ ├── test_services.py
│ │ └── test_workloads.py
├── run_tests.py
└── scripts
│ ├── __init__.py
│ ├── startup.sh
│ ├── test_azure_config_dir
│ ├── accessTokens.json
│ └── azureProfile.json
│ ├── test_credentials.json
│ └── utils.py
└── tools
├── __init__.py
├── aws_cli.py
├── az_cli.py
├── cli.py
└── gcp_cli.py
/.github/workflows/install_test.yml:
--------------------------------------------------------------------------------
1 | name: Install and test
2 |
3 | on:
4 | push:
5 | pull_request:
6 | types: [opened, synchronize, reopened]
7 |
8 | jobs:
9 | build:
10 |
11 | runs-on: ubuntu-latest
12 | strategy:
13 | matrix:
14 | os: [ubuntu-latest, macos-latest]
15 | python-version: ['3.9', '3.10', '3.11', '3.12']
16 |
17 | steps:
18 | - uses: actions/checkout@v2
19 | - name: Set up Python ${{ matrix.python-version }}
20 | uses: actions/setup-python@v1
21 | with:
22 | python-version: ${{ matrix.python-version }}
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install --upgrade pip
26 | python -m pip install --upgrade setuptools
27 | - name: Install poetry
28 | run: |
29 | python -m pip install poetry
30 | - name: Cache the virtualenv
31 | uses: actions/cache@v4
32 | with:
33 | path: ./.venv
34 | key: ${{ runner.os }}-venv-${{ hashFiles('**/poetry.lock') }}
35 | - name: Install dependencies through Poetry
36 | run: |
37 | python -m poetry install --with dev
38 | - name: Install through setup.py
39 | run: python setup.py install
40 | - name: Test with unittest
41 | run: |
42 | python -m poetry run python -m coverage run -m unittest
43 |
--------------------------------------------------------------------------------
/.github/workflows/install_test_coverage.yml:
--------------------------------------------------------------------------------
1 | name: Install, test and get code coverage
2 |
3 | on:
4 | pull_request:
5 | types: [opened, synchronize, reopened]
6 |
7 | jobs:
8 | build:
9 |
10 | runs-on: ubuntu-latest
11 | strategy:
12 | matrix:
13 | os: [ubuntu-latest, macos-latest]
14 | python-version: ['3.9', '3.10', '3.11', '3.12']
15 |
16 | steps:
17 | - uses: actions/checkout@v2
18 | - name: Set up Python ${{ matrix.python-version }}
19 | uses: actions/setup-python@v1
20 | with:
21 | python-version: ${{ matrix.python-version }}
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install --upgrade pip
25 | python -m pip install --upgrade setuptools
26 | - name: Install poetry
27 | run: |
28 | python -m pip install poetry
29 | - name: Cache the virtualenv
30 | uses: actions/cache@v4
31 | with:
32 | path: ./.venv
33 | key: ${{ runner.os }}-venv-${{ hashFiles('**/poetry.lock') }}
34 | - name: Install dependencies through Poetry
35 | run: |
36 | python -m poetry install --with dev
37 | - name: Install through setup.py
38 | run: python setup.py install
39 | - name: Test
40 | run: |
41 | python -m poetry run python -m coverage run -m unittest
42 | - name: Upload coverage to Codecov
43 | uses: codecov/codecov-action@v1
44 | with:
45 | file: ./coverage.xml
46 | flags: unittests
47 | name: codecov-umbrella
48 |
--------------------------------------------------------------------------------
/.github/workflows/install_test_setup_py.yml:
--------------------------------------------------------------------------------
1 | name: Install and test (Poetry)
2 |
3 | on:
4 | pull_request:
5 | types: [opened, synchronize, reopened]
6 |
7 | jobs:
8 | build:
9 |
10 | runs-on: ubuntu-latest
11 | strategy:
12 | matrix:
13 | os: [ubuntu-latest, macos-latest]
14 | python-version: ['3.9', '3.10', '3.11', '3.12']
15 |
16 | steps:
17 | - uses: actions/checkout@v2
18 | - name: Set up Python ${{ matrix.python-version }}
19 | uses: actions/setup-python@v1
20 | with:
21 | python-version: ${{ matrix.python-version }}
22 | - name: Update PIP and install crypto and setuptools
23 | run: python -m pip install -U pip && pip install cryptography && pip install setuptools
24 | - name: Install poetry
25 | run: |
26 | python -m pip install poetry
27 | - name: Cache the virtualenv
28 | uses: actions/cache@v4
29 | with:
30 | path: ./.venv
31 | key: ${{ runner.os }}-venv-${{ hashFiles('**/poetry.lock') }}
32 | - name: Install dependencies through Poetry
33 | run: |
34 | python -m poetry install --with dev
35 | - name: Install through setup.py
36 | run: python setup.py install
37 | - name: Test
38 | run: |
39 | python -m poetry run python -m unittest
40 |
--------------------------------------------------------------------------------
/.github/workflows/pylint_and_mypy.yml:
--------------------------------------------------------------------------------
1 | name: Run pylint and mypy on the codebase
2 |
3 | on:
4 | push:
5 | pull_request:
6 | types: [opened, synchronize, reopened]
7 |
8 | jobs:
9 | build:
10 |
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - uses: actions/checkout@v2
15 | - name: Set up Python 3.12
16 | uses: actions/setup-python@v1
17 | with:
18 | python-version: 3.12
19 | - name: Install poetry
20 | run: |
21 | python -m pip install poetry
22 | - name: Cache the virtualenv
23 | uses: actions/cache@v4
24 | with:
25 | path: ./.venv
26 | key: ${{ runner.os }}-venv-${{ hashFiles('**/poetry.lock') }}
27 | - name: Install dependencies through Poetry
28 | run: |
29 | python -m poetry install --with dev
30 | - name: Run pylint on all *.py files
31 | run: |
32 | python -m poetry run pylint tests libcloudforensics tools
33 | - name: Run mypy on all *.py files
34 | run: |
35 | python -m poetry run mypy --ignore-missing-imports --strict --no-warn-unused-ignores -p tests -p libcloudforensics -p tools
36 |
--------------------------------------------------------------------------------
/.github/workflows/pypi_push.yml:
--------------------------------------------------------------------------------
1 | name: Publish Python 🐍 distributions 📦 to PyPI and TestPyPI
2 |
3 | on: push
4 |
5 | jobs:
6 | build-n-publish:
7 | name: Build and publish Python 🐍 distributions 📦 to PyPI and TestPyPI
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@master
11 | - name: Set up Python 3.12 🐍🐍🐍
12 | uses: actions/setup-python@v1
13 | with:
14 | python-version: 3.12
15 | - name: Install poetry
16 | run: |
17 | python -m pip install poetry
18 | - name: Install dependencies through Poetry
19 | run: |
20 | python -m poetry install --with dev
21 | - name: Publish distribution 📦 to PyPI
22 | if: startsWith(github.event.ref, 'refs/tags')
23 | env:
24 | PYPI_TOKEN: ${{ secrets.pypi_deploy }}
25 | run: |
26 | poetry config pypi-token.pypi $PYPI_TOKEN
27 | poetry publish --build
28 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | .coverage
3 | *.egg-info
4 | __pycache__
5 | .noseids
6 |
7 | # Python packaging
8 | build/
9 | dist/
10 |
11 | # virtualenv
12 | .venv/
13 | venv/
14 | ENV/
15 |
16 | # VSCode
17 | .vscode/
--------------------------------------------------------------------------------
/.mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 |
3 | [mypy-azure.*]
4 | ignore_errors = True
5 |
--------------------------------------------------------------------------------
/.style.yapf:
--------------------------------------------------------------------------------
1 | #
2 | # To run yapf for this project, invoke as such from the base directory:
3 | # yapf -i -r --style .style.yapf ./turbinia/
4 | #
5 | [style]
6 | based_on_style = yapf
7 | COALESCE_BRACKETS = True
8 | SPLIT_ALL_TOP_LEVEL_COMMA_SEPARATED_VALUES = True
9 | SPLIT_BEFORE_FIRST_ARGUMENT = True
10 | SPLIT_BEFORE_NAMED_ASSIGNS = False
11 | SPLIT_PENALTY_AFTER_OPENING_BRACKET = 0
12 | SPLIT_PENALTY_FOR_ADDED_LINE_SPLIT = 30
13 |
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @google/libcloudforensics
2 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Want to contribute?
2 | Great! First read this [page](https://github.com/google/cloud-forensics-utils/blob/master/docs/contributing.md).
3 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include requirements*.txt
2 | include libcloudforensics/scripts/*
3 | include libcloudforensics/providers/aws/internal/iampolicies/*
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Cloud Forensics Utils
2 |
3 |
4 |
5 |
6 |
7 | This repository contains some tools to be used by forensics teams to collect
8 | evidence from cloud platforms. Currently, Google Cloud Platform, Microsoft Azure,
9 | and Amazon Web Services are supported.
10 |
11 | It consists of one module called `libcloudforensics` which implements functions
12 | that can be desirable in the context of incident response in a cloud
13 | environment, as well as a CLI wrapper tool for these functions.
14 |
15 | Documentation can be found on the [ReadTheDocs page](https://libcloudforensics.readthedocs.io/en/latest/).
16 |
17 | Quick access:
18 |
19 | * [Installation](https://libcloudforensics.readthedocs.io/en/latest/gettingstarted.html#installing-from-pypi)
20 | * [User Manual](https://libcloudforensics.readthedocs.io/en/latest/usermanual/index.html#)
21 | * [How to contribute](https://libcloudforensics.readthedocs.io/en/latest/contributing.html)
22 |
23 |
24 |
--------------------------------------------------------------------------------
/config/linux/jenkins_libcloudforensics_install.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Exit on error
4 | set -e
5 |
6 | sudo apt-get update -q
7 | sudo apt-get install -y python3-pip
8 | sudo pip3 install poetry
9 |
10 | # Install libcloudforensics pinned requirements
11 | sudo python3 -m poetry install --with dev
12 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | sys.path.insert(0, os.path.abspath('../'))
16 |
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = 'libcloudforensics'
21 | copyright = '2020, Google'
22 | author = 'Google'
23 |
24 |
25 | # -- General configuration ---------------------------------------------------
26 |
27 | # Add any Sphinx extension module names here, as strings. They can be
28 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
29 | # ones.
30 | extensions = [
31 | 'recommonmark',
32 | 'sphinx.ext.autodoc',
33 | 'sphinx.ext.napoleon',
34 | 'sphinx_autodoc_typehints',
35 | ]
36 |
37 | # set_type_checking_flag = True
38 |
39 | master_doc = 'index'
40 |
41 | # Add any paths that contain templates here, relative to this directory.
42 | templates_path = ['_templates']
43 |
44 | # List of patterns, relative to source directory, that match files and
45 | # directories to ignore when looking for source files.
46 | # This pattern also affects html_static_path and html_extra_path.
47 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
48 |
49 |
50 | # -- Options for HTML output -------------------------------------------------
51 |
52 | # The theme to use for HTML and HTML Help pages. See the documentation for
53 | # a list of builtin themes.
54 | #
55 | html_theme = 'sphinx_rtd_theme'
56 |
57 | # Add any paths that contain custom static files (such as style sheets) here,
58 | # relative to this directory. They are copied after the builtin static files,
59 | # so a file named "default.css" will overwrite the builtin "default.css".
60 | html_static_path = ['_static']
61 |
62 | source_suffix = {
63 | '.rst': 'restructuredtext',
64 | '.txt': 'markdown',
65 | '.md': 'markdown',
66 | }
67 |
68 | from recommonmark.transform import AutoStructify
69 |
70 | github_doc_root = 'https://github.com/google/libcloudforensics/tree/master/doc/'
71 |
72 | def setup(app):
73 | app.add_config_value('recommonmark_config', {
74 | 'enable_auto_doc_ref': False,
75 | }, True)
76 | app.add_transform(AutoStructify)
77 |
--------------------------------------------------------------------------------
/docs/contributing.md:
--------------------------------------------------------------------------------
1 | ### Contributing
2 |
3 | #### Before you contribute
4 |
5 | We love contributions! Read this page (including the small print at the end).
6 |
7 | Before we can use your code, you must sign the
8 | [Google Individual Contributor License Agreement](https://developers.google.com/open-source/cla/individual?csw=1)
9 | (CLA), which you can do online. The CLA is necessary mainly because you own the
10 | copyright to your changes, even after your contribution becomes part of our
11 | codebase, so we need your permission to use and distribute your code. We also
12 | need to be sure of various other things—for instance that you'll tell us if you
13 | know that your code infringes on other people's patents. You don't have to sign
14 | the CLA until after you've submitted your code for review and a member has
15 | approved it, but you must do it before we can put your code into our codebase.
16 | Before you start working on a larger contribution, you should get in touch with
17 | us first through the issue tracker with your idea so that we can help out and
18 | possibly guide you. Coordinating up front makes it much easier to avoid
19 | frustration later on.
20 |
21 | We use the github
22 | [fork and pull review process](https://help.github.com/articles/using-pull-requests)
23 | to review all contributions. First, fork the cloud-forensics-utils repository by
24 | following the [github instructions](https://help.github.com/articles/fork-a-repo).
25 | Then check out your personal fork:
26 |
27 | $ git clone https://github.com//cloud-forensics-utils.git
28 |
29 | Add an upstream remote so you can easily keep up to date with the main
30 | repository:
31 |
32 | $ git remote add upstream https://github.com/google/cloud-forensics-utils.git
33 |
34 | To update your local repo from the main:
35 |
36 | $ git pull upstream master
37 |
38 | Please follow the Style Guide when making your changes, and also make sure to
39 | use the project's
40 | [pylintrc](https://github.com/google/cloud-forensics-utils/blob/master/.pylintrc)
41 | and
42 | [yapf config file](https://github.com/google/cloud-forensics-utils/blob/master/.style.yapf).
43 | Once you're ready for review make sure the tests pass:
44 |
45 | $ nosetests -v tests
46 |
47 | Commit your changes to your personal fork and then use the GitHub Web UI to
48 | create and send the pull request. We'll review and merge the change.
49 |
50 | #### Code review
51 |
52 | All submissions, including submissions by project members, require review. To
53 | keep the code base maintainable and readable all code is developed using a
54 | similar coding style. It ensures:
55 |
56 | The code should be easy to maintain and understand. As a developer you'll
57 | sometimes find yourself thinking hmm, what is the code supposed to do here. It
58 | is important that you should be able to come back to code 5 months later and
59 | still quickly understand what it supposed to be doing. Also for other people
60 | that want to contribute it is necessary that they need to be able to quickly
61 | understand the code. Be that said, quick-and-dirty solutions might work in the
62 | short term, but we'll ban them from the code base to gain better long term
63 | quality. With the code review process we ensure that at least two eyes looked
64 | over the code in hopes of finding potential bugs or errors (before they become
65 | bugs and errors). This also improves the overall code quality and makes sure
66 | that every developer knows to (largely) expect the same coding style.
67 |
68 | #### Style guide
69 |
70 | We primarily follow the
71 | [Log2Timeline Python Style Guide](https://github.com/log2timeline/l2tdocs/blob/master/process/Style-guide.md).
72 |
73 | #### The small print
74 |
75 | Contributions made by corporations are covered by a different agreement than the
76 | one above, the Software Grant and Corporate Contributor License Agreement.
77 |
--------------------------------------------------------------------------------
/docs/gettingstarted.md:
--------------------------------------------------------------------------------
1 | # Getting started
2 |
3 | ## Installing from pypi
4 |
5 | As easy as:
6 |
7 | ```
8 | $ pip install libcloudforensics
9 | ```
10 |
11 | and you're done!
12 |
13 | ## Using the CLI
14 |
15 | A standalone tool called `cloudforensics` is created during installation.
16 |
17 | ```
18 | $ cloudforensics --help
19 | usage: cloudforensics [-h] {aws,az,gcp} ...
20 |
21 | CLI tool for AWS, Azure and GCP.
22 |
23 | positional arguments:
24 | {aws,az,gcp}
25 | aws Tools for AWS
26 | az Tools for Azure
27 | gcp Tools for GCP
28 |
29 | optional arguments:
30 | -h, --help show this help message and exit
31 | ```
32 |
33 | The implemented functions for each platform can be listed. For example:
34 |
35 | ```
36 | $ cloudforensics gcp -h
37 | usage: cloudforensics gcp [-h] project {listinstances,listdisks,copydisk,startvm,querylogs,listlogs,listservices,creatediskgcs,bucketacls,objectmetadata,listobjects} ...
38 |
39 | positional arguments:
40 | project GCP project ID.
41 | {listinstances,listdisks,copydisk,startvm,querylogs,listlogs,listservices,creatediskgcs,bucketacls,objectmetadata,listobjects}
42 | listinstances List GCE instances in GCP project.
43 | listdisks List GCE disks in GCP project.
44 | copydisk Create a GCP disk copy.
45 | startvm Start a forensic analysis VM.
46 | querylogs Query GCP logs.
47 | listlogs List GCP logs for a project.
48 | listservices List active services for a project.
49 | creatediskgcs Creates GCE persistent disk from image in GCS.
50 | bucketacls List ACLs of a GCS bucket.
51 | objectmetadata List the details of an object in a GCS bucket.
52 | listobjects List the objects in a GCS bucket.
53 |
54 | optional arguments:
55 | -h, --help show this help message and exit
56 | ```
57 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Welcome to libcloudforensics's documentation!
2 |
3 | ## Table of contents
4 |
5 | * [Getting started](gettingstarted.md)
6 | * [User manual](usermanual/index.md)
7 | * [How to contribute](contributing.md)
8 |
9 | ## API documentation
10 |
11 | Documentation for the library's functions and classes can be found below:
12 |
13 | ```eval_rst
14 | .. toctree::
15 | :maxdepth: 2
16 | :caption: Google Cloud Platform
17 |
18 | source/libcloudforensics.providers.gcp
19 | source/libcloudforensics.providers.gcp.internal
20 |
21 | .. toctree::
22 | :maxdepth: 2
23 | :caption: Amazon Web Services
24 |
25 | source/libcloudforensics.providers.aws
26 | source/libcloudforensics.providers.aws.internal
27 |
28 | .. toctree::
29 | :maxdepth: 2
30 | :caption: Microsoft Azure
31 |
32 | source/libcloudforensics.providers.azure
33 | source/libcloudforensics.providers.azure.internal
34 |
35 | .. toctree::
36 | :maxdepth: 2
37 | :caption: Scripts
38 |
39 | source/libcloudforensics.scripts
40 | source/libcloudforensics.scripts.utils
41 | ```
42 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx==3.1.1
2 | sphinx-rtd-theme
3 | sphinx-autodoc-typehints
4 | recommonmark
5 |
--------------------------------------------------------------------------------
/docs/source/images/ebs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/google/cloud-forensics-utils/975a6a5b71bb6a941dd783a5acd5647963e781a6/docs/source/images/ebs.png
--------------------------------------------------------------------------------
/docs/source/images/sas.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/google/cloud-forensics-utils/975a6a5b71bb6a941dd783a5acd5647963e781a6/docs/source/images/sas.png
--------------------------------------------------------------------------------
/docs/source/libcloudforensics.providers.aws.internal.rst:
--------------------------------------------------------------------------------
1 | AWS internal provider functions
2 | ===============================
3 |
4 |
5 | libcloudforensics.providers.aws.internal.account module
6 | -------------------------------------------------------
7 |
8 | .. automodule:: libcloudforensics.providers.aws.internal.account
9 | :members:
10 | :undoc-members:
11 | :show-inheritance:
12 |
13 | libcloudforensics.providers.aws.internal.common module
14 | ------------------------------------------------------
15 |
16 | .. automodule:: libcloudforensics.providers.aws.internal.common
17 | :members:
18 | :undoc-members:
19 | :show-inheritance:
20 |
21 | libcloudforensics.providers.aws.internal.ebs module
22 | ---------------------------------------------------
23 |
24 | .. automodule:: libcloudforensics.providers.aws.internal.ebs
25 | :members:
26 | :undoc-members:
27 | :show-inheritance:
28 |
29 | libcloudforensics.providers.aws.internal.ec2 module
30 | ---------------------------------------------------
31 |
32 | .. automodule:: libcloudforensics.providers.aws.internal.ec2
33 | :members:
34 | :undoc-members:
35 | :show-inheritance:
36 |
37 | libcloudforensics.providers.aws.internal.kms module
38 | ---------------------------------------------------
39 |
40 | .. automodule:: libcloudforensics.providers.aws.internal.kms
41 | :members:
42 | :undoc-members:
43 | :show-inheritance:
44 |
45 | libcloudforensics.providers.aws.internal.log module
46 | ---------------------------------------------------
47 |
48 | .. automodule:: libcloudforensics.providers.aws.internal.log
49 | :members:
50 | :undoc-members:
51 | :show-inheritance:
52 |
53 | libcloudforensics.providers.aws.internal.s3 module
54 | ---------------------------------------------------
55 |
56 | .. automodule:: libcloudforensics.providers.aws.internal.s3
57 | :members:
58 | :undoc-members:
59 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/source/libcloudforensics.providers.aws.rst:
--------------------------------------------------------------------------------
1 | AWS forensics package
2 | =====================
3 |
4 | Internal provider functions
5 | ---------------------------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | libcloudforensics.providers.aws.internal
11 |
12 |
13 | libcloudforensics.providers.aws.forensics module
14 | ------------------------------------------------
15 |
16 | .. automodule:: libcloudforensics.providers.aws.forensics
17 | :members:
18 | :undoc-members:
19 | :show-inheritance:
20 |
--------------------------------------------------------------------------------
/docs/source/libcloudforensics.providers.azure.internal.rst:
--------------------------------------------------------------------------------
1 | Azure internal provider functions
2 | ===============================
3 |
4 |
5 | libcloudforensics.providers.azure.internal.account module
6 | -------------------------------------------------------
7 |
8 | .. automodule:: libcloudforensics.providers.azure.internal.account
9 | :members:
10 | :undoc-members:
11 | :show-inheritance:
12 |
13 | libcloudforensics.providers.azure.internal.common module
14 | -------------------------------------------------------
15 |
16 | .. automodule:: libcloudforensics.providers.azure.internal.common
17 | :members:
18 | :undoc-members:
19 | :show-inheritance:
20 |
21 | libcloudforensics.providers.azure.internal.compute module
22 | -------------------------------------------------------
23 |
24 | .. automodule:: libcloudforensics.providers.azure.internal.compute
25 | :members:
26 | :undoc-members:
27 | :show-inheritance:
28 |
29 | libcloudforensics.providers.azure.internal.compute\_base\_resource module
30 | -------------------------------------------------------
31 |
32 | .. automodule:: libcloudforensics.providers.azure.internal.compute_base_resource
33 | :members:
34 | :undoc-members:
35 | :show-inheritance:
36 |
37 | libcloudforensics.providers.azure.internal.monitoring module
38 | -------------------------------------------------------
39 |
40 | .. automodule:: libcloudforensics.providers.azure.internal.monitoring
41 | :members:
42 | :undoc-members:
43 | :show-inheritance:
44 |
45 | libcloudforensics.providers.azure.internal.network module
46 | -------------------------------------------------------
47 |
48 | .. automodule:: libcloudforensics.providers.azure.internal.network
49 | :members:
50 | :undoc-members:
51 | :show-inheritance:
52 |
53 | libcloudforensics.providers.azure.internal.resource module
54 | -------------------------------------------------------
55 |
56 | .. automodule:: libcloudforensics.providers.azure.internal.resource
57 | :members:
58 | :undoc-members:
59 | :show-inheritance:
60 |
61 | libcloudforensics.providers.azure.internal.storage module
62 | -------------------------------------------------------
63 |
64 | .. automodule:: libcloudforensics.providers.azure.internal.storage
65 | :members:
66 | :undoc-members:
67 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/source/libcloudforensics.providers.azure.rst:
--------------------------------------------------------------------------------
1 | Azure forensics package
2 | =====================
3 |
4 | Internal provider functions
5 | ---------------------------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | libcloudforensics.providers.azure.internal
11 |
12 |
13 | libcloudforensics.providers.azure.forensics module
14 | ------------------------------------------------
15 |
16 | .. automodule:: libcloudforensics.providers.azure.forensics
17 | :members:
18 | :undoc-members:
19 | :show-inheritance:
20 |
--------------------------------------------------------------------------------
/docs/source/libcloudforensics.providers.gcp.internal.rst:
--------------------------------------------------------------------------------
1 | GCP internal provider functions
2 | ===============================
3 |
4 |
5 | libcloudforensics.providers.gcp.internal.build module
6 | -----------------------------------------------------
7 |
8 | .. automodule:: libcloudforensics.providers.gcp.internal.build
9 | :members:
10 | :undoc-members:
11 | :show-inheritance:
12 |
13 | libcloudforensics.providers.gcp.internal.cloudsql module
14 | -----------------------------------------------------
15 |
16 | .. automodule:: libcloudforensics.providers.gcp.internal.cloudsql
17 | :members:
18 | :undoc-members:
19 | :show-inheritance:
20 |
21 | libcloudforensics.providers.gcp.internal.common module
22 | ------------------------------------------------------
23 |
24 | .. automodule:: libcloudforensics.providers.gcp.internal.common
25 | :members:
26 | :undoc-members:
27 | :show-inheritance:
28 |
29 | libcloudforensics.providers.gcp.internal.compute module
30 | -------------------------------------------------------
31 |
32 | .. automodule:: libcloudforensics.providers.gcp.internal.compute
33 | :members:
34 | :undoc-members:
35 | :show-inheritance:
36 |
37 | libcloudforensics.providers.gcp.internal.compute\_base\_resource module
38 | -----------------------------------------------------------------------
39 |
40 | .. automodule:: libcloudforensics.providers.gcp.internal.compute_base_resource
41 | :members:
42 | :undoc-members:
43 | :show-inheritance:
44 |
45 | libcloudforensics.providers.gcp.internal.function module
46 | --------------------------------------------------------
47 |
48 | .. automodule:: libcloudforensics.providers.gcp.internal.function
49 | :members:
50 | :undoc-members:
51 | :show-inheritance:
52 |
53 | libcloudforensics.providers.gcp.internal.gke module
54 | --------------------------------------------------------
55 |
56 | .. automodule:: libcloudforensics.providers.gcp.internal.gke
57 | :members:
58 | :undoc-members:
59 | :show-inheritance:
60 |
61 | libcloudforensics.providers.gcp.internal.log module
62 | ---------------------------------------------------
63 |
64 | .. automodule:: libcloudforensics.providers.gcp.internal.log
65 | :members:
66 | :undoc-members:
67 | :show-inheritance:
68 |
69 | libcloudforensics.providers.gcp.internal.monitoring module
70 | ----------------------------------------------------------
71 |
72 | .. automodule:: libcloudforensics.providers.gcp.internal.monitoring
73 | :members:
74 | :undoc-members:
75 | :show-inheritance:
76 |
77 | libcloudforensics.providers.gcp.internal.project module
78 | -------------------------------------------------------
79 |
80 | .. automodule:: libcloudforensics.providers.gcp.internal.project
81 | :members:
82 | :undoc-members:
83 | :show-inheritance:
84 |
85 | libcloudforensics.providers.gcp.internal.storage module
86 | -------------------------------------------------------
87 |
88 | .. automodule:: libcloudforensics.providers.gcp.internal.storage
89 | :members:
90 | :undoc-members:
91 | :show-inheritance:
92 |
--------------------------------------------------------------------------------
/docs/source/libcloudforensics.providers.gcp.rst:
--------------------------------------------------------------------------------
1 | GCP forensics package
2 | =====================
3 |
4 | Internal provider functions
5 | ---------------------------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | libcloudforensics.providers.gcp.internal
11 |
12 |
13 | libcloudforensics.providers.gcp.forensics module
14 | ------------------------------------------------
15 |
16 | .. automodule:: libcloudforensics.providers.gcp.forensics
17 | :members:
18 | :undoc-members:
19 | :show-inheritance:
20 |
--------------------------------------------------------------------------------
/docs/source/libcloudforensics.scripts.rst:
--------------------------------------------------------------------------------
1 | Helper classes
2 | =================================
3 |
4 | libcloudforensics.scripts.utils module
5 | --------------------------------------
6 |
7 | .. automodule:: libcloudforensics.scripts.utils
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | libcloudforensics.errors module
13 | --------------------------------------
14 |
15 | .. automodule:: libcloudforensics.errors
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | libcloudforensics.logging_utils module
21 | --------------------------------------
22 |
23 | .. automodule:: libcloudforensics.logging_utils
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
--------------------------------------------------------------------------------
/libcloudforensics/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """libcloud forensics module."""
16 |
17 | # Since moving to poetry, ensure the version number tracked in pyproject.toml is
18 | # also updated
19 | __version__ = '20250331'
20 |
--------------------------------------------------------------------------------
/libcloudforensics/errors.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Generic error wrapper"""
16 |
17 | from libcloudforensics import logging_utils
18 |
19 |
20 | class LCFError(Exception):
21 | """Class to represent a cloud-forensics-utils (CFU) error.
22 |
23 | Attributes:
24 | message (str): The error message.
25 | name (str): Name of the module that generated the error.
26 | """
27 |
28 | def __init__(self,
29 | message: str,
30 | name: str) -> None:
31 | """Initializes the CFUError with provided message.
32 |
33 | Args:
34 | message (str): The error message.
35 | name (str): The name of the module that generated the error.
36 | """
37 | super().__init__(message)
38 | self.message = message
39 | self.name = name
40 | logging_utils.SetUpLogger(self.name)
41 | logger = logging_utils.GetLogger(self.name)
42 | logger.error(self.message)
43 |
44 |
45 | class CredentialsConfigurationError(LCFError):
46 | """Error when an issue with the credentials configuration is encountered."""
47 |
48 |
49 | class InvalidFileFormatError(LCFError):
50 | """Error when an issue with file format is encountered."""
51 |
52 |
53 | class InvalidNameError(LCFError):
54 | """Error when an issue with resource name is encountered."""
55 |
56 |
57 | class ResourceNotFoundError(LCFError):
58 | """Error when an issue with non-existent resource is encountered."""
59 |
60 |
61 | class ResourceCreationError(LCFError):
62 | """Error when an issue with creating a new resource is encountered."""
63 |
64 |
65 | class ResourceDeletionError(LCFError):
66 | """Error when an issue with deleting a resource is encountered."""
67 |
68 |
69 | class InstanceStateChangeError(LCFError):
70 | """Error when an issue with changing an instance state is encountered."""
71 |
72 |
73 | class ServiceAccountRemovalError(LCFError):
74 | """Error when an issue with removing a service account is encountered."""
75 |
76 |
77 | class InstanceProfileCreationError(LCFError):
78 | """Error when there is an issue creating an instance profile."""
79 |
80 |
81 | class OperationFailedError(LCFError):
82 | """Error when an operation did not succeed."""
83 |
84 |
85 | class TransferCreationError(LCFError):
86 | """Error when an issue with creating a new transfer job is encountered."""
87 |
88 |
89 | class TransferExecutionError(LCFError):
90 | """Error when an issue with running a transfer job is encountered."""
91 |
92 |
93 | class ResourceAlreadyExistsError(LCFError):
94 | """Error when trying to create a resource with existing name."""
95 |
96 |
97 | class AmbiguousIdentifierError(LCFError):
98 | """Error when an identifier could refer to more than one resource."""
99 |
--------------------------------------------------------------------------------
/libcloudforensics/logging_utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Module providing custom logging formatters and colorization for ANSI
16 | compatible terminals."""
17 |
18 | import logging
19 | import random
20 | import sys
21 | from typing import List
22 |
23 |
24 | def _GenerateColorSequences() -> List[str]:
25 | """Generates ANSI codes for 256 colors.
26 | Works on Linux and macOS, Windows (WSL) to be confirmed.
27 |
28 | Returns:
29 | List[str]: A list of ANSI codes.
30 | """
31 | sequences = []
32 | for i in range(0, 16):
33 | for j in range(0, 16):
34 | code = str(i * 16 + j)
35 | seq = '\u001b[38;5;' + code + 'm'
36 | sequences.append(seq)
37 | return sequences
38 |
39 |
40 | COLOR_SEQS = _GenerateColorSequences()
41 | RESET_SEQ = '\u001b[0m'
42 |
43 | # Cherrypick a few interesting values. We still want the whole list of colors
44 | # so that modules have a good amount colors to chose from.
45 | # pylint: disable=unbalanced-tuple-unpacking
46 | BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = COLOR_SEQS[8:16]
47 | BG_RED = '\u001b[41m' # Red background
48 | BOLD = '\u001b[1m' # Bold / bright modifier
49 |
50 | # We'll get something like this:
51 | # [2020-07-09 18:06:05,187] [libcloudforensics] INFO Disk successfully copied
52 | LOG_FORMAT = ('[%(asctime)s] [{0:s}{color:s}%(name)-20s{1:s}] %(levelname)-8s'
53 | ' %(message)s')
54 |
55 | LEVEL_COLOR_MAP = {
56 | 'WARNING': YELLOW,
57 | 'INFO': WHITE,
58 | 'DEBUG': BLUE,
59 | 'CRITICAL': BOLD + BG_RED + WHITE,
60 | 'ERROR': RED
61 | }
62 |
63 |
64 | class Formatter(logging.Formatter):
65 | """Helper class used to add color to log messages depending on their level."""
66 |
67 | def __init__(self,
68 | colorize: bool = True,
69 | random_color: bool = False,
70 | **kwargs: str) -> None:
71 | """Initializes the Formatter object.
72 |
73 | Args:
74 | colorize (bool): If True, output will be colorized.
75 | random_color (bool): If True, will colorize the module name with a random
76 | color picked from COLOR_SEQS.
77 | """
78 | self.colorize = colorize
79 | kwargs['fmt'] = LOG_FORMAT.format('', '', color='')
80 | if self.colorize:
81 | color = ''
82 | if random_color:
83 | color = random.choice(COLOR_SEQS)
84 | kwargs['fmt'] = LOG_FORMAT.format(BOLD, RESET_SEQ, color=color)
85 | super().__init__(**kwargs) # type: ignore
86 |
87 | def format(self, record: logging.LogRecord) -> str:
88 | """Hooks the native format method and colorizes messages if needed.
89 |
90 | Args:
91 | record (logging.LogRecord): Native log record.
92 |
93 | Returns:
94 | str: The formatted message string.
95 | """
96 | if self.colorize:
97 | message = record.getMessage()
98 | loglevel_color = LEVEL_COLOR_MAP.get(record.levelname)
99 | if loglevel_color:
100 | message = loglevel_color + message + RESET_SEQ
101 | record.msg = message
102 | return super().format(record)
103 |
104 |
105 | def SetUpLogger(name: str, no_newline: bool = False) -> None:
106 | """Setup a logger.
107 |
108 | Args:
109 | name (str): The name for the logger.
110 | no_newline (bool): Optional. Whether or not to disable new lines in the
111 | logger's output. Defaults to False.
112 | """
113 | # We can ignore the mypy warning below since the manager is created at runtime
114 | #pylint: disable=no-member
115 | add_handler = name not in logging.root.manager.loggerDict # type: ignore
116 | # pylint: enable=no-member
117 | logger = logging.getLogger(name)
118 | logger.setLevel(logging.INFO)
119 | if add_handler:
120 | console_handler = logging.StreamHandler(sys.stdout)
121 | if no_newline:
122 | console_handler.terminator = ''
123 | formatter = Formatter(random_color=True)
124 | console_handler.setFormatter(formatter)
125 | logger.addHandler(console_handler)
126 |
127 |
128 | def GetLogger(name: str) -> logging.Logger:
129 | """Return a logger.
130 |
131 | This is a wrapper around logging.getLogger that is intended to be used by
132 | the other modules so that they don't have to import the logging module +
133 | this module.
134 |
135 | Args:
136 | name (str); The name for the logger.
137 |
138 | Returns:
139 | logging.Logger: The logger.
140 | """
141 | return logging.getLogger(name)
142 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/aws/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/aws/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/aws/internal/common.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Common utilities."""
16 | from typing import Dict, List, TYPE_CHECKING, Any
17 |
18 | if TYPE_CHECKING:
19 | import botocore
20 |
21 | EC2_SERVICE = 'ec2'
22 | ACCOUNT_SERVICE = 'sts'
23 | KMS_SERVICE = 'kms'
24 | CLOUDTRAIL_SERVICE = 'cloudtrail'
25 | S3_SERVICE = 's3'
26 | IAM_SERVICE = 'iam'
27 |
28 | # Resource types constant
29 | INSTANCE = 'instance'
30 | VOLUME = 'volume'
31 | SNAPSHOT = 'snapshot'
32 |
33 | # Default Amazon Machine Images to use for bootstrapping instances
34 | UBUNTU_2204_FILTER = 'ubuntu/images/hvm-ssd/ubuntu-jammy-22.04-amd64-server-20230728' # pylint: disable=line-too-long
35 | ALINUX2_BASE_FILTER = 'amzn2-ami-hvm-2*-x86_64-gp2'
36 |
37 |
38 | def CreateTags(resource: str, tags: Dict[str, str]) -> Dict[str, Any]:
39 | """Create AWS Tag Specifications.
40 |
41 | Args:
42 | resource (str): The type of AWS resource.
43 | tags (Dict[str, str]): A dictionary of tags to add to the resource.
44 |
45 | Returns:
46 | Dict[str, Any]: A dictionary for AWS Tag Specifications.
47 | """
48 |
49 | tag_specifications = {
50 | 'ResourceType': resource,
51 | 'Tags': []
52 | } # type: Dict[str, Any]
53 | for tag in tags:
54 | tag_specifications['Tags'].append({
55 | 'Key': tag,
56 | 'Value': tags[tag]
57 | })
58 | return tag_specifications
59 |
60 |
61 | def GetInstanceTypeByCPU(cpu_cores: int) -> str:
62 | """Return the instance type for the requested number of CPU cores.
63 |
64 | Args:
65 | cpu_cores (int): The number of requested cores.
66 |
67 | Returns:
68 | str: The type of instance that matches the number of cores.
69 |
70 | Raises:
71 | ValueError: If the requested amount of cores is unavailable.
72 | """
73 |
74 | cpu_cores_to_instance_type = {
75 | 1: 't2.small',
76 | 2: 'm4.large',
77 | 4: 'm4.xlarge',
78 | 8: 'm4.2xlarge',
79 | 16: 'm4.4xlarge',
80 | 32: 'm5.8xlarge',
81 | 40: 'm4.10xlarge',
82 | 48: 'm5.12xlarge',
83 | 64: 'm4.16xlarge',
84 | 96: 'm5.24xlarge',
85 | 128: 'x1.32xlarge'
86 | }
87 | if cpu_cores not in cpu_cores_to_instance_type:
88 | raise ValueError(
89 | 'Cannot start a machine with {0:d} CPU cores. CPU cores should be one'
90 | ' of: {1:s}'.format(
91 | cpu_cores, ', '.join(map(str, cpu_cores_to_instance_type.keys()))
92 | ))
93 | return cpu_cores_to_instance_type[cpu_cores]
94 |
95 |
96 | def ExecuteRequest(client: 'botocore.client.EC2',
97 | func: str,
98 | kwargs: Dict[str, Any]) -> List[Dict[str, Any]]:
99 | """Execute a request to the boto3 API.
100 |
101 | Args:
102 | client (boto3.session.Session): A boto3 client object.
103 | func (str): A boto3 function to query from the client.
104 | kwargs (Dict): A dictionary of parameters for the function func. Expected
105 | keys are strings, values can be of multiple types. E.g.:
106 | {'InstanceIds': ['instance_id'], 'MaxResults': 12}.
107 |
108 | Returns:
109 | List[Dict]: A list of dictionaries (responses from the
110 | request), e.g. [{'Groups': [{...}], 'Instances': [{...}]}, {...}]
111 |
112 | Raises:
113 | RuntimeError: If the request to the boto3 API could not complete.
114 | """
115 | responses = []
116 | next_token = None
117 | while True:
118 | if next_token:
119 | kwargs['NextToken'] = next_token
120 | request = getattr(client, func)
121 | try:
122 | response = request(**kwargs)
123 | except client.exceptions.ClientError as exception:
124 | raise RuntimeError('Could not process request: {0:s}'.format(
125 | str(exception))) from exception
126 | responses.append(response)
127 | next_token = response.get('NextToken')
128 | if not next_token:
129 | return responses
130 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/aws/internal/iampolicies/ebs_copy_to_s3_policy.json:
--------------------------------------------------------------------------------
1 | {
2 | "Version": "2012-10-17",
3 | "Statement": [
4 | {
5 | "Effect": "Allow",
6 | "Action": "s3:PutObject",
7 | "Resource": "arn:aws:s3:::*"
8 | },
9 | {
10 | "Effect": "Allow",
11 | "Action": [
12 | "ec2:AttachVolume",
13 | "ec2:CreateVolume",
14 | "ec2:DeleteVolume",
15 | "ec2:DescribeVolumes",
16 | "ec2:DetachVolume"
17 | ],
18 | "Resource": "*"
19 | },
20 | {
21 | "Effect": "Allow",
22 | "Action": [
23 | "ec2:CreateTags"
24 | ],
25 | "Resource": "arn:*:ec2:*:*:volume/*"
26 | }
27 | ]
28 | }
29 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/aws/internal/iampolicies/ec2_assume_role_policy.json:
--------------------------------------------------------------------------------
1 | {
2 | "Version": "2012-10-17",
3 | "Statement": [
4 | {
5 | "Effect": "Allow",
6 | "Principal": {
7 | "Service": "ec2.amazonaws.com"
8 | },
9 | "Action": "sts:AssumeRole"
10 | }
11 | ]
12 | }
13 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/aws/internal/iampolicies/revoke_old_sessions.json:
--------------------------------------------------------------------------------
1 | {
2 | "Version": "2012-10-17",
3 | "Statement": [
4 | {
5 | "Effect": "Deny",
6 | "Action": "*",
7 | "Resource": "*",
8 | "Condition": {
9 | "DateLessThan": {
10 | "aws:TokenIssueTime": "DATE"
11 | }
12 | }
13 | }
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/aws/internal/kms.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """KMS functionality."""
16 |
17 | import json
18 | from typing import Optional, TYPE_CHECKING
19 |
20 | from libcloudforensics import errors
21 | from libcloudforensics.providers.aws.internal import common
22 |
23 | if TYPE_CHECKING:
24 | # TYPE_CHECKING is always False at runtime, therefore it is safe to ignore
25 | # the following cyclic import, as it it only used for type hints
26 | from libcloudforensics.providers.aws.internal import account # pylint: disable=cyclic-import
27 |
28 |
29 | class KMS:
30 | """Class that represents AWS KMS services."""
31 |
32 | def __init__(self,
33 | aws_account: 'account.AWSAccount') -> None:
34 | """Initialize the AWS KMS client object.
35 |
36 | Args:
37 | aws_account (AWSAccount): An AWS account object.
38 | """
39 | self.aws_account = aws_account
40 |
41 | def CreateKMSKey(self) -> str:
42 | """Create a KMS key.
43 |
44 | Returns:
45 | str: The KMS key ID for the key that was created.
46 |
47 | Raises:
48 | ResourceCreationError: If the key could not be created.
49 | """
50 |
51 | client = self.aws_account.ClientApi(common.KMS_SERVICE)
52 | try:
53 | kms_key = client.create_key()
54 | except client.exceptions.ClientError as exception:
55 | raise errors.ResourceCreationError(
56 | 'Could not create KMS key: {0!s}'.format(
57 | exception), __name__) from exception
58 |
59 | # The response contains the key ID
60 | key_id = kms_key['KeyMetadata']['KeyId'] # type: str
61 | return key_id
62 |
63 | def ShareKMSKeyWithAWSAccount(self,
64 | kms_key_id: str,
65 | aws_account_id: str) -> None:
66 | """Share a KMS key.
67 |
68 | Args:
69 | kms_key_id (str): The KMS key ID of the key to share.
70 | aws_account_id (str): The AWS Account ID to share the KMS key with.
71 |
72 | Raises:
73 | RuntimeError: If the key could not be shared.
74 | """
75 |
76 | share_policy = {
77 | 'Sid': 'Allow use of the key',
78 | 'Effect': 'Allow',
79 | 'Principal': {
80 | 'AWS': 'arn:aws:iam::{0:s}:root'.format(aws_account_id)
81 | },
82 | 'Action': [
83 | # kms:*crypt and kms:ReEncrypt* are necessary to transfer
84 | # encrypted EBS resources across accounts.
85 | 'kms:Encrypt',
86 | 'kms:Decrypt',
87 | 'kms:ReEncrypt*',
88 | # kms:CreateGrant is necessary to transfer encrypted EBS
89 | # resources across regions.
90 | 'kms:CreateGrant'
91 | ],
92 | 'Resource': '*'
93 | }
94 | client = self.aws_account.ClientApi(common.KMS_SERVICE)
95 | try:
96 | policy = json.loads(client.get_key_policy(
97 | KeyId=kms_key_id, PolicyName='default')['Policy'])
98 | policy['Statement'].append(share_policy)
99 | # Update the key policy so that it is shared with the AWS account.
100 | client.put_key_policy(
101 | KeyId=kms_key_id, PolicyName='default', Policy=json.dumps(policy))
102 | except client.exceptions.ClientError as exception:
103 | raise RuntimeError('Could not share KMS key {0:s}: {1:s}'.format(
104 | kms_key_id, str(exception))) from exception
105 |
106 | def DeleteKMSKey(self, kms_key_id: Optional[str] = None) -> None:
107 | """Delete a KMS key.
108 |
109 | Schedule the KMS key for deletion. By default, users have a 30 days
110 | window before the key gets deleted.
111 |
112 | Args:
113 | kms_key_id (str): The ID of the KMS key to delete.
114 |
115 | Raises:
116 | ResourceDeletionError: If the key could not be scheduled for deletion.
117 | """
118 |
119 | if not kms_key_id:
120 | return
121 |
122 | client = self.aws_account.ClientApi(common.KMS_SERVICE)
123 | try:
124 | client.schedule_key_deletion(KeyId=kms_key_id)
125 | except client.exceptions.ClientError as exception:
126 | raise errors.ResourceDeletionError(
127 | 'Could not schedule the KMS key {0:s} for deletion {1!s}'.format(
128 | exception, kms_key_id), __name__) from exception
129 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/aws/internal/log.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Log functionality."""
16 | from typing import TYPE_CHECKING, Dict, List, Optional, Any
17 |
18 | from libcloudforensics.providers.aws.internal import common
19 |
20 | if TYPE_CHECKING:
21 | # TYPE_CHECKING is always False at runtime, therefore it is safe to ignore
22 | # the following cyclic import, as it it only used for type hints
23 | from libcloudforensics.providers.aws.internal import account # pylint: disable=cyclic-import
24 | from datetime import datetime
25 |
26 |
27 | class AWSCloudTrail:
28 | """Class representing an AWS CloudTrail service.
29 |
30 | Attributes:
31 | aws_account (AWSAccount): The AWS account to use.
32 | """
33 |
34 | def __init__(self, aws_account: 'account.AWSAccount') -> None:
35 | """Initialize an AWS CloudTrail client.
36 |
37 | Args:
38 | aws_account (AWSAccount): The AWS account to use.
39 | """
40 |
41 | self.aws_account = aws_account
42 |
43 | def LookupEvents(
44 | self,
45 | qfilter: Optional[str] = None,
46 | starttime: Optional['datetime'] = None,
47 | endtime: Optional['datetime'] = None) -> List[Dict[str, Any]]:
48 | """Lookup events in the CloudTrail logs of this account.
49 |
50 | Example usage:
51 | # pylint: disable=line-too-long
52 | # qfilter = 'key,value'
53 | # starttime = datetime(2020,5,5,17,33,00)
54 | # LookupEvents(qfilter=qfilter, starttime=starttime)
55 | # Check documentation for qfilter details
56 | # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudtrail.html#CloudTrail.Client.lookup_events
57 |
58 | Args:
59 | qfilter (string): Optional. Filter for the query including 1 key and value.
60 | starttime (datetime): Optional. Start datetime to add to query filter.
61 | endtime (datetime): Optional. End datetime to add to query filter.
62 |
63 | Returns:
64 | List[Dict]: A list of events. E.g. [{'EventId': 'id', ...},
65 | {'EventId': ...}]
66 | """
67 |
68 | events = []
69 |
70 | client = self.aws_account.ClientApi(common.CLOUDTRAIL_SERVICE)
71 |
72 | params = {} # type: Dict[str, Any]
73 | if qfilter:
74 | k, v = qfilter.split(',')
75 | filters = [{'AttributeKey': k, 'AttributeValue': v}]
76 | params = {'LookupAttributes': filters}
77 | if starttime:
78 | params['StartTime'] = starttime
79 | if endtime:
80 | params['EndTime'] = endtime
81 |
82 | responses = common.ExecuteRequest(client, 'lookup_events', params)
83 | for response in responses:
84 | for entry in response['Events']:
85 | events.append(entry)
86 | return events
87 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/azure/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/azure/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/azure/internal/account.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Represents an Azure account."""
16 |
17 | from typing import Optional
18 |
19 | # pylint: disable=line-too-long
20 | from libcloudforensics.providers.azure.internal import common
21 | from libcloudforensics.providers.azure.internal import compute as compute_module
22 | from libcloudforensics.providers.azure.internal import monitoring as monitoring_module
23 | from libcloudforensics.providers.azure.internal import network as network_module
24 | from libcloudforensics.providers.azure.internal import resource as resource_module
25 | from libcloudforensics.providers.azure.internal import storage as storage_module
26 | from libcloudforensics import logging_utils
27 | # pylint: enable=line-too-long
28 |
29 | logging_utils.SetUpLogger(__name__)
30 | logger = logging_utils.GetLogger(__name__)
31 |
32 |
33 | class AZAccount:
34 | """Class that represents an Azure Account.
35 |
36 | Attributes:
37 | subscription_id (str): The Azure subscription ID to use.
38 | credentials (ServicePrincipalCredentials): An Azure credentials object.
39 | default_region (str): The default region to create new resources in.
40 | default_resource_group_name (str): The default resource group in which to
41 | create new resources in.
42 | """
43 |
44 | def __init__(self,
45 | default_resource_group_name: str,
46 | default_region: str = 'eastus',
47 | profile_name: Optional[str] = None) -> None:
48 | """Initialize the AZAccount class.
49 |
50 | Args:
51 | default_resource_group_name (str): The default resource group in which to
52 | create new resources in. If the resource group does not exists,
53 | it will be automatically created.
54 | default_region (str): Optional. The default region to create new
55 | resources in. Default is eastus.
56 | profile_name (str): Optional. The name of the profile to use for Azure
57 | operations. For more information on profiles, see GetCredentials()
58 | in libcloudforensics.providers.azure.internal.common.py. Default
59 | does not use profiles and will authenticate to Azure using
60 | environment variables.
61 | """
62 | self.subscription_id, self.credentials = common.GetCredentials(profile_name)
63 | self.default_region = default_region
64 | self._compute = None # type: Optional[compute_module.AZCompute]
65 | self._monitoring = None # type: Optional[monitoring_module.AZMonitoring]
66 | self._network = None # type: Optional[network_module.AZNetwork]
67 | self._resource = None # type: Optional[resource_module.AZResource]
68 | self._storage = None # type: Optional[storage_module.AZStorage]
69 | self.default_resource_group_name = self.resource.GetOrCreateResourceGroup(
70 | default_resource_group_name)
71 |
72 | @property
73 | def compute(self) -> compute_module.AZCompute:
74 | """Get an Azure compute object for the account.
75 |
76 | Returns:
77 | AZCompute: An Azure compute object.
78 | """
79 | if self._compute:
80 | return self._compute
81 | self._compute = compute_module.AZCompute(self)
82 | return self._compute
83 |
84 | @property
85 | def monitoring(self) -> monitoring_module.AZMonitoring:
86 | """Get an Azure monitoring object for the account.
87 |
88 | Returns:
89 | AZMonitoring: An Azure monitoring object.
90 | """
91 | if self._monitoring:
92 | return self._monitoring
93 | self._monitoring = monitoring_module.AZMonitoring(self)
94 | return self._monitoring
95 |
96 | @property
97 | def network(self) -> network_module.AZNetwork:
98 | """Get an Azure network object for the account.
99 |
100 | Returns:
101 | AZNetwork: An Azure network object.
102 | """
103 | if self._network:
104 | return self._network
105 | self._network = network_module.AZNetwork(self)
106 | return self._network
107 |
108 | @property
109 | def resource(self) -> resource_module.AZResource:
110 | """Get an Azure resource object for the account.
111 |
112 | Returns:
113 | AZResource: An Azure resource object.
114 | """
115 | if self._resource:
116 | return self._resource
117 | self._resource = resource_module.AZResource(self)
118 | return self._resource
119 |
120 | @property
121 | def storage(self) -> storage_module.AZStorage:
122 | """Get an Azure storage object for the account.
123 |
124 | Returns:
125 | AZStorage: An Azure storage object.
126 | """
127 | if self._storage:
128 | return self._storage
129 | self._storage = storage_module.AZStorage(self)
130 | return self._storage
131 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/azure/internal/compute_base_resource.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Azure Compute Base Resource."""
16 |
17 | from typing import Optional, List, TYPE_CHECKING
18 |
19 | # pylint: disable=import-error
20 | from azure.mgmt import compute as compute_sdk # type: ignore
21 | # pylint: enable=import-error
22 |
23 | from libcloudforensics import errors
24 | from libcloudforensics.providers.azure.internal import common
25 |
26 | if TYPE_CHECKING:
27 | # TYPE_CHECKING is always False at runtime, therefore it is safe to ignore
28 | # the following cyclic import, as it it only used for type hints
29 | from libcloudforensics.providers.azure.internal import account # pylint: disable=cyclic-import, ungrouped-imports
30 |
31 |
32 | class AZComputeResource:
33 | """Class that represent an Azure compute resource.
34 |
35 | Attributes:
36 | az_account (AZAccount): An Azure account object.
37 | resource_group_name (str): The Azure resource group name for the resource.
38 | resource_id (str): The Azure resource ID.
39 | name (str): The resource's name.
40 | region (str): The region in which the resource is located.
41 | zones (List[str]): Optional. Availability zones within the region where
42 | the resource is located.
43 | """
44 |
45 | def __init__(self,
46 | az_account: 'account.AZAccount',
47 | resource_id: str,
48 | name: str,
49 | region: str,
50 | zones: Optional[List[str]] = None) -> None:
51 | """Initialize the AZComputeResource class.
52 |
53 | Args:
54 | az_account (AZAccount): An Azure account object.
55 | resource_id (str): The Azure resource ID.
56 | name (str): The resource's name.
57 | region (str): The region in which the resource is located.
58 | zones (List[str]): Optional. Availability zones within the region where
59 | the resource is located.
60 |
61 | Raises:
62 | InvalidNameError: If the resource ID is malformed.
63 | """
64 |
65 | if not common.REGEX_COMPUTE_RESOURCE_ID.match(resource_id):
66 | raise errors.InvalidNameError(
67 | 'Malformed resource ID: expected {0:s}, got {1:s}'.format(
68 | common.REGEX_COMPUTE_RESOURCE_ID.pattern, resource_id), __name__)
69 |
70 | self.az_account = az_account
71 | # Format of resource_id: /subscriptions/{id}/resourceGroups/{
72 | # resource_group_name}/providers/Microsoft.Compute/{resourceType}/{resource}
73 | self.resource_group_name = resource_id.split('/')[4]
74 | self.resource_id = resource_id
75 | self.name = name
76 | self.region = region
77 | self.zones = zones
78 |
79 | @property
80 | def compute_client(self) -> compute_sdk.ComputeManagementClient:
81 | """Return the Azure compute client object associated to the Azure
82 | account.
83 |
84 | Returns:
85 | ComputeManagementClient: An Azure compute client object.
86 | """
87 | return self.az_account.compute.compute_client
88 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/azure/internal/monitoring.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Azure Monitoring functionality."""
16 |
17 | from typing import List, Optional, Dict, TYPE_CHECKING
18 |
19 | from azure.mgmt.monitor import MonitorManagementClient
20 | from azure.core.exceptions import HttpResponseError
21 |
22 | if TYPE_CHECKING:
23 | # TYPE_CHECKING is always False at runtime, therefore it is safe to ignore
24 | # the following cyclic import, as it it only used for type hints
25 | from libcloudforensics.providers.azure.internal import account # pylint: disable=cyclic-import
26 | from datetime import datetime
27 |
28 |
29 | class AZMonitoring:
30 | """Azure Monitoring.
31 |
32 | Attributes:
33 | monitoring_client (MonitorManagementClient): An Azure monitoring client
34 | object.
35 | """
36 |
37 | def __init__(self,
38 | az_account: 'account.AZAccount') -> None:
39 | """Initialize the Azure monitoring class.
40 |
41 | Args:
42 | az_account (AZAccount): An Azure account object.
43 | """
44 | self.monitoring_client = MonitorManagementClient(
45 | az_account.credentials, az_account.subscription_id)
46 |
47 | def ListAvailableMetricsForResource(self, resource_id: str) -> List[str]:
48 | """List the available metrics for a given resource.
49 |
50 | Args:
51 | resource_id (str): The resource ID from which to list available
52 | metrics.
53 |
54 | Returns:
55 | List[str]: A list of metrics that can be queried for the resource ID.
56 |
57 | Raises:
58 | RuntimeError: If the resource could not be found.
59 | """
60 | try:
61 | return [metric.name.value for metric
62 | in self.monitoring_client.metric_definitions.list(resource_id)]
63 | except HttpResponseError as exception:
64 | raise RuntimeError(
65 | 'Could not fetch metrics for resource {0:s}. Please make sure you '
66 | 'specified the full resource ID url, i.e. /subscriptions/<>/'
67 | 'resourceGroups/<>/providers/<>/<>/yourResourceName'.format(
68 | resource_id)) from exception
69 |
70 | def GetMetricsForResource(
71 | self,
72 | resource_id: str,
73 | metrics: str,
74 | from_date: Optional['datetime'] = None,
75 | to_date: Optional['datetime'] = None,
76 | interval: Optional[str] = None,
77 | aggregation: str = 'Total',
78 | qfilter: Optional[str] = None) -> Dict[str, Dict[str, str]]:
79 | """Retrieve metrics for a given resource.
80 |
81 | Args:
82 | resource_id (str): The resource ID for which to lookup the metric.
83 | metrics (str): A comma separated list of metrics to retrieve. E.g.
84 | 'Percentage CPU,Network In'.
85 | from_date (datetime.datetime): Optional. A start date from which to get
86 | the metric. If passed, to_date is also required.
87 | to_date (datetime.datetime): Optional. An end date until which to get the
88 | metric. If passed, from_date is also required.
89 | interval (str): An interval for the metrics, e.g. 'PT1H' will output
90 | metric's values with one hour granularity.
91 | aggregation (str): Optional. The type of aggregation for the metric's
92 | values. Default is 'Total'. Possible values: 'Total', 'Average'.
93 | Both can be retrieved if passed as a single string, separated by a
94 | comma.
95 | qfilter (str): Optional. A filter for the query. See
96 | https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list for
97 | details about filtering.
98 |
99 | Returns:
100 | Dict[str, Dict[str, str]]: A dictionary mapping the metric to a dict of
101 | the metric's values, per timestamp.
102 |
103 | Raises:
104 | RuntimeError: If the resource could not be found.
105 | """
106 | kwargs = {'metricnames': metrics, 'aggregation': aggregation}
107 | if from_date and to_date:
108 | timespan = '{0:s}/{1:s}'.format(from_date.strftime('%Y-%m-%dT%H:%M:%SZ'),
109 | to_date.strftime('%Y-%m-%dT%H:%M:%SZ'))
110 | kwargs['timespan'] = timespan
111 | if interval:
112 | kwargs['interval'] = interval
113 | try:
114 | metrics_data = self.monitoring_client.metrics.list(
115 | resource_id, filter=qfilter, **kwargs)
116 | except HttpResponseError as exception:
117 | raise RuntimeError(
118 | 'Could not fetch metrics {0:s} for resource {1:s}. Please make '
119 | 'sure you specified the full resource ID url, i.e. /subscriptions/'
120 | '<>/resourceGroups/<>/providers/<>/<>/yourResourceName'.format(
121 | metrics, resource_id)) from exception
122 | results = {} # type: Dict[str, Dict[str, str]]
123 | for metric in metrics_data.value:
124 | values = {}
125 | for timeserie in metric.timeseries:
126 | for data in timeserie.data:
127 | if data.time_stamp and data.total:
128 | values[str(data.time_stamp)] = str(data.total)
129 | results[metric.name.value] = values
130 | return results
131 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/azure/internal/resource.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Azure Resource functionality."""
16 |
17 | from typing import TYPE_CHECKING, List
18 |
19 | # pylint: disable=import-error
20 | from azure.mgmt import resource # type: ignore
21 | from msrestazure import azure_exceptions
22 | # pylint: enable=import-error
23 |
24 | from libcloudforensics import logging_utils
25 |
26 | if TYPE_CHECKING:
27 | # TYPE_CHECKING is always False at runtime, therefore it is safe to ignore
28 | # the following cyclic import, as it it only used for type hints
29 | from libcloudforensics.providers.azure.internal import account # pylint: disable=cyclic-import
30 |
31 |
32 | logging_utils.SetUpLogger(__name__)
33 | logger = logging_utils.GetLogger(__name__)
34 |
35 |
36 | class AZResource:
37 | """Azure resource functionality.
38 |
39 | Attributes:
40 | az_account (AZAccount): An Azure account object.
41 | resource_client (ResourceManagementClient): An Azure resource client object.
42 | subscription_client (SubscriptionClient): An Azure subscription client
43 | object.
44 | """
45 |
46 | def __init__(self,
47 | az_account: 'account.AZAccount') -> None:
48 | """Initialize the Azure resource class.
49 |
50 | Args:
51 | az_account (AZAccount): An Azure account object.
52 | """
53 | self.az_account = az_account
54 | self.resource_client = resource.ResourceManagementClient(
55 | self.az_account.credentials, self.az_account.subscription_id)
56 | self.subscription_client = resource.SubscriptionClient(
57 | self.az_account.credentials)
58 |
59 | def GetOrCreateResourceGroup(self, resource_group_name: str) -> str:
60 | """Check if a resource group exists, and create it otherwise.
61 |
62 | Args:
63 | resource_group_name (str); The name of the resource group to check
64 | existence for. If it does not exist, create it.
65 |
66 | Returns:
67 | str: The resource group name.
68 | """
69 | try:
70 | self.resource_client.resource_groups.get(resource_group_name)
71 | except azure_exceptions.CloudError:
72 | # Group doesn't exist, creating it
73 | logger.info('Resource group {0:s} not found, creating it.'.format(
74 | resource_group_name))
75 | creation_data = {
76 | 'location': self.az_account.default_region
77 | }
78 | self.resource_client.resource_groups.create_or_update(
79 | resource_group_name, creation_data)
80 | logger.info('Resource group {0:s} successfully created.'.format(
81 | resource_group_name))
82 | return resource_group_name
83 |
84 | def ListSubscriptionIDs(self) -> List[str]:
85 | """List subscription ids from an Azure account.
86 |
87 | Returns:
88 | List[str]: A list of all subscription IDs from the Azure account.
89 | """
90 | subscription_ids = self.subscription_client.subscriptions.list()
91 | return [sub.subscription_id for sub in subscription_ids]
92 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/azure/internal/storage.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Azure Storage functionality."""
16 |
17 | from typing import TYPE_CHECKING, Optional, Tuple
18 |
19 | # pylint: disable=import-error
20 | from azure.mgmt import storage
21 | from msrestazure import azure_exceptions
22 | # pylint: enable=import-error
23 |
24 | from libcloudforensics import logging_utils
25 | from libcloudforensics import errors
26 | from libcloudforensics.providers.azure.internal import common
27 |
28 | if TYPE_CHECKING:
29 | # TYPE_CHECKING is always False at runtime, therefore it is safe to ignore
30 | # the following cyclic import, as it it only used for type hints
31 | from libcloudforensics.providers.azure.internal import account # pylint: disable=cyclic-import
32 |
33 |
34 | logging_utils.SetUpLogger(__name__)
35 | logger = logging_utils.GetLogger(__name__)
36 |
37 |
38 | class AZStorage:
39 | """Azure Storage functionality.
40 |
41 | Attributes:
42 | az_account (AZAccount): An Azure account object.
43 | storage_client (StorageManagementClient): An Azure storage client object.
44 | """
45 |
46 | def __init__(self,
47 | az_account: 'account.AZAccount') -> None:
48 | """Initialize the Azure storage class.
49 |
50 | Args:
51 | az_account (AZAccount): An Azure account object.
52 | """
53 | self.az_account = az_account
54 | self.storage_client = storage.StorageManagementClient(
55 | self.az_account.credentials, self.az_account.subscription_id)
56 |
57 | def CreateStorageAccount(self,
58 | storage_account_name: str,
59 | region: Optional[str] = None) -> Tuple[str, str]:
60 | """Create a storage account and returns its ID and access key.
61 |
62 | Args:
63 | storage_account_name (str): The name for the storage account.
64 | region (str): Optional. The region in which to create the storage
65 | account. If not provided, it will be created in the default_region
66 | associated to the AZAccount object.
67 |
68 | Returns:
69 | Tuple[str, str]: The storage account ID and its access key.
70 |
71 | Raises:
72 | InvalidNameError: If the storage account name is invalid.
73 | """
74 |
75 | if not common.REGEX_ACCOUNT_STORAGE_NAME.match(storage_account_name):
76 | raise errors.InvalidNameError(
77 | 'Storage account name {0:s} does not comply with {1:s}'.format(
78 | storage_account_name, common.REGEX_ACCOUNT_STORAGE_NAME.pattern),
79 | __name__)
80 |
81 | if not region:
82 | region = self.az_account.default_region
83 |
84 | # https://docs.microsoft.com/en-us/rest/api/storagerp/srp_sku_types
85 | creation_data = {
86 | 'location': region,
87 | 'sku': {
88 | 'name': 'Standard_RAGRS'
89 | },
90 | 'kind': 'Storage'
91 | }
92 |
93 | # pylint: disable=line-too-long
94 | # https://docs.microsoft.com/en-us/samples/azure-samples/storage-python-manage/storage-python-manage/
95 | # https://docs.microsoft.com/en-us/azure/storage/blobs/storage-quickstart-blobs-python
96 | # pylint: enable=line-too-long
97 | logger.info('Creating storage account: {0:s}'.format(storage_account_name))
98 | request = self.storage_client.storage_accounts.begin_create(
99 | self.az_account.default_resource_group_name,
100 | storage_account_name,
101 | creation_data
102 | )
103 | logger.info('Storage account {0:s} successfully created'.format(
104 | storage_account_name))
105 | storage_account = request.result()
106 | storage_account_keys = self.storage_client.storage_accounts.list_keys(
107 | self.az_account.default_resource_group_name, storage_account_name)
108 | storage_account_keys = {key.key_name: key.value
109 | for key in storage_account_keys.keys}
110 | storage_account_id = storage_account.id # type: str
111 | storage_account_key = storage_account_keys['key1'] # type: str
112 | return storage_account_id, storage_account_key
113 |
114 | def DeleteStorageAccount(self, storage_account_name: str) -> None:
115 | """Delete an account storage.
116 |
117 | Raises:
118 | ResourceDeletionError: if the storage account could not be deleted.
119 | """
120 | try:
121 | logger.info('Deleting storage account: {0:s}'.format(
122 | storage_account_name))
123 | self.storage_client.storage_accounts.delete(
124 | self.az_account.default_resource_group_name, storage_account_name)
125 | logger.info('Storage account {0:s} successfully deleted'.format(
126 | storage_account_name))
127 | except azure_exceptions.CloudError as exception:
128 | raise errors.ResourceDeletionError(
129 | 'Could not delete account storage {0:s}: {1:s}'.format(
130 | storage_account_name, str(exception)), __name__) from exception
131 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/gcp/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/gcp/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/gcp/internal/bigquery.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Google BigQuery functionalities."""
16 |
17 | from typing import TYPE_CHECKING, List, Dict, Any, Optional
18 | from libcloudforensics.providers.gcp.internal import common
19 |
20 | if TYPE_CHECKING:
21 | import googleapiclient.discovery
22 |
23 | _BIGQUERY_API_VERSION = 'v2'
24 |
25 | class GoogleBigQuery:
26 | """Class to call Google BigQuery APIs.
27 |
28 | Attributes:
29 | project_id: Google Cloud project ID.
30 | """
31 |
32 | def __init__(self, project_id: Optional[str] = None) -> None:
33 | """Initialize the GoogleBigQuery object.
34 |
35 | Args:
36 | project_id: Optional. Google Cloud project ID.
37 | """
38 |
39 | self.project_id = project_id
40 |
41 | def GoogleBigQueryApi(self) -> 'googleapiclient.discovery.Resource':
42 | """Get a Google BigQuery service object.
43 |
44 | Returns:
45 | A Google BigQuery service object.
46 | """
47 |
48 | return common.CreateService('bigquery', _BIGQUERY_API_VERSION)
49 |
50 | def ListBigQueryJobs(self) -> List[Dict[str, Any]]:
51 | """List jobs of Google BigQuery within a project.
52 |
53 | Returns:
54 | List of jobs.
55 | """
56 | bq_jobs = self.GoogleBigQueryApi().jobs() # pylint: disable=no-member
57 | request = bq_jobs.list(projectId=self.project_id, projection='full')
58 | jobs: List[Dict[str, Any]] = request.execute().get('jobs', [])
59 | return jobs
60 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/gcp/internal/build.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Google Cloud Build functionalities."""
16 |
17 | import logging
18 | import time
19 | from typing import Dict, Any
20 | import googleapiclient
21 |
22 |
23 | from libcloudforensics.providers.gcp.internal import common
24 |
25 |
26 | class GoogleCloudBuild:
27 | """Class to call Google Cloud Build APIs.
28 |
29 | Dictionary objects content can be found in
30 | https://cloud.google.com/cloud-build/docs/api/reference/rest/v1/projects.builds
31 | """
32 | CLOUD_BUILD_API_VERSION = 'v1'
33 |
34 | def __init__(self, project_id: str) -> None:
35 | """Initialize the GoogleCloudBuild object.
36 |
37 | Args:
38 | project_id (str): Google Cloud project ID.
39 | """
40 |
41 | self.project_id = project_id
42 |
43 | def GcbApi(self) -> 'googleapiclient.discovery.Resource':
44 | """Get a Google Cloud Build service object.
45 |
46 | Returns:
47 | googleapiclient.discovery.Resource: A Google Cloud Build service object.
48 | """
49 |
50 | return common.CreateService(
51 | 'cloudbuild', self.CLOUD_BUILD_API_VERSION)
52 |
53 | def CreateBuild(self, build_body: Dict[str, Any]) -> Dict[str, Any]:
54 | """Create a cloud build.
55 |
56 | Args:
57 | build_body (Dict): A dictionary that describes how to find the source
58 | code and how to build it.
59 |
60 | Returns:
61 | Dict: Represents long-running operation that is the result of a network
62 | API call.
63 | """
64 | cloud_build_client = self.GcbApi().projects().builds() # pylint: disable=no-member
65 | build_info = cloud_build_client.create(
66 | projectId=self.project_id,
67 | body=build_body).execute() # type: Dict[str, Any]
68 | build_metadata = build_info['metadata']['build']
69 | common.logger.info(
70 | 'Build started, logs bucket: {0:s}, logs URL: {1:s}'.format(
71 | build_metadata['logsBucket'], build_metadata['logUrl']))
72 | return build_info
73 |
74 | def _RetryExecuteRequest(self, operation_name: str) -> Dict[str, Any]:
75 | """Execute GCB operation.get request and retry if error.
76 |
77 | Args:
78 | operation_name (str): The name of the operation resource,
79 |
80 | Returns:
81 | Dict: Represents long-running operation that is the result of a network
82 | API call.
83 |
84 | Raises:
85 | RuntimeError: If getting the Cloud Build API operation object failed.
86 | """
87 | block_retry_max = 10
88 | service = self.GcbApi()
89 | request = service.operations().get(name=operation_name) # pylint: disable=no-member
90 | get_success = False
91 | for block_retry in range(block_retry_max):
92 | try:
93 | response = request.execute() # type: Dict[str, Any]
94 | get_success = True
95 | except googleapiclient.errors.HttpError as error:
96 | logging.info(
97 | 'build.BlockOperation: Get request to cloudbuild.googleapis.com '
98 | 'failed.\nTry {0:d} of {1:d}. Error: {2!s} '.format(
99 | block_retry, block_retry_max, error))
100 | if get_success:
101 | break
102 | if block_retry == block_retry_max - 1:
103 | raise RuntimeError(
104 | 'Failure blocking Cloud Build operation: {0:s}'.format(
105 | operation_name))
106 | return response
107 |
108 | def BlockOperation(self, response: Dict[str, Any]) -> Dict[str, Any]:
109 | """Block execution until API operation is finished.
110 |
111 | Args:
112 | response (Dict): Google Cloud Build API response.
113 |
114 | Returns:
115 | Dict: Holding the response of a get operation on an API object of type
116 | operations.
117 |
118 | Raises:
119 | RuntimeError: If the Cloud Build failed or if getting the Cloud Build
120 | API operation object failed.
121 | """
122 | while True:
123 | response = self._RetryExecuteRequest(response['name'])
124 | if response.get('done') and response.get('error'):
125 | build_metadata = response['metadata']['build']
126 | raise RuntimeError(
127 | ': {0:1}, logs bucket: {1:s}, logs URL: {2:s}'.format(
128 | response['error']['message'],
129 | build_metadata['logsBucket'],
130 | build_metadata['logUrl']))
131 |
132 | if response.get('done') and response.get('response'):
133 | return response
134 | time.sleep(5) # Seconds between requests
135 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/gcp/internal/cloudsql.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Google Cloud SQL functionalities."""
16 |
17 | from typing import TYPE_CHECKING, List, Dict, Any, Optional
18 | from libcloudforensics.providers.gcp.internal import common
19 |
20 | if TYPE_CHECKING:
21 | import googleapiclient
22 |
23 |
24 | class GoogleCloudSQL:
25 | """Class to call Google CloudSQL APIs.
26 |
27 | Attributes:
28 | project_id: Google Cloud project ID.
29 | """
30 | SQLADMIN_API_VERSION = 'v1beta4'
31 |
32 | def __init__(self, project_id: Optional[str] = None) -> None:
33 | """Initialize the GoogleCloudSQL object.
34 |
35 | Args:
36 | project_id (str): Optional. Google Cloud project ID.
37 | """
38 |
39 | self.project_id = project_id
40 |
41 | def GoogleCloudSQLApi(self) -> 'googleapiclient.discovery.Resource':
42 | """Get a Google CloudSQL service object.
43 |
44 | Returns:
45 | googleapiclient.discovery.Resource: A Google CloudSQL service object.
46 | """
47 |
48 | return common.CreateService(
49 | 'sqladmin', self.SQLADMIN_API_VERSION)
50 |
51 | def ListCloudSQLInstances(self) -> List[Dict[str, Any]]:
52 | """List instances of Google CloudSQL within a project.
53 |
54 | Returns:
55 | List[Dict[str, Any]]: List of instances.
56 | """
57 | gcsql_instances = self.GoogleCloudSQLApi().instances() # pylint: disable=no-member
58 | request = gcsql_instances.list(project=self.project_id)
59 | instances: List[Dict[str, Any]] = request.execute().get('items', [])
60 | return instances
61 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/gcp/internal/function.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Google Cloud Functions functionalities."""
16 |
17 | import json
18 | import ssl
19 | from typing import TYPE_CHECKING, Dict, Any
20 | from googleapiclient.errors import HttpError
21 | from libcloudforensics.providers.gcp.internal import common
22 | from libcloudforensics import logging_utils
23 |
24 | logging_utils.SetUpLogger(__name__)
25 | logger = logging_utils.GetLogger(__name__)
26 |
27 | if TYPE_CHECKING:
28 | import googleapiclient
29 |
30 |
31 | class GoogleCloudFunction:
32 | """Class to call Google Cloud Functions.
33 |
34 | Attributes:
35 | project_id: Google Cloud project ID.
36 | """
37 |
38 | CLOUD_FUNCTIONS_API_VERSION = 'v1'
39 |
40 | def __init__(self, project_id: str) -> None:
41 | """Initialize the GoogleCloudFunction object.
42 |
43 | Args:
44 | project_id (str): The name of the project.
45 | """
46 |
47 | self.project_id = project_id
48 |
49 | def GcfApi(self) -> 'googleapiclient.discovery.Resource':
50 | """Get a Google Cloud Function service object.
51 |
52 | Returns:
53 | googleapiclient.discovery.Resource: A Google Cloud Function service
54 | object.
55 | """
56 |
57 | return common.CreateService(
58 | 'cloudfunctions', self.CLOUD_FUNCTIONS_API_VERSION)
59 |
60 | def ExecuteFunction(self,
61 | function_name: str,
62 | region: str,
63 | args: Dict[str, Any]) -> Dict[str, Any]:
64 | """Executes a Google Cloud Function.
65 |
66 | Args:
67 | function_name (str): The name of the function to call.
68 | region (str): Region to execute functions in.
69 | args (Dict): Arguments to pass to the function. Dictionary content
70 | details can be found in
71 | https://cloud.google.com/functions/docs/reference/rest/v1/projects.locations.functions # pylint: disable=line-too-long
72 |
73 | Returns:
74 | Dict[str, str]: Return value from function call.
75 |
76 | Raises:
77 | RuntimeError: When cloud function arguments cannot be serialized or
78 | when an HttpError is encountered.
79 | """
80 |
81 | service = self.GcfApi()
82 | cloud_function = service.projects().locations().functions() # pylint: disable=no-member
83 |
84 | try:
85 | json_args = json.dumps(args)
86 | except TypeError as exception:
87 | error_msg = (
88 | 'Cloud function args [{0:s}] could not be serialized:'
89 | ' {1!s}').format(str(args), exception)
90 | raise RuntimeError(error_msg) from exception
91 |
92 | function_path = 'projects/{0:s}/locations/{1:s}/functions/{2:s}'.format(
93 | self.project_id, region, function_name)
94 |
95 | logger.debug(
96 | 'Calling Cloud Function [{0:s}] with args [{1!s}]'.format(
97 | function_name, args))
98 | try:
99 | function_return = cloud_function.call(
100 | name=function_path, body={
101 | 'data': json_args
102 | }).execute() # type: Dict[str, Any]
103 | except (HttpError, ssl.SSLError) as exception:
104 | error_msg = 'Cloud function [{0:s}] call failed: {1!s}'.format(
105 | function_name, exception)
106 | raise RuntimeError(error_msg) from exception
107 |
108 | return function_return
109 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/gcp/internal/log.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Google Cloud Logging functionalities."""
16 | from typing import Optional
17 | from typing import TYPE_CHECKING, List, Dict, Any
18 |
19 | from libcloudforensics.providers.gcp.internal import common
20 |
21 | if TYPE_CHECKING:
22 | import googleapiclient
23 |
24 |
25 | class GoogleCloudLog:
26 | """Class representing a Google Cloud Logs interface.
27 |
28 | Attributes:
29 | project_ids: List of Google Cloud project IDs.
30 |
31 | Example use:
32 | # pylint: disable=line-too-long
33 | gcp = GoogleCloudLog(project_id='your_project_name')
34 | gcp.ListLogs()
35 | gcp.ExecuteQuery(filter='resource.type="gce_instance" labels."compute.googleapis.com/resource_name"="instance-1"')
36 | See https://cloud.google.com/logging/docs/view/advanced-queries for filter details.
37 | """
38 |
39 | LOGGING_API_VERSION = 'v2'
40 |
41 | def __init__(self, project_ids: List[str]) -> None:
42 | """Initialize the GoogleCloudProject object.
43 |
44 | Args:
45 | project_ids (List[str]): List of project IDs.
46 | """
47 | self.project_ids = project_ids
48 |
49 | def GclApi(self) -> 'googleapiclient.discovery.Resource':
50 | """Get a Google Compute Logging service object.
51 |
52 | Returns:
53 | googleapiclient.discovery.Resource: A Google Compute Logging service
54 | object.
55 | """
56 |
57 | return common.CreateService(
58 | 'logging', self.LOGGING_API_VERSION)
59 |
60 | def ListLogs(self) -> List[str]:
61 | """List logs in project.
62 |
63 | Returns:
64 | List[str]: The project logs available.
65 |
66 | Raises:
67 | RuntimeError: If API call failed.
68 | """
69 |
70 | logs = []
71 | gcl_instance_client = self.GclApi().logs() # pylint: disable=no-member
72 | for project_id in self.project_ids:
73 | responses = common.ExecuteRequest(
74 | gcl_instance_client,
75 | 'list',
76 | {'parent': 'projects/' + project_id})
77 | for response in responses:
78 | for logtypes in response.get('logNames', []):
79 | logs.append(logtypes)
80 |
81 | return logs
82 |
83 | def ExecuteQuery(
84 | self, qfilter: Optional[List[str]] = None) -> List[Dict[str, Any]]:
85 | """Query logs in GCP project.
86 |
87 | Args:
88 | qfilter (List[str]): Optional. A list of query filters to use.
89 |
90 | Returns:
91 | List[Dict]: Log entries returned by the query, e.g. [{'projectIds':
92 | [...], 'resourceNames': [...]}, {...}]
93 |
94 | Raises:
95 | RuntimeError: If API call failed.
96 | ValueError: If the number of project IDs being queried doesn't match
97 | the number of provided filters.
98 | """
99 |
100 | entries = []
101 | gcl_instance_client = self.GclApi().entries() # pylint: disable=no-member
102 |
103 | if qfilter and len(self.project_ids) != len(qfilter):
104 | raise ValueError(
105 | 'Several project IDs detected ({0:d}) but only {1:d} query filters '
106 | 'provided.'.format(len(self.project_ids), len(qfilter)))
107 |
108 | for idx, project_id in enumerate(self.project_ids):
109 | body = {
110 | 'resourceNames': 'projects/' + project_id,
111 | 'filter': qfilter[idx] if qfilter else '',
112 | 'orderBy': 'timestamp desc',
113 | }
114 | responses = common.ExecuteRequest(
115 | gcl_instance_client, 'list', {'body': body}, throttle=True)
116 | for response in responses:
117 | for entry in response.get('entries', []):
118 | entries.append(entry)
119 | return entries
120 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/gcp/internal/serviceusage.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Google Service Usage functionality."""
16 |
17 | import time
18 | from typing import TYPE_CHECKING, Dict, List, Any
19 | from libcloudforensics.providers.gcp.internal import common
20 |
21 | if TYPE_CHECKING:
22 | import googleapiclient
23 |
24 |
25 | class GoogleServiceUsage:
26 | """Class to call the Google Cloud Service Usage API.
27 |
28 | Attributes:
29 | project_id: Google Cloud project ID.
30 | """
31 |
32 | SERVICE_USAGE_API_VERSION = 'v1'
33 | NOOP_API_RESPONSE = 'operations/noop.DONE_OPERATION'
34 |
35 | def __init__(self, project_id: str) -> None:
36 | """Initialize the GoogleServiceUsage object.
37 |
38 | Args:
39 | project_id (str): Google Cloud project ID.
40 | """
41 |
42 | self.project_id = project_id
43 |
44 | def GsuApi(self) -> 'googleapiclient.discovery.Resource':
45 | """Get a Service Usage service object.
46 |
47 | Returns:
48 | googleapiclient.discovery.Resource: A Service Usage service object.
49 | """
50 |
51 | return common.CreateService(
52 | 'serviceusage', self.SERVICE_USAGE_API_VERSION)
53 |
54 | def GetEnabled(self) -> List[Any]:
55 | """Get enabled services/APIs for a project.
56 |
57 | Returns:
58 | List[Any]: A list of enabled services/APIs.
59 | """
60 |
61 | services_client = self.GsuApi().services() # pylint: disable=no-member
62 | parent = 'projects/' + self.project_id
63 | request = {'parent': parent, 'filter': 'state:ENABLED'}
64 | responses = common.ExecuteRequest(services_client, 'list', request)
65 |
66 | services = []
67 | for response in responses:
68 | for service in response.get('services', []):
69 | services.append(service['config']['name'])
70 |
71 | return services
72 |
73 | def _BlockOperation(self, response: Dict[str, Any]) -> Dict[str, Any]:
74 | """Block until API operation is finished.
75 |
76 | Args:
77 | response (Dict): Service Usage API response.
78 |
79 | Returns:
80 | Dict: Holding the response of a get operation on an API object of type
81 | Operation.
82 | """
83 |
84 | operations_api = self.GsuApi().operations() # pylint: disable=no-member
85 |
86 | if response['name'] == self.NOOP_API_RESPONSE:
87 | return response
88 |
89 | while True:
90 | request = {'name': response['name']}
91 | result = common.ExecuteRequest(operations_api, 'get', request)[0]
92 | if 'done' in result:
93 | return result
94 | time.sleep(5) # Seconds between requests
95 |
96 | def EnableService(self, service_name: str) -> None:
97 | """Enable a service/API for a project.
98 |
99 | Args:
100 | service_name (str): The service to enable.
101 | """
102 |
103 | services_client = self.GsuApi().services() # pylint: disable=no-member
104 | name = 'projects/' + self.project_id + '/services/' + service_name
105 | request = {'name': name}
106 | response = common.ExecuteRequest(services_client, 'enable', request)[0]
107 | self._BlockOperation(response)
108 |
109 | def DisableService(self, service_name: str) -> None:
110 | """Disable a service/API for a project.
111 |
112 | Args:
113 | service_name (str): The service to disable.
114 | """
115 |
116 | services_client = self.GsuApi().services() # pylint: disable=no-member
117 | name = 'projects/' + self.project_id + '/services/' + service_name
118 | request = {'name': name}
119 | response = common.ExecuteRequest(services_client, 'disable', request)[0]
120 | self._BlockOperation(response)
121 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/container.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Kubernetes container class."""
16 | from typing import List
17 |
18 | from kubernetes import client
19 |
20 |
21 | class K8sContainer:
22 | """Class wrapping a Kubernetes container response"""
23 |
24 | def __init__(self, response: client.V1Container):
25 | """Builds a K8sContainer object.
26 |
27 | Args:
28 | response (client.V1Container): The Kubernetes Container response object
29 | to be wrapped.
30 | """
31 | self._response = response
32 |
33 | def IsPrivileged(self) -> bool:
34 | """Returns True if this container is privileged, False otherwise.
35 |
36 | Returns:
37 | bool: True if this container is privileged, False otherwise.
38 | """
39 | security_context = self._response.security_context
40 | # Conversion to bool for mypy
41 | return bool(security_context and security_context.privileged)
42 |
43 | def Name(self) -> str:
44 | """Returns the name of this container.
45 |
46 | Returns:
47 | str: The name if this container.
48 | """
49 | name = self._response.name # type: str
50 | return name
51 |
52 | def Image(self) -> str:
53 | """Returns the image of this container.
54 |
55 | Returns:
56 | str: The image of this container.
57 | """
58 | image = self._response.image # type: str
59 | return image
60 |
61 | def ContainerPorts(self) -> List[int]:
62 | """Returns the ports listed for this container.
63 |
64 | Returns:
65 | List[int]: The ports listed for this container.
66 | """
67 | ports = [
68 | port.container_port for port in (self._response.ports or [])
69 | ] # type: List[int]
70 | return ports
71 |
72 | def VolumeMounts(self) -> List[str]:
73 | """Returns the volumes mounted in this container.
74 |
75 | Returns:
76 | List[str]: The volumes mounted in this container.
77 | """
78 | volumes = [
79 | volume.name for volume in (self._response.volume_mounts or [])
80 | ] # type: List[str]
81 | return volumes
82 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/enumerations/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/enumerations/gcp.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """GCP Enumeration classes."""
16 | from typing import Any, Dict, Iterable, Optional
17 |
18 | from libcloudforensics.providers.gcp.internal import gke
19 | from libcloudforensics.providers.kubernetes.enumerations import base
20 |
21 |
22 | class GkeClusterEnumeration(base.Enumeration[gke.GkeCluster]):
23 | """Enumeration class for a GKE cluster."""
24 |
25 | @property
26 | def keyword(self) -> str:
27 | """Override of abstract property."""
28 | return 'GkeCluster'
29 |
30 | def _Children(
31 | self, namespace: Optional[str] = None) -> Iterable[base.Enumeration[Any]]:
32 | """Method override."""
33 | yield base.ClusterEnumeration(self._object)
34 |
35 | def _Populate(self, info: Dict[str, Any], warnings: Dict[str, Any]) -> None:
36 | """Method override."""
37 | info['Name'] = self._object.cluster_id
38 | info['NetworkPolicy'] = (
39 | 'Enabled' if self._object.IsNetworkPolicyEnabled() else 'Disabled')
40 | if self._object.IsWorkloadIdentityEnabled():
41 | info['WorkloadIdentity'] = 'Enabled'
42 | else:
43 | warnings['WorkloadIdentity'] = 'Disabled'
44 | if self._object.IsLegacyEndpointsDisabled():
45 | info['LegacyEndpoints'] = 'Disabled'
46 | else:
47 | warnings['LegacyEndpoints'] = 'Enabled'
48 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/mitigation.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Mitigation functions to be used in end-to-end functionality."""
16 | from typing import List, Optional
17 |
18 | from libcloudforensics import errors
19 | from libcloudforensics import logging_utils
20 | from libcloudforensics import prompts
21 | from libcloudforensics.providers.kubernetes import base
22 | from libcloudforensics.providers.kubernetes import cluster as k8s
23 | from libcloudforensics.providers.kubernetes import netpol
24 |
25 | logging_utils.SetUpLogger(__name__)
26 | logger = logging_utils.GetLogger(__name__)
27 |
28 |
29 | def DrainWorkloadNodesFromOtherPods(
30 | workload: base.K8sWorkload, cordon: bool = True) -> None:
31 | """Drains a workload's nodes from non-workload pods.
32 |
33 | Args:
34 | workload (base.K8sWorkload): The workload for which nodes
35 | must be drained from pods that are not covered by the workload.
36 | cordon (bool): Optional. Whether or not to cordon the nodes before draining,
37 | to prevent pods from appearing on the nodes again as it will be marked
38 | as unschedulable. Defaults to True.
39 | """
40 | nodes = workload.GetCoveredNodes()
41 | if cordon:
42 | for node in nodes:
43 | node.Cordon()
44 | for node in nodes:
45 | node.Drain(lambda pod: not workload.IsCoveringPod(pod))
46 |
47 |
48 | def IsolatePodsWithNetworkPolicy(
49 | cluster: k8s.K8sCluster,
50 | pods: List[base.K8sPod],
51 | existing_policies_prompt: bool = False
52 | ) -> Optional[netpol.K8sTargetedDenyAllNetworkPolicy]:
53 | """Isolates pods via a deny-all NetworkPolicy.
54 |
55 | Args:
56 | cluster (k8s.K8sCluster): The cluster in which to create the deny-all
57 | policy.
58 | pods (List[base.K8sPod]): The pods to patch with the labels of the created
59 | deny-all NetworkPolicy.
60 | existing_policies_prompt (bool): Optional. If True, the user will be
61 | prompted with options to patch, delete or leave the existing network
62 | policies. Defaults to False.
63 |
64 | Returns:
65 | netpol.K8sTargetedDenyAllNetworkPolicy: Optional. The deny-all network
66 | policy that was created to isolate the pods. If no pods were supplied,
67 | None is returned.
68 |
69 | Raises:
70 | ValueError: If the pods are not in the same namespace.
71 | errors.OperationFailedError: If NetworkPolicy is not enabled in the cluster.
72 | """
73 | if not pods:
74 | return None
75 |
76 | if not cluster.IsNetworkPolicyEnabled():
77 | raise errors.OperationFailedError(
78 | 'NetworkPolicy is not enabled for the cluster. Creating the deny-all '
79 | 'NetworkPolicy will have no effect.',
80 | __name__)
81 |
82 | namespace = pods[0].namespace
83 | if any(pod.namespace != namespace for pod in pods):
84 | raise ValueError('Supplied pods are not in the same namespace.')
85 |
86 | # Keep in mind that this does not create the network policy in the cluster,
87 | # it just creates the K8sNetworkPolicy object
88 | deny_all_policy = cluster.TargetedDenyAllNetworkPolicy(namespace)
89 |
90 | # If other network policies exist, they need to be handled, otherwise the
91 | # deny-all NetworkPolicy may have no effect. There are a two options to do
92 | # this, either patching the network policies or deleting them.
93 | existing_policies = cluster.ListNetworkPolicies(namespace=namespace)
94 |
95 | def PatchExistingNetworkPolicies() -> None:
96 | for policy in existing_policies:
97 | policy.Patch(not_match_labels=deny_all_policy.labels)
98 |
99 | def DeleteExistingNetworkPolicies() -> None:
100 | for policy in existing_policies:
101 | policy.Delete()
102 |
103 | if existing_policies and existing_policies_prompt:
104 | logger.warning('There are existing NetworkPolicy objects.')
105 | prompt_sequence = prompts.PromptSequence(
106 | prompts.MultiPrompt(
107 | options=[
108 | prompts.PromptOption(
109 | 'Delete existing NetworkPolicy objects in same namespace',
110 | DeleteExistingNetworkPolicies),
111 | prompts.PromptOption(
112 | 'Patch existing NetworkPolicy objects in same namespace',
113 | PatchExistingNetworkPolicies),
114 | prompts.PromptOption('Leave existing NetworkPolicy objects')
115 | ]))
116 | prompt_sequence.Run()
117 |
118 | # Tag the pods covered by the workload with the selecting label of the
119 | # deny-all NetworkPolicy
120 | for pod in pods:
121 | pod.AddLabels(deny_all_policy.labels)
122 |
123 | deny_all_policy.Create()
124 |
125 | return deny_all_policy
126 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/netpol.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Kubernetes classes for wrapping NetworkPolicy APIs."""
16 | import abc
17 | import random
18 | import string
19 | from typing import Dict, Optional
20 |
21 | from kubernetes import client
22 |
23 | from libcloudforensics.providers.kubernetes import base
24 |
25 |
26 | class K8sNetworkPolicy(base.K8sNamespacedResource):
27 | """Class representing a Kubernetes NetworkPolicy, enabling API calls."""
28 |
29 | def Delete(self, cascade: bool = True) -> None:
30 | """Override of abstract method. The cascade parameter is ignored."""
31 | api = self._Api(client.NetworkingV1Api)
32 | api.delete_namespaced_network_policy(self.name, self.namespace)
33 |
34 | def Read(self) -> client.V1NetworkPolicy:
35 | """Override of abstract method."""
36 | api = self._Api(client.NetworkingV1Api)
37 | return api.read_namespaced_network_policy(self.name, self.namespace)
38 |
39 | def Patch(
40 | self,
41 | match_labels: Optional[Dict[str, str]] = None,
42 | not_match_labels: Optional[Dict[str, str]] = None) -> None:
43 | """Patches a Kubernetes NetworkPolicy to (not) match specified labels.
44 |
45 | The patched NetworkPolicy will have new fields in the podSelector's
46 | matchLabels and matchExpressions, so that it now has to match the labels
47 | given in match_labels, and not match the labels in not_match_labels.
48 |
49 | e.g. calling this method on a policy with an empty podSelector with args:
50 |
51 | ```
52 | match_labels={'app': 'nginx'}
53 | not_match_labels={'quarantine': 'true'}
54 | ```
55 |
56 | will result in a NetworkPolicy with the following spec YAML:
57 |
58 | ```
59 | spec:
60 | podSelector:
61 | matchExpressions:
62 | - key: quarantine
63 | operator: NotIn
64 | values:
65 | - "true"
66 | matchLabels:
67 | app: nginx
68 | ```
69 |
70 | Args:
71 | match_labels: The matchLabels to be added to the NetworkPolicy spec.
72 | not_match_labels: The labels to excluded from the NetworkPolicy. Each
73 | of these key-value pairs will result in a line in matchExpressions
74 | with the format {key: KEY, operator: NotIn, values: [VALUE]}.
75 | """
76 | api = self._Api(client.NetworkingV1Api)
77 |
78 | match_expressions = self.Read().spec.pod_selector.match_expressions or []
79 | if not_match_labels:
80 | match_expressions.extend(
81 | client.V1LabelSelectorRequirement(
82 | key=key, operator='NotIn', values=[value]) for key,
83 | value in not_match_labels.items())
84 |
85 | api.patch_namespaced_network_policy(
86 | self.name,
87 | self.namespace,
88 | {
89 | 'spec':
90 | client.V1NetworkPolicySpec(
91 | pod_selector=client.V1LabelSelector(
92 | match_labels=match_labels,
93 | match_expressions=match_expressions,
94 | ))
95 | })
96 |
97 |
98 | class K8sNetworkPolicyWithSpec(K8sNetworkPolicy, metaclass=abc.ABCMeta):
99 | """Class representing a Kubernetes NetworkPolicy with an underlying spec.
100 |
101 | This class additionally exposes creation API calls, as specification
102 | arguments can now be provided.
103 | """
104 |
105 | @property
106 | @abc.abstractmethod
107 | def _spec(self) -> client.V1NetworkPolicySpec:
108 | """The specification of this network policy to be used on creation."""
109 |
110 | @property
111 | def _metadata(self) -> client.V1ObjectMeta:
112 | """The metadata of this network policy to be used on creation."""
113 | return client.V1ObjectMeta(namespace=self.namespace, name=self.name)
114 |
115 | @property
116 | def _policy(self) -> client.V1NetworkPolicy:
117 | """The policy object of this network policy to be used on creation."""
118 | return client.V1NetworkPolicy(spec=self._spec, metadata=self._metadata)
119 |
120 | def Create(self) -> None:
121 | """Creates this network policy via the Kubernetes API."""
122 | api = self._Api(client.NetworkingV1Api)
123 | api.create_namespaced_network_policy(self.namespace, self._policy)
124 |
125 |
126 | class K8sTargetedDenyAllNetworkPolicy(K8sNetworkPolicyWithSpec):
127 | """Class representing a deny-all NetworkPolicy.
128 |
129 | https://kubernetes.io/docs/concepts/services-networking/network-policies/#default-deny-all-ingress-and-all-egress-traffic # pylint: disable=line-too-long
130 |
131 | Attributes:
132 | labels (Dict[str, str]): The matchLabels used by this NetworkPolicy.
133 | """
134 |
135 | def __init__(self, api_client: client.ApiClient, namespace: str) -> None:
136 | """Returns a deny-all Kubernetes NetworkPolicy.
137 |
138 | Args:
139 | api_client (ApiClient): The Kubernetes API client to the cluster.
140 | namespace (str): The namespace for this NetworkPolicy.
141 | """
142 | self._GenerateTag()
143 | name = 'cfu-netpol-{0:s}'.format(self._tag)
144 | super().__init__(api_client, name, namespace)
145 |
146 | def _GenerateTag(self) -> None:
147 | """Generates a random tag for this deny-all NetworkPolicy."""
148 | chars = random.choices(string.ascii_lowercase + string.digits, k=16)
149 | self._tag = ''.join(chars)
150 |
151 | @property
152 | def labels(self) -> Dict[str, str]:
153 | """The pod selector labels (matchLabels) of this policy."""
154 | return {'quarantineId': self._tag}
155 |
156 | @property
157 | def _spec(self) -> client.V1NetworkPolicySpec:
158 | """Override of abstract property."""
159 | return client.V1NetworkPolicySpec(
160 | pod_selector=client.V1LabelSelector(match_labels=self.labels),
161 | policy_types=[
162 | 'Ingress',
163 | 'Egress',
164 | ])
165 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/selector.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Kubernetes selector class structure."""
16 |
17 | import abc
18 | from collections import defaultdict
19 | from typing import Dict
20 |
21 |
22 | class K8sSelector:
23 | """Class to build K8s API selectors."""
24 |
25 | class Component(abc.ABC):
26 | """Component of the selector."""
27 |
28 | @abc.abstractmethod
29 | def ToString(self) -> str:
30 | """Builds the string of this selector component.
31 |
32 | Returns:
33 | str: The string of this selector component.
34 | """
35 |
36 | @property
37 | @abc.abstractmethod
38 | def keyword(self) -> str:
39 | """The keyword argument to which this selector component belongs."""
40 |
41 | class LabelComponent(Component, metaclass=abc.ABCMeta):
42 | """Selector component on labels."""
43 |
44 | @property
45 | def keyword(self) -> str:
46 | return 'label_selector'
47 |
48 | class FieldComponent(Component, metaclass=abc.ABCMeta):
49 | """Selector component on fields."""
50 |
51 | @property
52 | def keyword(self) -> str:
53 | return 'field_selector'
54 |
55 | class Name(FieldComponent):
56 | """Selector component having a particular name."""
57 |
58 | def __init__(self, name: str) -> None:
59 | self._name = name
60 |
61 | def ToString(self) -> str:
62 | return 'metadata.name={0:s}'.format(self._name)
63 |
64 | class Node(FieldComponent):
65 | """Selector component for being on a particular node."""
66 |
67 | def __init__(self, node: str) -> None:
68 | self._node = node
69 |
70 | def ToString(self) -> str:
71 | return 'spec.nodeName={0:s}'.format(self._node)
72 |
73 | class Running(FieldComponent):
74 | """Selector component for a running pod."""
75 |
76 | def ToString(self) -> str:
77 | return 'status.phase!=Failed,status.phase!=Succeeded'
78 |
79 | class Label(LabelComponent):
80 | """Selector component for a label's key-value pair."""
81 |
82 | def __init__(self, key: str, value: str) -> None:
83 | self._key = key
84 | self._value = value
85 |
86 | def ToString(self) -> str:
87 | return '{0:s}={1:s}'.format(self._key, self._value)
88 |
89 | def __init__(self, *selectors: Component) -> None:
90 | self._selectors = selectors
91 |
92 | def ToKeywords(self) -> Dict[str, str]:
93 | """Builds the keyword arguments to be passed to the K8s API.
94 |
95 | Returns:
96 | Dict[str, str]: The keyword arguments to be passed to a Kubernetes
97 | API call.
98 | """
99 | keywords = defaultdict(list)
100 | for selector in self._selectors:
101 | keywords[selector.keyword].append(selector.ToString())
102 | return {k: ','.join(vs) for k, vs in keywords.items()}
103 |
104 | @classmethod
105 | def FromLabelsDict(cls, labels: Dict[str, str]) -> 'K8sSelector':
106 | """Builds a selector from the given label key-value pairs.
107 |
108 | Args:
109 | labels (Dict[str, str]): The label key-value pairs.
110 |
111 | Returns:
112 | K8sSelector: The resulting selector object.
113 | """
114 | args = map(lambda k: K8sSelector.Label(k, labels[k]), labels)
115 | return cls(*args)
116 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/services.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Kubernetes service classes extending the base hierarchy."""
16 | from typing import Dict, List, Optional
17 |
18 | from kubernetes import client
19 |
20 | from libcloudforensics.providers.kubernetes import base
21 | from libcloudforensics.providers.kubernetes import selector
22 |
23 |
24 | class K8sService(base.K8sNamespacedResource):
25 | """Class representing a Kubernetes service."""
26 |
27 | def Delete(self, cascade: bool = True) -> None:
28 | """Override of abstract method."""
29 | api = self._Api(client.CoreV1Api)
30 | api.delete_namespaced_service(self.name, self.namespace)
31 |
32 | def Read(self) -> client.V1Service:
33 | """Override of abstract method."""
34 | api = self._Api(client.CoreV1Api)
35 | return api.read_namespaced_service(self.name, self.namespace)
36 |
37 | def Type(self) -> str:
38 | """Returns the type of this service.
39 |
40 | Returns:
41 | str: The type of this service.
42 | """
43 | return str(self.Read().spec.type)
44 |
45 | def Labels(self) -> Dict[str, str]:
46 | """Returns the selector labels for this service.
47 |
48 | Returns:
49 | Dict[str, str]: The selector labels for this service.
50 | """
51 | labels = self.Read().spec.selector # type: Dict[str, str]
52 | return labels
53 |
54 | def GetCoveredPods(self) -> List[base.K8sPod]:
55 | """Returns the pods covered by this service.
56 |
57 | Returns:
58 | List[base.K8sPod]: The pods covered by this service.
59 | """
60 | api = self._Api(client.CoreV1Api)
61 | pods = api.list_namespaced_pod(
62 | self.namespace,
63 | **selector.K8sSelector.FromLabelsDict(self.Labels()).ToKeywords())
64 | return [
65 | base.K8sPod(
66 | self._api_client, pod.metadata.name, pod.metadata.namespace)
67 | for pod in pods.items
68 | ]
69 |
70 | def ClusterIp(self) -> Optional[str]:
71 | """Returns the Cluster IP of this service.
72 |
73 | The return type is optional to correspond to the API return type.
74 |
75 | Returns:
76 | str: Optional. The Cluster IP of this service
77 | """
78 | cluster_ip = self.Read().spec.cluster_ip # type: Optional[str]
79 | return cluster_ip
80 |
81 | def ExternalIps(self) -> Optional[List[str]]:
82 | """Returns the external IPs of this service.
83 |
84 | The return type is optional to correspond to the API return type.
85 |
86 | Returns:
87 | List[str]: Optional. The Cluster IP of this service
88 | """
89 | external_ips = self.Read().spec.external_i_ps # type: Optional[List[str]]
90 | return external_ips
91 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/kubernetes/volume.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Kubernetes volume class."""
16 | from typing import Any
17 | from typing import Dict
18 | from typing import Optional
19 |
20 | from kubernetes import client
21 |
22 |
23 | class K8sVolume:
24 | """Class wrapping a Kubernetes volume response."""
25 |
26 | def __init__(self, response: client.V1Volume):
27 | """Builds a K8sVolume object.
28 |
29 | Args:
30 | response (client.V1Volume): The Kubernetes Volume response object to wrap.
31 | """
32 | self._response = response
33 |
34 | def Name(self) -> str:
35 | """Returns the name of this volume.
36 |
37 | Returns:
38 | str: The name of this volume.
39 | """
40 | name = self._response.name # type: str
41 | return name
42 |
43 | def Type(self) -> str:
44 | """Returns the type of this volume.
45 |
46 | Returns:
47 | str: The type of this volume.
48 |
49 | Raises:
50 | RuntimeError: If the type of this volume is not found.
51 | """
52 | # There is no attribute for a type, but rather the corresponding type
53 | # attribute is non-null.
54 | # https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1Volume.md # pylint: disable=line-too-long
55 | response_dict = self._response.to_dict() # type: Dict[str, Any]
56 | for k, v in response_dict.items():
57 | if k != 'name' and v:
58 | return k
59 | raise RuntimeError('Volume type not found.')
60 |
61 | def HostPath(self) -> Optional[str]:
62 | """Returns the host path of this volume.
63 |
64 | Will return None if this volume is not hostPath type.
65 |
66 | Returns:
67 | Optional[str]: Returns the path if this is a hostPath volume, None
68 | otherwise
69 | """
70 | host_path = self._response.host_path
71 | return host_path.path if host_path else None
72 |
73 | def IsHostRootFilesystem(self) -> bool:
74 | """Returns True if this volume is the host's root filesystem.
75 |
76 | Returns:
77 | bool: True if this volume is the host's root filesystem, False otherwise.
78 | """
79 | return self.HostPath() == '/'
80 |
--------------------------------------------------------------------------------
/libcloudforensics/providers/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/google/cloud-forensics-utils/975a6a5b71bb6a941dd783a5acd5647963e781a6/libcloudforensics/providers/utils/__init__.py
--------------------------------------------------------------------------------
/libcloudforensics/providers/utils/storage_utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Cross-provider functionalities."""
16 |
17 |
18 | from typing import Tuple
19 |
20 |
21 | def SplitStoragePath(path: str) -> Tuple[str, str]:
22 | """Split a path to bucket name and object URI.
23 |
24 | Args:
25 | path (str): File path to a resource in GCS.
26 | Ex: gs://bucket/folder/obj
27 |
28 | Returns:
29 | Tuple[str, str]: Bucket name. Object URI.
30 | """
31 |
32 | _, _, full_path = path.partition('//')
33 | bucket, _, object_uri = full_path.partition('/')
34 | return bucket, object_uri
35 |
--------------------------------------------------------------------------------
/libcloudforensics/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/libcloudforensics/scripts/ebs_snapshot_copy_aws.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -x
2 |
3 | set -o pipefail
4 |
5 | snapshot={0:s}
6 | bucket={1:s}
7 |
8 | # This script gets used by python's string.format, so following curly braces need to be doubled
9 |
10 | function ebsCopy {{
11 | # params
12 | snapshot=$1
13 | bucket=$2
14 |
15 | echo snapshot: "$snapshot"
16 | echo bucket: "$bucket"
17 |
18 | # Install utilities
19 | amazon-linux-extras install epel -y
20 | yum install jq dc3dd -y
21 |
22 | # Get details about self
23 | region=$(curl -s http://169.254.169.254/latest/meta-data/placement/region)
24 | az=$(curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone)
25 | instance=$(curl -s http://169.254.169.254/latest/meta-data/instance-id)
26 |
27 | echo region: "$region"
28 | echo az: "$az"
29 | echo instance: "$instance"
30 |
31 | # create the new volume
32 | volume=$(aws ec2 --region $region create-volume --availability-zone $az --snapshot-id $snapshot --tag-specification 'ResourceType=volume,Tags=[{{Key=Name,Value=volumeToCopy}}]' | jq -r .VolumeId)
33 |
34 | # wait for create to complete
35 | aws ec2 --region $region wait volume-available --volume-ids $volume
36 |
37 | # attach the new volume to self
38 | aws ec2 --region $region attach-volume --device xvdh --instance-id $instance --volume-id $volume
39 |
40 | # wait for the attachment
41 | aws ec2 --region $region wait volume-in-use --volume-ids $volume
42 | sleep 5 # let the kernel catch up
43 |
44 | # perform the dd to s3
45 | dc3dd if=/dev/xvdh hash=sha512 hash=sha256 hash=md5 log=/tmp/log.txt hlog=/tmp/hlog.txt mlog=/tmp/mlog.txt | aws s3 cp - $bucket/$snapshot/image.bin
46 | aws s3 cp /tmp/log.txt $bucket/$snapshot/
47 | aws s3 cp /tmp/hlog.txt $bucket/$snapshot/
48 | aws s3 cp /tmp/mlog.txt $bucket/$snapshot/
49 |
50 | # detach the volume
51 | aws ec2 --region $region detach-volume --volume-id $volume
52 | aws ec2 --region $region wait volume-available --volume-ids $volume
53 |
54 | # delete the volume
55 | aws ec2 --region $region delete-volume --volume-id $volume
56 | }}
57 |
58 | ebsCopy $snapshot $bucket 2> /tmp/err > /tmp/out
59 |
60 | aws s3 cp /tmp/out $bucket/$snapshot/instance_copy_stdout.txt
61 | aws s3 cp /tmp/err $bucket/$snapshot/instance_copy_stderr.txt
62 |
63 | sleep 5
64 |
65 | poweroff
66 |
--------------------------------------------------------------------------------
/libcloudforensics/scripts/forensics_packages_startup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Startup script to execute when bootstrapping a new forensic VM.
4 | # The script will install forensics packages to perform analysis.
5 |
6 | max_retry=100
7 |
8 | gift_ppa_track='stable'
9 |
10 | # Default packages to install
11 | # This can be overwritten in GetOrCreateAnalysisVm(
12 | # packages=['package1', 'package2', ...])
13 | packages=(
14 | binutils
15 | docker-explorer-tools
16 | htop
17 | jq
18 | libbde-tools
19 | libfsapfs-tools
20 | libfvde-tools
21 | ncdu
22 | plaso-tools
23 | sleuthkit
24 | upx-ucl
25 | )
26 |
27 | err() {
28 | echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')]: $*" >&2
29 | }
30 |
31 | install_packages() {
32 | add-apt-repository -y -u ppa:gift/${gift_ppa_track}
33 | apt -y install ${packages[@]}
34 | }
35 |
36 | # Try to install the packages
37 | for try in $(seq 1 ${max_retry}); do
38 | [[ ${try} -gt 1 ]] && sleep 5
39 | install_packages && exit_code=0 && break || exit_code=$?
40 | err "Failed to install forensics packages, retrying in 5 seconds."
41 | done;
42 |
43 | # Install container-explorer
44 | wget -P /tmp/ https://raw.githubusercontent.com/google/container-explorer/main/script/setup.sh
45 | bash /tmp/setup.sh install
46 |
47 | (exit ${exit_code})
48 |
--------------------------------------------------------------------------------
/libcloudforensics/scripts/forensics_packages_startup_aws.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Startup script to execute when bootstrapping a new forensic VM.
4 | # The script will install forensics packages to perform analysis.
5 |
6 | max_retry=100
7 |
8 | gift_ppa_track='stable'
9 |
10 | packages=(
11 | binutils
12 | docker-explorer-tools
13 | ec2-instance-connect
14 | htop
15 | jq
16 | libbde-tools
17 | libfsapfs-tools
18 | libfvde-tools
19 | ncdu
20 | plaso-tools
21 | sleuthkit
22 | upx-ucl
23 | )
24 |
25 | err() {
26 | echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')]: $*" >&2
27 | }
28 |
29 | install_packages() {
30 | add-apt-repository -y -u ppa:gift/${gift_ppa_track}
31 | apt -y install ${packages[@]}
32 | }
33 |
34 | # Try to install the packages
35 | for try in $(seq 1 ${max_retry}); do
36 | [[ ${try} -gt 1 ]] && sleep 5
37 | install_packages && exit_code=0 && break || exit_code=$?
38 | err "Failed to install forensics packages, retrying in 5 seconds."
39 | done;
40 |
41 | # Install container-explorer
42 | wget -P /tmp/ https://raw.githubusercontent.com/google/container-explorer/main/script/setup.sh
43 | bash /tmp/setup.sh install
44 |
45 | (exit ${exit_code})
46 |
--------------------------------------------------------------------------------
/libcloudforensics/scripts/utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Utils method for cloud providers"""
16 |
17 | import os
18 | from typing import Optional
19 |
20 | FORENSICS_STARTUP_SCRIPT = 'forensics_packages_startup.sh'
21 | FORENSICS_STARTUP_SCRIPT_AWS = 'forensics_packages_startup_aws.sh'
22 | FORENSICS_STARTUP_SCRIPT_GCP = FORENSICS_STARTUP_SCRIPT
23 | FORENSICS_STARTUP_SCRIPT_AZ = FORENSICS_STARTUP_SCRIPT
24 | EBS_SNAPSHOT_COPY_SCRIPT_AWS = 'ebs_snapshot_copy_aws.sh'
25 |
26 | def ReadStartupScript(filename: Optional[str] = '') -> str:
27 | """Read and return the startup script that is to be run on the forensics VM.
28 |
29 | Users can either write their own script to install custom packages,
30 | or use one of the provided ones. To use your own script, export a
31 | STARTUP_SCRIPT environment variable with the absolute path to it:
32 | "user@terminal:~$ export STARTUP_SCRIPT='absolute/path/script.sh'"
33 |
34 | Args:
35 | filename (str): the name of the script in the scripts directory to read
36 | Defaults to 'forensics_packages_startup.sh' if none specified.
37 | Returns:
38 | str: The script to run.
39 |
40 | Raises:
41 | OSError: If the script cannot be opened, read or closed.
42 | """
43 |
44 | try:
45 | script_path = None
46 | if not filename:
47 | script_path = os.environ.get('STARTUP_SCRIPT')
48 | if not script_path:
49 | # Use the provided script
50 | script_path = os.path.join(
51 | os.path.dirname(os.path.realpath(__file__)),
52 | filename or FORENSICS_STARTUP_SCRIPT)
53 | with open(script_path, encoding='utf-8') as startup_script:
54 | return startup_script.read()
55 | except OSError as exception:
56 | raise OSError(
57 | 'Could not open/read/close the startup script {0:s}: {1:s}'.format(
58 | script_path, str(exception))) from exception
59 |
--------------------------------------------------------------------------------
/poetry.toml:
--------------------------------------------------------------------------------
1 | [virtualenvs]
2 | create = true
3 | in-project = true
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "libcloudforensics"
3 | version = "20250331"
4 | description = "libcloudforensics is a set of tools to help acquire forensic evidence from Cloud platforms."
5 | authors = ["cloud-forensics-utils development team "]
6 | license = "Apache-2.0"
7 | readme = "README.md"
8 |
9 | [tool.poetry.scripts]
10 | cloudforensics = "tools.cli:Main"
11 |
12 | [tool.poetry.dependencies]
13 | python = "^3.9"
14 | google-api-core = "*"
15 | azure-common = "^1.1.28"
16 | azure-core = "^1.29.4"
17 | azure-identity = "^1.13.0"
18 | azure-mgmt-compute = "^30.1.0"
19 | azure-mgmt-monitor = "^6.0.1"
20 | azure-mgmt-network = "^24.0.0"
21 | azure-mgmt-reservations = "^2.3.0"
22 | azure-mgmt-resource = "^23.0.1"
23 | azure-mgmt-storage = "^21.0.0"
24 | azure-storage-blob = "^12.18.1"
25 | boto3 = "^1.28.20"
26 | botocore = ">=1.29.135"
27 | google-api-python-client = "^2.95.0"
28 | httplib2 = "^0.22.0"
29 | msrest = "^0.7.1"
30 | msrestazure = "^0.6.4"
31 | netaddr = "^0.8.0"
32 | pycryptodome = "^3.19.1"
33 | pyjwt = "^2.8.0"
34 | sshpubkeys = "^3.3.1"
35 | requests = "^2.31.0"
36 | kubernetes = "^27.2.0"
37 | pyopenssl = "^23.2.0"
38 | urllib3 = [
39 | {version = ">=1.25.4,<1.27", python = "<3.10"},
40 | {version = ">=1.25.4,<2.1", python = ">=3.10"}
41 | ]
42 | google-auth = "^2.22.0"
43 | setuptools = "^75.8.0"
44 |
45 | [tool.poetry.group.dev.dependencies]
46 | coverage = "^7.2.7"
47 | mock = "^5.1.0"
48 | mypy = "^1.4.1"
49 | pylint = "^3.3.6"
50 | sphinx = "^7.1.2"
51 | sphinx-autodoc-typehints = "^1.24.0"
52 | yapf = "^0.40.1"
53 | types-six = "^1.16.21.9"
54 | types-mock = "^5.1.0.1"
55 | sphinx-rtd-theme = "^1.2.2"
56 | wheel = "^0.41.1"
57 | setuptools = "^75.8.0"
58 |
59 | [build-system]
60 | requires = ["poetry-core"]
61 | build-backend = "poetry.core.masonry.api"
62 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # Copyright 2017 Google Inc.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | """This is the setup file for the project."""
18 |
19 | # yapf: disable
20 | import sys
21 |
22 | from setuptools import find_packages
23 | from setuptools import setup
24 |
25 | # make sure libcloudforensics is in path
26 | sys.path.insert(0, '.')
27 |
28 | import libcloudforensics # pylint: disable=wrong-import-position
29 |
30 | description = (
31 | 'libcloudforensics is a set of tools to help acquire forensic evidence from'
32 | ' cloud platforms.'
33 | )
34 |
35 |
36 | setup(
37 | name='libcloudforensics',
38 | version=libcloudforensics.__version__,
39 | description=description,
40 | long_description=description,
41 | license='Apache License, Version 2.0',
42 | url='http://github.com/google/cloud-forensics-utils/',
43 | maintainer='Cloud-forensics-utils development team',
44 | maintainer_email='cloud-forensics-utils-dev@googlegroups.com',
45 | classifiers=[
46 | 'Development Status :: 4 - Beta',
47 | 'Environment :: Console',
48 | 'Operating System :: OS Independent',
49 | 'Programming Language :: Python',
50 | ],
51 | packages=find_packages(),
52 | include_package_data=True,
53 | package_data={
54 | 'libcloudforensics': ['libcloudforensics/scripts/*']
55 | },
56 | entry_points={'console_scripts': [
57 | 'cloudforensics = tools.cli:Main']},
58 | zip_safe=False,
59 | )
60 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/aws/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/aws/aws_cli.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Prepares calls to the CLI tool for AWS operations."""
16 | from typing import List, Optional
17 |
18 | from libcloudforensics import logging_utils
19 |
20 | logging_utils.SetUpLogger(__name__)
21 | logger = logging_utils.GetLogger(__name__)
22 |
23 |
24 | class AWSCLIHelper:
25 | """AWSCLIHelper prepares calls to the CLI tool for AWS operations."""
26 |
27 | @staticmethod
28 | def PrepareStartAnalysisVmCmd(
29 | vm_name: str,
30 | zone: str,
31 | attach_volumes: Optional[List[str]] = None) -> str:
32 | """Wrapper around the CLI tool to start an analysis VM.
33 |
34 | Args:
35 | vm_name (str): The name of the instance to start.
36 | zone (str): The zone in which to start the instance.
37 | attach_volumes (List[str]): Optional. List of volume names to attach to
38 | the VM.
39 |
40 | Returns:
41 | str: The CLI command to run.
42 | """
43 | cmd = 'cloudforensics aws {0:s} startvm {1:s}'.format( # pylint: disable=line-too-long
44 | zone, vm_name)
45 | if attach_volumes:
46 | cmd += ' --attach_volumes={0:s}'.format(','.join(attach_volumes))
47 | logger.info('CLI command: {0:s}'.format(cmd))
48 | return cmd
49 |
50 | @staticmethod
51 | def PrepareCreateVolumeCopyCmd(
52 | zone: str,
53 | dst_zone: Optional[str] = None,
54 | instance_id: Optional[str] = None,
55 | volume_id: Optional[str] = None) -> str:
56 | """Wrapper around the CLI tool to create a volume copy.
57 |
58 | Args:
59 | zone (str): The AWS zone in which the volume is located, e.g.
60 | 'us-east-2b'.
61 | dst_zone (str): Optional. The AWS zone in which to create the volume
62 | copy. By default, this is the same as 'zone'.
63 | instance_id (str): Optional. Instance ID of the instance using the volume
64 | to be copied. If specified, the boot volume of the instance will be
65 | copied. If volume_id is also specified, then the volume pointed by
66 | that volume_id will be copied.
67 | volume_id (str): Optional. ID of the volume to copy. If not set,
68 | then instance_id needs to be set and the boot volume will be copied.
69 |
70 | Returns:
71 | str: The CLI command to run.
72 | """
73 | cmd = 'cloudforensics aws {0:s} copydisk'.format(zone)
74 | if instance_id:
75 | cmd += ' --instance_id={0:s}'.format(instance_id)
76 | elif volume_id:
77 | cmd += ' --volume_id={0:s}'.format(volume_id)
78 | if dst_zone:
79 | cmd += ' --dst_zone={0:s}'.format(dst_zone)
80 | logger.info('CLI command: {0:s}'.format(cmd))
81 | return cmd
82 |
83 | @staticmethod
84 | def PrepareListImagesCmd(
85 | zone: str,
86 | qfilter: Optional[str] = None) -> str:
87 | """Wrapper around the CLI tool to list AMI images.
88 |
89 | Args:
90 | zone (str): The AWS zone in which to list the images, e.g. 'us-east-2b'.
91 | qfilter (str): The filter to apply.
92 | See https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.describe_images # pylint: disable=line-too-long
93 |
94 | Returns:
95 | str: The CLI command to run.
96 | """
97 | cmd = 'cloudforensics aws {0:s} listimages'.format(zone)
98 | if qfilter:
99 | cmd += ' --filter={0:s}'.format(qfilter)
100 | logger.info('CLI command: {0:s}'.format(cmd))
101 | return cmd
102 |
--------------------------------------------------------------------------------
/tests/providers/aws/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/aws/internal/test_common.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for aws module - common.py."""
16 |
17 | import typing
18 | import unittest
19 |
20 | from libcloudforensics.providers.aws.internal import common
21 |
22 |
23 | class AWSCommonTest(unittest.TestCase):
24 | """Test the common.py public methods"""
25 |
26 | @typing.no_type_check
27 | def testCreateTags(self):
28 | """Test that tag specifications are correctly created"""
29 | tag_specifications = common.CreateTags(common.VOLUME, {'Name': 'fake-name'})
30 | self.assertEqual('volume', tag_specifications['ResourceType'])
31 | self.assertEqual(1, len(tag_specifications['Tags']))
32 | self.assertEqual('Name', tag_specifications['Tags'][0]['Key'])
33 | self.assertEqual('fake-name', tag_specifications['Tags'][0]['Value'])
34 |
35 | tag_specifications = common.CreateTags(
36 | common.VOLUME, {'Name': 'fake-name', 'FakeTag': 'fake-tag'})
37 | self.assertEqual(2, len(tag_specifications['Tags']))
38 | self.assertEqual('FakeTag', tag_specifications['Tags'][1]['Key'])
39 | self.assertEqual('fake-tag', tag_specifications['Tags'][1]['Value'])
40 |
41 | @typing.no_type_check
42 | def testGetInstanceTypeByCPU(self):
43 | """Test that the instance type matches the requested amount of CPU cores."""
44 | self.assertEqual('m4.large', common.GetInstanceTypeByCPU(2))
45 | self.assertEqual('m4.16xlarge', common.GetInstanceTypeByCPU(64))
46 | with self.assertRaises(ValueError):
47 | common.GetInstanceTypeByCPU(0)
48 | with self.assertRaises(ValueError):
49 | common.GetInstanceTypeByCPU(256)
50 |
--------------------------------------------------------------------------------
/tests/providers/aws/internal/test_log.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for aws module - log.py."""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.aws import aws_mocks
22 |
23 |
24 | class AWSCloudTrailTest(unittest.TestCase):
25 | """Test AWS CloudTrail class."""
26 | # pylint: disable=line-too-long
27 |
28 | @typing.no_type_check
29 | @mock.patch('libcloudforensics.providers.aws.internal.account.AWSAccount.ClientApi')
30 | def testLookupEvents(self, mock_ec2_api):
31 | """Test that the CloudTrail event are looked up."""
32 | events = mock_ec2_api.return_value.lookup_events
33 | events.return_value = aws_mocks.MOCK_EVENT_LIST
34 | lookup_events = aws_mocks.FAKE_CLOUDTRAIL.LookupEvents()
35 |
36 | self.assertEqual(2, len(lookup_events))
37 | self.assertEqual(aws_mocks.FAKE_EVENT_LIST[0], lookup_events[0])
38 |
--------------------------------------------------------------------------------
/tests/providers/aws/internal/test_s3.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for AWS module - s3.py."""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.aws import aws_mocks
22 |
23 |
24 | class AWSS3Test(unittest.TestCase):
25 | """Test AWS S3 class."""
26 | # pylint: disable=line-too-long
27 |
28 | @typing.no_type_check
29 | @mock.patch('libcloudforensics.providers.aws.internal.account.AWSAccount.ClientApi')
30 | def testCreateBucket(self, mock_s3_api):
31 | """Test that the Bucket is created."""
32 | storage = mock_s3_api.return_value.create_bucket
33 | storage.return_value = aws_mocks.MOCK_CREATE_BUCKET
34 | create_bucket = aws_mocks.FAKE_STORAGE.CreateBucket('test-bucket')
35 |
36 | storage.assert_called_with(
37 | Bucket='test-bucket',
38 | ACL='private',
39 | CreateBucketConfiguration={
40 | 'LocationConstraint': aws_mocks.FAKE_AWS_ACCOUNT.default_region
41 | })
42 | self.assertEqual(200, create_bucket['ResponseMetadata']['HTTPStatusCode'])
43 | self.assertEqual('http://test-bucket.s3.amazonaws.com/', create_bucket['Location'])
44 |
45 | create_bucket = aws_mocks.FAKE_STORAGE.CreateBucket('test-bucket', region='us-east-1')
46 | storage.assert_called_with(
47 | Bucket='test-bucket',
48 | ACL='private')
49 |
--------------------------------------------------------------------------------
/tests/providers/aws/test_forensics.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for aws module - forensics.py."""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from libcloudforensics import errors
22 | from libcloudforensics.providers.aws.internal import ebs
23 |
24 | from libcloudforensics.providers.aws import forensics
25 | from tests.providers.aws import aws_mocks
26 |
27 |
28 | class AWSForensicsTest(unittest.TestCase):
29 | """Test the forensics.py public methods."""
30 | # pylint: disable=line-too-long
31 |
32 | @typing.no_type_check
33 | @mock.patch('boto3.session.Session._setup_loader')
34 | @mock.patch('libcloudforensics.providers.aws.internal.ebs.AWSVolume.GetVolumeType')
35 | @mock.patch('libcloudforensics.providers.aws.internal.ebs.AWSVolume.Snapshot')
36 | @mock.patch('libcloudforensics.providers.aws.internal.ebs.EBS.GetVolumeById')
37 | @mock.patch('libcloudforensics.providers.aws.internal.ebs.EBS.GetAccountInformation')
38 | @mock.patch('libcloudforensics.providers.aws.internal.account.AWSAccount.ClientApi')
39 | def testCreateVolumeCopy1(self,
40 | mock_ec2_api,
41 | mock_account,
42 | mock_get_volume,
43 | mock_snapshot,
44 | mock_volume_type,
45 | mock_loader):
46 | """Test that a volume is correctly cloned."""
47 | aws_mocks.FAKE_SNAPSHOT.name = aws_mocks.FAKE_VOLUME.volume_id
48 | mock_ec2_api.return_value.create_volume.return_value = aws_mocks.MOCK_CREATE_VOLUME
49 | mock_account.return_value = aws_mocks.MOCK_CALLER_IDENTITY
50 | mock_get_volume.return_value = aws_mocks.FAKE_VOLUME
51 | mock_snapshot.return_value = aws_mocks.FAKE_SNAPSHOT
52 | mock_volume_type.return_value = 'standard'
53 | mock_loader.return_value = None
54 |
55 | # CreateVolumeCopy(zone, volume_id='fake-volume-id'). This should grab
56 | # the volume 'fake-volume-id'.
57 | new_volume = forensics.CreateVolumeCopy(
58 | aws_mocks.FAKE_INSTANCE.availability_zone, volume_id=aws_mocks.FAKE_VOLUME.volume_id)
59 | mock_get_volume.assert_called_with('fake-volume-id')
60 | self.assertIsInstance(new_volume, ebs.AWSVolume)
61 | self.assertTrue(new_volume.name.startswith('evidence-'))
62 | self.assertIn('fake-volume-id', new_volume.name)
63 | self.assertTrue(new_volume.name.endswith('-copy'))
64 |
65 | @typing.no_type_check
66 | @mock.patch('boto3.session.Session._setup_loader')
67 | @mock.patch('libcloudforensics.providers.aws.internal.ebs.AWSVolume.GetVolumeType')
68 | @mock.patch('libcloudforensics.providers.aws.internal.ebs.AWSVolume.Snapshot')
69 | @mock.patch('libcloudforensics.providers.aws.internal.ec2.AWSInstance.GetBootVolume')
70 | @mock.patch('libcloudforensics.providers.aws.internal.ec2.EC2.GetInstanceById')
71 | @mock.patch('libcloudforensics.providers.aws.internal.ebs.EBS.GetAccountInformation')
72 | @mock.patch('libcloudforensics.providers.aws.internal.account.AWSAccount.ClientApi')
73 | def testCreateVolumeCopy2(self,
74 | mock_ec2_api,
75 | mock_account,
76 | mock_get_instance,
77 | mock_get_volume,
78 | mock_snapshot,
79 | mock_volume_type,
80 | mock_loader):
81 | """Test that a volume is correctly cloned."""
82 | aws_mocks.FAKE_SNAPSHOT.name = aws_mocks.FAKE_BOOT_VOLUME.volume_id
83 | mock_ec2_api.return_value.create_volume.return_value = aws_mocks.MOCK_CREATE_VOLUME
84 | mock_account.return_value = aws_mocks.MOCK_CALLER_IDENTITY
85 | mock_get_instance.return_value = aws_mocks.FAKE_INSTANCE
86 | mock_get_volume.return_value = aws_mocks.FAKE_BOOT_VOLUME
87 | mock_snapshot.return_value = aws_mocks.FAKE_SNAPSHOT
88 | mock_volume_type.return_value = 'standard'
89 | mock_loader.return_value = None
90 |
91 | # CreateVolumeCopy(zone, instance='fake-instance-id'). This should grab
92 | # the boot volume of the instance.
93 | new_volume = forensics.CreateVolumeCopy(
94 | aws_mocks.FAKE_INSTANCE.availability_zone, instance_id=aws_mocks.FAKE_INSTANCE.instance_id)
95 | mock_get_instance.assert_called_with('fake-instance-id')
96 | self.assertIsInstance(new_volume, ebs.AWSVolume)
97 | self.assertTrue(new_volume.name.startswith('evidence-'))
98 | self.assertIn('fake-boot-volume-id', new_volume.name)
99 | self.assertTrue(new_volume.name.endswith('-copy'))
100 |
101 | @typing.no_type_check
102 | @mock.patch('boto3.session.Session._setup_loader')
103 | @mock.patch('libcloudforensics.providers.aws.internal.ebs.EBS.ListVolumes')
104 | @mock.patch('libcloudforensics.providers.aws.internal.ec2.EC2.ListInstances')
105 | def testCreateVolumeCopy3(self,
106 | mock_list_instances,
107 | mock_list_volumes,
108 | mock_loader):
109 | """Test that a volume is correctly cloned."""
110 | mock_loader.return_value = None
111 | # Should raise a ValueError exception as no volume_id or instance_id is
112 | # specified.
113 | with self.assertRaises(ValueError):
114 | forensics.CreateVolumeCopy(aws_mocks.FAKE_INSTANCE.availability_zone)
115 |
116 | # Should raise a ResourceCreationError as we are querying a non-existent
117 | # instance.
118 | mock_list_instances.return_value = {}
119 | with self.assertRaises(errors.ResourceCreationError):
120 | forensics.CreateVolumeCopy(
121 | aws_mocks.FAKE_INSTANCE.availability_zone,
122 | instance_id='non-existent-instance-id')
123 |
124 | # Should raise a ResourceCreationError as we are querying a non-existent
125 | # volume.
126 | mock_list_volumes.return_value = {}
127 | with self.assertRaises(errors.ResourceCreationError):
128 | forensics.CreateVolumeCopy(
129 | aws_mocks.FAKE_INSTANCE.availability_zone,
130 | volume_id='non-existent-volume-id')
131 |
--------------------------------------------------------------------------------
/tests/providers/azure/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/azure/azure_cli.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Prepares calls to the CLI tool for Azure operations."""
16 | from typing import List, Optional
17 |
18 | from libcloudforensics import logging_utils
19 | from libcloudforensics.providers.azure.internal import account
20 |
21 | logging_utils.SetUpLogger(__name__)
22 | logger = logging_utils.GetLogger(__name__)
23 |
24 |
25 | class AzureCLIHelper:
26 | """AzureCLIHelper prepares calls the CLI tool for Azure operations."""
27 |
28 | def __init__(self, az: account.AZAccount) -> None:
29 | """Initialize the CLI class.
30 |
31 | Attributes:
32 | az (AZAccount): The Azure account to work with.
33 | """
34 | self.az = az
35 |
36 | def PrepareStartAnalysisVmCmd(
37 | self,
38 | instance_name: str,
39 | attach_disks: Optional[List[str]] = None) -> str:
40 | """Start an analysis VM.
41 |
42 | Args:
43 | instance_name (str): The name of the instance to start.
44 | attach_disks (List[str]): Optional. List of volume names to attach to
45 | the VM.
46 |
47 | Returns:
48 | str: The CLI command to run.
49 | """
50 | cmd = 'cloudforensics az {0:s} startvm {1:s} --region {2:s}'.format(
51 | self.az.default_resource_group_name,
52 | instance_name,
53 | self.az.default_region)
54 | if attach_disks:
55 | cmd += ' --attach_disks={0:s}'.format(','.join(attach_disks))
56 | logger.info('CLI command: {0:s}'.format(cmd))
57 | return cmd
58 |
59 | def PrepareCreateDiskCopyCmd(
60 | self,
61 | instance_name: Optional[str] = None,
62 | disk_name: Optional[str] = None,
63 | region: Optional[str] = None) -> str:
64 | """Create a disk copy.
65 |
66 | Args:
67 | instance_name (str): Optional. Instance name of the instance using the
68 | disk to be copied. If specified, the boot disk of the instance will be
69 | copied. If disk_name is also specified, then the disk pointed to by
70 | disk_name will be copied.
71 | disk_name (str): Optional. Name of the disk to copy. If not set,
72 | then instance_name needs to be set and the boot disk will be copied.
73 | region (str): Optional. The region in which to create the disk copy.
74 | Default is eastus.
75 |
76 | Returns:
77 | str: The CLI command to run.
78 | """
79 | cmd = 'cloudforensics az {0:s} copydisk'.format(
80 | self.az.default_resource_group_name)
81 | if instance_name:
82 | cmd += ' --instance_name={0:s}'.format(instance_name)
83 | elif disk_name:
84 | cmd += ' --disk_name={0:s}'.format(disk_name)
85 | cmd += ' --region={0:s}'.format(region or self.az.default_region)
86 | cmd += ' --disk_type=Standard_LRS'
87 | logger.info('CLI command: {0:s}'.format(cmd))
88 | return cmd
89 |
--------------------------------------------------------------------------------
/tests/providers/azure/azure_mocks.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Azure mocks used across tests."""
16 |
17 | import mock
18 |
19 | from libcloudforensics.providers.azure.internal import account
20 | from libcloudforensics.providers.azure.internal import compute
21 | from libcloudforensics.providers.azure.internal import monitoring
22 |
23 | RESOURCE_ID_PREFIX = ('/subscriptions/sub/resourceGroups/fake-resource-group'
24 | '/providers/Microsoft.Compute/type/')
25 |
26 | # pylint: disable=line-too-long
27 | with mock.patch('libcloudforensics.providers.azure.internal.common.GetCredentials') as mock_creds:
28 | mock_creds.return_value = ('fake-subscription-id', mock.Mock())
29 | with mock.patch('libcloudforensics.providers.azure.internal.resource.AZResource.GetOrCreateResourceGroup') as mock_resource:
30 | # pylint: enable=line-too-long
31 | mock_resource.return_value = 'fake-resource-group'
32 | FAKE_ACCOUNT = account.AZAccount(
33 | 'fake-resource-group',
34 | default_region='fake-region'
35 | )
36 |
37 | FAKE_INSTANCE = compute.AZComputeVirtualMachine(
38 | FAKE_ACCOUNT,
39 | RESOURCE_ID_PREFIX + 'fake-vm-name',
40 | 'fake-vm-name',
41 | 'fake-region',
42 | ['fake-zone']
43 | )
44 |
45 | FAKE_DISK = compute.AZComputeDisk(
46 | FAKE_ACCOUNT,
47 | RESOURCE_ID_PREFIX + 'fake-disk-name',
48 | 'fake-disk-name',
49 | 'fake-region',
50 | ['fake-zone']
51 | )
52 |
53 | FAKE_BOOT_DISK = compute.AZComputeDisk(
54 | FAKE_ACCOUNT,
55 | RESOURCE_ID_PREFIX + 'fake-boot-disk-name',
56 | 'fake-boot-disk-name',
57 | 'fake-region',
58 | ['fake-zone']
59 | )
60 |
61 | FAKE_SNAPSHOT = compute.AZComputeSnapshot(
62 | FAKE_ACCOUNT,
63 | RESOURCE_ID_PREFIX + 'fake_snapshot_name',
64 | 'fake_snapshot_name',
65 | 'fake-region',
66 | FAKE_DISK
67 | )
68 |
69 | FAKE_MONITORING = monitoring.AZMonitoring(FAKE_ACCOUNT)
70 |
71 | MOCK_INSTANCE = mock.Mock(
72 | id=RESOURCE_ID_PREFIX + 'fake-vm-name',
73 | location='fake-region',
74 | zones=['fake-zone']
75 | )
76 | # Name attributes for Mock objects have to be added in a separate statement,
77 | # otherwise it becomes itself a mock object.
78 | MOCK_INSTANCE.name = 'fake-vm-name'
79 | MOCK_REQUEST_INSTANCES = [[MOCK_INSTANCE]]
80 | MOCK_LIST_INSTANCES = {
81 | 'fake-vm-name': FAKE_INSTANCE
82 | }
83 | MOCK_CAPACITY_PROVIDER = mock.Mock(registration_state='Registered')
84 | MOCK_QUOTA = mock.Mock(properties=mock.Mock(limit=1, current_value=0))
85 |
86 | MOCK_DISK = mock.Mock(
87 | id=RESOURCE_ID_PREFIX + 'fake-disk-name',
88 | location='fake-region',
89 | zones=['fake-zone']
90 | )
91 | MOCK_DISK.name = 'fake-disk-name'
92 |
93 | MOCK_BOOT_DISK = mock.Mock(
94 | id=RESOURCE_ID_PREFIX + 'fake-boot-disk-name',
95 | location='fake-region',
96 | zones=['fake-zone']
97 | )
98 | MOCK_BOOT_DISK.name = 'fake-boot-disk-name'
99 |
100 | MOCK_DISK_COPY = mock.Mock(
101 | id=RESOURCE_ID_PREFIX + 'fake_snapshot_name_f4c186ac_copy',
102 | location='fake-region',
103 | zones=['fake-zone']
104 | )
105 | MOCK_DISK_COPY.name = 'fake_snapshot_name_f4c186ac_copy'
106 |
107 | MOCK_REQUEST_DISKS = [[MOCK_DISK, MOCK_BOOT_DISK]]
108 | MOCK_LIST_DISKS = {
109 | 'fake-disk-name': FAKE_DISK,
110 | 'fake-boot-disk-name': FAKE_BOOT_DISK
111 | }
112 |
113 | MOCK_VM_SIZE = mock.Mock()
114 | MOCK_VM_VCPU_CAPABILITY = mock.Mock()
115 | MOCK_VM_VCPU_CAPABILITY.name = 'vCPUs'
116 | MOCK_VM_VCPU_CAPABILITY.value = '4'
117 | MOCK_VM_MEMORYGB_CAPABILITY = mock.Mock()
118 | MOCK_VM_MEMORYGB_CAPABILITY.name = 'MemoryGB'
119 | MOCK_VM_MEMORYGB_CAPABILITY.value = '8'
120 | MOCK_VM_PREMIUMIO_CAPABILITY = mock.Mock()
121 | MOCK_VM_PREMIUMIO_CAPABILITY.name = 'PremiumIO'
122 | MOCK_VM_PREMIUMIO_CAPABILITY.value = 'True'
123 | MOCK_VM_SIZE.capabilities = [
124 | MOCK_VM_VCPU_CAPABILITY,
125 | MOCK_VM_MEMORYGB_CAPABILITY,
126 | MOCK_VM_PREMIUMIO_CAPABILITY
127 | ]
128 | MOCK_VM_SIZE.name = 'fake-vm-type'
129 | MOCK_REQUEST_VM_SIZE = [MOCK_VM_SIZE]
130 | MOCK_LIST_VM_SIZES = [{
131 | 'Name': 'fake-vm-type',
132 | 'CPU': 4,
133 | 'Memory': 8192,
134 | 'Family': 'standardB1lsFamily'
135 | }]
136 |
137 | MOCK_ANALYSIS_INSTANCE = mock.Mock(
138 | id=RESOURCE_ID_PREFIX + 'fake-analysis-vm-name',
139 | location='fake-region',
140 | zones=['fake-zone']
141 | )
142 | MOCK_ANALYSIS_INSTANCE.name = 'fake-analysis-vm-name'
143 |
144 | MOCK_LIST_IDS = [
145 | mock.Mock(subscription_id='fake-subscription-id-1'),
146 | mock.Mock(subscription_id='fake-subscription-id-2')
147 | ]
148 |
149 | MOCK_STORAGE_ACCOUNT = mock.Mock(id='fakestorageid')
150 |
151 | MOCK_LIST_KEYS = mock.Mock(
152 | keys=[mock.Mock(key_name='key1', value='fake-key-value')])
153 |
154 | JSON_FILE = 'scripts/test_credentials.json'
155 | STARTUP_SCRIPT = 'scripts/startup.sh'
156 |
157 | MOCK_BLOB_PROPERTIES = mock.Mock()
158 | MOCK_BLOB_PROPERTIES.copy = mock.Mock()
159 | MOCK_BLOB_PROPERTIES.copy.status = 'success'
160 |
161 | MOCK_METRICS = mock.Mock()
162 | MOCK_METRICS.name = mock.Mock()
163 | MOCK_METRICS.name.value = 'fake-metric'
164 | MOCK_LIST_METRICS = [MOCK_METRICS]
165 |
166 | MOCK_METRIC_OPERATION_VALUE = mock.Mock(timeseries=[mock.Mock(
167 | data=[mock.Mock(time_stamp='fake-time-stamp', total='fake-value')])])
168 | MOCK_METRIC_OPERATION_VALUE.name = mock.Mock(value='fake-metric')
169 | MOCK_METRIC_OPERATION = mock.Mock(value=[MOCK_METRIC_OPERATION_VALUE])
170 |
171 | AZURE_CONFIG_DIR = 'scripts/test_azure_config_dir/'
172 | EMPTY_AZURE_CONFIG_DIR = 'scripts/test_empty_azure_config_dir/'
173 |
--------------------------------------------------------------------------------
/tests/providers/azure/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/azure/internal/test_monitoring.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the azure module - monitoring.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.azure import azure_mocks
22 |
23 |
24 | class AZMonitoringTest(unittest.TestCase):
25 | """Test Azure monitoring class."""
26 | # pylint: disable=line-too-long
27 |
28 | @typing.no_type_check
29 | @mock.patch('azure.mgmt.monitor.v2021_05_01.operations._metric_definitions_operations.MetricDefinitionsOperations.list')
30 | def testListAvailableMetricsForResource(self, mock_list_metrics):
31 | """Test that metrics are correctly listed."""
32 | mock_list_metrics.return_value = azure_mocks.MOCK_LIST_METRICS
33 | metrics = azure_mocks.FAKE_MONITORING.ListAvailableMetricsForResource(
34 | 'fake-resource-id')
35 | self.assertEqual(1, len(metrics))
36 | self.assertIn('fake-metric', metrics)
37 |
38 | @typing.no_type_check
39 | @mock.patch('azure.mgmt.monitor.v2021_05_01.operations._metrics_operations.MetricsOperations.list')
40 | def testGetMetricsForResource(self, mock_list_metrics_operations):
41 | """Test that metrics values are correctly retrieved."""
42 | mock_list_metrics_operations.return_value = azure_mocks.MOCK_METRIC_OPERATION
43 | metrics = azure_mocks.FAKE_MONITORING.GetMetricsForResource(
44 | 'fake-resource-id', 'fake-metric')
45 | self.assertIn('fake-metric', metrics)
46 | self.assertEqual(1, len(metrics['fake-metric']))
47 | self.assertEqual('fake-value', metrics['fake-metric']['fake-time-stamp'])
48 |
--------------------------------------------------------------------------------
/tests/providers/azure/internal/test_resource.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the azure module - resource.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.azure import azure_mocks
22 |
23 |
24 | class AZResourceTest(unittest.TestCase):
25 | """Test Azure monitoring class."""
26 |
27 | # pylint: disable=line-too-long
28 |
29 | @mock.patch(
30 | "azure.mgmt.resource.subscriptions.v2022_12_01.operations._operations.SubscriptionsOperations.list"
31 | )
32 | @typing.no_type_check
33 | def testListSubscriptionIDs(self, mock_list):
34 | """Test that subscription IDs are correctly listed"""
35 | mock_list.return_value = azure_mocks.MOCK_LIST_IDS
36 | subscription_ids = azure_mocks.FAKE_ACCOUNT.resource.ListSubscriptionIDs()
37 | self.assertEqual(2, len(subscription_ids))
38 | self.assertEqual("fake-subscription-id-1", subscription_ids[0])
39 |
--------------------------------------------------------------------------------
/tests/providers/azure/internal/test_storage.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the azure module - storage.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from libcloudforensics import errors
22 | from tests.providers.azure import azure_mocks
23 |
24 |
25 | class AZStorageTest(unittest.TestCase):
26 | """Test Azure storage class."""
27 | # pylint: disable=line-too-long
28 |
29 | @mock.patch('azure.mgmt.storage.v2023_05_01.operations._storage_accounts_operations.StorageAccountsOperations.list_keys')
30 | @mock.patch('azure.mgmt.storage.v2023_05_01.operations._storage_accounts_operations.StorageAccountsOperations.begin_create')
31 | @typing.no_type_check
32 | def testCreateStorageAccount(self, mock_create, mock_list_keys):
33 | """Test that a storage account is created and its information retrieved"""
34 | # pylint: disable=protected-access
35 | mock_create.return_value.result.return_value = azure_mocks.MOCK_STORAGE_ACCOUNT
36 | mock_list_keys.return_value = azure_mocks.MOCK_LIST_KEYS
37 | account_id, account_key = azure_mocks.FAKE_ACCOUNT.storage.CreateStorageAccount(
38 | 'fakename')
39 | self.assertEqual('fakestorageid', account_id)
40 | self.assertEqual('fake-key-value', account_key)
41 |
42 | with self.assertRaises(errors.InvalidNameError) as error:
43 | _, _ = azure_mocks.FAKE_ACCOUNT.storage.CreateStorageAccount(
44 | 'fake-non-conform-name')
45 | # pylint: enable=protected-access
46 | self.assertEqual('Storage account name fake-non-conform-name does not '
47 | 'comply with ^[a-z0-9]{1,24}$', str(error.exception))
48 |
--------------------------------------------------------------------------------
/tests/providers/gcp/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/gcp/gcp_cli.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Prepares calls to the CLI tool for GCP operations."""
16 | from typing import List
17 | from typing import Optional
18 |
19 | from libcloudforensics import logging_utils
20 | from libcloudforensics.providers.gcp.internal.project import GoogleCloudProject
21 |
22 | logging_utils.SetUpLogger(__name__)
23 | logger = logging_utils.GetLogger(__name__)
24 |
25 |
26 | class GCPCLIHelper:
27 | """GCPCLIHelper prepares calls to the CLI tool for GCP operations."""
28 | def __init__(self, gcp: GoogleCloudProject) -> None:
29 | """Initialize the CLI class.
30 |
31 | Attributes:
32 | gcp (GoogleCloudProject): The GCP project to work with.
33 | """
34 | self.gcp = gcp
35 |
36 | def PrepareStartAnalysisVmCmd(
37 | self,
38 | instance_name: str,
39 | zone: str,
40 | attach_disks: Optional[List[str]] = None) -> str:
41 | """Wrapper around the CLI tool to start an analysis VM.
42 |
43 | Args:
44 | instance_name (str): The name of the instance to start.
45 | zone (str): The zone in which to start the instance.
46 | attach_disks (List[str]): Optional. List of disk names to attach.
47 |
48 | Returns:
49 | str: The CLI command to run.
50 | """
51 | cmd = 'cloudforensics gcp --project={0:s} startvm {1:s} {2:s}'.format(
52 | self.gcp.project_id, instance_name, zone)
53 | if attach_disks:
54 | cmd += ' --attach_disks={0:s}'.format(','.join(attach_disks))
55 | logger.info('CLI command: {0:s}'.format(cmd))
56 | return cmd
57 |
58 | def PrepareCreateDiskCopyCmd(
59 | self,
60 | instance_name: Optional[str] = None,
61 | disk_name: Optional[str] = None) -> str:
62 | """Create a disk copy.
63 |
64 | Args:
65 | instance_name (str): Name of the instance to copy disk from.
66 | disk_name (str): Name of the disk to copy.
67 |
68 | Returns:
69 | str: The CLI command to run.
70 | """
71 | cmd = 'cloudforensics gcp --project={0:s} copydisk {0:s} {1:s}'.format(
72 | self.gcp.project_id, self.gcp.default_zone)
73 | if instance_name:
74 | cmd += ' --instance_name={0:s}'.format(instance_name)
75 | elif disk_name:
76 | cmd += ' --disk_name={0:s}'.format(disk_name)
77 | logger.info('CLI command: {0:s}'.format(cmd))
78 | return cmd
79 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_bigquery.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - bigquery.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.gcp import gcp_mocks
22 |
23 |
24 | class GoogleBigQueryTest(unittest.TestCase):
25 | """Test Google BigQuery class."""
26 | # pylint: disable=line-too-long
27 |
28 | @typing.no_type_check
29 | @mock.patch(
30 | 'libcloudforensics.providers.gcp.internal.bigquery.GoogleBigQuery.GoogleBigQueryApi'
31 | )
32 | def testListBigQueryJobs(self, mock_bigquery_api):
33 | """Test BigQuery Jobs List operation."""
34 | api_list_jobs = mock_bigquery_api.return_value.jobs.return_value.list
35 | api_list_jobs.return_value.execute.return_value = gcp_mocks.MOCK_BIGQUERY_JOBS
36 | list_results = gcp_mocks.FAKE_BIGQUERY.ListBigQueryJobs()
37 | self.assertEqual(1, len(list_results))
38 | self.assertEqual(
39 | 'bquxjob_12345678_abcdefghij1k',
40 | list_results[0]['jobReference']['jobId'])
41 | self.assertEqual(
42 | 'SELECT * FROM `fake-target-project.fake-target-project-dataset.fake-target-project-table`',
43 | list_results[0]['configuration']['query']['query'])
44 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_build.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - build.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.gcp import gcp_mocks
22 |
23 |
24 | class GoogleCloudBuildTest(unittest.TestCase):
25 | """Test Google Cloud Build class."""
26 | # pylint: disable=line-too-long
27 |
28 | @typing.no_type_check
29 | @mock.patch('libcloudforensics.providers.gcp.internal.build.GoogleCloudBuild.GcbApi')
30 | def testCreateBuild(self, mock_gcb_api):
31 | """Test that Cloud Builds are correctly created."""
32 | build_create_object = mock_gcb_api.return_value.projects.return_value.builds.return_value.create
33 | build_create_object.return_value.execute.return_value = gcp_mocks.MOCK_GCB_BUILDS_CREATE
34 | build_response = gcp_mocks.FAKE_GCB.CreateBuild({'Fake-Build_body': None})
35 | self.assertEqual(gcp_mocks.MOCK_GCB_BUILDS_CREATE, build_response)
36 |
37 | @typing.no_type_check
38 | @mock.patch('libcloudforensics.providers.gcp.internal.build.GoogleCloudBuild.GcbApi')
39 | def testBlockOperation(self, mock_gcb_api):
40 | """Test that Cloud Builds are correctly blocked until done."""
41 | build_operation_object = mock_gcb_api.return_value.operations.return_value.get
42 | build_operation_object.return_value.execute.return_value = gcp_mocks.MOCK_GCB_BUILDS_SUCCESS
43 | block_build_success = gcp_mocks.FAKE_GCB.BlockOperation(
44 | gcp_mocks.MOCK_GCB_BUILDS_CREATE)
45 | self.assertEqual(gcp_mocks.MOCK_GCB_BUILDS_SUCCESS, block_build_success)
46 | build_operation_object.return_value.execute.return_value = gcp_mocks.MOCK_GCB_BUILDS_FAIL
47 | with self.assertRaises(RuntimeError):
48 | gcp_mocks.FAKE_GCB.BlockOperation(gcp_mocks.MOCK_GCB_BUILDS_CREATE)
49 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_cloudsql.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - cloudsql.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.gcp import gcp_mocks
22 |
23 | class GoogleCloudSqlTest(unittest.TestCase):
24 | """Test Google CloudSql class."""
25 | # pylint: disable=line-too-long
26 |
27 | @typing.no_type_check
28 | @mock.patch('libcloudforensics.providers.gcp.internal.cloudsql.GoogleCloudSQL.GoogleCloudSQLApi')
29 | def testListCloudSqlInstances(self, mock_gcsql_api):
30 | """Test GCSql instance List operation."""
31 | api_list_instances = mock_gcsql_api.return_value.instances.return_value.list
32 | api_list_instances.return_value.execute.return_value = gcp_mocks.MOCK_GCSQL_INSTANCES
33 | list_results = gcp_mocks.FAKE_CLOUDSQLINSTANCE.ListCloudSQLInstances()
34 | self.assertEqual(1, len(list_results))
35 | self.assertEqual('FAKE_INSTANCE', list_results[0]['instanceType'])
36 | self.assertEqual('fake', list_results[0]['name'])
37 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_common.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - common.py"""
16 |
17 | import typing
18 | import unittest
19 |
20 | from libcloudforensics import errors
21 | from libcloudforensics.providers.gcp.internal import common
22 |
23 | from tests.providers.gcp import gcp_mocks
24 |
25 |
26 | class GCPCommonTest(unittest.TestCase):
27 | """Test forensics.py methods and common.py helper methods."""
28 | # pylint: disable=line-too-long
29 |
30 | @typing.no_type_check
31 | def testGenerateDiskName(self):
32 | """Test that the generated disk name is always within GCP boundaries.
33 |
34 | The disk name must comply with the following RegEx:
35 | - ^(?=.{1,63}$)[a-z]([-a-z0-9]*[a-z0-9])?$
36 |
37 | i.e., it must be between 1 and 63 chars, the first character must be a
38 | lowercase letter, and all following characters must be a dash, lowercase
39 | letter, or digit, except the last character, which cannot be a dash.
40 | """
41 |
42 | disk_name = common.GenerateDiskName(gcp_mocks.FAKE_SNAPSHOT)
43 | self.assertEqual('fake-snapshot-268f2df3-copy', disk_name)
44 | self.assertTrue(gcp_mocks.REGEX_DISK_NAME.match(disk_name))
45 |
46 | disk_name = common.GenerateDiskName(gcp_mocks.FAKE_SNAPSHOT_LONG_NAME)
47 | self.assertEqual(
48 | 'this-is-a-kind-of-long-fake-snapshot-name-and-is--bc618512-copy',
49 | disk_name)
50 | self.assertTrue(gcp_mocks.REGEX_DISK_NAME.match(disk_name))
51 |
52 | disk_name = common.GenerateDiskName(
53 | gcp_mocks.FAKE_SNAPSHOT, disk_name_prefix='some-not-so-long-disk-name-prefix')
54 | self.assertEqual(
55 | 'some-not-so-long-disk-name-prefix-fake-snapshot-268f2df3-copy',
56 | disk_name)
57 | self.assertTrue(gcp_mocks.REGEX_DISK_NAME.match(disk_name))
58 |
59 | disk_name = common.GenerateDiskName(
60 | gcp_mocks.FAKE_SNAPSHOT_LONG_NAME,
61 | disk_name_prefix='some-not-so-long-disk-name-prefix')
62 | self.assertEqual(
63 | 'some-not-so-long-disk-name-prefix-this-is-a-kind--bc618512-copy',
64 | disk_name)
65 | self.assertTrue(gcp_mocks.REGEX_DISK_NAME.match(disk_name))
66 |
67 | disk_name = common.GenerateDiskName(
68 | gcp_mocks.FAKE_SNAPSHOT,
69 | disk_name_prefix='some-really-really-really-really-really-really-long'
70 | '-disk-name-prefix')
71 | self.assertEqual(
72 | 'some-really-really-really-really-really-really-lo-268f2df3-copy',
73 | disk_name)
74 | self.assertTrue(gcp_mocks.REGEX_DISK_NAME.match(disk_name))
75 |
76 | disk_name = common.GenerateDiskName(
77 | gcp_mocks.FAKE_SNAPSHOT_LONG_NAME,
78 | disk_name_prefix='some-really-really-really-really-really-really-long'
79 | '-disk-name-prefix')
80 | self.assertEqual(
81 | 'some-really-really-really-really-really-really-lo-bc618512-copy',
82 | disk_name)
83 | self.assertTrue(gcp_mocks.REGEX_DISK_NAME.match(disk_name))
84 |
85 | # Disk prefix cannot start with a capital letter
86 | with self.assertRaises(errors.InvalidNameError):
87 | common.GenerateDiskName(
88 | gcp_mocks.FAKE_SNAPSHOT, 'Some-prefix-that-starts-with-a-capital-letter')
89 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_compute_base_resource.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - compute_base_resource.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.gcp import gcp_mocks
22 |
23 |
24 | class GoogleComputeBaseResourceTest(unittest.TestCase):
25 | """Test Google Cloud Compute Base Resource class."""
26 | # pylint: disable=line-too-long
27 |
28 | @typing.no_type_check
29 | @mock.patch('libcloudforensics.providers.gcp.internal.compute.GoogleComputeInstance.GetOperation')
30 | def testGetValue(self, mock_get_operation):
31 | """Test that the correct value is retrieved for the given key."""
32 | mock_get_operation.return_value = {
33 | # https://cloud.google.com/compute/docs/reference/rest/v1/instances/get
34 | 'name': gcp_mocks.FAKE_INSTANCE.name
35 | }
36 | self.assertEqual('fake-instance', gcp_mocks.FAKE_INSTANCE.GetValue('name'))
37 | self.assertIsNone(gcp_mocks.FAKE_INSTANCE.GetValue('key'))
38 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_gke.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - gke.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from googleapiclient.errors import HttpError
22 |
23 | from libcloudforensics import errors
24 | from libcloudforensics.providers.gcp.internal import gke
25 | import libcloudforensics.providers.kubernetes.cluster as k8s
26 |
27 |
28 | class GoogleKubernetesEngineTest(unittest.TestCase):
29 | """Test Google Kubernetes Engine class."""
30 |
31 |
32 | @typing.no_type_check
33 | # The next two decorators enable GkeCluster to be instantiated
34 | @mock.patch.object(k8s.K8sCluster, '_AuthorizationCheck', mock.Mock)
35 | @mock.patch.object(gke.GkeCluster, '_GetK8sApiClient', mock.Mock)
36 | @mock.patch.object(gke.GoogleKubernetesEngine, 'GkeApi')
37 | def testGetCluster(self, mock_gke_api):
38 | """Test GkeCluster calls the API correctly and returns its response."""
39 | clusters_api = mock_gke_api().projects().locations().clusters()
40 | clusters_api.get.return_value.execute.return_value = {
41 | 'key': 'ddbjnaxz'
42 | }
43 |
44 | cluster = gke.GkeCluster('fake-project-id', 'fake-zone', 'fake-cluster-id')
45 | get_operation_result = cluster.GetOperation()
46 |
47 | clusters_api.get.assert_called_once_with(name=cluster.name)
48 | self.assertEqual({'key': 'ddbjnaxz'}, get_operation_result)
49 |
50 | # Test error case
51 | clusters_api.get.return_value.execute.side_effect = HttpError(
52 | resp=mock.Mock(status=404), content=b'Cluster not found')
53 | with self.assertRaises(errors.ResourceNotFoundError):
54 | cluster.GetOperation()
55 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_log.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - log.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.gcp import gcp_mocks
22 |
23 |
24 | class GoogleCloudLogTest(unittest.TestCase):
25 | """Test Google Cloud Log class."""
26 | # pylint: disable=line-too-long
27 |
28 | @typing.no_type_check
29 | @mock.patch('libcloudforensics.providers.gcp.internal.log.GoogleCloudLog.GclApi')
30 | def testListLogs(self, mock_gcl_api):
31 | """Test that logs of project are correctly listed."""
32 | logs = mock_gcl_api.return_value.logs.return_value.list
33 | logs.return_value.execute.return_value = gcp_mocks.MOCK_LOGS_LIST
34 | list_logs = gcp_mocks.FAKE_LOGS.ListLogs()
35 | self.assertEqual(2, len(list_logs))
36 | self.assertEqual(gcp_mocks.FAKE_LOG_LIST[0], list_logs[0])
37 |
38 | @typing.no_type_check
39 | @mock.patch('libcloudforensics.providers.gcp.internal.log.GoogleCloudLog.GclApi')
40 | def testExecuteQuery(self, mock_gcl_api):
41 | """Test that logs of project are correctly queried."""
42 | query = mock_gcl_api.return_value.entries.return_value.list
43 | query.return_value.execute.return_value = gcp_mocks.MOCK_LOG_ENTRIES
44 | qfilter = ['*']
45 | query_logs = gcp_mocks.FAKE_LOGS.ExecuteQuery(qfilter)
46 | self.assertEqual(2, len(query_logs))
47 | self.assertEqual(gcp_mocks.FAKE_LOG_ENTRIES[0], query_logs[0])
48 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_serviceusage.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - serviceusage.py"""
16 | import typing
17 | import unittest
18 |
19 | import mock
20 |
21 | from tests.providers.gcp import gcp_mocks
22 |
23 |
24 | class GoogleServiceUsageTest(unittest.TestCase):
25 | """Test Google Service Usage class."""
26 | # pylint: disable=line-too-long
27 |
28 | @typing.no_type_check
29 | @mock.patch('libcloudforensics.providers.gcp.internal.common.ExecuteRequest')
30 | @mock.patch('libcloudforensics.providers.gcp.internal.serviceusage.GoogleServiceUsage.GsuApi')
31 | def testGetEnabled(self, mock_gsu_api, mock_execute_request):
32 | """Validates the GetEnabled function"""
33 | mock_execute_request.return_value = gcp_mocks.MOCK_ENABLED_SERVICES
34 | mock_service_usage = mock_gsu_api.return_value.services.return_value
35 | response = gcp_mocks.FAKE_SERVICE_USAGE.GetEnabled()
36 |
37 | mock_execute_request.assert_called_with(mock_service_usage,
38 | 'list', {'parent': 'projects/fake-project', 'filter': 'state:ENABLED'})
39 |
40 | self.assertListEqual(response, [
41 | 'bigquery.googleapis.com',
42 | 'cloudapis.googleapis.com',
43 | 'compute.googleapis.com'
44 | ])
45 |
46 | @typing.no_type_check
47 | @mock.patch('libcloudforensics.providers.gcp.internal.common.ExecuteRequest')
48 | @mock.patch('libcloudforensics.providers.gcp.internal.serviceusage.GoogleServiceUsage.GsuApi')
49 | def testEnableService(self, mock_gsu_api, mock_execute_request):
50 | """Validates that EnableService calls ExecuteRequest with the correct
51 | arguments."""
52 | mock_service_usage = mock_gsu_api.return_value.services.return_value
53 | mock_execute_request.return_value = [{'name': 'operations/noop.DONE_OPERATION'}]
54 | gcp_mocks.FAKE_SERVICE_USAGE.EnableService('container.googleapis.com')
55 |
56 | mock_execute_request.assert_called_with(mock_service_usage, 'enable',
57 | {'name': 'projects/fake-project/services/container.googleapis.com'})
58 |
59 | @typing.no_type_check
60 | @mock.patch('libcloudforensics.providers.gcp.internal.common.ExecuteRequest')
61 | @mock.patch('libcloudforensics.providers.gcp.internal.serviceusage.GoogleServiceUsage.GsuApi')
62 | def testDisableService(self, mock_gsu_api, mock_execute_request):
63 | """Validates that DisableService calls ExecuteRequest with the correct
64 | arguments."""
65 | mock_service_usage = mock_gsu_api.return_value.services.return_value
66 | mock_execute_request.return_value = [{'name': 'operations/noop.DONE_OPERATION'}]
67 | gcp_mocks.FAKE_SERVICE_USAGE.DisableService('container.googleapis.com')
68 |
69 | mock_execute_request.assert_called_with(mock_service_usage, 'disable',
70 | {'name': 'projects/fake-project/services/container.googleapis.com'})
71 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_storage.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - storage.py"""
16 |
17 | import typing
18 | import unittest
19 | import mock
20 |
21 | from tests.providers.gcp import gcp_mocks
22 | from libcloudforensics.providers.utils.storage_utils import SplitStoragePath
23 |
24 |
25 | class GoogleCloudStorageTest(unittest.TestCase):
26 | """Test Google Cloud Storage class."""
27 | # pylint: disable=line-too-long
28 |
29 | @typing.no_type_check
30 | def testSplitGcsPath(self):
31 | """Tests that GCS path split is correctly done."""
32 | bucket, object_uri = SplitStoragePath('gs://fake-bucket/fake-folder/fake-object')
33 | self.assertEqual('fake-folder/fake-object', object_uri)
34 | self.assertEqual('fake-bucket', bucket)
35 |
36 | @typing.no_type_check
37 | @mock.patch('libcloudforensics.providers.gcp.internal.storage.GoogleCloudStorage.GcsApi')
38 | def testGetObjectMetadata(self, mock_gcs_api):
39 | """Test GCS object Get operation."""
40 | api_get_object = mock_gcs_api.return_value.objects.return_value.get
41 | api_get_object.return_value.execute.return_value = gcp_mocks.MOCK_GCS_OBJECT_METADATA
42 | get_results = gcp_mocks.FAKE_GCS.GetObjectMetadata(
43 | 'gs://fake-bucket/foo/fake.img')
44 | self.assertEqual(gcp_mocks.MOCK_GCS_OBJECT_METADATA, get_results)
45 | self.assertEqual('5555555555', get_results['size'])
46 | self.assertEqual('MzFiYWIzY2M0MTJjNGMzNjUyZDMyNWFkYWMwODA5YTEgIGNvdW50MQo=', get_results['md5Hash'])
47 |
48 | @typing.no_type_check
49 | @mock.patch('libcloudforensics.providers.gcp.internal.storage.GoogleCloudStorage.GcsApi')
50 | def testListBuckets(self, mock_gcs_api):
51 | """Test GCS bucket List operation."""
52 | api_list_bucket = mock_gcs_api.return_value.buckets.return_value.list
53 | api_list_bucket.return_value.execute.return_value = gcp_mocks.MOCK_GCS_BUCKETS
54 | list_results = gcp_mocks.FAKE_GCS.ListBuckets()
55 | self.assertEqual(1, len(list_results))
56 | self.assertEqual('fake-bucket', list_results[0]['name'])
57 | self.assertEqual('123456789', list_results[0]['projectNumber'])
58 |
59 | @typing.no_type_check
60 | @mock.patch('libcloudforensics.providers.gcp.internal.storage.GoogleCloudStorage.GcsApi')
61 | def testListBucketObjects(self, mock_gcs_api):
62 | """Test GCS object List operation."""
63 | api_list_object = mock_gcs_api.return_value.objects.return_value.list
64 | api_list_object.return_value.execute.return_value = gcp_mocks.MOCK_GCS_BUCKET_OBJECTS
65 | list_results = gcp_mocks.FAKE_GCS.ListBucketObjects('gs://fake-bucket')
66 | self.assertEqual(1, len(list_results))
67 | self.assertEqual('5555555555', list_results[0]['size'])
68 | self.assertEqual('MzFiYWIzY2M0MTJjNGMzNjUyZDMyNWFkYWMwODA5YTEgIGNvdW50MQo=', list_results[0]['md5Hash'])
69 |
70 | @typing.no_type_check
71 | @mock.patch('libcloudforensics.providers.gcp.internal.storage.GoogleCloudStorage.GcsApi')
72 | def testGetBucketACLs(self, mock_gcs_api):
73 | """Test GCS ACL List operation."""
74 | api_acl_object = mock_gcs_api.return_value.bucketAccessControls.return_value.list
75 | api_acl_object.return_value.execute.return_value = gcp_mocks.MOCK_GCS_BUCKET_ACLS
76 | api_iam_object = mock_gcs_api.return_value.buckets.return_value.getIamPolicy
77 | api_iam_object.return_value.execute.return_value = gcp_mocks.MOCK_GCS_BUCKET_IAM
78 | acl_results = gcp_mocks.FAKE_GCS.GetBucketACLs('gs://fake-bucket')
79 | self.assertEqual(2, len(acl_results))
80 | self.assertEqual(2, len(acl_results['OWNER']))
81 | self.assertEqual(2, len(acl_results['roles/storage.legacyBucketOwner']))
82 |
83 | @typing.no_type_check
84 | @mock.patch('libcloudforensics.providers.gcp.internal.monitoring.GoogleCloudMonitoring.GcmApi')
85 | def testGetBucketSize(self, mock_gcm_api):
86 | """Test GCS Bucket Size operation."""
87 | services = mock_gcm_api.return_value.projects.return_value.timeSeries.return_value.list
88 | services.return_value.execute.return_value = gcp_mocks.MOCK_GCM_METRICS_BUCKETSIZE
89 | size_results = gcp_mocks.FAKE_GCS.GetBucketSize('gs://test_bucket_1')
90 | self.assertEqual(1, len(size_results))
91 | self.assertEqual(60, size_results['test_bucket_1'])
92 |
93 | @typing.no_type_check
94 | @mock.patch('libcloudforensics.providers.gcp.internal.storage.GoogleCloudStorage.GcsApi')
95 | def testCreateBucket(self, mock_gcs_api):
96 | """Test GCS bucket Create operation."""
97 | api_create_bucket = mock_gcs_api.return_value.buckets.return_value.insert
98 | api_create_bucket.return_value.execute.return_value = gcp_mocks.MOCK_GCS_BUCKETS['items'][0]
99 | create_result = gcp_mocks.FAKE_GCS.CreateBucket('fake-bucket')
100 |
101 | api_create_bucket.assert_called_with(
102 | project='fake-target-project',
103 | predefinedAcl='private',
104 | predefinedDefaultObjectAcl='private',
105 | body={
106 | 'name': 'fake-bucket', 'labels': None
107 | })
108 | self.assertEqual('fake-bucket', create_result['name'])
109 | self.assertEqual('123456789', create_result['projectNumber'])
110 |
--------------------------------------------------------------------------------
/tests/providers/gcp/internal/test_storagetransfer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the gcp module - storagetransfer.py"""
16 |
17 |
18 | import typing
19 | import unittest
20 | import mock
21 |
22 | from libcloudforensics import errors
23 | from tests.providers.gcp import gcp_mocks
24 |
25 |
26 | class GoogleCloudStorageTransferTest(unittest.TestCase):
27 | """Test Google Cloud Storage Transfer class."""
28 | # pylint: disable=line-too-long
29 |
30 | @typing.no_type_check
31 | @mock.patch('boto3.session.Session.get_credentials')
32 | @mock.patch('boto3.session.Session._setup_loader')
33 | @mock.patch('libcloudforensics.providers.gcp.internal.storagetransfer.GoogleCloudStorageTransfer.GcstApi')
34 | def testS3ToGCS(self, mock_gcst_api, mock_loader, mock_creds):
35 | """Test S3ToGCS operation."""
36 | api_job_create = mock_gcst_api.return_value.transferJobs.return_value.create
37 | api_job_create.return_value.execute.return_value = gcp_mocks.MOCK_STORAGE_TRANSFER_JOB
38 | api_job_get = mock_gcst_api.return_value.transferOperations.return_value.list
39 | api_job_get.return_value.execute.return_value = gcp_mocks.MOCK_STORAGE_TRANSFER_OPERATION
40 | mock_loader.return_value = None
41 | creds = mock.MagicMock()
42 | creds.access_key = 'ABC'
43 | creds.secret_key = 'DEF'
44 | mock_creds.return_value = creds
45 |
46 | transfer_results = gcp_mocks.FAKE_GCST.S3ToGCS(
47 | 's3://s3_source_bucket/file.name',
48 | 'fake-zone-2b',
49 | 'gs://gcs_sink_bucket/test_path')
50 | self.assertEqual(1, len(transfer_results['operations']))
51 | self.assertEqual('s3_source_bucket', transfer_results['operations'][0]['metadata']['transferSpec']['awsS3DataSource']['bucketName'])
52 | self.assertEqual('30', transfer_results['operations'][0]['metadata']['counters']['bytesCopiedToSink'])
53 |
54 | @typing.no_type_check
55 | @mock.patch('boto3.session.Session.get_credentials')
56 | def testS3ToGCSNoCreds(self, mock_creds):
57 | """Test S3TOGCS operation when no AWS credentials exist."""
58 | with self.assertRaises(errors.TransferCreationError):
59 | mock_creds.return_value = mock.MagicMock()
60 | gcp_mocks.FAKE_GCST.S3ToGCS(
61 | 's3://s3_source_bucket/file.name',
62 | 'fake-zone-2b',
63 | 'gs://gcs_sink_bucket/test_path')
64 |
65 | @typing.no_type_check
66 | @mock.patch('boto3.session.Session.get_credentials')
67 | def testS3ToGCSTempCreds(self, mock_creds):
68 | """Test S3TOGCS operation when temporary AWS credentials exist."""
69 | creds = mock.MagicMock()
70 | creds.access_key = 'ASIA'
71 | creds.secret_key = 'DEF'
72 | mock_creds.return_value = creds
73 | with self.assertRaises(errors.TransferCreationError):
74 | gcp_mocks.FAKE_GCST.S3ToGCS(
75 | 's3://s3_source_bucket/file.name',
76 | 'fake-zone-2b',
77 | 'gs://gcs_sink_bucket/test_path')
78 |
--------------------------------------------------------------------------------
/tests/providers/kubernetes/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/providers/kubernetes/k8s_mocks.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Kubernetes mock response objects, used for testing."""
16 | from typing import Dict, Optional
17 | from unittest import mock
18 |
19 | from kubernetes import client
20 |
21 | MOCK_API_CLIENT = mock.Mock()
22 |
23 | Labels = Dict[str, str]
24 |
25 | # pylint: disable=line-too-long
26 |
27 | def V1ObjectMeta(
28 | name: Optional[str] = None,
29 | namespace: Optional[str] = None,
30 | labels: Optional[Labels] = None) -> client.V1ObjectMeta:
31 | """Make Kubernetes API response metadata, see V1ObjectMeta.
32 |
33 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1ObjectMeta.md
34 | """
35 | return client.V1ObjectMeta(name=name, namespace=namespace, labels=labels)
36 |
37 |
38 | def V1NodeList(amount: int) -> client.V1NodeList:
39 | """Make Kubernetes API Node list response, see V1NodeList.
40 |
41 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1NodeList.md
42 | """
43 | items = [V1Node('node-{0:d}'.format(i)) for i in range(amount)]
44 | return client.V1NodeList(items=items)
45 |
46 |
47 | def V1PodList(amount: int) -> client.V1PodList:
48 | """Make Kubernetes API Pod list response, see V1PodList.
49 |
50 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1PodList.md
51 | """
52 | items = [V1Pod(name='pod-{0:d}'.format(i)) for i in range(amount)]
53 | return client.V1PodList(items=items)
54 |
55 | def V1NetworkPolicyList(
56 | amount: int, namespace: str) -> client.V1NetworkPolicyList:
57 | """Make Kubernetes API NetworkPolicy list, see V1NetworkPolicyList.
58 |
59 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1NetworkPolicyList.md
60 | """
61 | items = [
62 | V1NetworkPolicy('netpol-{0:d}'.format(i), namespace)
63 | for i in range(amount)
64 | ]
65 | return client.V1NetworkPolicyList(items=items)
66 |
67 | def V1NetworkPolicy(name: str, namespace: str) -> client.V1NetworkPolicy:
68 | """Make Kubernetes API NetworkPolicy response, see V1NetworkPolicy.
69 |
70 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1NetworkPolicy.md
71 | """
72 | return client.V1NetworkPolicy(
73 | metadata=V1ObjectMeta(name=name, namespace=namespace))
74 |
75 | def V1Service(selector_labels: Labels) -> client.V1Service:
76 | """Make Kubernetes API service response, see V1Service.
77 |
78 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1Service.md
79 | """
80 | return client.V1Service(spec=client.V1ServiceSpec(selector=selector_labels))
81 |
82 | def V1Node(name: str) -> client.V1Node:
83 | """Make Kubernetes API Node response, see V1Node.
84 |
85 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1Node.md
86 | """
87 | return client.V1Node(metadata=V1ObjectMeta(name=name))
88 |
89 |
90 | def V1Pod(
91 | name: Optional[str] = None,
92 | namespace: Optional[str] = None,
93 | node_name: Optional[str] = None,
94 | labels: Optional[Labels] = None) -> client.V1Pod:
95 | """Make Kubernetes API Pod response, see V1Pod.
96 |
97 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1Pod.md
98 | """
99 | return client.V1Pod(
100 | metadata=V1ObjectMeta(name=name, namespace=namespace, labels=labels),
101 | spec=client.V1PodSpec(node_name=node_name, containers=[]))
102 |
103 |
104 | def V1PodTemplateSpec(labels: Labels) -> client.V1PodTemplateSpec:
105 | """Make Kubernetes API template spec response, see V1PodTemplateSpec.
106 |
107 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1PodTemplateSpec.md
108 | """
109 | return client.V1PodTemplateSpec(metadata=V1ObjectMeta(labels=labels))
110 |
111 |
112 | def V1ReplicaSet(
113 | name: Optional[str] = None,
114 | namespace: Optional[str] = None,
115 | template_spec_labels: Optional[Labels] = None) -> client.V1ReplicaSet:
116 | """Make Kubernetes API ReplicaSet response, V1ReplicaSet.
117 |
118 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1ReplicaSet.md
119 | """
120 | return client.V1ReplicaSet(
121 | metadata=V1ObjectMeta(name=name, namespace=namespace),
122 | spec=client.V1ReplicaSetSpec(
123 | selector=client.V1LabelSelector(),
124 | template=V1PodTemplateSpec(template_spec_labels or {})))
125 |
126 |
127 | def V1Deployment(
128 | name: Optional[str] = None,
129 | namespace: Optional[str] = None,
130 | template_spec_labels: Optional[Labels] = None,
131 | match_labels: Optional[Labels] = None) -> client.V1Deployment:
132 | """Make Kubernetes API response deployment, see V1Deployment.
133 |
134 | https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1Deployment.md
135 | """
136 | return client.V1Deployment(
137 | metadata=V1ObjectMeta(name=name, namespace=namespace),
138 | spec=client.V1DeploymentSpec(
139 | selector=client.V1LabelSelector(match_labels=match_labels),
140 | template=V1PodTemplateSpec(template_spec_labels or {})))
141 |
--------------------------------------------------------------------------------
/tests/providers/kubernetes/test_netpol.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Test on netpol Kubernetes objects."""
16 |
17 | import typing
18 | import unittest
19 |
20 | import mock
21 | from kubernetes import client
22 |
23 | from libcloudforensics.providers.kubernetes import netpol
24 | from tests.providers.kubernetes import k8s_mocks
25 |
26 |
27 | @mock.patch.object(netpol.K8sNetworkPolicyWithSpec, '__abstractmethods__', ())
28 | @mock.patch.object(client.NetworkingV1Api, 'create_namespaced_network_policy')
29 | class K8sNetworkPolicyCreationTest(unittest.TestCase):
30 | """Test the K8sNetworkPolicyWithSpec's creation API call."""
31 |
32 | # pylint: disable=abstract-class-instantiated
33 |
34 | mock_spec = mock.Mock()
35 |
36 | @typing.no_type_check
37 | @mock.patch.object(netpol.K8sNetworkPolicyWithSpec, '_spec', mock_spec)
38 | def testNetworkPolicyCreationNamespace(self, mock_create_func):
39 | """Test that creating a network policy provides the correct namespace."""
40 | network_policy = netpol.K8sNetworkPolicyWithSpec(
41 | k8s_mocks.MOCK_API_CLIENT, 'name', 'namespace-iwlgvtpb')
42 | network_policy.Create()
43 | self.assertEqual('namespace-iwlgvtpb', mock_create_func.call_args.args[0])
44 |
45 | @typing.no_type_check
46 | @mock.patch.object(netpol.K8sNetworkPolicyWithSpec, '_spec', mock_spec)
47 | def testNetworkPolicyCreationSpec(self, mock_create_func):
48 | """Test that creating a network policy provides the correct spec."""
49 | network_policy = netpol.K8sNetworkPolicyWithSpec(
50 | k8s_mocks.MOCK_API_CLIENT, 'name', 'namespace')
51 | network_policy.Create()
52 | self.assertEqual(self.mock_spec, mock_create_func.call_args.args[1].spec)
53 |
54 | @typing.no_type_check
55 | @mock.patch.object(netpol.K8sNetworkPolicyWithSpec, '_spec', mock_spec)
56 | def testNetworkPolicyCreationMetadata(self, mock_create_func):
57 | """Test that creating a network policy provides the correct metadata."""
58 | network_policy = netpol.K8sNetworkPolicyWithSpec(
59 | k8s_mocks.MOCK_API_CLIENT, 'name-jsdukbvx', 'namespace-jsdukbvx')
60 | network_policy.Create()
61 | self.assertEqual(
62 | client.V1ObjectMeta(
63 | name='name-jsdukbvx', namespace='namespace-jsdukbvx'),
64 | mock_create_func.call_args.args[1].metadata)
65 |
66 |
67 | @mock.patch.object(client.NetworkingV1Api, 'create_namespaced_network_policy')
68 | class K8sDenyAllNetworkPolicyCreationTest(unittest.TestCase):
69 | """Test K8sDenyAllNetworkPolicy creation API call."""
70 |
71 | @typing.no_type_check
72 | def testIsDenyAllNetworkPolicyCreationSpec(self, mock_create_func):
73 | """Test that a deny-all network policy creation has deny-all spec."""
74 | network_policy = netpol.K8sTargetedDenyAllNetworkPolicy(
75 | k8s_mocks.MOCK_API_CLIENT, 'default')
76 | network_policy.Create()
77 | # Check that given network policy is a deny-all policy
78 | provided_spec = mock_create_func.call_args.args[1].spec
79 | self.assertEqual(['Ingress', 'Egress'], provided_spec.policy_types)
80 | self.assertIsNone(provided_spec.ingress)
81 | self.assertIsNone(provided_spec.egress)
82 |
83 |
84 | class K8sNetworkPolicyTest(unittest.TestCase):
85 | """Test that K8sNetworkPolicy calls Kubernetes API correctly."""
86 |
87 | @typing.no_type_check
88 | @mock.patch.object(client.NetworkingV1Api, 'read_namespaced_network_policy')
89 | def testNetworkPolicyReadArgs(self, mock_read_func):
90 | """Test that a NetworkPolicy read is called with the correct args."""
91 | network_policy = netpol.K8sNetworkPolicy(
92 | k8s_mocks.MOCK_API_CLIENT, 'name-arvvbdxl', 'namespace-arvvbdxl')
93 | network_policy.Read()
94 | mock_read_func.assert_called_once_with(
95 | 'name-arvvbdxl', 'namespace-arvvbdxl')
96 |
97 | @typing.no_type_check
98 | @mock.patch.object(client.NetworkingV1Api, 'delete_namespaced_network_policy')
99 | def testNetworkPolicyDeleteArgs(self, mock_delete_func):
100 | """Test that a NetworkPolicy deletion is called with the correct args."""
101 | network_policy = netpol.K8sNetworkPolicy(
102 | k8s_mocks.MOCK_API_CLIENT, 'name-iyykyqbc', 'namespace-iyykyqbc')
103 | network_policy.Delete()
104 | mock_delete_func.assert_called_once_with(
105 | 'name-iyykyqbc', 'namespace-iyykyqbc')
106 |
--------------------------------------------------------------------------------
/tests/providers/kubernetes/test_services.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2021 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests on Kubernetes service objects."""
16 | import typing
17 | import unittest
18 |
19 | import mock
20 | from kubernetes import client
21 |
22 | from libcloudforensics.providers.kubernetes import services
23 | from tests.providers.kubernetes import k8s_mocks
24 |
25 |
26 | class K8sServiceTest(unittest.TestCase):
27 | """Test the K8sService methods."""
28 |
29 | @typing.no_type_check
30 | @mock.patch.object(client.CoreV1Api, 'list_namespaced_pod')
31 | def testListCoveredPods(self, list_namespaced_pod):
32 | """Test that GetCoveredPods calls API correctly and returns correctly."""
33 | mock_pods = k8s_mocks.V1PodList(4)
34 | list_namespaced_pod.return_value = mock_pods
35 | service = services.K8sService(
36 | k8s_mocks.MOCK_API_CLIENT, 'service-name', 'service-namespace')
37 | with mock.patch.object(service, 'Read') as read:
38 | read.return_value = k8s_mocks.V1Service(selector_labels={'app': 'nginx'})
39 | self.assertEqual(
40 | {(pod.metadata.name, pod.metadata.namespace)
41 | for pod in mock_pods.items},
42 | {(pod.name, pod.namespace)
43 | for pod in service.GetCoveredPods()})
44 | list_namespaced_pod.assert_called_once_with(
45 | 'service-namespace', label_selector='app=nginx')
46 |
--------------------------------------------------------------------------------
/tests/run_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Run all tests."""
16 |
17 | import os
18 | import unittest
19 |
20 | if __name__ == '__main__':
21 | loader = unittest.TestLoader()
22 | tests_dir = os.path.dirname(os.path.abspath(__file__))
23 | suite = loader.discover(tests_dir, pattern='test_*.py')
24 | runner = unittest.TextTestRunner()
25 | runner.run(suite)
26 |
--------------------------------------------------------------------------------
/tests/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/tests/scripts/startup.sh:
--------------------------------------------------------------------------------
1 | # THIS IS A CUSTOM BASH SCRIPT
--------------------------------------------------------------------------------
/tests/scripts/test_azure_config_dir/accessTokens.json:
--------------------------------------------------------------------------------
1 | [{"tokenType":"Bearer","expiresIn":3599,"expiresOn":"2022-01-0100:00:00.000000","resource":"https://management.azure.com","accessToken":"NOTATOKEN","refreshToken":"NOTATOKEN","oid":"12345678-1234-5678-1234-567812345678","userId":"fakename@fakedomain.com","isMRRT":true,"_clientId":"12345678-1234-5678-1234-567812345678","_authority":"https://login.microsoftonline.com/12345678-1234-5678-1234-567812345678"}]
--------------------------------------------------------------------------------
/tests/scripts/test_azure_config_dir/azureProfile.json:
--------------------------------------------------------------------------------
1 | {"installationId": "12345678-1234-5678-1234-567812345678", "subscriptions": [{"id": "12345678-1234-5678-1234-567812345678", "name": "Fake Subscription 1", "state": "Enabled", "user": {"name": "fakename@fakedomain.com", "type": "user"}, "isDefault": true, "tenantId": "12345678-1234-5678-1234-567812345678", "environmentName": "AzureCloud", "homeTenantId": "12345678-1234-5678-1234-567812345678", "managedByTenants": []}]}
--------------------------------------------------------------------------------
/tests/scripts/test_credentials.json:
--------------------------------------------------------------------------------
1 | {
2 | "test_profile_name": {
3 | "subscriptionId": "fake-subscription-id-from-credential-file",
4 | "tenantId": "fake-tenant-id-from-credential-file",
5 | "clientId": "fake-client-id-from-credential-file",
6 | "clientSecret": "fake-client-secret-from-credential-file"
7 | },
8 | "incomplete_profile_name": {
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/tests/scripts/utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2020 Google Inc.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Utils test methods"""
16 | import json
17 | import os
18 | from typing import List, Dict
19 |
20 |
21 | def ReadProjectInfo(keys: List[str]) -> Dict[str, str]:
22 | """Read project information to run e2e test.
23 |
24 | Args:
25 | keys (List[str]): A list of mandatory dictionary keys that are expected
26 | to be present in the project_info file.
27 |
28 | Returns:
29 | dict: A dict with the project information.
30 |
31 | Raises:
32 | OSError: If the file cannot be found, opened or closed.
33 | RuntimeError: If the json file cannot be parsed.
34 | ValueError: If the json file does not have the required properties.
35 | """
36 | project_info_path = os.environ.get('PROJECT_INFO')
37 | if project_info_path is None:
38 | raise OSError(
39 | 'Please make sure that you defined the '
40 | '"PROJECT_INFO" environment variable pointing '
41 | 'to your project settings.')
42 | try:
43 | with open(project_info_path, encoding='utf-8') as json_file:
44 | try:
45 | project_info = json.load(json_file) # type: Dict[str, str]
46 | except ValueError as exception:
47 | raise RuntimeError(
48 | 'Cannot parse JSON file. {0:s}'.format(
49 | str(exception))) from exception
50 | except OSError as exception:
51 | raise OSError(
52 | 'Could not open/close file {0:s}: {1:s}'.format(
53 | project_info_path, str(exception))) from exception
54 |
55 | if not all(key in project_info for key in keys):
56 | raise ValueError(
57 | 'Please make sure that your JSON file '
58 | 'has the required entries. The file should '
59 | 'contain at least the following: {0:s}'.format(', '.join(keys)))
60 | return project_info
61 |
--------------------------------------------------------------------------------
/tools/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------