├── .flake8
├── .github
└── workflows
│ └── ci.yml
├── .gitignore
├── .pre-commit-config.yaml
├── LICENSE
├── Makefile
├── README.md
├── caizen
├── app
│ ├── common
│ ├── main.py
│ ├── poetry.lock
│ ├── pyproject.toml
│ ├── src
│ │ └── v1
│ │ │ ├── __init__.py
│ │ │ ├── providers
│ │ │ ├── __init__.py
│ │ │ └── gcp
│ │ │ │ ├── GCP_CLOUDRESOURCEMANAGER_FOLDER.py
│ │ │ │ ├── GCP_CLOUDRESOURCEMANAGER_ORGANIZATION.py
│ │ │ │ ├── GCP_CLOUDRESOURCEMANAGER_PROJECT.py
│ │ │ │ ├── GCP_DEFAULT.py
│ │ │ │ ├── GCP_PUBSUB_TOPIC.py
│ │ │ │ ├── GCP_STORAGE_BUCKET.py
│ │ │ │ └── __init__.py
│ │ │ ├── router.py
│ │ │ └── utils
│ │ │ ├── __init__.py
│ │ │ └── asset_helpers.py
│ └── tests
│ │ └── __init__.py
├── common
│ ├── __init__.py
│ └── v1
│ │ ├── __init__.py
│ │ ├── providers
│ │ ├── __init__.py
│ │ └── gcp
│ │ │ ├── GCP_CLOUDRESOURCEMANAGER_FOLDER.py
│ │ │ ├── GCP_CLOUDRESOURCEMANAGER_ORGANIZATION.py
│ │ │ ├── GCP_CLOUDRESOURCEMANAGER_PROJECT.py
│ │ │ ├── GCP_DEFAULT.py
│ │ │ ├── GCP_PUBSUB_TOPIC.py
│ │ │ ├── GCP_STORAGE_BUCKET.py
│ │ │ └── __init__.py
│ │ └── schemas.py
└── gcp_cai_func
│ ├── Makefile
│ ├── common
│ ├── main.py
│ ├── poetry.lock
│ ├── pyproject.toml
│ ├── requirements.txt
│ ├── src
│ ├── __init__.py
│ ├── message.py
│ ├── processing.py
│ ├── schemas.py
│ ├── transform.py
│ └── v1
│ │ └── providers
│ │ ├── __init__.py
│ │ └── gcp
│ │ ├── GCP_CLOUDRESOURCEMANAGER_FOLDER.py
│ │ ├── GCP_CLOUDRESOURCEMANAGER_ORGANIZATION.py
│ │ ├── GCP_CLOUDRESOURCEMANAGER_PROJECT.py
│ │ ├── GCP_PUBSUB_TOPIC.py
│ │ ├── GCP_STORAGE_BUCKET.py
│ │ └── __init__.py
│ └── tests
│ ├── badmsg.json
│ ├── buckets.json
│ ├── emptymsg.json
│ ├── folders.json
│ ├── orgs.json
│ ├── projects.json
│ ├── routes.json
│ └── topics.json
├── docker
└── docker-compose.yaml
├── docs
├── CNAME
├── img
│ ├── attackpaths.png
│ ├── caizen-logo-dark.png
│ ├── mono-light-caizen-logo.svg
│ ├── pathscores.png
│ └── resources.png
└── index.md
├── mkdocs.yml
├── pyproject.toml
└── terraform
├── main.tf
└── modules
├── caizen_core
├── LICENSE
├── README.md
├── iam.tf
├── main.tf
├── network.tf
├── outputs.tf
├── storage.tf
└── variables.tf
└── gcp_cai
├── LICENSE
├── README.md
├── function.tf
├── iam.tf
├── pubsub.tf
├── scheduler.tf
├── variables.tf
└── workflow.tf
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore = E203, E266, E501, W503, F403, F401
3 | max-line-length = 89
4 | max-complexity = 18
5 | select = B,C,E,F,W,T4,B9
6 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: ci
2 | on:
3 | push:
4 | branches:
5 | - main
6 | permissions:
7 | contents: write
8 | jobs:
9 | deploy:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v4
13 | - uses: actions/setup-python@v4
14 | with:
15 | python-version: 3.x
16 | - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
17 | - uses: actions/cache@v3
18 | with:
19 | key: mkdocs-material-${{ env.cache_id }}
20 | path: .cache
21 | restore-keys: |
22 | mkdocs-material-
23 | - run: pip install mkdocs-material
24 | - run: mkdocs gh-deploy --force
25 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | __pycache__
3 | .terraform
4 | .terraform.lock.hcl
5 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # See https://pre-commit.com for more information
2 | # See https://pre-commit.com/hooks.html for more hooks
3 | repos:
4 | - repo: https://github.com/ambv/black
5 | rev: 24.8.0
6 | hooks:
7 | - id: black
8 | language_version: python3.11
9 | - repo: https://github.com/pre-commit/pre-commit-hooks
10 | rev: v2.0.0
11 | hooks:
12 | - id: flake8
13 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: help
2 | help:
3 | @echo "# Options"
4 | @echo "serve - runs the jekyll serve command"
5 | @echo "view - opens in browser to view/print to PDF"
6 | @echo "up - docker compose up"
7 | @echo "down - docker compose down"
8 | @echo "tail - tail the container logs"
9 | @echo "ui - open the memgraph UI"
10 | @echo "--"
11 | @echo "local - Run caizen locally"
12 |
13 | .PHONY: serve
14 | serve:
15 | @docker run --rm -it -v ${PWD}:/docs -p8000:8000 squidfunk/mkdocs-material
16 |
17 | .PHONY: view
18 | view:
19 | @open http://localhost:8000
20 |
21 | .PHONY: up
22 | up:
23 | @docker compose up -d -f docker/docker-compose.yml
24 |
25 | .PHONY: down
26 | down:
27 | @docker compose down -f docker/docker-compose.yml
28 |
29 | .PHONY: ui
30 | ui:
31 | @open -a "Google Chrome" "http://localhost:3000"
32 |
33 | .PHONY: tail
34 | tail:
35 | @docker-compose logs -f
36 |
37 | .PHONY: local
38 | local:
39 | @docker compose up -d
40 | @poetry run uvicorn main:app --app-dir app/ --reload
41 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
22 |
23 |
24 |
25 |
26 |
27 | Table of Contents
28 |
29 | -
30 | About The Project
31 |
34 |
35 | - Getting Started
36 | - Roadmap
37 | - License
38 | - Contributing
39 | - Contact
40 | - Acknowledgments
41 |
42 |
43 |
44 |
45 |
46 |
47 | ## About The Project
48 |
49 | If we map our cloud resources in full like this:
50 |
51 |
52 |
53 | Then we can derive Attack Paths like this:
54 |
55 |
56 |
57 | And score them like this:
58 |
59 |
60 |
61 | Which enables us to measure risky combinations in our cloud configurations by modeling attacker behavior to resource goals/sub-goals using [MITRE ATT&CK® Framework](https://attack.mitre.org/) [Tactics, Techniques, and Procedures](https://attack.mitre.org/resources/) or "TTPs".
62 |
63 | As a proof-of-concept, a companion tool was created named [Psychiac](https://github.com/caizencloud/psychiac) to demonstrate the value of evaluating attack paths on proposed changes by Terraform _before apply_ in a CI pipeline.
64 |
65 | (back to top)
66 |
67 |
68 |
69 | ### Built With
70 |
71 | * [Memgraph](https://memgraph.com/) - A Bolt/Neo4j compatible Graph DB running in memory
72 | * [FastAPI](https://fastapi.tiangolo.com/) - a modern, fast (high-performance), web framework for building APIs with Python based on standard Python type hints.
73 |
74 | (back to top)
75 |
76 |
77 |
78 |
79 | ## Getting Started
80 |
81 | The project is undergoing active development to go from PoC to real-time resource graph. Stay tuned.
82 |
83 |
84 |
85 | ## Roadmap
86 |
87 | - [x] Proof of value
88 | - [ ] Periodic ingest and parsing of all GCP CAI resources
89 | - [ ] Real-time ingest and parsing of CAI resource updates
90 |
91 | (back to top)
92 |
93 |
94 |
95 |
96 |
97 | ## License
98 |
99 | Distributed under the Apache 2.0 License. See `LICENSE` for more information.
100 |
101 | (back to top)
102 |
103 |
104 | ## Contributing
105 |
106 | This project is not quite ready to accept external contributions. In the meantime, feel free to contact me about your specific needs.
107 |
108 | (back to top)
109 |
110 |
111 |
112 | ## Contact
113 |
114 | Brad Geesaman - [@bradgeesaman](https://twitter.com/bradgeesaman)
115 |
116 | Project Link: [https://github.com/caizencloud/caizen](https://github.com/caizencloud/caizen)
117 |
118 | (back to top)
119 |
120 |
121 |
122 |
123 | ## Acknowledgments
124 |
125 | Here are a list of related resources:
126 |
127 | * [Psychiac](https://github.com/caizencloud/psychiac) - A proof-of-concept CI companion tool for CAIZEN to perform attack path analysis before apply.
128 | * [Google Cloud Asset Inventory](https://cloud.google.com/asset-inventory/docs/overview) - A full cloud resource inventory service.
129 | * [MITRE ATT&CK® Framework](https://attack.mitre.org/) - A security framework for modeling attacker behaviors.
130 | * [OpenCSPM](https://github.com/OpenCSPM/opencspm) - Prior work in this space using Ruby and RedisGraph with my coworker [joshlarsen](https://github.com/joshlarsen)
131 | * [Cartography](https://github.com/lyft/cartography) - Original inspiration for OpenCSPM and now CAIZEN came from Cartography by Lyft. Cartography consolidates infrastructure assets and the relationships between them in an intuitive graph view.
132 |
133 | (back to top)
134 |
--------------------------------------------------------------------------------
/caizen/app/common:
--------------------------------------------------------------------------------
1 | ../common
--------------------------------------------------------------------------------
/caizen/app/main.py:
--------------------------------------------------------------------------------
1 | from common.v1.schemas import HealthStatus # noqa
2 | from fastapi import FastAPI, Request
3 | from neo4j import GraphDatabase
4 | from src.v1.router import v1_router as v1_router # noqa
5 |
6 |
7 | def lifespan(app: FastAPI):
8 | """Makes a single conn to the db on startup. Autocloses on shutdown."""
9 | URI = "bolt://localhost:7687"
10 | AUTH = ("", "")
11 | with GraphDatabase.driver(URI, auth=AUTH) as driver:
12 | app.db = driver
13 | yield
14 |
15 |
16 | app = FastAPI(
17 | title="CAIZEN API",
18 | description="A real-time cloud resource and attack graph platform.",
19 | version="0.0.1",
20 | lifespan=lifespan,
21 | )
22 | app.include_router(v1_router, prefix="/v1")
23 |
24 |
25 | @app.get("/status", response_model=HealthStatus)
26 | def health_status(request: Request) -> HealthStatus:
27 | """Health endpoint for the API -- Tests the graph db connection"""
28 | db = request.app.db
29 | try:
30 | db.run("MATCH (n) RETURN count(n) as count limit 1")
31 | return HealthStatus(status="ok", msg="Graph DB alive")
32 | except Exception:
33 | return HealthStatus(status="error", msg="Graph DB unavailable")
34 |
--------------------------------------------------------------------------------
/caizen/app/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "caizen"
3 | version = "0.1.0"
4 | description = ""
5 | authors = ["Brad Geesaman <3769609+bgeesaman@users.noreply.github.com>"]
6 | license = "MIT"
7 | readme = "README.md"
8 | package-mode = false
9 |
10 | [tool.poetry.dependencies]
11 | python = "^3.11"
12 | fastapi = {extras = ["all"], version = "^0.111.0"}
13 | neo4j = "^5.20.0"
14 | flake8 = "^7.0.0"
15 | uvicorn = {extras = ["standard"], version = "^0.30.1"}
16 | black = "^24.8.0"
17 | flake8-pyproject = "^1.2.3"
18 | isort = "^5.13.2"
19 |
20 |
21 | [build-system]
22 | requires = ["poetry-core"]
23 | build-backend = "poetry.core.masonry.api"
24 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/caizen/app/src/v1/__init__.py
--------------------------------------------------------------------------------
/caizen/app/src/v1/providers/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # Import all the provider modules
4 | for folder in os.listdir(os.path.dirname(os.path.realpath(__file__))):
5 | if not folder.startswith("__"):
6 | exec(f"from src.v1.providers.{folder} import *")
7 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_FOLDER.py:
--------------------------------------------------------------------------------
1 | from common.v1.schemas import CaizenAssetV1
2 | from neo4j import GraphDatabase
3 | from src.v1.providers.gcp.GCP_DEFAULT import GCP_DEFAULT_ASSET_V1_MANAGER
4 |
5 |
6 | class GCP_CLOUDRESOURCEMANAGER_FOLDER_ASSET_V1_MANAGER(GCP_DEFAULT_ASSET_V1_MANAGER):
7 | def __init__(self, asset_model: CaizenAssetV1, db: GraphDatabase) -> None:
8 | super().__init__(asset_model, db)
9 |
10 | def delete(self) -> None:
11 | print(
12 | f"CLOUDRESOURCEMANAGER_FOLDER Deleting {self.asset.name} of type {self.asset.type}"
13 | )
14 | message = self._write_with_retries(self._delete_node, asset=self.asset)
15 | self._write_with_retries(self._delete_unattached_nodes)
16 |
17 | return message
18 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_ORGANIZATION.py:
--------------------------------------------------------------------------------
1 | from common.v1.schemas import CaizenAssetV1
2 | from neo4j import GraphDatabase
3 | from src.v1.providers.gcp.GCP_DEFAULT import GCP_DEFAULT_ASSET_V1_MANAGER
4 |
5 |
6 | class GCP_CLOUDRESOURCEMANAGER_ORGANIZATION_ASSET_V1_MANAGER(
7 | GCP_DEFAULT_ASSET_V1_MANAGER
8 | ):
9 | def __init__(self, asset_model: CaizenAssetV1, db: GraphDatabase) -> None:
10 | super().__init__(asset_model, db)
11 |
12 | def delete(self) -> None:
13 | print(
14 | f"CLOUDRESOURCEMANAGER_ORGANIZATION Deleting {self.asset.name} of type {self.asset.type}"
15 | )
16 | message = self._write_with_retries(self._delete_node, asset=self.asset)
17 | self._write_with_retries(self._delete_unattached_nodes)
18 |
19 | return message
20 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_PROJECT.py:
--------------------------------------------------------------------------------
1 | from common.v1.schemas import CaizenAssetV1
2 | from neo4j import GraphDatabase
3 | from src.v1.providers.gcp.GCP_DEFAULT import GCP_DEFAULT_ASSET_V1_MANAGER
4 |
5 |
6 | class GCP_CLOUDRESOURCEMANAGER_PROJECT_ASSET_V1_MANAGER(GCP_DEFAULT_ASSET_V1_MANAGER):
7 | def __init__(self, asset_model: CaizenAssetV1, db: GraphDatabase) -> None:
8 | super().__init__(asset_model, db)
9 |
10 | def delete(self) -> None:
11 | print(
12 | f"CLOUDRESOURCEMANAGER_PROJECT Deleting {self.asset.name} of type {self.asset.type}"
13 | )
14 | message = self._write_with_retries(self._delete_node, asset=self.asset)
15 | self._write_with_retries(self._delete_unattached_nodes)
16 |
17 | return message
18 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/providers/gcp/GCP_DEFAULT.py:
--------------------------------------------------------------------------------
1 | import random
2 | import time
3 |
4 | from common.v1.schemas import CaizenAssetV1
5 | from neo4j import GraphDatabase
6 | from neo4j.exceptions import TransientError
7 |
8 |
9 | class GCP_DEFAULT_ASSET_V1_MANAGER:
10 | def __init__(self, asset_model: CaizenAssetV1, db: GraphDatabase) -> None:
11 | self.asset = asset_model
12 | self.db = db
13 |
14 | def upsert(self) -> None:
15 | return self._write_with_retries(self._upsert_node, asset=self.asset)
16 |
17 | def delete(self) -> None:
18 | print(f"DEFAULT Deleting {self.asset.name} of type {self.asset.type}")
19 | return self._write_with_retries(self._delete_node, asset=self.asset)
20 |
21 | def _write_with_retries(self, db_action_func, **kwargs) -> None:
22 | max_retries = 50
23 | initial_wait_time = 0.200
24 | backoff_factor = 1.1
25 | jitter = 0.1
26 | with self.db.session(database="") as session:
27 | for attempt in range(max_retries):
28 | try:
29 | # session.execute_write(action, **kwargs)
30 | with session.begin_transaction() as tx:
31 | result = db_action_func(tx, **kwargs)
32 | tx.commit()
33 | return result
34 | except TransientError:
35 | jitter = random.uniform(0, jitter) * initial_wait_time
36 | wait_time = initial_wait_time * (backoff_factor**attempt) + jitter
37 | print(
38 | f"Commit failed on attempt {attempt + 1}. Retrying in {wait_time} seconds..."
39 | )
40 | time.sleep(wait_time)
41 | except Exception as e:
42 | raise e
43 |
44 | def _delete_unattached_nodes(self, tx) -> None:
45 | """
46 | Delete all nodes that are not attached to any parent
47 |
48 | Args:
49 | tx (neo4j.Transaction): Neo4j transaction object
50 |
51 | Returns:
52 | None
53 | """
54 | tx.run("MATCH (n) WHERE NOT EXISTS((n)--()) DELETE n;")
55 | print("Unattached nodes deleted")
56 |
57 | return None
58 |
59 | def _delete_node(self, tx, asset) -> str:
60 | """
61 | Delete the asset node
62 |
63 | Args:
64 | tx (neo4j.Transaction): Neo4j transaction object
65 | asset (CaizenAssetV1): Asset object
66 |
67 | Returns:
68 | str: status message
69 | """
70 | node_name = str(asset.name)
71 | node_label = str(asset.type)
72 | query = f"""MATCH (n:{node_label} {{ name: $node_name }}) DETACH DELETE n RETURN 1"""
73 | try:
74 | result = tx.run(query, node_label=node_label, node_name=node_name)
75 | if len(result.data()) > 0:
76 | print(f"Asset: {node_name} type: {node_label} deleted")
77 | return "Asset deleted"
78 | else:
79 | print(f"Asset: {node_name} type: {node_label} not found. Skipping")
80 | return "Asset not found"
81 | except Exception as e:
82 | raise Exception(f"Failed to delete node: {e}")
83 |
84 | def _upsert_node(self, tx, asset) -> None:
85 | parent, parent_rel = self._get_parent_details(asset)
86 | display_name = self._get_display_name(asset)
87 | node_label = str(asset.type)
88 | node_attrs = self._get_tidy_attrs(asset)
89 |
90 | q = f"""
91 | MERGE (n:{node_label} {{ name: $node_name }})
92 | ON CREATE SET
93 | n.created_ts = $created,
94 | n.created = $created_display,
95 | n.updated_ts = $updated,
96 | n.updated = $updated_display,
97 | n.display_name = $display_name,
98 | n.attrs = $node_attrs
99 | ON MATCH SET
100 | n.updated_ts = CASE WHEN n.updated_ts < $updated THEN $updated ELSE n.updated_ts END,
101 | n.updated = CASE WHEN n.updated_ts < $updated THEN $updated_display ELSE n.updated END,
102 | n.display_name = CASE WHEN n.updated_ts < $updated THEN $display_name ELSE n.display_name END,
103 | n.attrs = CASE WHEN n.updated_ts < $updated THEN $node_attrs ELSE n.attrs END
104 | {parent_rel}
105 | RETURN n
106 | """
107 | tx.run(
108 | q,
109 | node_name=str(asset.name),
110 | created=asset.created.timestamp(),
111 | created_display=self._format_3339(asset.created),
112 | updated=asset.updated.timestamp(),
113 | updated_display=self._format_3339(asset.updated),
114 | node_attrs=node_attrs,
115 | display_name=display_name,
116 | parent_name=parent,
117 | )
118 | print(f"Node {asset.name} of type {asset.type} upserted")
119 |
120 | return None
121 |
122 | def _format_3339(self, dt) -> str:
123 | """
124 | Format the datetime object to RFC3339 format
125 |
126 | Args:
127 | dt: datetime object
128 |
129 | Returns:
130 | str: datetime in RFC3339 format
131 | """
132 | return dt.replace(microsecond=0).isoformat() + "Z"
133 |
134 | def _get_parent_label(self, parent):
135 | """
136 | Get the parent label of the asset
137 |
138 | Args:
139 | parent (str): parent name
140 |
141 | Returns:
142 | str: parent label
143 | """
144 | parent_label = None
145 | if parent.startswith("cloudresourcemanager.googleapis.com/projects/"):
146 | parent_label = "GCP_CLOUDRESOURCEMANAGER_PROJECT"
147 | elif parent.startswith("cloudresourcemanager.googleapis.com/organizations/"):
148 | parent_label = "GCP_CLOUDRESOURCEMANAGER_ORGANIZATION"
149 | elif parent.startswith("cloudresourcemanager.googleapis.com/folders/"):
150 | parent_label = "GCP_CLOUDRESOURCEMANAGER_FOLDER"
151 | else:
152 | parent_label = "GCP_CLOUDRESOURCEMANAGER_UNKNOWN"
153 |
154 | return parent_label
155 |
156 | def _get_parent_details(self, asset):
157 | """
158 | Get the parent details of the asset
159 |
160 | Args:
161 | asset (CaizenAssetV1): Asset object
162 |
163 | Returns:
164 | str: parent name
165 | str: parent relationship query
166 | """
167 | parent = None
168 | parent_rel = ""
169 | if "parent" in asset.attrs.__fields_set__:
170 | parent = asset.attrs.parent
171 | parent_label = self._get_parent_label(parent)
172 | parent_rel = f"""
173 | MERGE (p:{parent_label} {{ name: $parent_name }})
174 | MERGE (p)-[r:HAS_CHILD]->(n)
175 | ON CREATE SET r.created_ts = $updated, r.created = $updated_display
176 | ON MATCH SET
177 | r.updated_ts = CASE WHEN r.updated_ts < $updated THEN $updated ELSE r.updated_ts END,
178 | r.updated = CASE WHEN r.updated_ts < $updated THEN $updated_display ELSE r.updated END
179 | """
180 |
181 | return parent, parent_rel
182 |
183 | def _get_display_name(self, asset):
184 | """
185 | Get the display name of the asset
186 |
187 | Args:
188 | asset (CaizenAssetV1): Asset object
189 |
190 | Returns:
191 | str: friendly display name of the asset
192 | """
193 | display_name = asset.name
194 | if "display_name" in asset.attrs.__fields_set__:
195 | display_name = asset.attrs.display_name
196 | if "name" in asset.attrs.__fields_set__:
197 | display_name = asset.attrs.name
198 |
199 | return display_name
200 |
201 | def _get_tidy_attrs(self, asset):
202 | """
203 | Remove parent, display_name and name from the attrs dict
204 |
205 | Args:
206 | asset (CaizenAssetV1): Asset object
207 |
208 | Returns:
209 | dict: with several keys removed
210 | """
211 | node_attrs = asset.attrs.dict()
212 | node_attrs.pop("parent", None)
213 | node_attrs.pop("display_name", None)
214 | node_attrs.pop("name", None)
215 |
216 | return node_attrs
217 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/providers/gcp/GCP_PUBSUB_TOPIC.py:
--------------------------------------------------------------------------------
1 | from common.v1.schemas import CaizenAssetV1
2 | from neo4j import GraphDatabase
3 | from src.v1.providers.gcp.GCP_DEFAULT import GCP_DEFAULT_ASSET_V1_MANAGER
4 |
5 |
6 | class GCP_PUBSUB_TOPIC_ASSET_V1_MANAGER(GCP_DEFAULT_ASSET_V1_MANAGER):
7 | def __init__(self, asset_model: CaizenAssetV1, db: GraphDatabase) -> None:
8 | super().__init__(asset_model, db)
9 |
10 | # def delete(self) -> None:
11 | # print(f"PUBSUB_TOPIC Deleting {self.asset.name} of type {self.asset.type}")
12 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/providers/gcp/GCP_STORAGE_BUCKET.py:
--------------------------------------------------------------------------------
1 | from common.v1.schemas import CaizenAssetV1
2 | from neo4j import GraphDatabase
3 | from src.v1.providers.gcp.GCP_DEFAULT import GCP_DEFAULT_ASSET_V1_MANAGER
4 |
5 |
6 | class GCP_STORAGE_BUCKET_ASSET_V1_MANAGER(GCP_DEFAULT_ASSET_V1_MANAGER):
7 | def __init__(self, asset_model: CaizenAssetV1, db: GraphDatabase) -> None:
8 | super().__init__(asset_model, db)
9 |
10 | # def delete(self) -> None:
11 | # print(f"BUCKET Deleting {self.asset.name} of type {self.asset.type}")
12 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/providers/gcp/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # import all the files in the directory starting with GCP_
4 | for file in os.listdir(os.path.dirname(os.path.realpath(__file__))):
5 | if file.endswith(".py") and file.startswith("GCP_"):
6 | exec(f"from src.v1.providers.gcp.{file[:-3]} import *")
7 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/router.py:
--------------------------------------------------------------------------------
1 | from common.v1.providers import * # noqa
2 | from common.v1.schemas import CaizenAssetDeleteV1, CaizenAssetV1, ProcessedAsset
3 | from fastapi import APIRouter, HTTPException, Request, status
4 | from src.v1.providers import * # noqa
5 | from src.v1.utils.asset_helpers import find_asset_processor
6 |
7 | v1_router = APIRouter()
8 |
9 |
10 | # POST /v1/asset
11 | @v1_router.post(
12 | "/asset",
13 | status_code=status.HTTP_201_CREATED,
14 | response_model=ProcessedAsset,
15 | )
16 | def process_asset_upsert(req: Request, input: CaizenAssetV1) -> ProcessedAsset:
17 | """
18 | Find an asset processor and call the upsert method on the asset
19 | model to upsert the asset into the database.
20 |
21 | Args:
22 | input: The asset model to process.
23 |
24 | Returns:
25 | The processed asset JSON response.
26 | """
27 | try:
28 | # Find the asset processor to use
29 | asset_processor = find_asset_processor(input)
30 | # Process the asset into a pydantic model
31 | asset_model = asset_processor(**input.asset.model_dump())
32 | # Get the db manager class for the asset model
33 | loader_cls = globals().get(f"{type(asset_model).__name__}_MANAGER")
34 | # and call upsert()
35 | loader_cls(db=req.app.db, asset_model=asset_model).upsert()
36 | except Exception as e:
37 | print(f"Failed to upsert asset: {e}")
38 | raise HTTPException(status_code=400, detail=f"Failed to upsert asset: {e}")
39 |
40 | return ProcessedAsset(name=asset_model.name, action="upsert")
41 |
42 |
43 | # DELETE /v1/asset
44 | @v1_router.delete(
45 | "/asset",
46 | status_code=status.HTTP_200_OK,
47 | response_model=ProcessedAsset,
48 | )
49 | def process_asset_delete(req: Request, input: CaizenAssetDeleteV1) -> ProcessedAsset:
50 | """
51 | Find an asset processor and call the delete method on the asset
52 | model to delete the asset from the database.
53 |
54 | Args:
55 | input: The asset model to process.
56 |
57 | Returns:
58 | The processed asset JSON response.
59 | """
60 | try:
61 | # Find the asset processor to use
62 | asset_processor = find_asset_processor(input)
63 | asset = input.asset
64 | # Get the db manager class for the asset model
65 | manager_cls = globals().get(f"{asset_processor.__name__}_MANAGER")
66 | # and call delete()
67 | msg = manager_cls(db=req.app.db, asset_model=asset).delete()
68 | except Exception as e:
69 | print(f"Failed to delete asset: {e}")
70 | raise HTTPException(status_code=400, detail=f"Failed to delete asset: {e}")
71 |
72 | return ProcessedAsset(name=asset.name, action="delete", message=msg)
73 |
--------------------------------------------------------------------------------
/caizen/app/src/v1/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/caizen/app/src/v1/utils/__init__.py
--------------------------------------------------------------------------------
/caizen/app/src/v1/utils/asset_helpers.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from common.v1.providers import * # noqa
4 | from fastapi import HTTPException
5 | from src.v1.providers import * # noqa
6 |
7 |
8 | def find_asset_processor(asset_model) -> dict:
9 | """
10 | Find an appropriate asset processor via its asset_type for the asset model, call
11 | the upsert or delete method on the asset model, and return the processed asset.
12 |
13 | Args:
14 | asset_model: The asset model to process.
15 |
16 | Returns:
17 | The processed asset dictionary
18 | """
19 | try:
20 | asset_type = asset_model.asset.type
21 | asset_version = asset_model.version
22 |
23 | # named
24 | processor = globals().get(f"{asset_type}_ASSET_V{str(asset_version)}")
25 | if not processor:
26 | # try to find the default asset processor for the provider
27 | subdirs = list_of_provider_subdirs()
28 |
29 | for dir in subdirs:
30 | if asset_type.lower().startswith(f"{dir.lower()}_"):
31 | processor = globals().get(
32 | f"{dir.upper()}_DEFAULT_ASSET_V{str(asset_version)}"
33 | )
34 | break
35 | except Exception as e:
36 | raise HTTPException(
37 | status_code=400, detail=f"Failed to find asset processor: {e}"
38 | )
39 |
40 | if not processor:
41 | raise HTTPException(status_code=400, detail="No asset processor not found")
42 |
43 | return processor
44 |
45 |
46 | def list_of_provider_subdirs() -> list:
47 | """
48 | Get list of providers via the names of the subdirectories.
49 |
50 | Returns:
51 | List of provider subdirectories.
52 | """
53 | return [
54 | dir.upper()
55 | for dir in os.listdir(os.path.join(os.path.dirname(__file__), "../providers"))
56 | if not dir.startswith("_")
57 | ]
58 |
--------------------------------------------------------------------------------
/caizen/app/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/caizen/app/tests/__init__.py
--------------------------------------------------------------------------------
/caizen/common/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/caizen/common/__init__.py
--------------------------------------------------------------------------------
/caizen/common/v1/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/caizen/common/v1/__init__.py
--------------------------------------------------------------------------------
/caizen/common/v1/providers/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # Import all the provider modules
4 | for folder in os.listdir(os.path.dirname(os.path.realpath(__file__))):
5 | if not folder.startswith("__"):
6 | exec(f"from common.v1.providers.{folder} import *")
7 |
--------------------------------------------------------------------------------
/caizen/common/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_FOLDER.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from common.v1.schemas import CaizenAssetFormatV1
4 | from pydantic import BaseModel
5 |
6 |
7 | class GCP_CLOUDRESOURCEMANAGER_FOLDER_ASSET_ATTRS_V1(BaseModel):
8 | ancestors: List[str]
9 | parent: str
10 | location: Optional[str] = "global"
11 | display_name: str
12 | lifecycle_state: str
13 |
14 |
15 | class GCP_CLOUDRESOURCEMANAGER_FOLDER_ASSET_V1(CaizenAssetFormatV1):
16 | attrs: GCP_CLOUDRESOURCEMANAGER_FOLDER_ASSET_ATTRS_V1
17 |
--------------------------------------------------------------------------------
/caizen/common/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_ORGANIZATION.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from common.v1.schemas import CaizenAssetFormatV1
4 | from pydantic import BaseModel
5 |
6 |
7 | class GCP_CLOUDRESOURCEMANAGER_ORGANIZATION_ASSET_ATTRS_V1(BaseModel):
8 | location: Optional[str] = "global"
9 | display_name: str
10 | lifecycle_state: str
11 |
12 |
13 | class GCP_CLOUDRESOURCEMANAGER_ORGANIZATION_ASSET_V1(CaizenAssetFormatV1):
14 | attrs: GCP_CLOUDRESOURCEMANAGER_ORGANIZATION_ASSET_ATTRS_V1
15 |
--------------------------------------------------------------------------------
/caizen/common/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_PROJECT.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from common.v1.schemas import CaizenAssetFormatV1
4 | from pydantic import BaseModel
5 |
6 |
7 | class GCP_CLOUDRESOURCEMANAGER_PROJECT_ASSET_ATTRS_V1(BaseModel):
8 | ancestors: List[str]
9 | parent: str
10 | location: Optional[str] = "global"
11 | display_name: str
12 | lifecycle_state: str
13 | project_number: int
14 | project_id: str
15 |
16 |
17 | class GCP_CLOUDRESOURCEMANAGER_PROJECT_ASSET_V1(CaizenAssetFormatV1):
18 | attrs: GCP_CLOUDRESOURCEMANAGER_PROJECT_ASSET_ATTRS_V1
19 |
--------------------------------------------------------------------------------
/caizen/common/v1/providers/gcp/GCP_DEFAULT.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from common.v1.schemas import CaizenAssetFormatV1
4 |
5 |
6 | class GCP_DEFAULT_ASSET_V1(CaizenAssetFormatV1):
7 | attrs: Optional[dict]
8 |
--------------------------------------------------------------------------------
/caizen/common/v1/providers/gcp/GCP_PUBSUB_TOPIC.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from common.v1.schemas import CaizenAssetFormatV1
4 | from pydantic import BaseModel
5 |
6 |
7 | class GCP_PUBSUB_TOPIC_ASSET_ATTRS_V1(BaseModel):
8 | ancestors: List[str]
9 | parent: str
10 | location: str
11 | display_name: Optional[str] = None
12 |
13 |
14 | class GCP_PUBSUB_TOPIC_ASSET_V1(CaizenAssetFormatV1):
15 | attrs: GCP_PUBSUB_TOPIC_ASSET_ATTRS_V1
16 |
--------------------------------------------------------------------------------
/caizen/common/v1/providers/gcp/GCP_STORAGE_BUCKET.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from common.v1.schemas import CaizenAssetFormatV1
4 | from pydantic import BaseModel
5 |
6 |
7 | class GCP_STORAGE_BUCKET_ASSET_ATTRS_IAM_V1(BaseModel):
8 | bucket_policy_only: bool
9 | uniform_bucket_level_access: bool
10 | block_public_access: str
11 |
12 |
13 | class GCP_STORAGE_BUCKET_ASSET_ATTRS_V1(BaseModel):
14 | display_name: Optional[str] = None
15 | ancestors: List[str]
16 | parent: str
17 | location: str
18 | storage_class: str
19 | cors: List
20 | labels: dict
21 | versioning: bool
22 | iam: GCP_STORAGE_BUCKET_ASSET_ATTRS_IAM_V1
23 |
24 |
25 | class GCP_STORAGE_BUCKET_ASSET_V1(CaizenAssetFormatV1):
26 | attrs: GCP_STORAGE_BUCKET_ASSET_ATTRS_V1
27 |
--------------------------------------------------------------------------------
/caizen/common/v1/providers/gcp/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # import all the files in the directory starting with GCP_
4 | for file in os.listdir(os.path.dirname(os.path.realpath(__file__))):
5 | if file.endswith(".py") and file.startswith("GCP_"):
6 | exec(f"from common.v1.providers.gcp.{file[:-3]} import *")
7 |
--------------------------------------------------------------------------------
/caizen/common/v1/schemas.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from typing import Literal, Optional
3 |
4 | from pydantic import BaseModel, field_validator
5 |
6 |
7 | class CaizenAssetFormatV1(BaseModel):
8 | """Model for all v1 'asset' payloads."""
9 |
10 | name: str
11 | type: str
12 | created: datetime
13 | updated: datetime
14 | attrs: Optional[dict] = None
15 |
16 | def upsert(self):
17 | raise NotImplementedError
18 |
19 | def delete(self):
20 | raise NotImplementedError
21 |
22 |
23 | class CaizenAssetDeleteFormatV1(BaseModel):
24 | """Model for all v1 'asset' payloads."""
25 |
26 | name: str
27 | type: str
28 |
29 |
30 | class CaizenAssetDeleteV1(BaseModel):
31 | """Outermost model of the versioned asset format."""
32 |
33 | version: int
34 | asset: CaizenAssetDeleteFormatV1
35 |
36 | @field_validator("version")
37 | def ensure_version_is_1(cls, v):
38 | if v != 1:
39 | raise ValueError("version must be equal to 1")
40 | return v
41 |
42 |
43 | class CaizenAssetV1(BaseModel):
44 | """Outermost model of the versioned asset format."""
45 |
46 | version: int
47 | asset: CaizenAssetFormatV1
48 |
49 | @field_validator("version")
50 | def ensure_version_is_1(cls, v):
51 | if v != 1:
52 | raise ValueError("version must be equal to 1")
53 | return v
54 |
55 |
56 | class ProcessedAsset(BaseModel):
57 | """Model for the response of the /v1/asset endpoint."""
58 |
59 | name: str
60 | action: Literal["upsert", "delete"]
61 | message: Optional[str] = None
62 |
63 |
64 | class HealthStatus(BaseModel):
65 | """Model for the response of the /status endpoint."""
66 |
67 | status: str = "error"
68 | msg: Optional[str] = None
69 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/Makefile:
--------------------------------------------------------------------------------
1 | .DEFAULT_GOAL := help
2 |
3 | help:
4 | @echo "Usage:"
5 | @echo " make export-requirements - Export dependencies to requirements.txt"
6 | @echo " make help - Show this help message"
7 |
8 | export-requirements:
9 | @echo "Export requirements.txt using Poetry"
10 | @poetry export --without-hashes --format=requirements.txt > requirements.txt
11 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/common:
--------------------------------------------------------------------------------
1 | ../common
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/main.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 |
4 | import functions_framework
5 | import google.cloud.logging
6 | from flask import Flask
7 | from flask.wrappers import Request, Response
8 | from src.message import extract_bucket_and_object_id, validate_request
9 | from src.processing import process_gcs_file # noqa
10 | from src.schemas import NotificationResponse
11 |
12 | app = Flask(__name__)
13 |
14 | # If on GCP
15 | if os.getenv("K_SERVICE"):
16 | client = google.cloud.logging.Client()
17 | client.setup_logging()
18 |
19 | logger = logging.getLogger()
20 | log_level = os.getenv("LOG_LEVEL", "INFO")
21 | logger.setLevel(log_level)
22 |
23 |
24 | # Function Entrypoint: main
25 | @functions_framework.http
26 | def main(request: Request) -> Response:
27 | body = validate_request(request)
28 | bucket_id, object_id = extract_bucket_and_object_id(body)
29 |
30 | try:
31 | process_gcs_file(bucket_id, object_id)
32 | except Exception as e:
33 | logging.error(f"Error processing GCS file: gs://{bucket_id}/{object_id}: {e}")
34 | resp = NotificationResponse(
35 | detail=f"Error processing GCS file: gs://{bucket_id}/{object_id}: {e}"
36 | ).model_dump_json(exclude_none=True)
37 | return Response(resp, status=500)
38 |
39 | # Log the file processed and respond 200 to the pubsub http trigger
40 | detail = f"gs://{bucket_id}/{object_id}"
41 | resp = NotificationResponse(detail=detail).model_dump_json(exclude_none=True)
42 | logging.info(resp)
43 |
44 | return Response(resp, status=200)
45 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "gcp-cai-func"
3 | version = "0.1.0"
4 | description = ""
5 | authors = ["Brad Geesaman <3769609+bgeesaman@users.noreply.github.com>"]
6 | license = "APACHE2.0"
7 | readme = "README.md"
8 | package-mode = false
9 |
10 | [tool.poetry.dependencies]
11 | python = "^3.11"
12 | functions-framework = "^3.8.1"
13 | requests = "^2.32.3"
14 | flask-pydantic = "^0.12.0"
15 | google-cloud-logging = "^3.11.2"
16 | poetry-plugin-export = "^1.8.0"
17 | uvicorn = "^0.30.6"
18 | fastapi = "^0.114.1"
19 | a2wsgi = "^1.10.7"
20 | google-cloud-storage = "^2.18.2"
21 |
22 |
23 | [build-system]
24 | requires = ["poetry-core"]
25 | build-backend = "poetry.core.masonry.api"
26 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/requirements.txt:
--------------------------------------------------------------------------------
1 | a2wsgi==1.10.7 ; python_version >= "3.11" and python_version < "4.0"
2 | annotated-types==0.7.0 ; python_version >= "3.11" and python_version < "4.0"
3 | anyio==4.4.0 ; python_version >= "3.11" and python_version < "4.0"
4 | blinker==1.8.2 ; python_version >= "3.11" and python_version < "4"
5 | build==1.2.2 ; python_version >= "3.11" and python_version < "4.0"
6 | cachecontrol[filecache]==0.14.0 ; python_version >= "3.11" and python_version < "4.0"
7 | cachetools==5.5.0 ; python_version >= "3.11" and python_version < "4.0"
8 | certifi==2024.8.30 ; python_version >= "3.11" and python_version < "4.0"
9 | cffi==1.17.1 ; python_version >= "3.11" and python_version < "4.0" and (sys_platform == "darwin" or sys_platform == "linux") and (sys_platform == "darwin" or platform_python_implementation != "PyPy")
10 | charset-normalizer==3.3.2 ; python_version >= "3.11" and python_version < "4.0"
11 | cleo==2.1.0 ; python_version >= "3.11" and python_version < "4.0"
12 | click==8.1.7 ; python_version >= "3.11" and python_version < "4"
13 | cloudevents==1.11.0 ; python_version >= "3.11" and python_version < "4"
14 | colorama==0.4.6 ; python_version >= "3.11" and python_version < "4" and platform_system == "Windows" or python_version >= "3.11" and python_version < "4" and os_name == "nt"
15 | crashtest==0.4.1 ; python_version >= "3.11" and python_version < "4.0"
16 | cryptography==43.0.1 ; python_version >= "3.11" and python_version < "4.0" and sys_platform == "linux"
17 | deprecated==1.2.14 ; python_version >= "3.11" and python_version < "4.0"
18 | deprecation==2.1.0 ; python_version >= "3.11" and python_version < "4"
19 | distlib==0.3.8 ; python_version >= "3.11" and python_version < "4.0"
20 | dulwich==0.21.7 ; python_version >= "3.11" and python_version < "4.0"
21 | fastapi==0.114.1 ; python_version >= "3.11" and python_version < "4.0"
22 | fastjsonschema==2.20.0 ; python_version >= "3.11" and python_version < "4.0"
23 | filelock==3.16.0 ; python_version >= "3.11" and python_version < "4.0"
24 | flask-pydantic==0.12.0 ; python_version >= "3.11" and python_version < "4.0"
25 | flask==3.0.3 ; python_version >= "3.11" and python_version < "4"
26 | functions-framework==3.8.1 ; python_version >= "3.11" and python_version < "4"
27 | google-api-core==2.19.2 ; python_version >= "3.11" and python_version < "4.0"
28 | google-api-core[grpc]==2.19.2 ; python_version >= "3.11" and python_version < "4.0"
29 | google-auth==2.34.0 ; python_version >= "3.11" and python_version < "4.0"
30 | google-cloud-appengine-logging==1.4.5 ; python_version >= "3.11" and python_version < "4.0"
31 | google-cloud-audit-log==0.3.0 ; python_version >= "3.11" and python_version < "4.0"
32 | google-cloud-core==2.4.1 ; python_version >= "3.11" and python_version < "4.0"
33 | google-cloud-logging==3.11.2 ; python_version >= "3.11" and python_version < "4.0"
34 | google-cloud-storage==2.18.2 ; python_version >= "3.11" and python_version < "4.0"
35 | google-crc32c==1.6.0 ; python_version >= "3.11" and python_version < "4.0"
36 | google-resumable-media==2.7.2 ; python_version >= "3.11" and python_version < "4.0"
37 | googleapis-common-protos==1.65.0 ; python_version >= "3.11" and python_version < "4.0"
38 | googleapis-common-protos[grpc]==1.65.0 ; python_version >= "3.11" and python_version < "4.0"
39 | grpc-google-iam-v1==0.13.1 ; python_version >= "3.11" and python_version < "4.0"
40 | grpcio-status==1.66.1 ; python_version >= "3.11" and python_version < "4.0"
41 | grpcio==1.66.1 ; python_version >= "3.11" and python_version < "4.0"
42 | gunicorn==23.0.0 ; python_version >= "3.11" and python_version < "4" and platform_system != "Windows"
43 | h11==0.14.0 ; python_version >= "3.11" and python_version < "4.0"
44 | idna==3.8 ; python_version >= "3.11" and python_version < "4.0"
45 | importlib-metadata==8.4.0 ; python_version >= "3.11" and python_version < "4.0"
46 | installer==0.7.0 ; python_version >= "3.11" and python_version < "4.0"
47 | itsdangerous==2.2.0 ; python_version >= "3.11" and python_version < "4"
48 | jaraco-classes==3.4.0 ; python_version >= "3.11" and python_version < "4.0"
49 | jeepney==0.8.0 ; python_version >= "3.11" and python_version < "4.0" and sys_platform == "linux"
50 | jinja2==3.1.4 ; python_version >= "3.11" and python_version < "4"
51 | keyring==24.3.1 ; python_version >= "3.11" and python_version < "4.0"
52 | markupsafe==2.1.5 ; python_version >= "3.11" and python_version < "4"
53 | more-itertools==10.5.0 ; python_version >= "3.11" and python_version < "4.0"
54 | msgpack==1.1.0 ; python_version >= "3.11" and python_version < "4.0"
55 | opentelemetry-api==1.27.0 ; python_version >= "3.11" and python_version < "4.0"
56 | packaging==24.1 ; python_version >= "3.11" and python_version < "4"
57 | pexpect==4.9.0 ; python_version >= "3.11" and python_version < "4.0"
58 | pkginfo==1.11.1 ; python_version >= "3.11" and python_version < "4.0"
59 | platformdirs==4.3.2 ; python_version >= "3.11" and python_version < "4.0"
60 | poetry-core==1.9.0 ; python_version >= "3.11" and python_version < "4.0"
61 | poetry-plugin-export==1.8.0 ; python_version >= "3.11" and python_version < "4.0"
62 | poetry==1.8.3 ; python_version >= "3.11" and python_version < "4.0"
63 | proto-plus==1.24.0 ; python_version >= "3.11" and python_version < "4.0"
64 | protobuf==5.28.1 ; python_version >= "3.11" and python_version < "4.0"
65 | ptyprocess==0.7.0 ; python_version >= "3.11" and python_version < "4.0"
66 | pyasn1-modules==0.4.1 ; python_version >= "3.11" and python_version < "4.0"
67 | pyasn1==0.6.1 ; python_version >= "3.11" and python_version < "4.0"
68 | pycparser==2.22 ; python_version >= "3.11" and python_version < "4.0" and (sys_platform == "darwin" or sys_platform == "linux") and (sys_platform == "darwin" or platform_python_implementation != "PyPy")
69 | pydantic-core==2.23.3 ; python_version >= "3.11" and python_version < "4.0"
70 | pydantic==2.9.1 ; python_version >= "3.11" and python_version < "4.0"
71 | pyproject-hooks==1.1.0 ; python_version >= "3.11" and python_version < "4.0"
72 | pywin32-ctypes==0.2.3 ; python_version >= "3.11" and python_version < "4.0" and sys_platform == "win32"
73 | rapidfuzz==3.9.7 ; python_version >= "3.11" and python_version < "4.0"
74 | requests-toolbelt==1.0.0 ; python_version >= "3.11" and python_version < "4.0"
75 | requests==2.32.3 ; python_version >= "3.11" and python_version < "4.0"
76 | rsa==4.9 ; python_version >= "3.11" and python_version < "4"
77 | secretstorage==3.3.3 ; python_version >= "3.11" and python_version < "4.0" and sys_platform == "linux"
78 | shellingham==1.5.4 ; python_version >= "3.11" and python_version < "4.0"
79 | sniffio==1.3.1 ; python_version >= "3.11" and python_version < "4.0"
80 | starlette==0.38.5 ; python_version >= "3.11" and python_version < "4.0"
81 | tomlkit==0.13.2 ; python_version >= "3.11" and python_version < "4.0"
82 | trove-classifiers==2024.9.12 ; python_version >= "3.11" and python_version < "4.0"
83 | typing-extensions==4.12.2 ; python_version >= "3.11" and python_version < "4.0"
84 | urllib3==2.2.2 ; python_version >= "3.11" and python_version < "4.0"
85 | uvicorn==0.30.6 ; python_version >= "3.11" and python_version < "4.0"
86 | virtualenv==20.26.4 ; python_version >= "3.11" and python_version < "4.0"
87 | watchdog==5.0.2 ; python_version >= "3.11" and python_version < "4"
88 | werkzeug==3.0.4 ; python_version >= "3.11" and python_version < "4"
89 | wrapt==1.16.0 ; python_version >= "3.11" and python_version < "4.0"
90 | xattr==1.1.0 ; python_version >= "3.11" and python_version < "4.0" and sys_platform == "darwin"
91 | zipp==3.20.1 ; python_version >= "3.11" and python_version < "4.0"
92 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/caizen/gcp_cai_func/src/__init__.py
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/message.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from collections import defaultdict
3 |
4 | from flask import Request, Response, abort
5 | from pydantic import ValidationError
6 | from src.schemas import NotificationResponse, PushNotification
7 |
8 |
9 | def validate_request(request: Request):
10 | """
11 | Validates the request body using Pydantic.
12 |
13 | Args:
14 | request: The Flask request object.
15 |
16 | Returns:
17 | The validated request body.
18 | """
19 | # Validate the request body using Pydantic
20 | try:
21 | data = request.get_json()
22 | body = PushNotification(**data)
23 | except ValidationError as e:
24 | reformatted_message = defaultdict(list)
25 | for pydantic_error in e.errors():
26 | loc, msg = pydantic_error["loc"], pydantic_error["msg"]
27 | filtered_loc = loc[1:] if loc[0] in ("body", "query", "path") else loc
28 | field_string = ".".join(filtered_loc) # nested fields with dot-notation
29 | reformatted_message[field_string].append(msg)
30 | msg = {
31 | "detail": "Invalid request",
32 | "errors": reformatted_message,
33 | }
34 | resp = NotificationResponse(**msg).model_dump_json(exclude_none=True)
35 | logging.error(resp)
36 | abort(Response(resp, status=422))
37 |
38 | # Validate the event type, size, and file content
39 | bucket_id, object_id = extract_bucket_and_object_id(body)
40 | full_object_name = f"gs://{bucket_id}/{object_id}"
41 | event_type = body.message.attributes.eventType
42 | if event_type != "OBJECT_FINALIZE":
43 | msg = {
44 | "detail": f"Invalid event type {event_type}. Skipping {full_object_name}"
45 | }
46 | resp = NotificationResponse(**msg).model_dump_json(exclude_none=True)
47 | logging.error(resp)
48 | abort(Response(resp, status=400))
49 |
50 | file_size = int(body.message.data.get("size")) or 0
51 | if file_size == 0:
52 | msg = {"detail": f"Empty file. Skipping {full_object_name}"}
53 | resp = NotificationResponse(**msg).model_dump_json(exclude_none=True)
54 | logging.info(resp)
55 | abort(Response(resp, status=200))
56 |
57 | content_type = body.message.data.get("contentType")
58 | if content_type != "application/x-ndjson":
59 | msg = {
60 | "detail": f"Non-ndjson file. Skipping {full_object_name} of type {content_type}"
61 | }
62 | resp = NotificationResponse(**msg).model_dump_json(exclude_none=True)
63 | logging.info(resp)
64 | abort(Response(resp, status=200))
65 |
66 | return body
67 |
68 |
69 | def extract_bucket_and_object_id(body):
70 | """
71 | Extracts the bucket ID and object ID from the validated data.
72 |
73 | Args:
74 | body: The validated data containing the message attributes.
75 |
76 | Returns:
77 | A tuple containing the bucket ID and object ID.
78 | """
79 | bucket_id = body.message.attributes.bucketId
80 | object_id = body.message.attributes.objectId
81 | return bucket_id, object_id
82 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/processing.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | from concurrent.futures import ThreadPoolExecutor, as_completed
4 | from typing import Generator
5 |
6 | import requests
7 | from google.cloud import storage
8 | from pydantic import ValidationError
9 | from src.schemas import CaiRecord
10 | from src.transform import GCP_ASSET
11 |
12 |
13 | def stream_gcs_file(bucket_id: str, object_id: str) -> Generator[str, None, None]:
14 | """
15 | Stream the contents of a file from GCS line by line (asset by asset).
16 |
17 | Args:
18 | bucket_id: The GCS bucket ID.
19 | object_id: The GCS object ID.
20 |
21 | Yields:
22 | Each line of the file as a string.
23 | """
24 | client = storage.Client()
25 | bucket = client.bucket(bucket_id)
26 | blob = bucket.blob(object_id)
27 | try:
28 | with blob.open("r") as file:
29 | for line in file:
30 | yield line.strip()
31 | except Exception as e:
32 | logging.error(f"Error fetching GCS file: {e}")
33 | raise Exception(f"Error fetching GCS file: {e}")
34 |
35 |
36 | def decode_json_line(line: str):
37 | try:
38 | return json.loads(line)
39 | except json.JSONDecodeError as e:
40 | logging.error(f"Error decoding JSON: {e}")
41 | raise Exception(f"Error decoding JSON: {e}")
42 |
43 |
44 | def validate_cai_record(data: dict):
45 | try:
46 | return CaiRecord(**data)
47 | except ValidationError as e:
48 | logging.error(f"Error validating CAI record: {e}")
49 | raise Exception(f"Error validating CAI record: {e}")
50 |
51 |
52 | def transform_cai_record(cai_record):
53 | try:
54 | asset = GCP_ASSET().transform(cai_record)
55 | return asset
56 | except ValueError as e:
57 | logging.error(f"Error transforming CAI record: {e}")
58 | raise Exception(f"Error transforming CAI record: {e}")
59 |
60 |
61 | def send_to_caizen_api(api_url: str, headers: dict, caizen_asset_json: dict):
62 | try:
63 | # import random
64 | # import time
65 |
66 | # time.sleep(random.randint(1, 5))
67 | response = requests.post(api_url, headers=headers, json=caizen_asset_json)
68 | response.raise_for_status()
69 | except requests.RequestException as e:
70 | logging.error(
71 | f"Error sending request to CAIZEN API: {api_url} {caizen_asset_json} {e}"
72 | )
73 | raise Exception(
74 | f"Error sending request to CAIZEN API: {api_url} {caizen_asset_json} {e}"
75 | )
76 |
77 |
78 | def process_asset_line(line: str, api_url: str, headers: dict) -> None:
79 | """
80 | Process a single line and send it to the CAIZEN API as an "upsert" request.
81 |
82 | Args:
83 | line: The line to process.
84 | api_url: The CAIZEN API URL.
85 | headers: The headers for the API request.
86 |
87 | Returns:
88 | None
89 | """
90 | try:
91 | data = decode_json_line(line)
92 | cai_record = validate_cai_record(data)
93 | caizen_asset = transform_cai_record(cai_record)
94 | caizen_asset_json = json.loads(caizen_asset.model_dump_json(exclude_none=True))
95 | send_to_caizen_api(api_url, headers, caizen_asset_json)
96 | except Exception as e:
97 | logging.error(f"Error processing asset line: {e}")
98 | raise Exception(f"Error processing asset line: {e}")
99 |
100 |
101 | def process_gcs_file(bucket_id: str, object_id: str) -> None:
102 | """
103 | Stream the ndjson file from GCS and process chunks of each
104 | line at a time and send to the CAIZEN API as "upsert" requests.
105 |
106 | Args:
107 | bucket_id: The GCS bucket ID.
108 | object_id: The GCS object ID.
109 |
110 | Returns:
111 | None
112 | """
113 | api_url = "http://localhost:8000/v1/asset"
114 | headers = {"Content-Type": "application/json"}
115 |
116 | with ThreadPoolExecutor(max_workers=10) as executor:
117 | futures = []
118 | for line in stream_gcs_file(bucket_id, object_id):
119 | futures.append(executor.submit(process_asset_line, line, api_url, headers))
120 |
121 | for future in as_completed(futures):
122 | try:
123 | future.result()
124 | except Exception as e:
125 | logging.error(f"Error processing asset: {e}")
126 | raise Exception(f"Error processing asset: {e}")
127 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/schemas.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import json
3 | from datetime import datetime
4 | from typing import List, Optional
5 |
6 | from pydantic import BaseModel, HttpUrl, field_validator
7 |
8 |
9 | class CaiResource(BaseModel):
10 | data: Optional[dict] = None
11 | discovery_document_uri: HttpUrl
12 | discovery_name: str
13 | location: Optional[str] = "global"
14 | parent: Optional[str] = None
15 | version: str
16 |
17 |
18 | class CaiRecord(BaseModel):
19 | ancestors: List[str]
20 | asset_type: str
21 | name: str
22 | resource: CaiResource
23 | update_time: datetime
24 |
25 | @field_validator("asset_type")
26 | def format_asset_type(cls, v):
27 | parts = v.split("/")
28 | if len(parts) != 2:
29 | raise ValueError(f"Invalid asset_type format {v}")
30 |
31 | service = parts[0].split(".")[0].upper() # "pubsub.googleapis.com" -> "PUBSUB"
32 | resource = parts[1].upper() # "Topic" -> "TOPIC"
33 |
34 | return f"GCP_{service}_{resource}"
35 |
36 | @field_validator("name")
37 | def format_asset_name(cls, v):
38 | return v.lstrip("//")
39 |
40 |
41 | class StorageAttributes(BaseModel):
42 | bucketId: str
43 | eventTime: datetime
44 | eventType: str
45 | notificationConfig: str
46 | objectGeneration: str
47 | objectId: str
48 | overwroteGeneration: Optional[str] = None
49 | payloadFormat: str
50 |
51 |
52 | class StorageNotification(BaseModel):
53 | attributes: StorageAttributes
54 | data: str
55 | messageId: str
56 | message_id: str
57 | publishTime: str
58 | publish_time: str
59 |
60 | @field_validator("data")
61 | def decode_base64_and_load_json(cls, v):
62 | try:
63 | # Decode the base64 string
64 | decoded_bytes = base64.b64decode(v)
65 | # Load the JSON content
66 | decoded_json = json.loads(decoded_bytes.decode("utf-8"))
67 | except (ValueError, json.JSONDecodeError):
68 | raise ValueError("Invalid base64-encoded JSON data")
69 |
70 | return decoded_json
71 |
72 |
73 | class PushNotification(BaseModel):
74 | message: StorageNotification
75 | subscription: str
76 |
77 |
78 | class NotificationResponse(BaseModel):
79 | detail: str
80 | errors: Optional[dict] = None
81 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/transform.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timezone
2 |
3 | from common.v1.schemas import CaizenAssetV1
4 | from src.v1.providers import * # noqa
5 |
6 |
7 | class GCP_ASSET:
8 | def transform(self, cr) -> CaizenAssetV1:
9 | """
10 | Transform a CAI record into a CAIZEN asset.
11 |
12 | Args:
13 | cai_record: The CAI record to transform.
14 |
15 | Returns:
16 | The CAIZEN asset as a dictionary.
17 | """
18 |
19 | created = self._get_created_time(cr)
20 | ancestors = self._parse_ancestors(cr.name, cr.ancestors)
21 |
22 | # Create the asset dictionary
23 | ca = {
24 | "name": cr.name,
25 | "type": cr.asset_type,
26 | "created": created,
27 | "updated": cr.update_time,
28 | "attrs": {
29 | "ancestors": ancestors,
30 | "parent": str(cr.resource.parent).lstrip("//"),
31 | "location": cr.resource.location,
32 | },
33 | }
34 |
35 | # Enrich the asset with custom attributes based on the asset type
36 | asset_model = globals().get(f"{cr.asset_type}_V1")
37 | if asset_model:
38 | ca = asset_model.enrich_attrs(ca, cr)
39 |
40 | # Wrap into a CaizenAssetV1 object
41 | cav = {"version": 1, "asset": ca}
42 | caizen_asset = CaizenAssetV1(**cav)
43 |
44 | return caizen_asset
45 |
46 | def _parse_ancestors(self, asset_name, ancestors_list):
47 | """
48 | Transform the ancestors list by prepending 'cloudresourcemanager.googleapis.com/'
49 |
50 | Args:
51 | ancestors_list: List of ancestor strings.
52 |
53 | Returns:
54 | List of transformed ancestor strings.
55 | """
56 | ancestors = ["cloudresourcemanager.googleapis.com/" + a for a in ancestors_list]
57 |
58 | # Remove self from the ancestors list
59 | if asset_name in ancestors:
60 | ancestors.remove(asset_name)
61 |
62 | return ancestors
63 |
64 | def _get_created_time(self, cr):
65 | """
66 | Get the created time of the asset. If not found, return the epoch datetime.
67 |
68 | Args:
69 | cr: The CAI record.
70 |
71 | Returns:
72 | created: datetime object of the created time.
73 |
74 | """
75 | created = datetime.fromtimestamp(0, tz=timezone.utc)
76 | ct = cr.resource.data.get("createTime") or cr.resource.data.get("creationTime")
77 | if ct:
78 | created = datetime.fromisoformat(ct)
79 |
80 | return created
81 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/v1/providers/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # Import all the provider modules
4 | for folder in os.listdir(os.path.dirname(os.path.realpath(__file__))):
5 | if not folder.startswith("__"):
6 | exec(f"from src.v1.providers.{folder} import *")
7 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_FOLDER.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | from common.v1.providers.gcp.GCP_CLOUDRESOURCEMANAGER_FOLDER import (
4 | GCP_CLOUDRESOURCEMANAGER_FOLDER_ASSET_V1,
5 | )
6 | from common.v1.schemas import CaizenAssetV1
7 | from src.schemas import CaiRecord
8 |
9 |
10 | class GCP_CLOUDRESOURCEMANAGER_FOLDER_V1:
11 | def enrich_attrs(
12 | ca: GCP_CLOUDRESOURCEMANAGER_FOLDER_ASSET_V1, cr: CaiRecord
13 | ) -> CaizenAssetV1:
14 | """
15 | Set the attributes for a GCP CRM Folder asset.
16 |
17 | Args:
18 | ca: The CAIZEN asset.
19 | cr: The CAI record.
20 |
21 | Returns:
22 | The CAIZEN asset with attributes set.
23 | """
24 | ca["created"] = datetime.fromisoformat(cr.resource.data.get("createTime"))
25 | ca["updated"] = cr.update_time
26 | ca["attrs"]["display_name"] = cr.resource.data.get("displayName")
27 | ca["attrs"]["lifecycle_state"] = cr.resource.data.get("lifecycleState").lower()
28 |
29 | try:
30 | GCP_CLOUDRESOURCEMANAGER_FOLDER_ASSET_V1(**ca)
31 | except ValueError as e:
32 | raise ValueError(f"Error enriching GCP CRM Folder asset: {e}")
33 |
34 | return ca
35 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_ORGANIZATION.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | from common.v1.providers.gcp.GCP_CLOUDRESOURCEMANAGER_ORGANIZATION import (
4 | GCP_CLOUDRESOURCEMANAGER_ORGANIZATION_ASSET_V1,
5 | )
6 | from common.v1.schemas import CaizenAssetV1
7 | from src.schemas import CaiRecord
8 |
9 |
10 | class GCP_CLOUDRESOURCEMANAGER_ORGANIZATION_V1:
11 | def enrich_attrs(
12 | ca: GCP_CLOUDRESOURCEMANAGER_ORGANIZATION_ASSET_V1, cr: CaiRecord
13 | ) -> CaizenAssetV1:
14 | """
15 | Set the attributes for a GCP CRM Organization asset.
16 |
17 | Args:
18 | ca: The CAIZEN asset.
19 | cr: The CAI record.
20 |
21 | Returns:
22 | The CAIZEN asset with attributes set.
23 | """
24 | ca["created"] = datetime.fromisoformat(cr.resource.data.get("creationTime"))
25 | ca["updated"] = cr.update_time
26 | ca["attrs"]["display_name"] = cr.resource.data.get("displayName")
27 | ca["attrs"]["lifecycle_state"] = cr.resource.data.get("lifecycleState").lower()
28 |
29 | try:
30 | GCP_CLOUDRESOURCEMANAGER_ORGANIZATION_ASSET_V1(**ca)
31 | except ValueError as e:
32 | raise ValueError(f"Error enriching GCP CRM Organization asset: {e}")
33 |
34 | return ca
35 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/v1/providers/gcp/GCP_CLOUDRESOURCEMANAGER_PROJECT.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | from common.v1.providers.gcp.GCP_CLOUDRESOURCEMANAGER_PROJECT import (
4 | GCP_CLOUDRESOURCEMANAGER_PROJECT_ASSET_V1,
5 | )
6 | from common.v1.schemas import CaizenAssetV1
7 | from src.schemas import CaiRecord
8 |
9 |
10 | class GCP_CLOUDRESOURCEMANAGER_PROJECT_V1:
11 | def enrich_attrs(
12 | ca: GCP_CLOUDRESOURCEMANAGER_PROJECT_ASSET_V1, cr: CaiRecord
13 | ) -> CaizenAssetV1:
14 | """
15 | Set the attributes for a GCP CRM Project asset.
16 |
17 | Args:
18 | ca: The CAIZEN asset.
19 | cr: The CAI record.
20 |
21 | Returns:
22 | The CAIZEN asset with attributes set.
23 | """
24 | ca["created"] = datetime.fromisoformat(cr.resource.data.get("createTime"))
25 | ca["updated"] = cr.update_time
26 | ca["attrs"]["display_name"] = cr.resource.data.get("name")
27 | ca["attrs"]["lifecycle_state"] = cr.resource.data.get("lifecycleState").lower()
28 | ca["attrs"]["project_number"] = cr.resource.data.get("projectNumber")
29 | ca["attrs"]["project_id"] = cr.resource.data.get("projectId")
30 |
31 | try:
32 | GCP_CLOUDRESOURCEMANAGER_PROJECT_ASSET_V1(**ca)
33 | except ValueError as e:
34 | raise ValueError(f"Error enriching GCP CRM Project asset: {e}")
35 |
36 | return ca
37 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/v1/providers/gcp/GCP_PUBSUB_TOPIC.py:
--------------------------------------------------------------------------------
1 | from common.v1.providers.gcp.GCP_PUBSUB_TOPIC import GCP_PUBSUB_TOPIC_ASSET_V1
2 | from common.v1.schemas import CaizenAssetV1
3 | from src.schemas import CaiRecord
4 |
5 |
6 | class GCP_PUBSUB_TOPIC_V1:
7 | def enrich_attrs(ca: GCP_PUBSUB_TOPIC_ASSET_V1, cr: CaiRecord) -> CaizenAssetV1:
8 | """
9 | Set the attributes for a GCP PUBSUB TOPIC asset.
10 |
11 | Args:
12 | cav: The CAIZEN asset.
13 | cr: The CAI record.
14 |
15 | Returns:
16 | The CAIZEN asset with attributes set.
17 | """
18 | ca["attrs"]["display_name"] = cr.name.split("/")[-1]
19 |
20 | try:
21 | GCP_PUBSUB_TOPIC_ASSET_V1(**ca)
22 | except ValueError as e:
23 | raise ValueError(f"Error enriching GCP_PUBSUB_TOPIC asset: {e}")
24 |
25 | return ca
26 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/v1/providers/gcp/GCP_STORAGE_BUCKET.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | from common.v1.providers.gcp.GCP_STORAGE_BUCKET import GCP_STORAGE_BUCKET_ASSET_V1
4 | from common.v1.schemas import CaizenAssetV1
5 | from src.schemas import CaiRecord
6 |
7 |
8 | class GCP_STORAGE_BUCKET_V1:
9 | def enrich_attrs(ca: GCP_STORAGE_BUCKET_ASSET_V1, cr: CaiRecord) -> CaizenAssetV1:
10 | """
11 | Set the attributes for a GCP Storage Bucket asset.
12 |
13 | Args:
14 | ca: The CAIZEN asset.
15 | cr: The CAI record.
16 |
17 | Returns:
18 | The CAIZEN asset with attributes set.
19 | """
20 | ca["created"] = datetime.fromisoformat(cr.resource.data.get("timeCreated"))
21 | ca["attrs"]["storage_class"] = cr.resource.data.get(
22 | "storageClass", "STANDARD"
23 | ) # "STANDARD", "NEARLINE", "COLDLINE", "ARCHIVE"
24 | ca["attrs"]["iam"] = {
25 | "bucket_policy_only": cr.resource.data.get("iamConfiguration", {})
26 | .get("bucketPolicyOnly", {})
27 | .get("enabled", False), # True, False
28 | "uniform_bucket_level_access": cr.resource.data.get("iamConfiguration", {})
29 | .get("uniformBucketLevelAccess", {})
30 | .get("enabled", False), # True, False
31 | "block_public_access": cr.resource.data.get("iamConfiguration", {}).get(
32 | "publicAccessPrevention", "unspecified"
33 | ), # "enforced", "unspecified"
34 | }
35 | ca["attrs"]["cors"] = cr.resource.data.get("cors", [])
36 | ca["attrs"]["labels"] = cr.resource.data.get("labels", {})
37 | ca["attrs"]["versioning"] = cr.resource.data.get("versioning", {}).get(
38 | "enabled", False
39 | )
40 | ca["attrs"]["display_name"] = cr.resource.data.get("name", None)
41 |
42 | try:
43 | GCP_STORAGE_BUCKET_ASSET_V1(**ca)
44 | except ValueError as e:
45 | raise ValueError(f"Error enriching GCP Storage Bucket asset: {e}")
46 |
47 | return ca
48 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/src/v1/providers/gcp/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | # import all the files in the directory starting with GCP_
4 | for file in os.listdir(os.path.dirname(os.path.realpath(__file__))):
5 | if file.endswith(".py") and file.startswith("GCP_"):
6 | exec(f"from src.v1.providers.gcp.{file[:-3]} import *")
7 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/tests/badmsg.json:
--------------------------------------------------------------------------------
1 | {"message": {"attributes": {"XXXbucketId": "caizen-data-ingest", "eventTime": "2024-09-12T12:50:59.733210Z", "eventType": "OBJECT_FINALIZE", "notificationConfig": "projects/_/buckets/caizen-data-ingest/notificationConfigs/38", "objectGeneration": "1726145459720093", "objectId": "gcp-cai/organizations/684587186245/resource/pubsub.googleapis.com/Topic/0", "overwroteGeneration": "1726145452115746", "payloadFormat": "JSON_API_V1"}, "data": "ewogICJraW5kIjogInN0b3JhZ2Ujb2JqZWN0IiwKICAiaWQiOiAiY2FpemVuLWRhdGEtaW5nZXN0L2djcC1jYWkvb3JnYW5pemF0aW9ucy82ODQ1ODcxODYyNDUvcmVzb3VyY2UvcHVic3ViLmdvb2dsZWFwaXMuY29tL1RvcGljLzAvMTcyNjE0NTQ1OTcyMDA5MyIsCiAgInNlbGZMaW5rIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3N0b3JhZ2UvdjEvYi9jYWl6ZW4tZGF0YS1pbmdlc3Qvby9nY3AtY2FpJTJGb3JnYW5pemF0aW9ucyUyRjY4NDU4NzE4NjI0NSUyRnJlc291cmNlJTJGcHVic3ViLmdvb2dsZWFwaXMuY29tJTJGVG9waWMlMkYwIiwKICAibmFtZSI6ICJnY3AtY2FpL29yZ2FuaXphdGlvbnMvNjg0NTg3MTg2MjQ1L3Jlc291cmNlL3B1YnN1Yi5nb29nbGVhcGlzLmNvbS9Ub3BpYy8wIiwKICAiYnVja2V0IjogImNhaXplbi1kYXRhLWluZ2VzdCIsCiAgImdlbmVyYXRpb24iOiAiMTcyNjE0NTQ1OTcyMDA5MyIsCiAgIm1ldGFnZW5lcmF0aW9uIjogIjEiLAogICJjb250ZW50VHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgInRpbWVDcmVhdGVkIjogIjIwMjQtMDktMTJUMTI6NTA6NTkuNzMzWiIsCiAgInVwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic3RvcmFnZUNsYXNzIjogIlNUQU5EQVJEIiwKICAidGltZVN0b3JhZ2VDbGFzc1VwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic2l6ZSI6ICIyOTMzIiwKICAibWVkaWFMaW5rIjogImh0dHBzOi8vc3RvcmFnZS5nb29nbGVhcGlzLmNvbS9kb3dubG9hZC9zdG9yYWdlL3YxL2IvY2FpemVuLWRhdGEtaW5nZXN0L28vZ2NwLWNhaSUyRm9yZ2FuaXphdGlvbnMlMkY2ODQ1ODcxODYyNDUlMkZyZXNvdXJjZSUyRnB1YnN1Yi5nb29nbGVhcGlzLmNvbSUyRlRvcGljJTJGMD9nZW5lcmF0aW9uPTE3MjYxNDU0NTk3MjAwOTMmYWx0PW1lZGlhIiwKICAiY3JjMzJjIjogIlQvTjNQZz09IiwKICAiY29tcG9uZW50Q291bnQiOiAxLAogICJldGFnIjogIkNKMmYxUGU0dllnREVBRT0iCn0K", "messageId": "12238172258572246", "message_id": "12238172258572246", "publishTime": "2024-09-12T12:50:59.754Z", "publish_time": "2024-09-12T12:50:59.754Z"}, "subscription": "projects/caizen-export/subscriptions/gcp-cai-caizen-subscription"}
2 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/tests/buckets.json:
--------------------------------------------------------------------------------
1 | {"message": {"attributes": {"bucketId": "caizen-data-ingest", "eventTime": "2024-09-12T12:50:59.733210Z", "eventType": "OBJECT_FINALIZE", "notificationConfig": "projects/_/buckets/caizen-data-ingest/notificationConfigs/38", "objectGeneration": "1726145459720093", "objectId": "gcp-cai/organizations/684587186245/resource/storage.googleapis.com/Bucket/0", "overwroteGeneration": "1726145452115746", "payloadFormat": "JSON_API_V1"}, "data": "ewogICJraW5kIjogInN0b3JhZ2Ujb2JqZWN0IiwKICAiaWQiOiAiY2FpemVuLWRhdGEtaW5nZXN0L2djcC1jYWkvb3JnYW5pemF0aW9ucy82ODQ1ODcxODYyNDUvcmVzb3VyY2UvcHVic3ViLmdvb2dsZWFwaXMuY29tL1RvcGljLzAvMTcyNjE0NTQ1OTcyMDA5MyIsCiAgInNlbGZMaW5rIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3N0b3JhZ2UvdjEvYi9jYWl6ZW4tZGF0YS1pbmdlc3Qvby9nY3AtY2FpJTJGb3JnYW5pemF0aW9ucyUyRjY4NDU4NzE4NjI0NSUyRnJlc291cmNlJTJGcHVic3ViLmdvb2dsZWFwaXMuY29tJTJGVG9waWMlMkYwIiwKICAibmFtZSI6ICJnY3AtY2FpL29yZ2FuaXphdGlvbnMvNjg0NTg3MTg2MjQ1L3Jlc291cmNlL3B1YnN1Yi5nb29nbGVhcGlzLmNvbS9Ub3BpYy8wIiwKICAiYnVja2V0IjogImNhaXplbi1kYXRhLWluZ2VzdCIsCiAgImdlbmVyYXRpb24iOiAiMTcyNjE0NTQ1OTcyMDA5MyIsCiAgIm1ldGFnZW5lcmF0aW9uIjogIjEiLAogICJjb250ZW50VHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgInRpbWVDcmVhdGVkIjogIjIwMjQtMDktMTJUMTI6NTA6NTkuNzMzWiIsCiAgInVwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic3RvcmFnZUNsYXNzIjogIlNUQU5EQVJEIiwKICAidGltZVN0b3JhZ2VDbGFzc1VwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic2l6ZSI6ICIyOTMzIiwKICAibWVkaWFMaW5rIjogImh0dHBzOi8vc3RvcmFnZS5nb29nbGVhcGlzLmNvbS9kb3dubG9hZC9zdG9yYWdlL3YxL2IvY2FpemVuLWRhdGEtaW5nZXN0L28vZ2NwLWNhaSUyRm9yZ2FuaXphdGlvbnMlMkY2ODQ1ODcxODYyNDUlMkZyZXNvdXJjZSUyRnB1YnN1Yi5nb29nbGVhcGlzLmNvbSUyRlRvcGljJTJGMD9nZW5lcmF0aW9uPTE3MjYxNDU0NTk3MjAwOTMmYWx0PW1lZGlhIiwKICAiY3JjMzJjIjogIlQvTjNQZz09IiwKICAiY29tcG9uZW50Q291bnQiOiAxLAogICJldGFnIjogIkNKMmYxUGU0dllnREVBRT0iCn0K", "messageId": "12238172258572246", "message_id": "12238172258572246", "publishTime": "2024-09-12T12:50:59.754Z", "publish_time": "2024-09-12T12:50:59.754Z"}, "subscription": "projects/caizen-export/subscriptions/gcp-cai-caizen-subscription"}
2 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/tests/emptymsg.json:
--------------------------------------------------------------------------------
1 | {"message": {"attributes": {"bucketId": "caizen-data-ingest", "eventTime": "2024-09-12T12:50:59.733210Z", "eventType": "OBJECT_FINALIZE", "notificationConfig": "projects/_/buckets/caizen-data-ingest/notificationConfigs/38", "objectGeneration": "1726145459720093", "objectId": "gcp-cai/organizations/684587186245/resource/pubsub.googleapis.com/Topic/0", "overwroteGeneration": "1726145452115746", "payloadFormat": "JSON_API_V1"}, "data": "ewogICJraW5kIjogInN0b3JhZ2Ujb2JqZWN0IiwKICAiaWQiOiAiY2FpemVuLWRhdGEtaW5nZXN0L2djcC1jYWkvb3JnYW5pemF0aW9ucy82ODQ1ODcxODYyNDUvcmVzb3VyY2UvcHVic3ViLmdvb2dsZWFwaXMuY29tL1RvcGljLzAvMTcyNjE0NTQ1OTcyMDA5MyIsCiAgInNlbGZMaW5rIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3N0b3JhZ2UvdjEvYi9jYWl6ZW4tZGF0YS1pbmdlc3Qvby9nY3AtY2FpJTJGb3JnYW5pemF0aW9ucyUyRjY4NDU4NzE4NjI0NSUyRnJlc291cmNlJTJGcHVic3ViLmdvb2dsZWFwaXMuY29tJTJGVG9waWMlMkYwIiwKICAibmFtZSI6ICJnY3AtY2FpL29yZ2FuaXphdGlvbnMvNjg0NTg3MTg2MjQ1L3Jlc291cmNlL3B1YnN1Yi5nb29nbGVhcGlzLmNvbS9Ub3BpYy8wIiwKICAiYnVja2V0IjogImNhaXplbi1kYXRhLWluZ2VzdCIsCiAgImdlbmVyYXRpb24iOiAiMTcyNjE0NTQ1OTcyMDA5MyIsCiAgIm1ldGFnZW5lcmF0aW9uIjogIjEiLAogICJjb250ZW50VHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgInRpbWVDcmVhdGVkIjogIjIwMjQtMDktMTJUMTI6NTA6NTkuNzMzWiIsCiAgInVwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic3RvcmFnZUNsYXNzIjogIlNUQU5EQVJEIiwKICAidGltZVN0b3JhZ2VDbGFzc1VwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic2l6ZSI6ICIwIiwKICAibWVkaWFMaW5rIjogImh0dHBzOi8vc3RvcmFnZS5nb29nbGVhcGlzLmNvbS9kb3dubG9hZC9zdG9yYWdlL3YxL2IvY2FpemVuLWRhdGEtaW5nZXN0L28vZ2NwLWNhaSUyRm9yZ2FuaXphdGlvbnMlMkY2ODQ1ODcxODYyNDUlMkZyZXNvdXJjZSUyRnB1YnN1Yi5nb29nbGVhcGlzLmNvbSUyRlRvcGljJTJGMD9nZW5lcmF0aW9uPTE3MjYxNDU0NTk3MjAwOTMmYWx0PW1lZGlhIiwKICAiY3JjMzJjIjogIlQvTjNQZz09IiwKICAiY29tcG9uZW50Q291bnQiOiAxLAogICJldGFnIjogIkNKMmYxUGU0dllnREVBRT0iCn0K", "messageId": "12238172258572246", "message_id": "12238172258572246", "publishTime": "2024-09-12T12:50:59.754Z", "publish_time": "2024-09-12T12:50:59.754Z"}, "subscription": "projects/caizen-export/subscriptions/gcp-cai-caizen-subscription"}
2 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/tests/folders.json:
--------------------------------------------------------------------------------
1 | {"message": {"attributes": {"bucketId": "caizen-data-ingest", "eventTime": "2024-09-12T12:50:59.733210Z", "eventType": "OBJECT_FINALIZE", "notificationConfig": "projects/_/buckets/caizen-data-ingest/notificationConfigs/38", "objectGeneration": "1726145459720093", "objectId": "gcp-cai/organizations/684587186245/resource/cloudresourcemanager.googleapis.com/Folder/0", "overwroteGeneration": "1726145452115746", "payloadFormat": "JSON_API_V1"}, "data": "ewogICJraW5kIjogInN0b3JhZ2Ujb2JqZWN0IiwKICAiaWQiOiAiY2FpemVuLWRhdGEtaW5nZXN0L2djcC1jYWkvb3JnYW5pemF0aW9ucy82ODQ1ODcxODYyNDUvcmVzb3VyY2UvcHVic3ViLmdvb2dsZWFwaXMuY29tL1RvcGljLzAvMTcyNjE0NTQ1OTcyMDA5MyIsCiAgInNlbGZMaW5rIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3N0b3JhZ2UvdjEvYi9jYWl6ZW4tZGF0YS1pbmdlc3Qvby9nY3AtY2FpJTJGb3JnYW5pemF0aW9ucyUyRjY4NDU4NzE4NjI0NSUyRnJlc291cmNlJTJGcHVic3ViLmdvb2dsZWFwaXMuY29tJTJGVG9waWMlMkYwIiwKICAibmFtZSI6ICJnY3AtY2FpL29yZ2FuaXphdGlvbnMvNjg0NTg3MTg2MjQ1L3Jlc291cmNlL3B1YnN1Yi5nb29nbGVhcGlzLmNvbS9Ub3BpYy8wIiwKICAiYnVja2V0IjogImNhaXplbi1kYXRhLWluZ2VzdCIsCiAgImdlbmVyYXRpb24iOiAiMTcyNjE0NTQ1OTcyMDA5MyIsCiAgIm1ldGFnZW5lcmF0aW9uIjogIjEiLAogICJjb250ZW50VHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgInRpbWVDcmVhdGVkIjogIjIwMjQtMDktMTJUMTI6NTA6NTkuNzMzWiIsCiAgInVwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic3RvcmFnZUNsYXNzIjogIlNUQU5EQVJEIiwKICAidGltZVN0b3JhZ2VDbGFzc1VwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic2l6ZSI6ICIyOTMzIiwKICAibWVkaWFMaW5rIjogImh0dHBzOi8vc3RvcmFnZS5nb29nbGVhcGlzLmNvbS9kb3dubG9hZC9zdG9yYWdlL3YxL2IvY2FpemVuLWRhdGEtaW5nZXN0L28vZ2NwLWNhaSUyRm9yZ2FuaXphdGlvbnMlMkY2ODQ1ODcxODYyNDUlMkZyZXNvdXJjZSUyRnB1YnN1Yi5nb29nbGVhcGlzLmNvbSUyRlRvcGljJTJGMD9nZW5lcmF0aW9uPTE3MjYxNDU0NTk3MjAwOTMmYWx0PW1lZGlhIiwKICAiY3JjMzJjIjogIlQvTjNQZz09IiwKICAiY29tcG9uZW50Q291bnQiOiAxLAogICJldGFnIjogIkNKMmYxUGU0dllnREVBRT0iCn0K", "messageId": "12238172258572246", "message_id": "12238172258572246", "publishTime": "2024-09-12T12:50:59.754Z", "publish_time": "2024-09-12T12:50:59.754Z"}, "subscription": "projects/caizen-export/subscriptions/gcp-cai-caizen-subscription"}
2 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/tests/orgs.json:
--------------------------------------------------------------------------------
1 | {"message": {"attributes": {"bucketId": "caizen-data-ingest", "eventTime": "2024-09-12T12:50:59.733210Z", "eventType": "OBJECT_FINALIZE", "notificationConfig": "projects/_/buckets/caizen-data-ingest/notificationConfigs/38", "objectGeneration": "1726145459720093", "objectId": "gcp-cai/organizations/684587186245/resource/cloudresourcemanager.googleapis.com/Organization/0", "overwroteGeneration": "1726145452115746", "payloadFormat": "JSON_API_V1"}, "data": "ewogICJraW5kIjogInN0b3JhZ2Ujb2JqZWN0IiwKICAiaWQiOiAiY2FpemVuLWRhdGEtaW5nZXN0L2djcC1jYWkvb3JnYW5pemF0aW9ucy82ODQ1ODcxODYyNDUvcmVzb3VyY2UvcHVic3ViLmdvb2dsZWFwaXMuY29tL1RvcGljLzAvMTcyNjE0NTQ1OTcyMDA5MyIsCiAgInNlbGZMaW5rIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3N0b3JhZ2UvdjEvYi9jYWl6ZW4tZGF0YS1pbmdlc3Qvby9nY3AtY2FpJTJGb3JnYW5pemF0aW9ucyUyRjY4NDU4NzE4NjI0NSUyRnJlc291cmNlJTJGcHVic3ViLmdvb2dsZWFwaXMuY29tJTJGVG9waWMlMkYwIiwKICAibmFtZSI6ICJnY3AtY2FpL29yZ2FuaXphdGlvbnMvNjg0NTg3MTg2MjQ1L3Jlc291cmNlL3B1YnN1Yi5nb29nbGVhcGlzLmNvbS9Ub3BpYy8wIiwKICAiYnVja2V0IjogImNhaXplbi1kYXRhLWluZ2VzdCIsCiAgImdlbmVyYXRpb24iOiAiMTcyNjE0NTQ1OTcyMDA5MyIsCiAgIm1ldGFnZW5lcmF0aW9uIjogIjEiLAogICJjb250ZW50VHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgInRpbWVDcmVhdGVkIjogIjIwMjQtMDktMTJUMTI6NTA6NTkuNzMzWiIsCiAgInVwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic3RvcmFnZUNsYXNzIjogIlNUQU5EQVJEIiwKICAidGltZVN0b3JhZ2VDbGFzc1VwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic2l6ZSI6ICIyOTMzIiwKICAibWVkaWFMaW5rIjogImh0dHBzOi8vc3RvcmFnZS5nb29nbGVhcGlzLmNvbS9kb3dubG9hZC9zdG9yYWdlL3YxL2IvY2FpemVuLWRhdGEtaW5nZXN0L28vZ2NwLWNhaSUyRm9yZ2FuaXphdGlvbnMlMkY2ODQ1ODcxODYyNDUlMkZyZXNvdXJjZSUyRnB1YnN1Yi5nb29nbGVhcGlzLmNvbSUyRlRvcGljJTJGMD9nZW5lcmF0aW9uPTE3MjYxNDU0NTk3MjAwOTMmYWx0PW1lZGlhIiwKICAiY3JjMzJjIjogIlQvTjNQZz09IiwKICAiY29tcG9uZW50Q291bnQiOiAxLAogICJldGFnIjogIkNKMmYxUGU0dllnREVBRT0iCn0K", "messageId": "12238172258572246", "message_id": "12238172258572246", "publishTime": "2024-09-12T12:50:59.754Z", "publish_time": "2024-09-12T12:50:59.754Z"}, "subscription": "projects/caizen-export/subscriptions/gcp-cai-caizen-subscription"}
2 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/tests/projects.json:
--------------------------------------------------------------------------------
1 | {"message": {"attributes": {"bucketId": "caizen-data-ingest", "eventTime": "2024-09-12T12:50:59.733210Z", "eventType": "OBJECT_FINALIZE", "notificationConfig": "projects/_/buckets/caizen-data-ingest/notificationConfigs/38", "objectGeneration": "1726145459720093", "objectId": "gcp-cai/organizations/684587186245/resource/cloudresourcemanager.googleapis.com/Project/0", "overwroteGeneration": "1726145452115746", "payloadFormat": "JSON_API_V1"}, "data": "ewogICJraW5kIjogInN0b3JhZ2Ujb2JqZWN0IiwKICAiaWQiOiAiY2FpemVuLWRhdGEtaW5nZXN0L2djcC1jYWkvb3JnYW5pemF0aW9ucy82ODQ1ODcxODYyNDUvcmVzb3VyY2UvcHVic3ViLmdvb2dsZWFwaXMuY29tL1RvcGljLzAvMTcyNjE0NTQ1OTcyMDA5MyIsCiAgInNlbGZMaW5rIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3N0b3JhZ2UvdjEvYi9jYWl6ZW4tZGF0YS1pbmdlc3Qvby9nY3AtY2FpJTJGb3JnYW5pemF0aW9ucyUyRjY4NDU4NzE4NjI0NSUyRnJlc291cmNlJTJGcHVic3ViLmdvb2dsZWFwaXMuY29tJTJGVG9waWMlMkYwIiwKICAibmFtZSI6ICJnY3AtY2FpL29yZ2FuaXphdGlvbnMvNjg0NTg3MTg2MjQ1L3Jlc291cmNlL3B1YnN1Yi5nb29nbGVhcGlzLmNvbS9Ub3BpYy8wIiwKICAiYnVja2V0IjogImNhaXplbi1kYXRhLWluZ2VzdCIsCiAgImdlbmVyYXRpb24iOiAiMTcyNjE0NTQ1OTcyMDA5MyIsCiAgIm1ldGFnZW5lcmF0aW9uIjogIjEiLAogICJjb250ZW50VHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgInRpbWVDcmVhdGVkIjogIjIwMjQtMDktMTJUMTI6NTA6NTkuNzMzWiIsCiAgInVwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic3RvcmFnZUNsYXNzIjogIlNUQU5EQVJEIiwKICAidGltZVN0b3JhZ2VDbGFzc1VwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic2l6ZSI6ICIyOTMzIiwKICAibWVkaWFMaW5rIjogImh0dHBzOi8vc3RvcmFnZS5nb29nbGVhcGlzLmNvbS9kb3dubG9hZC9zdG9yYWdlL3YxL2IvY2FpemVuLWRhdGEtaW5nZXN0L28vZ2NwLWNhaSUyRm9yZ2FuaXphdGlvbnMlMkY2ODQ1ODcxODYyNDUlMkZyZXNvdXJjZSUyRnB1YnN1Yi5nb29nbGVhcGlzLmNvbSUyRlRvcGljJTJGMD9nZW5lcmF0aW9uPTE3MjYxNDU0NTk3MjAwOTMmYWx0PW1lZGlhIiwKICAiY3JjMzJjIjogIlQvTjNQZz09IiwKICAiY29tcG9uZW50Q291bnQiOiAxLAogICJldGFnIjogIkNKMmYxUGU0dllnREVBRT0iCn0K", "messageId": "12238172258572246", "message_id": "12238172258572246", "publishTime": "2024-09-12T12:50:59.754Z", "publish_time": "2024-09-12T12:50:59.754Z"}, "subscription": "projects/caizen-export/subscriptions/gcp-cai-caizen-subscription"}
2 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/tests/routes.json:
--------------------------------------------------------------------------------
1 | {"message": {"attributes": {"bucketId": "caizen-data-ingest", "eventTime": "2024-09-12T12:50:59.733210Z", "eventType": "OBJECT_FINALIZE", "notificationConfig": "projects/_/buckets/caizen-data-ingest/notificationConfigs/38", "objectGeneration": "1726145459720093", "objectId": "gcp-cai/organizations/684587186245/resource/compute.googleapis.com/Route/0", "overwroteGeneration": "1726145452115746", "payloadFormat": "JSON_API_V1"}, "data": "ewogICJraW5kIjogInN0b3JhZ2Ujb2JqZWN0IiwKICAiaWQiOiAiY2FpemVuLWRhdGEtaW5nZXN0L2djcC1jYWkvb3JnYW5pemF0aW9ucy82ODQ1ODcxODYyNDUvcmVzb3VyY2UvcHVic3ViLmdvb2dsZWFwaXMuY29tL1RvcGljLzAvMTcyNjE0NTQ1OTcyMDA5MyIsCiAgInNlbGZMaW5rIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3N0b3JhZ2UvdjEvYi9jYWl6ZW4tZGF0YS1pbmdlc3Qvby9nY3AtY2FpJTJGb3JnYW5pemF0aW9ucyUyRjY4NDU4NzE4NjI0NSUyRnJlc291cmNlJTJGcHVic3ViLmdvb2dsZWFwaXMuY29tJTJGVG9waWMlMkYwIiwKICAibmFtZSI6ICJnY3AtY2FpL29yZ2FuaXphdGlvbnMvNjg0NTg3MTg2MjQ1L3Jlc291cmNlL3B1YnN1Yi5nb29nbGVhcGlzLmNvbS9Ub3BpYy8wIiwKICAiYnVja2V0IjogImNhaXplbi1kYXRhLWluZ2VzdCIsCiAgImdlbmVyYXRpb24iOiAiMTcyNjE0NTQ1OTcyMDA5MyIsCiAgIm1ldGFnZW5lcmF0aW9uIjogIjEiLAogICJjb250ZW50VHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgInRpbWVDcmVhdGVkIjogIjIwMjQtMDktMTJUMTI6NTA6NTkuNzMzWiIsCiAgInVwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic3RvcmFnZUNsYXNzIjogIlNUQU5EQVJEIiwKICAidGltZVN0b3JhZ2VDbGFzc1VwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic2l6ZSI6ICIyOTMzIiwKICAibWVkaWFMaW5rIjogImh0dHBzOi8vc3RvcmFnZS5nb29nbGVhcGlzLmNvbS9kb3dubG9hZC9zdG9yYWdlL3YxL2IvY2FpemVuLWRhdGEtaW5nZXN0L28vZ2NwLWNhaSUyRm9yZ2FuaXphdGlvbnMlMkY2ODQ1ODcxODYyNDUlMkZyZXNvdXJjZSUyRnB1YnN1Yi5nb29nbGVhcGlzLmNvbSUyRlRvcGljJTJGMD9nZW5lcmF0aW9uPTE3MjYxNDU0NTk3MjAwOTMmYWx0PW1lZGlhIiwKICAiY3JjMzJjIjogIlQvTjNQZz09IiwKICAiY29tcG9uZW50Q291bnQiOiAxLAogICJldGFnIjogIkNKMmYxUGU0dllnREVBRT0iCn0K", "messageId": "12238172258572246", "message_id": "12238172258572246", "publishTime": "2024-09-12T12:50:59.754Z", "publish_time": "2024-09-12T12:50:59.754Z"}, "subscription": "projects/caizen-export/subscriptions/gcp-cai-caizen-subscription"}
2 |
--------------------------------------------------------------------------------
/caizen/gcp_cai_func/tests/topics.json:
--------------------------------------------------------------------------------
1 | {"message": {"attributes": {"bucketId": "caizen-data-ingest", "eventTime": "2024-09-12T12:50:59.733210Z", "eventType": "OBJECT_FINALIZE", "notificationConfig": "projects/_/buckets/caizen-data-ingest/notificationConfigs/38", "objectGeneration": "1726145459720093", "objectId": "gcp-cai/organizations/684587186245/resource/pubsub.googleapis.com/Topic/0", "overwroteGeneration": "1726145452115746", "payloadFormat": "JSON_API_V1"}, "data": "ewogICJraW5kIjogInN0b3JhZ2Ujb2JqZWN0IiwKICAiaWQiOiAiY2FpemVuLWRhdGEtaW5nZXN0L2djcC1jYWkvb3JnYW5pemF0aW9ucy82ODQ1ODcxODYyNDUvcmVzb3VyY2UvcHVic3ViLmdvb2dsZWFwaXMuY29tL1RvcGljLzAvMTcyNjE0NTQ1OTcyMDA5MyIsCiAgInNlbGZMaW5rIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL3N0b3JhZ2UvdjEvYi9jYWl6ZW4tZGF0YS1pbmdlc3Qvby9nY3AtY2FpJTJGb3JnYW5pemF0aW9ucyUyRjY4NDU4NzE4NjI0NSUyRnJlc291cmNlJTJGcHVic3ViLmdvb2dsZWFwaXMuY29tJTJGVG9waWMlMkYwIiwKICAibmFtZSI6ICJnY3AtY2FpL29yZ2FuaXphdGlvbnMvNjg0NTg3MTg2MjQ1L3Jlc291cmNlL3B1YnN1Yi5nb29nbGVhcGlzLmNvbS9Ub3BpYy8wIiwKICAiYnVja2V0IjogImNhaXplbi1kYXRhLWluZ2VzdCIsCiAgImdlbmVyYXRpb24iOiAiMTcyNjE0NTQ1OTcyMDA5MyIsCiAgIm1ldGFnZW5lcmF0aW9uIjogIjEiLAogICJjb250ZW50VHlwZSI6ICJhcHBsaWNhdGlvbi94LW5kanNvbiIsCiAgInRpbWVDcmVhdGVkIjogIjIwMjQtMDktMTJUMTI6NTA6NTkuNzMzWiIsCiAgInVwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic3RvcmFnZUNsYXNzIjogIlNUQU5EQVJEIiwKICAidGltZVN0b3JhZ2VDbGFzc1VwZGF0ZWQiOiAiMjAyNC0wOS0xMlQxMjo1MDo1OS43MzNaIiwKICAic2l6ZSI6ICIyOTMzIiwKICAibWVkaWFMaW5rIjogImh0dHBzOi8vc3RvcmFnZS5nb29nbGVhcGlzLmNvbS9kb3dubG9hZC9zdG9yYWdlL3YxL2IvY2FpemVuLWRhdGEtaW5nZXN0L28vZ2NwLWNhaSUyRm9yZ2FuaXphdGlvbnMlMkY2ODQ1ODcxODYyNDUlMkZyZXNvdXJjZSUyRnB1YnN1Yi5nb29nbGVhcGlzLmNvbSUyRlRvcGljJTJGMD9nZW5lcmF0aW9uPTE3MjYxNDU0NTk3MjAwOTMmYWx0PW1lZGlhIiwKICAiY3JjMzJjIjogIlQvTjNQZz09IiwKICAiY29tcG9uZW50Q291bnQiOiAxLAogICJldGFnIjogIkNKMmYxUGU0dllnREVBRT0iCn0K", "messageId": "12238172258572246", "message_id": "12238172258572246", "publishTime": "2024-09-12T12:50:59.754Z", "publish_time": "2024-09-12T12:50:59.754Z"}, "subscription": "projects/caizen-export/subscriptions/gcp-cai-caizen-subscription"}
2 |
--------------------------------------------------------------------------------
/docker/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: "3.9"
2 | services:
3 | memgraph:
4 | image: memgraph/memgraph-mage:latest
5 | container_name: memgraph-mage
6 | ports:
7 | - "7687:7687"
8 | - "7444:7444"
9 | command: ["--log-level=DEBUG"]
10 |
11 | ui:
12 | image: memgraph/lab:latest
13 | container_name: memgraph-lab
14 | ports:
15 | - "3000:3000"
16 | depends_on:
17 | - memgraph
18 | environment:
19 | - QUICK_CONNECT_MG_HOST=memgraph
20 | - QUICK_CONNECT_MG_PORT=7687
21 |
--------------------------------------------------------------------------------
/docs/CNAME:
--------------------------------------------------------------------------------
1 | caizen.cloud
2 |
--------------------------------------------------------------------------------
/docs/img/attackpaths.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/docs/img/attackpaths.png
--------------------------------------------------------------------------------
/docs/img/caizen-logo-dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/docs/img/caizen-logo-dark.png
--------------------------------------------------------------------------------
/docs/img/pathscores.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/docs/img/pathscores.png
--------------------------------------------------------------------------------
/docs/img/resources.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/caizencloud/caizen/7cf35b79205e45db37de75839815e0a4ba9272eb/docs/img/resources.png
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | {: style="height:153px;width:193px"}
2 |
3 | > Harness the security superpowers of your cloud asset inventory
4 |
5 | ```mermaid
6 | graph TD
7 | A[O: My GCP Org] --> B[F: Tenants]
8 | A --> Z[Caizen Project]
9 | Z --> Dep(Caizen Deployment)
10 | B --> G[P: Tenant Dev]
11 | B --> H[P: Tenant Stage]
12 | B --> I[P: Tenant Prod]
13 | ```
14 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: Caizen.Cloud
2 | theme:
3 | name: material
4 | language: en
5 | logo: img/mono-light-caizen-logo.svg
6 | favicon: img/favicon.png
7 | features:
8 | - content.code.copy
9 | - content.code.annotate
10 | - toc.follow
11 | - toc.integrate
12 | - navigation.instant
13 | - navigation.instant.progress
14 | - navigation.tracking
15 | - navigation.path
16 | palette:
17 | - media: "(prefers-color-scheme: light)"
18 | scheme: default
19 | toggle:
20 | icon: material/toggle-switch-off-outline
21 | name: Switch to dark mode
22 | primary: indigo
23 | accent: indigo
24 | - media: "(prefers-color-scheme: dark)"
25 | scheme: slate
26 | toggle:
27 | icon: material/toggle-switch
28 | name: Switch to light mode
29 | primary: indigo
30 | accent: indigo
31 |
32 | markdown_extensions:
33 | - pymdownx.highlight:
34 | anchor_linenums: true
35 | - pymdownx.inlinehilite
36 | - pymdownx.snippets
37 | - admonition
38 | - pymdownx.arithmatex:
39 | generic: true
40 | - footnotes
41 | - pymdownx.details
42 | - pymdownx.superfences
43 | - pymdownx.mark
44 | - attr_list
45 | - md_in_html
46 | - pymdownx.emoji:
47 | emoji_index: !!python/name:material.extensions.emoji.twemoji
48 | emoji_generator: !!python/name:materialx.emoji.to_svg
49 | - pymdownx.superfences:
50 | custom_fences:
51 | - name: mermaid
52 | class: mermaid
53 | format: !!python/name:pymdownx.superfences.fence_code_format
54 | extra:
55 | social:
56 | - icon: fontawesome/brands/github-alt
57 | link: https://github.com/caizencloud/caizen
58 | - icon: fontawesome/brands/twitter
59 | link: https://twitter.com/bradgeesaman
60 | - icon: fontawesome/brands/linkedin
61 | link: https://linkedin.com/in/bradgeesaman
62 |
63 | copyright: |
64 | © 2023 Brad Geesaman
65 |
66 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | target-version = ['py311']
3 | include = '\.pyi?$'
4 | exclude = '''
5 | /(
6 | \.git
7 | | \__pycache__
8 | | \.hg
9 | | \.mypy_cache
10 | | \.tox
11 | | \.venv
12 | | _build
13 | | buck-out
14 | | build
15 | | dist
16 | | blib2to3
17 | | tests/data
18 | )/
19 | '''
20 |
--------------------------------------------------------------------------------
/terraform/main.tf:
--------------------------------------------------------------------------------
1 | # !!! Change the bucket name first
2 | # gcloud storage buckets create gs://caizen-tfstate/ --location nam4 --uniform-bucket-level-access
3 | # gcloud storage buckets update gs://caizen-tfstate/ --versioning --update-labels=owner=caizen
4 | # terraform init
5 | # terraform plan
6 | # terraform apply
7 |
8 | terraform {
9 | required_version = ">= 1.5.0"
10 | backend "gcs" {
11 | bucket = "caizen-tfstate"
12 | prefix = "caizen"
13 | }
14 |
15 | required_providers {
16 | google = {
17 | source = "hashicorp/google"
18 | version = "~> 6.2"
19 | }
20 | }
21 | }
22 |
23 | module "caizen_core" {
24 | source = "./modules/caizen_core"
25 |
26 | project_id = "caizen-export"
27 | location = "us-central1"
28 | name_prefix = "caizen"
29 | }
30 |
31 | module "cai_org" {
32 | source = "./modules/gcp_cai"
33 |
34 | project_id = module.caizen_core.project_id
35 | location = module.caizen_core.location
36 | name_prefix = module.caizen_core.name_prefix
37 | ingest_bucket_name = module.caizen_core.ingest_bucket_name
38 |
39 | target_type = "organization"
40 | target_id = "684587186245"
41 | schedule = "50 */12 * * *"
42 | }
43 |
--------------------------------------------------------------------------------
/terraform/modules/caizen_core/LICENSE:
--------------------------------------------------------------------------------
1 | ../../../LICENSE
--------------------------------------------------------------------------------
/terraform/modules/caizen_core/README.md:
--------------------------------------------------------------------------------
1 | # Caizen Core
2 |
3 | Resources:
4 | - google_storage_bucket.data_ingest - data collection bucket to be ingested into Caizen
--------------------------------------------------------------------------------
/terraform/modules/caizen_core/iam.tf:
--------------------------------------------------------------------------------
1 | // Service Account for the CAIZEN VM
2 | resource "google_service_account" "vm" {
3 | project = var.project_id
4 | account_id = "${var.name_prefix}-vm"
5 | display_name = "CAIZEN VM Service Account"
6 | }
7 |
8 | // Grant the project SA the ability to use pubsub and token creator
9 | data "google_project" "project" {
10 | project_id = var.project_id
11 | }
12 | resource "google_project_iam_member" "eventarc_token_creator" {
13 | project = var.project_id
14 | role = "roles/iam.serviceAccountTokenCreator"
15 | member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-pubsub.iam.gserviceaccount.com"
16 | }
17 | resource "google_project_iam_member" "eventarc_pubsub_publisher" {
18 | project = var.project_id
19 | role = "roles/pubsub.publisher"
20 | member = "serviceAccount:service-${data.google_project.project.number}@gs-project-accounts.iam.gserviceaccount.com"
21 | }
22 |
--------------------------------------------------------------------------------
/terraform/modules/caizen_core/main.tf:
--------------------------------------------------------------------------------
1 | // Enable required APIs
2 | resource "google_project_service" "enabled_apis" {
3 | for_each = toset(var.enabled_apis)
4 | project = var.project_id
5 | service = each.value
6 |
7 | // Prevent removal on destroy
8 | disable_on_destroy = true
9 | }
10 |
--------------------------------------------------------------------------------
/terraform/modules/caizen_core/network.tf:
--------------------------------------------------------------------------------
1 | // VPC Network
2 | resource "google_compute_network" "vpc" {
3 | project = var.project_id
4 | name = "${var.name_prefix}-vpc"
5 |
6 | auto_create_subnetworks = false
7 | }
8 | // VPC Subnet
9 | resource "google_compute_subnetwork" "subnet" {
10 | project = var.project_id
11 | name = "${var.name_prefix}-${var.location}-subnet"
12 | region = var.location
13 | network = google_compute_network.vpc.self_link
14 |
15 | private_ip_google_access = true
16 | ip_cidr_range = var.vpc_subnet_ip_cidr_range
17 | }
18 |
--------------------------------------------------------------------------------
/terraform/modules/caizen_core/outputs.tf:
--------------------------------------------------------------------------------
1 | output "project_id" {
2 | value = var.project_id
3 | }
4 | output "location" {
5 | value = var.location
6 | }
7 | output "name_prefix" {
8 | value = var.name_prefix
9 | }
10 | output "ingest_bucket_name" {
11 | value = "${var.name_prefix}-${var.ingest_bucket_name}"
12 | }
13 |
--------------------------------------------------------------------------------
/terraform/modules/caizen_core/storage.tf:
--------------------------------------------------------------------------------
1 | // Main data_ingest bucket for collecting all data
2 | resource "google_storage_bucket" "data_ingest" {
3 | project = var.project_id
4 | location = var.location
5 |
6 | name = "${var.name_prefix}-${var.ingest_bucket_name}"
7 | labels = var.labels
8 |
9 | storage_class = var.ingest_bucket_storage_class
10 | public_access_prevention = "enforced"
11 | uniform_bucket_level_access = true
12 |
13 | // Object versioning to prevent accidental overwrites
14 | versioning {
15 | enabled = true
16 | }
17 |
18 | // Move to next storage class if not the latest version
19 | lifecycle_rule {
20 | action {
21 | type = "SetStorageClass"
22 | storage_class = var.ingest_next_storage_class
23 | }
24 | condition {
25 | num_newer_versions = 1
26 | with_state = "ARCHIVED"
27 | }
28 | }
29 |
30 | // Delete all objects after N days if they aren't the latest version
31 | lifecycle_rule {
32 | action {
33 | type = "Delete"
34 | }
35 |
36 | condition {
37 | age = var.ingest_bucket_name_delete_age_days
38 | num_newer_versions = 2
39 | with_state = "ARCHIVED"
40 | }
41 | }
42 |
43 | // Delete all objects after a year regardless
44 | lifecycle_rule {
45 | action {
46 | type = "Delete"
47 | }
48 |
49 | condition {
50 | age = 365
51 | }
52 | }
53 |
54 | // Prevent accidental deletion
55 | lifecycle {
56 | prevent_destroy = true
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/terraform/modules/caizen_core/variables.tf:
--------------------------------------------------------------------------------
1 | variable "project_id" {
2 | description = "The GCP project ID to deploy the resources to"
3 | type = string
4 | }
5 |
6 | variable "location" {
7 | description = "The GCP region to deploy the resources to"
8 | type = string
9 | }
10 |
11 | variable "name_prefix" {
12 | description = "The prefix to use for all resources"
13 | type = string
14 | default = "caizen"
15 | validation {
16 | condition = can(regex("^\\p{L}.*$", var.name_prefix))
17 | error_message = "name_prefix must start with a letter"
18 | }
19 | }
20 | variable "labels" {
21 | description = "The labels to apply to all resources"
22 | type = map(string)
23 | default = {
24 | "owner" = "caizen"
25 | }
26 | }
27 |
28 | variable "ingest_bucket_name" {
29 | description = "The name of the GCS bucket for all Caizen data ingest"
30 | type = string
31 | default = "data-ingest"
32 | }
33 | variable "ingest_bucket_storage_class" {
34 | description = "The storage class for the ingest bucket"
35 | type = string
36 | default = "STANDARD"
37 | }
38 | variable "ingest_next_storage_class" {
39 | description = "The storage class to transition objects to after the initial storage class"
40 | type = string
41 | default = "NEARLINE"
42 | }
43 | variable "ingest_next_storage_class_age_days" {
44 | description = "The age in days to transition the ingest bucket objects to the next storage class"
45 | type = number
46 | default = 7
47 | }
48 | variable "ingest_bucket_name_delete_age_days" {
49 | description = "The age in days to delete all ingest bucket objects"
50 | type = number
51 | default = 30
52 | }
53 |
54 | variable "enabled_apis" {
55 | description = "The list of APIs to enable for the project"
56 | type = list(string)
57 | default = [
58 | "cloudasset.googleapis.com",
59 | "cloudbuild.googleapis.com",
60 | "cloudresourcemanager.googleapis.com",
61 | "cloudfunctions.googleapis.com",
62 | "cloudscheduler.googleapis.com",
63 | "eventarc.googleapis.com",
64 | "iam.googleapis.com",
65 | "logging.googleapis.com",
66 | "monitoring.googleapis.com",
67 | "pubsub.googleapis.com",
68 | "run.googleapis.com",
69 | "serviceusage.googleapis.com",
70 | "storage.googleapis.com",
71 | "workflows.googleapis.com",
72 | ]
73 | }
74 |
75 | variable "vpc_subnet_ip_cidr_range" {
76 | description = "The IP CIDR range for the VPC subnet"
77 | type = string
78 | default = "10.100.0.0/24"
79 | }
80 |
--------------------------------------------------------------------------------
/terraform/modules/gcp_cai/LICENSE:
--------------------------------------------------------------------------------
1 | ../../../LICENSE
--------------------------------------------------------------------------------
/terraform/modules/gcp_cai/README.md:
--------------------------------------------------------------------------------
1 | # GCP Cloud Asset Inventory
2 |
--------------------------------------------------------------------------------
/terraform/modules/gcp_cai/function.tf:
--------------------------------------------------------------------------------
1 | resource "google_storage_bucket" "func_bucket" {
2 | project = var.project_id
3 | name = "${var.project_id}-gcf-source"
4 | location = var.location
5 |
6 | uniform_bucket_level_access = true
7 | versioning {
8 | enabled = true
9 | }
10 | }
11 |
12 | data "archive_file" "gcp_cai_function" {
13 | type = "zip"
14 | output_path = "/tmp/gcp_cai-function.zip"
15 | source_dir = "../caizen/gcp_cai_func/"
16 | excludes = [
17 | "*.terraform*",
18 | "*.git*",
19 | "*.DS_Store*",
20 | "*.pyc*",
21 | "__pycache__*",
22 | "*.lock",
23 | "*.toml",
24 | "*.zi*",
25 | "tests",
26 | "Makefile",
27 | "README.md"
28 | ]
29 | }
30 |
31 | resource "google_storage_bucket_object" "gcp_cai_function" {
32 | name = "gcp_cai/source.zip"
33 | bucket = google_storage_bucket.func_bucket.name
34 | source = data.archive_file.gcp_cai_function.output_path
35 | }
36 |
37 | locals {
38 | gcp_cai_function_name = "${var.module_name}-${var.name_prefix}-function"
39 | }
40 | resource "google_cloudfunctions2_function" "gcp_cai_function" {
41 | project = var.project_id
42 | name = local.gcp_cai_function_name
43 | location = var.location
44 | description = "GCP CAI Function"
45 |
46 | build_config {
47 | runtime = "python311"
48 | entry_point = "main"
49 | source {
50 | storage_source {
51 | bucket = google_storage_bucket.func_bucket.name
52 | object = google_storage_bucket_object.gcp_cai_function.name
53 | }
54 | }
55 | }
56 |
57 | service_config {
58 | max_instance_count = 10
59 | available_memory = "512M"
60 | timeout_seconds = 540
61 | environment_variables = {
62 | CAIZEN_API_URL = "https://localhost:8080"
63 | }
64 | ingress_settings = "ALLOW_INTERNAL_ONLY"
65 | all_traffic_on_latest_revision = true
66 | service_account_email = google_service_account.gcp_cai_function.email
67 | }
68 |
69 | depends_on = [google_storage_bucket_object.gcp_cai_function]
70 | lifecycle {
71 | replace_triggered_by = [
72 | google_storage_bucket_object.gcp_cai_function
73 | ]
74 | }
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/terraform/modules/gcp_cai/iam.tf:
--------------------------------------------------------------------------------
1 | // Target specific GCP SA
2 | resource "google_service_account" "cai_sa" {
3 | project = var.project_id
4 | account_id = "${var.name_prefix}-c-${substr(var.target_type, 0, 1)}-${var.target_id}"
5 | display_name = "CAI Export - ${var.name_prefix}-${var.target_type}-${var.target_id}"
6 | description = "SA to fetch CAI data"
7 | }
8 |
9 | // If target_type is organization
10 | resource "google_organization_iam_member" "cai_org" {
11 | for_each = var.target_type == "organization" ? toset(var.target_roles) : []
12 | org_id = var.target_id
13 | role = each.value
14 | member = "serviceAccount:${google_service_account.cai_sa.email}"
15 | }
16 |
17 | // If target_type is folder
18 | resource "google_folder_iam_member" "cai_folder" {
19 | for_each = var.target_type == "folder" ? toset(var.target_roles) : []
20 | folder = "folders/${var.target_id}"
21 | role = each.value
22 | member = "serviceAccount:${google_service_account.cai_sa.email}"
23 | }
24 |
25 | // If target_type is project
26 | resource "google_project_iam_member" "cai_project" {
27 | for_each = var.target_type == "project" ? toset(var.target_roles) : []
28 | project = "projects/${var.target_id}"
29 | role = each.value
30 | member = "serviceAccount:${google_service_account.cai_sa.email}"
31 | }
32 |
33 | // Grant the SA local project roles
34 | resource "google_project_iam_member" "cai_local" {
35 | project = var.project_id
36 | for_each = toset(var.project_roles)
37 | role = each.value
38 | member = "serviceAccount:${google_service_account.cai_sa.email}"
39 | }
40 |
41 | locals {
42 | // The name of the GCS bucket for all CAI data ingest
43 | target_path = "${var.module_name}/${var.target_type}s/${var.target_id}"
44 | }
45 | // Grant the SA the ability to read/write to the GCS bucket
46 | resource "google_storage_bucket_iam_member" "ingest_bucket" {
47 | bucket = var.ingest_bucket_name
48 | role = "roles/storage.objectAdmin"
49 | member = "serviceAccount:${google_service_account.cai_sa.email}"
50 |
51 | condition {
52 | title = "CAI Export prefix"
53 | description = "Grant the SA the ability to read/write to the GCS bucket"
54 | expression = "resource.name.startsWith('projects/_/buckets/${var.ingest_bucket_name}/objects/${local.target_path}/')"
55 | }
56 | }
57 |
58 | // Scheduler SA and permissions
59 | resource "google_service_account" "scheduler_sa" {
60 | project = var.project_id
61 | account_id = "${var.name_prefix}-s-${substr(var.target_type, 0, 1)}-${var.target_id}"
62 | display_name = "Scheduler - ${var.name_prefix}-${var.target_type}-${var.target_id}"
63 | description = "SA to trigger the workflow on a schedule"
64 | }
65 | resource "google_project_iam_member" "schedulder_invoke_workflow" {
66 | project = var.project_id
67 | role = "roles/workflows.invoker"
68 | member = "serviceAccount:${google_service_account.scheduler_sa.email}"
69 | }
70 |
71 | // Allow GCS to send pubsub messages
72 | data "google_storage_project_service_account" "default" {
73 | project = var.project_id
74 | }
75 | resource "google_project_iam_member" "gcs_pubsub_publishing" {
76 | project = var.project_id
77 | role = "roles/pubsub.publisher"
78 | member = "serviceAccount:${data.google_storage_project_service_account.default.email_address}"
79 | }
80 |
81 | // Pubsub Push subscription SA to trigger the function
82 | resource "google_service_account" "pushsub_sa" {
83 | project = var.project_id
84 | account_id = "${var.name_prefix}-ps-${substr(var.target_type, 0, 1)}-${var.target_id}"
85 | display_name = "Push Sub - ${var.name_prefix}-${var.target_type}-${var.target_id}"
86 | description = "SA to trigger the function on object finalization"
87 | }
88 | resource "google_project_iam_member" "pushsub_invoker" {
89 | project = var.project_id
90 | role = "roles/run.invoker"
91 | member = "serviceAccount:${google_service_account.pushsub_sa.email}"
92 | }
93 |
94 | // Function SA
95 | resource "google_service_account" "gcp_cai_function" {
96 | project = var.project_id
97 | account_id = "${var.name_prefix}-gcp-cai-func"
98 | display_name = "GCP CAI Function SA"
99 | }
100 | data "google_project" "project" {
101 | project_id = var.project_id
102 | }
103 |
104 | // Cloudbuild permissions to deploy the function
105 | locals {
106 | roles = [
107 | "roles/cloudfunctions.developer",
108 | "roles/storage.admin",
109 | "roles/iam.serviceAccountUser",
110 | ]
111 | }
112 | resource "google_project_iam_member" "gcp_cai_cloudbuild" {
113 | project = var.project_id
114 | for_each = toset(local.roles)
115 | role = each.value
116 | member = "serviceAccount:${data.google_project.project.number}@cloudbuild.gserviceaccount.com"
117 | }
118 | resource "google_project_iam_member" "cloudbuild" {
119 | project = var.project_id
120 | role = "roles/cloudbuild.builds.builder"
121 | member = "serviceAccount:${data.google_project.project.number}-compute@developer.gserviceaccount.com"
122 | }
123 |
--------------------------------------------------------------------------------
/terraform/modules/gcp_cai/pubsub.tf:
--------------------------------------------------------------------------------
1 | // Pubsub topic for storage notifications
2 | resource "google_pubsub_topic" "gcp_cai_topic" {
3 | project = var.project_id
4 | name = "${var.module_name}-${var.name_prefix}-topic"
5 | labels = var.labels
6 |
7 | message_retention_duration = "86400s"
8 | }
9 |
10 | // GCS notification to pubsub topic
11 | resource "google_storage_notification" "gcp_cai_notification" {
12 | bucket = var.ingest_bucket_name
13 | topic = google_pubsub_topic.gcp_cai_topic.id
14 |
15 | payload_format = "JSON_API_V1"
16 | event_types = ["OBJECT_FINALIZE"]
17 | }
18 |
19 | // Pubsub subscription to trigger the function
20 | resource "google_pubsub_subscription" "gcp_cai_subscription" {
21 | project = var.project_id
22 | name = "${var.module_name}-${var.name_prefix}-subscription"
23 | topic = google_pubsub_topic.gcp_cai_topic.name
24 |
25 | ack_deadline_seconds = 600
26 | message_retention_duration = "86400s"
27 |
28 | filter = "(attributes.eventType = \"OBJECT_FINALIZE\") AND (attributes.bucketId = \"${var.ingest_bucket_name}\") AND (hasPrefix(attributes.objectId, \"${var.module_name}/\"))"
29 |
30 | push_config {
31 | push_endpoint = "https://${local.gcp_cai_function_name}-${data.google_project.project.number}.${var.location}.run.app"
32 | oidc_token {
33 | service_account_email = google_service_account.pushsub_sa.email
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/terraform/modules/gcp_cai/scheduler.tf:
--------------------------------------------------------------------------------
1 |
2 | locals {
3 | input_data = {
4 | "content_type" : var.target_content_type,
5 | "max_retries" : var.workflow_max_retries,
6 | "output_bucket" : var.ingest_bucket_name,
7 | "output_prefix" : var.module_name,
8 | "sleep_seconds" : var.workflow_sleep_seconds,
9 | "target_id" : var.target_id,
10 | "target_type" : var.target_type
11 | }
12 | }
13 |
14 | resource "google_cloud_scheduler_job" "workflow_trigger" {
15 | project = var.project_id
16 | name = "${var.module_name}-${var.target_type}s-${var.target_id}"
17 | description = "Caizen ${var.module_name}-${var.target_type}s-${var.target_id} workflow trigger"
18 | region = var.location
19 |
20 | schedule = var.schedule
21 | time_zone = "Etc/UTC"
22 |
23 | http_target {
24 | http_method = "POST"
25 | uri = "https://workflowexecutions.googleapis.com/v1/projects/${var.project_id}/locations/${var.location}/workflows/${google_workflows_workflow.gcp_cai.name}/executions"
26 |
27 | # Pass input data to the workflow
28 | body = base64encode(<<-EOM
29 | { "argument": ${jsonencode(jsonencode((local.input_data)))}}
30 | EOM
31 | )
32 |
33 | oauth_token {
34 | service_account_email = google_service_account.scheduler_sa.email
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/terraform/modules/gcp_cai/variables.tf:
--------------------------------------------------------------------------------
1 | variable "project_id" {
2 | description = "The project ID to deploy the resources"
3 | type = string
4 | }
5 | variable "location" {
6 | description = "The location to deploy the resources"
7 | type = string
8 | }
9 | variable "name_prefix" {
10 | description = "The prefix to use for all resources"
11 | type = string
12 | default = "caizen"
13 | validation {
14 | condition = can(regex("^\\p{L}.*$", var.name_prefix))
15 | error_message = "name_prefix must start with a letter"
16 | }
17 | }
18 | variable "ingest_bucket_name" {
19 | description = "The name of the GCS bucket for all Caizen data ingest"
20 | type = string
21 | }
22 | variable "module_name" {
23 | description = "The name of the module"
24 | type = string
25 | default = "gcp-cai"
26 | }
27 | variable "labels" {
28 | description = "The labels to apply to all resources"
29 | type = map(string)
30 | default = {
31 | "owner" = "caizen"
32 | }
33 | }
34 | variable "workflow_call_log_level" {
35 | description = "The log level for the workflow"
36 | type = string
37 | default = "LOG_ERRORS_ONLY"
38 | validation {
39 | condition = can(regex("^(CALL_LOG_LEVEL_UNSPECIFIED|LOG_ALL_CALLS|LOG_ERRORS_ONLY|LOG_NONE)$", var.workflow_call_log_level))
40 | error_message = "workflow_call_log_level must be one of CALL_LOG_LEVEL_UNSPECIFIED, LOG_ALL_CALLS, LOG_ERRORS_ONLY, or LOG_NONE"
41 | }
42 | }
43 | variable "schedule" {
44 | description = "The schedule for the workflow"
45 | type = string
46 | default = "*/6 * * * *" # Every 10 minutes
47 | validation {
48 | condition = can(regex("^\\S+ \\S+ \\S+ \\S+ \\S+$", var.schedule))
49 | error_message = "schedule must be in the format 'minute hour day month weekday'"
50 | }
51 | }
52 | variable "workflow_max_retries" {
53 | description = "The maximum number of retries for the workflow"
54 | type = number
55 | default = 30
56 | validation {
57 | condition = can(regex("^[0-9]+$", var.workflow_max_retries))
58 | error_message = "workflow_max_retries must be a number"
59 | }
60 | }
61 | variable "workflow_sleep_seconds" {
62 | description = "The number of seconds to sleep between retries"
63 | type = number
64 | default = 30
65 | validation {
66 | condition = can(regex("^[0-9]+$", var.workflow_sleep_seconds))
67 | error_message = "workflow_sleep_seconds must be a number"
68 | }
69 | }
70 |
71 | variable "target_type" {
72 | description = "The type of the target resource"
73 | type = string
74 | validation {
75 | condition = can(regex("^(organization|folder|project)$", var.target_type))
76 | error_message = "target_type must be one of organization, folder, or project"
77 | }
78 | }
79 | variable "target_id" {
80 | description = "The ID of the target resource"
81 | type = string
82 | validation {
83 | condition = can(regex("^[0-9]{10,12}$", var.target_id))
84 | error_message = "target_id must be a 10-12 digit number"
85 | }
86 | }
87 | variable "target_content_type" {
88 | description = "The content type for the export"
89 | type = string
90 | default = "resource"
91 | validation {
92 | condition = can(regex("^(resource|iam_policy|org_policy|os_inventory)$", var.target_content_type))
93 | error_message = "content_type must be one of resource, iam_policy, org_policy, or os_inventory"
94 | }
95 | }
96 | variable "target_roles" {
97 | description = "The roles to grant to get the CAI data"
98 | type = list(string)
99 | default = ["roles/cloudasset.viewer"]
100 | }
101 |
102 | variable "project_roles" {
103 | description = "The roles to grant to the SA in the local project"
104 | type = list(string)
105 | default = [
106 | "roles/logging.logWriter",
107 | "roles/monitoring.metricWriter",
108 | ]
109 | }
110 |
--------------------------------------------------------------------------------
/terraform/modules/gcp_cai/workflow.tf:
--------------------------------------------------------------------------------
1 | resource "google_workflows_workflow" "gcp_cai" {
2 | project = var.project_id
3 | name = "${var.module_name}-${var.target_type}s-${var.target_id}"
4 | region = var.location
5 | description = "GCP Workflow for ${var.module_name}/${var.target_type}s/${var.target_id}"
6 |
7 | labels = var.labels
8 | service_account = google_service_account.cai_sa.email
9 | call_log_level = var.workflow_call_log_level
10 | source_contents = <= max_retries}
64 | next: error
65 | - condition: true
66 | next: increment_retry
67 | - increment_retry:
68 | assign:
69 | - retry_count: $${retry_count + 1}
70 | next: wait_for_operation_status
71 | - error:
72 | raise: "Max retries reached and operation is not complete."
73 | - finish_workflow:
74 | return: $${operation_status.body.response.outputResult}
75 |
76 | execute_export:
77 | params: [target_type, target_id, output_bucket, export_prefix, content_type]
78 | steps:
79 | - export_assets:
80 | call: http.post
81 | args:
82 | url: $${"https://cloudasset.googleapis.com/v1/"+ target_type +"/"+ target_id +":exportAssets"}
83 | auth:
84 | type: OAuth2
85 | scopes: "https://www.googleapis.com/auth/cloud-platform"
86 | body:
87 | outputConfig:
88 | gcsDestination:
89 | uri_prefix: $${"gs://"+ output_bucket +"/"+ export_prefix}
90 | contentType: "$${text.to_upper(content_type)}"
91 | result: export_result
92 | - return:
93 | return: $${export_result}
94 | EOF
95 | }
96 |
--------------------------------------------------------------------------------