├── tests ├── __init__.py ├── cli │ ├── __init__.py │ ├── test_list.py │ ├── test_delete.py │ ├── test_config.py │ ├── test_file_ignore.py │ ├── test_download.py │ ├── test_trigger.py │ ├── test_update.py │ ├── base.py │ ├── test_diffs.py │ ├── test_upload.py │ └── test_create.py ├── graph │ ├── __init__.py │ └── test_lookup.py ├── configuration │ ├── __init__.py │ ├── utils.py │ ├── test_config_editor.py │ └── test_directory_editor.py └── test_metadata.py ├── patterns ├── cli │ ├── __init__.py │ ├── commands │ │ ├── __init__.py │ │ ├── _common.py │ │ ├── logout.py │ │ ├── delete.py │ │ ├── login.py │ │ ├── update.py │ │ ├── config.py │ │ ├── trigger.py │ │ ├── download.py │ │ ├── list.py │ │ ├── upload.py │ │ └── create.py │ ├── services │ │ ├── __init__.py │ │ ├── accounts.py │ │ ├── delete.py │ │ ├── paths.py │ │ ├── webhooks.py │ │ ├── graph_list.py │ │ ├── trigger.py │ │ ├── graph_components.py │ │ ├── download.py │ │ ├── organizations.py │ │ ├── upload.py │ │ ├── secrets.py │ │ ├── graph_versions.py │ │ ├── pagination.py │ │ ├── logout.py │ │ ├── graph_path.py │ │ ├── versions.py │ │ ├── diffs.py │ │ ├── output.py │ │ ├── login.py │ │ ├── auth.py │ │ ├── api.py │ │ └── lookup.py │ ├── configuration │ │ ├── __init__.py │ │ └── edit.py │ ├── config.py │ ├── helpers.py │ └── main.py ├── node │ ├── __init__.py │ └── node.py └── __init__.py ├── .flake8 ├── scripts └── make_docs.py ├── pyproject.toml ├── .github └── workflows │ └── test.yml ├── LICENSE ├── .gitignore ├── assets └── logo.svg └── README.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/graph/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /patterns/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /patterns/node/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /patterns/cli/commands/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /patterns/cli/services/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/configuration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /patterns/cli/configuration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E203, E266, E501, W503, F403, F401, F811, F541, F722 3 | max-line-length = 88 4 | max-complexity = 18 5 | select = B,C,E,F,W,T4,B9 6 | exclude = examples,.venv 7 | -------------------------------------------------------------------------------- /patterns/cli/commands/_common.py: -------------------------------------------------------------------------------- 1 | from typer import Argument 2 | 3 | app_argument_help = ( 4 | "The slug or uid of an app or app version, or the path to an app's graph.yml" 5 | ) 6 | app_argument = Argument(None, help=app_argument_help, show_default=False) 7 | -------------------------------------------------------------------------------- /patterns/cli/commands/logout.py: -------------------------------------------------------------------------------- 1 | from patterns.cli.services import logout as logout_service 2 | from patterns.cli.services.api import reset_session_auth 3 | 4 | 5 | def logout(): 6 | """Log out of your Patterns account""" 7 | reset_session_auth() 8 | logout_service.logout() 9 | -------------------------------------------------------------------------------- /patterns/cli/services/accounts.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from requests import Session 4 | 5 | from patterns.cli.services.api import Endpoints, get_json 6 | 7 | 8 | def me(session: Session = None) -> dict: 9 | return get_json(Endpoints.ACCOUNTS_ME, session=session) 10 | -------------------------------------------------------------------------------- /patterns/cli/services/delete.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from requests import Session 4 | 5 | from patterns.cli.services.api import Endpoints, delete 6 | 7 | 8 | def delete_graph(graph_uid: str, session: Session = None): 9 | delete(Endpoints.graph_delete(graph_uid), session=session) 10 | -------------------------------------------------------------------------------- /patterns/cli/services/paths.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def is_relative_to(self: Path, other: Path) -> bool: 5 | """Backport of Path.is_relative_to, which was added in python3.9""" 6 | try: 7 | self.relative_to(other) 8 | return True 9 | except ValueError: 10 | return False 11 | -------------------------------------------------------------------------------- /scripts/make_docs.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | s = open("node.md").read() 4 | s += open("meth.md").read() 5 | 6 | s = re.sub(r"@classmethod\s*", "", s) 7 | s = re.sub(r"cls,?\s*", "", s) 8 | s = re.sub(r"####", "###", s) 9 | s = re.sub(r"::\s*", "\n\n```python #FIXME\n", s) 10 | 11 | with open("docs.md", "w") as f: 12 | f.write(s) 13 | -------------------------------------------------------------------------------- /patterns/__init__.py: -------------------------------------------------------------------------------- 1 | from .node.node import ( 2 | Connection, 3 | Parameter, 4 | State, 5 | Stream, 6 | Table, 7 | TableVersion, 8 | ) 9 | 10 | 11 | __all__ = [ 12 | "Connection", 13 | "Parameter", 14 | "State", 15 | "Stream", 16 | "Table", 17 | "TableVersion", 18 | ] 19 | 20 | __version__ = "1.7.0" 21 | -------------------------------------------------------------------------------- /patterns/cli/services/webhooks.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from requests import Session 4 | 5 | from patterns.cli.services.api import Endpoints, get_json 6 | from patterns.cli.services.pagination import paginated 7 | 8 | 9 | @paginated 10 | def paginated_webhooks(graph_uid: str, session: Session = None): 11 | return get_json(Endpoints.webhooks_list(graph_uid), session=session) 12 | -------------------------------------------------------------------------------- /tests/test_metadata.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | import patterns 4 | 5 | 6 | def test_pyproject_and_package_versions_are_in_sync(): 7 | path = (Path(__file__).parent.parent / "pyproject.toml").resolve() 8 | 9 | pyproject_version = re.findall( 10 | r'^version = "(\d+\.\d+\.\d+)"$', path.read_text(), re.M 11 | ) 12 | assert patterns.__version__ == pyproject_version[0] 13 | -------------------------------------------------------------------------------- /patterns/cli/services/graph_list.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from requests import Session 4 | 5 | from patterns.cli.services.api import Endpoints, get_json 6 | from patterns.cli.services.pagination import paginated 7 | 8 | 9 | @paginated 10 | def paginated_graphs(organization_uid: str, session: Session = None): 11 | return get_json(Endpoints.graphs_list(organization_uid), session=session) 12 | -------------------------------------------------------------------------------- /tests/cli/test_list.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from patterns.cli.services.api import Endpoints 4 | from tests.cli.base import request_mocker, set_tmp_dir, run_cli 5 | 6 | 7 | def test_list_graphs(tmp_path: Path): 8 | set_tmp_dir(tmp_path) 9 | with request_mocker() as m: 10 | m.get( 11 | Endpoints.graphs_list("test-org-uid"), 12 | json={"results": [{"name": "name"}], "next": None}, 13 | ) 14 | result = run_cli("list apps --json") 15 | assert "name" in result.output 16 | -------------------------------------------------------------------------------- /tests/cli/test_delete.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from patterns.cli.services.api import Endpoints 4 | from tests.cli.base import request_mocker, set_tmp_dir, run_cli 5 | 6 | 7 | def test_delete(tmp_path: Path): 8 | set_tmp_dir(tmp_path) 9 | 10 | with request_mocker() as m: 11 | m.delete(Endpoints.graph_delete("2")) 12 | m.get(Endpoints.graph_by_slug("test-org-uid", "test-graph"), json={"uid": "2"}) 13 | 14 | result = run_cli(f"delete -f test-graph") 15 | assert "App deleted" in result.output 16 | -------------------------------------------------------------------------------- /patterns/cli/services/trigger.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import List 4 | 5 | from requests import Session 6 | 7 | from patterns.cli.services.api import Endpoints, post_for_json 8 | 9 | 10 | def trigger_node( 11 | graph_uid: str, 12 | node_id: str, 13 | execution_type: str, 14 | session: Session = None, 15 | ) -> List[dict]: 16 | return post_for_json( 17 | Endpoints.trigger_node(graph_uid, node_id), 18 | json={ 19 | "execution_type": execution_type, 20 | }, 21 | session=session, 22 | ) 23 | -------------------------------------------------------------------------------- /patterns/cli/services/graph_components.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from requests import Session 4 | 5 | from patterns.cli.services.api import Endpoints, post_for_json, patch 6 | 7 | 8 | def create_graph_component(graph_version_uid: str, session: Session = None) -> dict: 9 | body = {"graph_version_uid": graph_version_uid} 10 | return post_for_json(Endpoints.COMPONENTS_CREATE, json=body, session=session) 11 | 12 | 13 | def update_graph_component(graph_uid: str, deprecated: bool, session: Session = None): 14 | body = {"deprecated": deprecated} 15 | return patch(Endpoints.component_update(graph_uid), json=body, session=session) 16 | -------------------------------------------------------------------------------- /tests/cli/test_config.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from patterns.cli.config import read_devkit_config 4 | from patterns.cli.services.api import Endpoints 5 | from tests.cli.base import set_tmp_dir, run_cli, request_mocker 6 | 7 | 8 | def test_config_org_and_env(tmp_path: Path): 9 | set_tmp_dir(tmp_path) 10 | old_cfg = read_devkit_config() 11 | assert old_cfg.organization_id == "test-org-uid" 12 | 13 | with request_mocker() as m: 14 | m.get(Endpoints.organization_by_slug("org"), json={"uid": "org-uid"}) 15 | run_cli("config -o org") 16 | new_cfg = read_devkit_config() 17 | assert new_cfg.organization_id == "org-uid" 18 | -------------------------------------------------------------------------------- /tests/configuration/utils.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | from pathlib import Path 3 | from typing import Dict 4 | 5 | 6 | def setup_graph_files(root: Path, files: Dict[str, str]): 7 | for path, content in files.items(): 8 | content = textwrap.dedent(content).strip() 9 | if path.endswith(".py"): 10 | if not content.startswith("@node"): 11 | content = f"@node\ndef generated_node(\n{content}\n):\n pass" 12 | content = "from patterns import *\n\n" + content 13 | abspath = root / path 14 | if len(Path(path).parts) > 1: 15 | abspath.parent.mkdir(parents=True, exist_ok=True) 16 | abspath.write_text(content) 17 | -------------------------------------------------------------------------------- /tests/cli/test_file_ignore.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from patterns.cli.helpers import _all_files_not_ignored 4 | 5 | 6 | def test_ignore_file(tmp_path: Path): 7 | f1 = tmp_path / "__pycache__" / "settings.xml" 8 | f1.parent.mkdir() 9 | f1.write_text("<>") 10 | 11 | f2 = tmp_path / ".DS_Store" / "foo" 12 | f2.parent.mkdir() 13 | f2.write_text("foo") 14 | 15 | f3 = tmp_path / "my.venv" / "foo.txt" 16 | f3.parent.mkdir() 17 | f3.write_text("foo") 18 | 19 | f4 = tmp_path / "p.pyc" 20 | f4.write_text("foo") 21 | 22 | f5 = tmp_path / "my.pycx" 23 | f5.write_text("foo") 24 | 25 | assert sorted(_all_files_not_ignored(tmp_path)) == sorted([f3, f5]) 26 | -------------------------------------------------------------------------------- /patterns/cli/services/download.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | 5 | from requests import Session 6 | 7 | from patterns.cli.services.api import Endpoints, get 8 | 9 | COMPONENT_RE = re.compile(r"([\w\-]+)/([\w\-]+)@([\w\-.]+)") 10 | 11 | 12 | def download_graph_zip(graph_version_uid: str, session: Session = None) -> bytes: 13 | resp = get(Endpoints.graph_version_download(graph_version_uid), session=session) 14 | return resp.content 15 | 16 | 17 | def download_component_zip(component_key: str, session: Session = None) -> bytes: 18 | org, comp, v = COMPONENT_RE.fullmatch(component_key).groups() 19 | resp = get(Endpoints.component_download(org, comp, v), session=session) 20 | return resp.content 21 | -------------------------------------------------------------------------------- /patterns/cli/services/organizations.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from requests import Session 4 | 5 | from patterns.cli.services.api import Endpoints, get_json 6 | from patterns.cli.services.pagination import paginated 7 | 8 | 9 | def get_organization_by_name(name: str, session: Session = None) -> dict: 10 | return get_json(Endpoints.organization_by_slug(name), session=session) 11 | 12 | 13 | def get_organization_by_id(organization_uid: str, session: Session = None) -> dict: 14 | return get_json(Endpoints.organization_by_id(organization_uid), session=session) 15 | 16 | 17 | @paginated 18 | def paginated_organizations(session: Session = None): 19 | return get_json(Endpoints.ORGANIZATIONS_LIST, session=session) 20 | -------------------------------------------------------------------------------- /patterns/cli/services/upload.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | 4 | from requests import Session 5 | 6 | from patterns.cli.configuration.edit import GraphDirectoryEditor 7 | from patterns.cli.services.api import Endpoints, post_for_json 8 | 9 | 10 | def upload_graph_version( 11 | graph_yaml_path: Path, 12 | organization_uid: str, 13 | add_missing_node_ids: bool, 14 | slug: str = None, 15 | session: Session = None, 16 | ) -> dict: 17 | editor = GraphDirectoryEditor(graph_yaml_path) 18 | if add_missing_node_ids: 19 | editor.add_missing_node_ids() 20 | payload = { 21 | "slug": slug or editor.graph_slug(), 22 | "root_yaml_path": editor.yml_path.name, 23 | } 24 | return post_for_json( 25 | Endpoints.graph_version_create(organization_uid), 26 | data={"payload": json.dumps(payload)}, 27 | files={"file": editor.compress_directory()}, 28 | session=session, 29 | ) 30 | -------------------------------------------------------------------------------- /patterns/cli/services/secrets.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import List 4 | 5 | from requests import Session 6 | 7 | from patterns.cli.services.api import Endpoints, post_for_json, get_json 8 | from patterns.cli.services.pagination import paginated 9 | 10 | 11 | def create_secret( 12 | organization_uid: str, 13 | name: str, 14 | value: str, 15 | description: str, 16 | sensitive: bool, 17 | session: Session = None, 18 | ) -> List[dict]: 19 | return post_for_json( 20 | Endpoints.org_secrets(organization_uid), 21 | json={ 22 | "name": name, 23 | "value": value, 24 | "description": description, 25 | "sensitive": sensitive, 26 | }, 27 | session=session, 28 | ) 29 | 30 | 31 | @paginated 32 | def paginated_secrets(organization_uid: str, session: Session = None): 33 | return get_json(Endpoints.org_secrets(organization_uid), session=session) 34 | -------------------------------------------------------------------------------- /patterns/cli/services/graph_versions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from requests import Session 4 | 5 | from patterns.cli.services.api import Endpoints, get_json, patch 6 | 7 | 8 | def get_graph_by_slug( 9 | organization_uid: str, slug: str, session: Session = None 10 | ) -> dict: 11 | return get_json(Endpoints.graph_by_slug(organization_uid, slug), session=session) 12 | 13 | 14 | def get_graph_by_uid(graph_uid: str, session: Session = None) -> dict: 15 | return get_json(Endpoints.graphs_latest(graph_uid), session=session) 16 | 17 | 18 | def update_graph(graph_uid: str, public: bool): 19 | patch(Endpoints.graph_update(graph_uid), json={"public": public}) 20 | 21 | 22 | def get_graph_version_by_uid(graph_version_uid, session: Session = None) -> dict: 23 | return get_json(Endpoints.graph_version_by_id(graph_version_uid), session=session) 24 | 25 | 26 | def get_latest_graph_version(graph_uid: str, session: Session = None) -> dict: 27 | return get_graph_by_uid(graph_uid, session=session)["active_graph_version"] 28 | -------------------------------------------------------------------------------- /tests/cli/test_download.py: -------------------------------------------------------------------------------- 1 | import io 2 | from pathlib import Path 3 | from zipfile import ZipFile 4 | 5 | from patterns.cli.services.api import Endpoints 6 | from tests.cli.base import request_mocker, set_tmp_dir, run_cli 7 | 8 | 9 | def test_download(tmp_path: Path): 10 | dr = set_tmp_dir(tmp_path).parent 11 | path = dr / "name" 12 | content = "nodes: []" 13 | 14 | with request_mocker() as m: 15 | b = io.BytesIO() 16 | with ZipFile(b, "w") as zf: 17 | zf.writestr("graph.yml", content) 18 | m.get(Endpoints.graph_version_download("uid"), content=b.getvalue()) 19 | m.get( 20 | Endpoints.graph_by_slug("test-org-uid", "uid"), 21 | json={"slug": "uid", "uid": "uid"}, 22 | ) 23 | m.get( 24 | Endpoints.graphs_latest("uid"), 25 | json={"active_graph_version": {"uid": "uid"}}, 26 | ) 27 | 28 | result = run_cli(f"download uid {path}") 29 | assert "Downloaded app" in result.output 30 | assert (path / "graph.yml").read_text() == content 31 | -------------------------------------------------------------------------------- /tests/graph/test_lookup.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from patterns.cli.services.lookup import IdLookup 4 | from tests.configuration.utils import setup_graph_files 5 | 6 | 7 | def test_find_graph_from_node(tmp_path: Path): 8 | setup_graph_files( 9 | tmp_path, 10 | { 11 | "graph.yml": """ 12 | nodes: 13 | - node_file: node1.py 14 | - node_file: dir/node2.py 15 | - node_file: sub/graph.yml 16 | """, 17 | "node1.py": "t3=OutputTable", 18 | "dir/node2.py": "t1=OutputTable", 19 | "sub/graph.yml": """ 20 | nodes: 21 | - node_file: node3.py 22 | """, 23 | "sub/node3.py": "t2=OutputTable", 24 | }, 25 | ) 26 | for p in [ 27 | tmp_path / "dir" / "node1.py", 28 | tmp_path / "sub" / "node2.py", 29 | tmp_path / "node3.py", 30 | ]: 31 | actual = IdLookup(node_file_path=p).graph_file_path 32 | assert actual == tmp_path / "graph.yml" 33 | -------------------------------------------------------------------------------- /tests/cli/test_trigger.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | 4 | from patterns.cli.services.api import Endpoints 5 | from tests.cli.base import set_tmp_dir, run_cli, request_mocker 6 | 7 | 8 | def test_trigger_node_in_subgraph(tmp_path: Path): 9 | dr = set_tmp_dir(tmp_path).parent / "graph" 10 | name = "sub/graph.yml" 11 | run_cli("create app", f"{dr}\n") 12 | path = dr / name 13 | run_cli(f"create node", f"{path}\n") 14 | name = (dr / "sub/p.py").as_posix() 15 | run_cli(f"create node", f"{name}\n") 16 | 17 | with request_mocker() as m: 18 | id = re.search(r"id: (\w+)", path.read_text()).group(1) 19 | m.post(Endpoints.trigger_node("2", id), json={"uid": "1"}) 20 | m.get(Endpoints.graph_by_slug("test-org-uid", "graph"), json={"uid": "2"}) 21 | m.get(Endpoints.graphs_latest("2"), json={"active_graph_version": {"uid": "3"}}) 22 | result = run_cli(f"trigger {name}") 23 | assert "Triggered node" in result.output 24 | 25 | result = run_cli(f"trigger --app=graph --node-id={id}") 26 | assert "Triggered node" in result.output 27 | -------------------------------------------------------------------------------- /patterns/cli/services/pagination.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import itertools 3 | from typing import Callable, Iterable, List 4 | 5 | from patterns.cli.services.api import get_json 6 | 7 | 8 | class PaginatedCall: 9 | def __init__(self, initial_request: Callable[[], dict]): 10 | self._initial_request = initial_request 11 | 12 | def pages(self) -> Iterable[List[dict]]: 13 | """Iterate over pages returned from the endpoint""" 14 | data = self._initial_request() 15 | yield data["results"] 16 | while data["next"]: 17 | data = get_json(data["next"], base_url="") 18 | if data["results"]: 19 | yield data["results"] 20 | 21 | def __iter__(self) -> Iterable[dict]: 22 | """Iterate over all objects returned from the endpoint""" 23 | return itertools.chain.from_iterable(self.pages()) 24 | 25 | 26 | def paginated(fn: Callable[..., dict]) -> Callable[..., PaginatedCall]: 27 | """Decorator that yields pages from a paginated endpoint""" 28 | 29 | def f(*args, **kwargs): 30 | return PaginatedCall(functools.partial(fn, *args, **kwargs)) 31 | 32 | return f 33 | -------------------------------------------------------------------------------- /patterns/cli/commands/delete.py: -------------------------------------------------------------------------------- 1 | from rich.prompt import Confirm 2 | from typer import Option 3 | 4 | from patterns.cli.commands._common import app_argument 5 | from patterns.cli.services.delete import delete_graph 6 | from patterns.cli.services.lookup import IdLookup 7 | from patterns.cli.services.output import sprint, abort_on_error 8 | 9 | _force_help = "Don't prompt before deleting an app" 10 | _organization_help = "The Patterns organization to delete from" 11 | 12 | 13 | def delete( 14 | force: bool = Option(False, "-f", "--force", help=_force_help), 15 | organization: str = Option( 16 | "", "-o", "--organization", metavar="SLUG", help=_organization_help 17 | ), 18 | app: str = app_argument, 19 | ): 20 | """Delete an app from the Patterns studio. 21 | 22 | This will not delete any files locally. 23 | """ 24 | ids = IdLookup(organization_slug=organization, graph_slug_or_uid_or_path=app) 25 | 26 | with abort_on_error("Deleting app failed"): 27 | if not force: 28 | Confirm.ask(f"Delete app {ids.graph_slug}?") 29 | delete_graph(ids.graph_uid) 30 | 31 | sprint(f"[success]App deleted from Patterns studio.") 32 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | authors = ["AJ Alt ", "Ken Van Haren "] 3 | description = "Data pipelines from re-usable components" 4 | license = "BSD-3-Clause" 5 | name = "patterns-devkit" 6 | packages = [ 7 | {include = "patterns"}, 8 | ] 9 | version = "1.7.0" 10 | 11 | [tool.poetry.dependencies] 12 | platformdirs = "^2.4.0" 13 | pydantic = "^1.8.1" 14 | python = "^3.8.1" 15 | rich = "^12.0.1" 16 | ruyaml = "^0.91.0" 17 | click = "^8.1.0" 18 | typer = {extras = ["all"], version = "^0.7.0"} 19 | pyyaml = "^6.0" 20 | requests = "^2.27.1" 21 | 22 | [tool.poetry.dev-dependencies] 23 | black = "^22.12.0" 24 | flake8 = "^6.0.0" 25 | pytest = "^7.2.0" 26 | requests-mock = "^1.10.0" 27 | 28 | [tool.poetry.scripts] 29 | patterns = "patterns.cli.main:main" 30 | 31 | [tool.black] 32 | exclude = ''' 33 | /( 34 | \.git 35 | | \.mypy_cache 36 | | \.pytest_cache 37 | | \.tox 38 | | \.venv 39 | | build 40 | | dist 41 | | examples 42 | )/ 43 | ''' 44 | 45 | [tool.pytest.ini_options] 46 | addopts = "-ra -q" 47 | norecursedirs = [] 48 | testpaths = [ 49 | "tests", 50 | ] 51 | 52 | [build-system] 53 | build-backend = "poetry.masonry.api" 54 | requires = ["poetry>=0.12"] 55 | -------------------------------------------------------------------------------- /patterns/cli/services/logout.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import urllib.parse 4 | from urllib.parse import ParseResult 5 | 6 | from patterns.cli.config import read_devkit_config, update_devkit_config 7 | from patterns.cli.services.auth import ( 8 | LOCAL_OAUTH_PORT, 9 | BaseOAuthRequestHandler, 10 | execute_oauth_flow, 11 | ) 12 | 13 | 14 | def logout(): 15 | cfg = read_devkit_config() 16 | if not cfg.auth_server: 17 | return 18 | 19 | params = { 20 | "client_id": cfg.auth_server.devkit_client_id, 21 | "returnTo": f"http://localhost:{LOCAL_OAUTH_PORT}{LogoutRequestHandler.handled_path}", 22 | } 23 | 24 | query = urllib.parse.urlencode(params) 25 | url = f"https://{cfg.auth_server.domain}/v2/logout?{query}" 26 | 27 | execute_oauth_flow(url, LogoutRequestHandler) 28 | 29 | 30 | class LogoutRequestHandler(BaseOAuthRequestHandler): 31 | handled_path: str = "/logout_callback" 32 | 33 | def handle_callback(self, parsed_url: ParseResult): 34 | update_devkit_config(refresh=None, token=None, auth_server=None) 35 | self.finish_with_success( 36 | "Successfully logged out", "You have successfully logged out" 37 | ) 38 | -------------------------------------------------------------------------------- /patterns/cli/services/graph_path.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | 4 | def resolve_graph_path( 5 | path: Path, exists: bool, create_parents_if_necessary: bool = True 6 | ) -> Path: 7 | """Resolve an explicitly given graph location to a yaml""" 8 | if path.is_dir(): 9 | f = path / "graph.yml" 10 | if f.is_file(): 11 | if exists: 12 | return f.absolute() 13 | raise ValueError(f"File '{f}' already exists") 14 | if exists: 15 | raise ValueError(f"File '{f}' does not exist") 16 | return f.absolute() 17 | if path.suffix and path.suffix not in (".yml", ".yaml"): 18 | raise ValueError(f"Invalid graph file name: {path.name}") 19 | if path.is_file(): 20 | if not exists: 21 | raise ValueError(f"Graph '{path}' already exists") 22 | return path.absolute() 23 | if exists: 24 | raise ValueError(f"Graph '{path}' does not exist") 25 | if path.suffix: 26 | if create_parents_if_necessary: 27 | path.parent.mkdir(parents=True) 28 | return path.absolute() 29 | if create_parents_if_necessary: 30 | path.mkdir(parents=True) 31 | graph_path = (path / "graph.yml").absolute() 32 | return graph_path 33 | -------------------------------------------------------------------------------- /tests/cli/test_update.py: -------------------------------------------------------------------------------- 1 | import io 2 | from pathlib import Path 3 | from zipfile import ZipFile 4 | 5 | from patterns.cli.services.api import Endpoints 6 | from tests.cli.base import request_mocker, set_tmp_dir, run_cli 7 | 8 | 9 | def test_update(tmp_path: Path): 10 | set_tmp_dir(tmp_path) 11 | 12 | with request_mocker() as m: 13 | m.patch(Endpoints.component_update("uid")) 14 | m.patch(Endpoints.graph_update("uid")) 15 | m.get( 16 | Endpoints.graph_by_slug("test-org-uid", "mygraph"), 17 | json={"slug": "uid", "uid": "uid"}, 18 | ) 19 | result = run_cli(f"update app mygraph --deprecated") 20 | assert "Updated app" in result.output 21 | assert m.last_request.json() == {"deprecated": True} 22 | 23 | result = run_cli(f"update app mygraph --public") 24 | assert "Updated app" in result.output 25 | assert m.last_request.json() == {"public": True} 26 | 27 | result = run_cli(f"update app mygraph --private") 28 | assert "Updated app" in result.output 29 | assert m.last_request.json() == {"public": False} 30 | 31 | result = run_cli(f"update app mygraph --no-deprecated") 32 | assert "Updated app" in result.output 33 | assert m.last_request.json() == {"deprecated": False} 34 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python-version: ["3.8", "3.9", "3.10"] 15 | 16 | steps: 17 | - uses: actions/checkout@v3 18 | - name: Set up Python ${{ matrix.python-version }} 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | - name: Install Poetry 23 | uses: snok/install-poetry@v1 24 | with: 25 | virtualenvs-create: true 26 | virtualenvs-in-project: true 27 | installer-parallel: true 28 | - name: Install dependencies 29 | run: poetry install --no-interaction --no-root 30 | - name: Lint with flake8 31 | run: | 32 | # stop the build if there are Python syntax errors or undefined names 33 | poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 34 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 35 | poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 36 | - name: Test with pytest 37 | run: | 38 | poetry run pytest tests 39 | -------------------------------------------------------------------------------- /patterns/cli/services/versions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import requests 4 | from requests import JSONDecodeError 5 | import patterns 6 | from patterns.cli.services.output import sprint 7 | 8 | """Set to true to disable version checking""" 9 | DISABLE_VERSION_CHECK = False 10 | 11 | def get_newer_devkit_version() -> str | None: 12 | """Return the version number of the latest devkit version on PyPI, or None if the 13 | local version is up-to-date. 14 | """ 15 | 16 | response = requests.get("https://pypi.python.org/pypi/patterns-devkit/json") 17 | if not response.ok: 18 | return None 19 | 20 | try: 21 | data = response.json() 22 | except JSONDecodeError: 23 | return None 24 | 25 | releases = data.get("releases") 26 | if not isinstance(releases, dict): 27 | return None 28 | 29 | latest = max(releases) 30 | if latest == patterns.__version__: 31 | return None 32 | return latest 33 | 34 | def print_message_if_devkit_needs_update(): 35 | if DISABLE_VERSION_CHECK: 36 | return 37 | latest = get_newer_devkit_version() 38 | if not latest: 39 | return 40 | 41 | sprint( 42 | f"\n[info]A newer version of the Patterns devkit " 43 | f"([error]{patterns.__version__}[/error] -> [success]{latest}[/success]) is available." 44 | ) 45 | sprint( 46 | "[info]Run [code]pip install --upgrade patterns-devkit[/code] " 47 | "to get the latest version." 48 | ) 49 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 Patterns Data Systems Inc. and individual contributors. 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 2. Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 3. Neither the name of the copyright holder nor the names of its contributors may 13 | be used to endorse or promote products derived from this software without 14 | specific prior written permission. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 17 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 20 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /tests/cli/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import shlex 5 | from contextlib import contextmanager 6 | from pathlib import Path 7 | 8 | import click 9 | import requests_mock 10 | import typer.testing 11 | from click.testing import Result 12 | 13 | from patterns.cli.config import DEVKIT_CONFIG_ENV_VAR, update_devkit_config 14 | from patterns.cli.main import app 15 | from patterns.cli.services.api import API_BASE_URL, Endpoints, build_url 16 | 17 | 18 | def run_cli(argv: str, input: str = None, **kwargs) -> click.testing.Result: 19 | args = ["--stacktrace", "--disable-version-check"] + shlex.split( 20 | argv.replace("\\", "/") 21 | ) 22 | runner = typer.testing.CliRunner() 23 | result = runner.invoke(app, args, input, catch_exceptions=False, **kwargs) 24 | print(result.output) 25 | return result 26 | 27 | 28 | def set_tmp_dir(tmp_dir: Path, create_devkit_config: bool = True) -> Path: 29 | cfg_pth = Path(tmp_dir) / ".test-config.json" 30 | os.environ[DEVKIT_CONFIG_ENV_VAR] = str(cfg_pth) 31 | if create_devkit_config: 32 | update_devkit_config( 33 | token="test-token", 34 | organization_id="test-org-uid", 35 | ) 36 | return cfg_pth 37 | 38 | 39 | class _BaseUrlMocker(requests_mock.Mocker): 40 | def __init__(self, base_url): 41 | super().__init__() 42 | self.base_url = base_url 43 | 44 | def request(self, method, url, *args, **kwargs): 45 | if isinstance(url, str): 46 | url = build_url(self.base_url, url) 47 | return super().request(method, url, *args, **kwargs) 48 | 49 | 50 | @contextmanager 51 | def request_mocker(): 52 | with _BaseUrlMocker(API_BASE_URL) as m: 53 | m.post(Endpoints.TOKEN_VERIFY) 54 | yield m 55 | -------------------------------------------------------------------------------- /patterns/cli/commands/login.py: -------------------------------------------------------------------------------- 1 | from rich.progress import Progress, SpinnerColumn, TextColumn 2 | 3 | from patterns.cli.config import ( 4 | update_devkit_config, 5 | get_devkit_config_path, 6 | ) 7 | from patterns.cli.services import login as login_service 8 | from patterns.cli.services.accounts import me 9 | from patterns.cli.services.api import reset_session_auth 10 | from patterns.cli.services.lookup import IdLookup 11 | from patterns.cli.services.output import sprint, abort_on_error, console 12 | 13 | 14 | def login(): 15 | """Log in to your Patterns account""" 16 | reset_session_auth() 17 | 18 | with abort_on_error("Login failed"): 19 | sprint("[info]Logging in to Patterns...") 20 | url, login_config = login_service.make_login_config() 21 | sprint(f"[info]Opening url:") 22 | sprint(url) 23 | 24 | progress = Progress( 25 | SpinnerColumn(), 26 | TextColumn("Waiting for authorization..."), 27 | console=console, 28 | transient=True, 29 | ) 30 | with progress: 31 | progress.add_task("") 32 | login_service.login(url, login_config) 33 | 34 | ids = IdLookup(ignore_local_cfg=True) 35 | with abort_on_error("Saving authorization token failed"): 36 | update_devkit_config(organization_id=ids.organization_uid) 37 | 38 | with abort_on_error("Fetching user profile failed"): 39 | profile = me() 40 | 41 | sprint( 42 | f"\n[success]Logged in to Patterns organization [b]{ids.organization_name}[/b] " 43 | f"as [b]{profile['username']}[/b] ([b]{profile['email']}[/b])" 44 | ) 45 | sprint( 46 | f"\n[info]Your login information is stored at " 47 | f"{get_devkit_config_path().as_posix()}" 48 | ) 49 | sprint( 50 | f"\n[info]If you want to create a new app, run " 51 | f"[code]patterns create app[/code] get started" 52 | ) 53 | -------------------------------------------------------------------------------- /patterns/cli/commands/update.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import typer 4 | from typer import Option 5 | 6 | from patterns.cli.commands._common import app_argument 7 | from patterns.cli.services.graph_components import update_graph_component 8 | from patterns.cli.services.graph_versions import update_graph 9 | from patterns.cli.services.lookup import IdLookup 10 | from patterns.cli.services.output import sprint, abort_on_error 11 | 12 | _organization_help = "The Patterns organization that the app belongs to" 13 | _public_help = "Set the app to public or private. Public apps may be viewed by anyone." 14 | _deprecated_help = "Set the app's component as deprecated or not. Deprecated components cannot be added to new apps, but continue to function if existing apps use them" 15 | _organization_option = Option( 16 | "", "--organization", "-o", metavar="SLUG", help=_organization_help 17 | ) 18 | 19 | update_command = typer.Typer(name="update", help="Update an object of a given type") 20 | 21 | 22 | @update_command.command() 23 | def app( 24 | organization: str = _organization_option, 25 | public: Optional[bool] = Option( 26 | None, "--public/--private", show_default=False, help=_public_help 27 | ), 28 | deprecated: Optional[bool] = Option( 29 | None, "--deprecated/--no-deprecated", show_default=False, help=_deprecated_help 30 | ), 31 | app_location: str = app_argument, 32 | ): 33 | """Update properties of an app""" 34 | ids = IdLookup( 35 | organization_slug=organization, graph_slug_or_uid_or_path=app_location 36 | ) 37 | with abort_on_error("Error updating app"): 38 | if public is not None: 39 | update_graph(ids.graph_uid, public=public) 40 | if deprecated is not None: 41 | update_graph_component(ids.graph_uid, deprecated=deprecated) 42 | 43 | if public is not None or deprecated is not None: 44 | sprint("[success]Updated app successfully") 45 | -------------------------------------------------------------------------------- /patterns/cli/commands/config.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from rich.table import Table 4 | from typer import Option 5 | 6 | from patterns.cli.config import ( 7 | write_devkit_config, 8 | get_devkit_config_path, 9 | ) 10 | from patterns.cli.services.api import API_BASE_URL 11 | from patterns.cli.services.lookup import IdLookup 12 | from patterns.cli.services.output import sprint 13 | 14 | _config_help = "Set the name of the organization to use by default" 15 | _json_help = "Output the config as JSON" 16 | 17 | 18 | def config( 19 | organization: str = Option("", "-o", "--organization", help=_config_help), 20 | verbose: bool = Option(False, "-v", "--verbose", help="Include all config values"), 21 | print_json: bool = Option(False, "--json", help=_json_help), 22 | ): 23 | """Get or set the default values used by other commands""" 24 | ids = IdLookup( 25 | organization_slug=organization, 26 | ) 27 | if organization: 28 | ids.cfg.organization_id = ids.organization_uid 29 | write_devkit_config(ids.cfg) 30 | config_path = get_devkit_config_path().as_posix() 31 | 32 | rows = {} 33 | 34 | if ids.cfg.token: 35 | try: 36 | rows["organization"] = ids.organization_name 37 | except Exception: 38 | rows["organization_id"] = ids.organization_uid 39 | if verbose: 40 | if ids.cfg.auth_server: 41 | rows["auth_server.domain"] = ids.cfg.auth_server.domain 42 | rows["auth_server.audience"] = ids.cfg.auth_server.audience 43 | rows["auth_server.devkit_client_id"] = ids.cfg.auth_server.devkit_client_id 44 | rows["api host"] = API_BASE_URL.rstrip("/") 45 | if print_json: 46 | rows["config file"] = config_path 47 | print(json.dumps(rows)) 48 | else: 49 | sprint(f"[info]Your patterns config is located at [code]{config_path}") 50 | t = Table(show_header=False) 51 | for k, v in rows.items(): 52 | t.add_row(k, v) 53 | sprint(t) 54 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | pip-wheel-metadata 29 | poetry.lock 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | cover/ 55 | 56 | # Profiling 57 | *.stats 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | .pybuilder/ 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # IPython 74 | profile_default/ 75 | ipython_config.py 76 | 77 | # pyenv 78 | .python-version 79 | 80 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 81 | __pypackages__/ 82 | 83 | # Celery stuff 84 | celerybeat-schedule 85 | celerybeat.pid 86 | 87 | # Environments 88 | .env 89 | .venv 90 | env/ 91 | venv/ 92 | ENV/ 93 | env.bak/ 94 | venv.bak/ 95 | 96 | # mkdocs documentation 97 | /site 98 | 99 | # mypy 100 | .mypy_cache/ 101 | .dmypy.json 102 | dmypy.json 103 | 104 | # pytype static type analyzer 105 | .pytype/ 106 | 107 | # Cython debug symbols 108 | cython_debug/ 109 | 110 | # Misc 111 | .DS_Store 112 | tmp/* 113 | 114 | # Editors 115 | .vscode 116 | *~ 117 | .idea 118 | 119 | # sqlite 120 | *.db 121 | -------------------------------------------------------------------------------- /patterns/cli/commands/trigger.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from typer import Option, Argument 4 | 5 | from patterns.cli.commands._common import app_argument_help 6 | from patterns.cli.services.lookup import IdLookup 7 | from patterns.cli.services.output import sprint, abort_on_error, abort 8 | from patterns.cli.services.trigger import trigger_node 9 | 10 | _organization_help = ( 11 | "The name of the Patterns organization that the graph specified " 12 | "with --app was uploaded to" 13 | ) 14 | _node_id_help = "The id of the node to trigger" 15 | _node_help = "The path to the node to trigger" 16 | 17 | 18 | def trigger( 19 | organization: str = Option( 20 | "", "-o", "--organization", metavar="SLUG", help=_organization_help 21 | ), 22 | app: str = Option(None, exists=True, help=app_argument_help, show_default=False), 23 | type: str = Option("pubsub", hidden=True), 24 | node_id: str = Option(None, help=_node_id_help, show_default=False), 25 | node: Path = Argument(None, exists=True, help=_node_help, show_default=False), 26 | ): 27 | """Trigger a node on an uploaded app to run immediately 28 | 29 | You can either pass a path to the node to trigger: 30 | 31 | patterns trigger ./app/my_node.py 32 | 33 | Or the id or slug of an app and the id of the node: 34 | 35 | patterns trigger --app=my-app --node-id=a1b2c3 36 | """ 37 | if node and node_id: 38 | abort("Cannot specify both --node-id and NODE path argument") 39 | if node is None and node_id is None: 40 | abort("Must specify one of --node-id or NODE path argument") 41 | 42 | ids = IdLookup( 43 | organization_slug=organization, 44 | graph_slug_or_uid_or_path=app, 45 | node_file_path=node, 46 | node_id=node_id, 47 | find_nearest_graph=True, 48 | ) 49 | with abort_on_error("Error triggering node"): 50 | trigger_node( 51 | ids.graph_uid, 52 | ids.node_id, 53 | execution_type=type, 54 | ) 55 | 56 | sprint(f"[success]Triggered node {node}") 57 | -------------------------------------------------------------------------------- /patterns/cli/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | from typing import Optional 4 | 5 | import platformdirs 6 | import pydantic 7 | 8 | DEVKIT_CONFIG_ENV_VAR = "PATTERNS_CONFIG" 9 | DEVKIT_CONFIG_NAME = "config.json" 10 | 11 | 12 | class AuthServer(pydantic.BaseModel): 13 | domain: str 14 | audience: str 15 | devkit_client_id: str 16 | 17 | 18 | class CliConfig(pydantic.BaseModel): 19 | organization_id: str = None 20 | token: str = None 21 | refresh: str = None 22 | auth_server: AuthServer = None 23 | 24 | class Config: 25 | extra = "ignore" 26 | allow_population_by_field_name = True 27 | 28 | 29 | def get_devkit_config_path() -> Path: 30 | path = os.environ.get(DEVKIT_CONFIG_ENV_VAR) 31 | if path: 32 | return Path(path) 33 | config_dir = platformdirs.user_config_dir("patterns", appauthor=False, roaming=True) 34 | return Path(config_dir) / DEVKIT_CONFIG_NAME 35 | 36 | 37 | def read_devkit_config() -> CliConfig: 38 | path = get_devkit_config_path() 39 | if path.exists(): 40 | return CliConfig.parse_file(path) 41 | return CliConfig() 42 | 43 | 44 | def write_devkit_config(config: CliConfig): 45 | path = get_devkit_config_path() 46 | path.parent.mkdir(parents=True, exist_ok=True) 47 | path.write_text(config.json(indent=" ")) 48 | 49 | 50 | _UNCHANGED = object() 51 | 52 | 53 | def update_devkit_config( 54 | organization_id: Optional[str] = _UNCHANGED, 55 | token: Optional[str] = _UNCHANGED, 56 | refresh: Optional[str] = _UNCHANGED, 57 | auth_server: Optional[AuthServer] = _UNCHANGED, 58 | ) -> CliConfig: 59 | cfg = read_devkit_config() 60 | update = {} 61 | if organization_id != _UNCHANGED: 62 | update["organization_id"] = organization_id 63 | if token != _UNCHANGED: 64 | update["token"] = token 65 | if refresh != _UNCHANGED: 66 | update["refresh"] = refresh 67 | if auth_server != _UNCHANGED: 68 | update["auth_server"] = auth_server 69 | copy = cfg.copy(update=update) 70 | write_devkit_config(copy) 71 | return copy 72 | -------------------------------------------------------------------------------- /tests/cli/test_diffs.py: -------------------------------------------------------------------------------- 1 | import io 2 | from pathlib import Path 3 | from zipfile import ZipFile 4 | 5 | from patterns.cli.services.diffs import get_diffs_between_zip_and_dir 6 | 7 | 8 | def test_diffs(tmp_path: Path): 9 | txt = tmp_path / "t.txt" 10 | txt2 = tmp_path / "t2.txt" 11 | bin = tmp_path / "b.bin" 12 | txt.write_text("foo\nbar\nbaz") 13 | txt2.write_text("foo\nbar\nbaz") 14 | bin.write_bytes(b"\xf1\xf2\xf3") 15 | zfbytes = io.BytesIO() 16 | with ZipFile(zfbytes, "w") as zf: 17 | zf.write(txt, "t.txt") 18 | zf.write(txt2, "t2.txt") 19 | zf.write(bin, "b.bin") 20 | 21 | diffs = get_diffs_between_zip_and_dir(zf, tmp_path, False) 22 | assert diffs.added == [] 23 | assert diffs.removed == [] 24 | assert diffs.changed == {} 25 | 26 | txt2.unlink() 27 | (tmp_path / "t3.txt").write_text("t3") 28 | txt.write_text("foo\nbar2\nbaz\nqux") 29 | bin.write_bytes(b"\xf1\xff") 30 | 31 | diffs = get_diffs_between_zip_and_dir(zf, tmp_path, False) 32 | assert diffs.added == ["t3.txt"] 33 | assert diffs.removed == ["t2.txt"] 34 | changed = {k: list(v) for k, v in diffs.changed.items()} 35 | assert changed == { 36 | "b.bin": [ 37 | "--- b.bin", 38 | "+++ b.bin", 39 | "Binary contents differ", 40 | ], 41 | "t.txt": [ 42 | "--- t.txt", 43 | "+++ t.txt", 44 | "@@ -1,3 +1,4 @@", 45 | " foo", 46 | "-bar", 47 | "+bar2", 48 | " baz", 49 | "+qux", 50 | ], 51 | } 52 | 53 | diffs = get_diffs_between_zip_and_dir(zf, tmp_path, True) 54 | assert diffs.added == ["t2.txt"] 55 | assert diffs.removed == ["t3.txt"] 56 | changed = {k: list(v) for k, v in diffs.changed.items()} 57 | assert changed == { 58 | "b.bin": [ 59 | "--- b.bin", 60 | "+++ b.bin", 61 | "Binary contents differ", 62 | ], 63 | "t.txt": [ 64 | "--- t.txt", 65 | "+++ t.txt", 66 | "@@ -1,4 +1,3 @@", 67 | " foo", 68 | "-bar2", 69 | "+bar", 70 | " baz", 71 | "-qux", 72 | ], 73 | } 74 | -------------------------------------------------------------------------------- /patterns/cli/commands/download.py: -------------------------------------------------------------------------------- 1 | import io 2 | from pathlib import Path 3 | from zipfile import ZipFile 4 | 5 | import typer 6 | from typer import Option, Argument 7 | 8 | from patterns.cli.commands._common import app_argument 9 | from patterns.cli.services.diffs import get_diffs_between_zip_and_dir, print_diffs 10 | from patterns.cli.services.download import ( 11 | download_graph_zip, 12 | ) 13 | from patterns.cli.services.lookup import IdLookup 14 | from patterns.cli.services.output import sprint, abort_on_error 15 | 16 | _directory_help = "The directory to download the app to" 17 | _organization_help = "The Patterns organization that the graph belongs to" 18 | _force_help = "Overwrite existing files without prompting" 19 | _diff_help = "Show a full diff of file conflicts" 20 | 21 | 22 | def download( 23 | organization: str = Option( 24 | "", "-o", "--organization", metavar="SLUG", help=_organization_help 25 | ), 26 | force: bool = Option(False, "-f", "--force", help=_force_help), 27 | diff: bool = Option(False, "-d", "--diff", help=_diff_help), 28 | app: str = app_argument, 29 | directory: Path = Argument(None, help=_directory_help, file_okay=False), 30 | ): 31 | """Download the code for a Patterns app 32 | 33 | Call this command like [bold cyan]patterns download my-app[/] to download the app named "my-app" 34 | to a new folder. 35 | 36 | If you are in the directory of an app you've already downloaded, you can get the 37 | latest version of the app by calling [bold cyan]patterns download[/] with no extra arguments. 38 | 39 | This command will never overwrite data by default. You can call this command with 40 | [bold cyan]--force[/] to overwrite local files. 41 | 42 | This command will never delete files, no matter if they're part of the app or not. 43 | """ 44 | ids = IdLookup(organization_slug=organization, graph_slug_or_uid_or_path=app) 45 | 46 | with abort_on_error("Error downloading app"): 47 | content = io.BytesIO(download_graph_zip(ids.graph_version_uid)) 48 | 49 | root = ( 50 | directory 51 | if directory 52 | # If a graph is specified, download it to a folder matching its slug. 53 | else Path(ids.graph_slug).resolve() 54 | if app 55 | # Otherwise download the current graph 56 | else ids.graph_directory 57 | ) 58 | with ZipFile(content, "r") as zf: 59 | if force: 60 | zf.extractall(root) 61 | else: 62 | conflicts = get_diffs_between_zip_and_dir(zf, root, True) 63 | if not conflicts.changed: 64 | zf.extractall(root) 65 | sprint(f"[success]Downloaded app {ids.graph_slug}") 66 | return 67 | sprint("[error]Download would overwrite the following files:\n") 68 | print_diffs(conflicts, diff, False) 69 | msg = "\n[info]Run this command with [code]--force[/code] to overwrite local files" 70 | if not diff: 71 | msg += ", or [code]--diff[/code] to see detailed differences" 72 | sprint(msg) 73 | raise typer.Exit(1) 74 | -------------------------------------------------------------------------------- /tests/cli/test_upload.py: -------------------------------------------------------------------------------- 1 | import io 2 | from pathlib import Path 3 | from zipfile import ZipFile 4 | 5 | from patterns.cli.services.api import Endpoints 6 | from tests.cli.base import request_mocker, set_tmp_dir, run_cli 7 | 8 | 9 | def test_upload(tmp_path: Path): 10 | dr = set_tmp_dir(tmp_path).parent 11 | path = dr / "name" 12 | path.mkdir() 13 | graph_file = path / "graph.yml" 14 | text_before = """ 15 | name: name 16 | slug: test-graph 17 | exposes: 18 | outputs: 19 | - output 20 | functions: 21 | - node_file: p.py 22 | """.lstrip() 23 | graph_file.write_text(text_before) 24 | (path / "p.py").write_text( 25 | """ 26 | from patterns import * 27 | @node 28 | def node_fn(output=OutputTable): 29 | pass 30 | """ 31 | ) 32 | 33 | with request_mocker() as m: 34 | m.post( 35 | Endpoints.graph_version_create("test-org-uid"), 36 | json={ 37 | "uid": "1", 38 | "ui_url": "url.com", 39 | "graph": {"name": "g"}, 40 | "errors": [{"node_id": "n1", "message": "Test Error"}], 41 | }, 42 | ) 43 | result = run_cli(f"upload {path} --force") 44 | assert "Uploaded new app" in result.output 45 | assert "Test Error" in result.output 46 | assert "url.com" in result.output 47 | 48 | text_after = graph_file.read_text() 49 | assert text_after[: len(text_before)] == text_before 50 | assert "id: " in text_after 51 | 52 | 53 | def test_upload_component(tmp_path: Path): 54 | dr = set_tmp_dir(tmp_path).parent 55 | path = "/".join((dr / "name").parts) 56 | run_cli(f"create app {path}") 57 | run_cli(f"create node {path}/node.py") 58 | 59 | with request_mocker() as m: 60 | m.post( 61 | Endpoints.graph_version_create("test-org-uid"), 62 | json={ 63 | "uid": "1", 64 | "ui_url": "url.com", 65 | "graph": {"name": "g"}, 66 | "errors": [], 67 | }, 68 | ) 69 | m.post( 70 | Endpoints.COMPONENTS_CREATE, 71 | json={ 72 | "uid": "2", 73 | "version_name": "1.1.1", 74 | "component": {"uid": "3", "slug": "c"}, 75 | "organization": {"uid": "4", "slug": "o"}, 76 | }, 77 | ) 78 | result = run_cli(f"upload --publish-component {path} --force") 79 | assert "Uploaded new app" in result.output 80 | assert "Published app component" in result.output 81 | 82 | 83 | def test_upload_custom_yaml_name(tmp_path: Path): 84 | dr = set_tmp_dir(tmp_path).parent 85 | path = dr / "name" 86 | path.mkdir() 87 | graph_file = path / "custom.yml" 88 | graph_file.write_text( 89 | """ 90 | name: name 91 | stores: 92 | - table: t 93 | """.lstrip() 94 | ) 95 | 96 | with request_mocker() as m: 97 | m.post( 98 | Endpoints.graph_version_create("test-org-uid"), 99 | json={ 100 | "uid": "1", 101 | "ui_url": "url.com", 102 | "graph": {"name": "g"}, 103 | "manifest": {}, 104 | }, 105 | ) 106 | result = run_cli(f"upload {graph_file.as_posix()} --force") 107 | assert "Uploaded new app" in result.output 108 | -------------------------------------------------------------------------------- /patterns/cli/services/diffs.py: -------------------------------------------------------------------------------- 1 | import difflib 2 | from dataclasses import dataclass 3 | from pathlib import Path 4 | from typing import Iterator, List, Dict 5 | from zipfile import ZipFile 6 | 7 | from rich.markdown import Markdown 8 | 9 | from patterns.cli.helpers import directory_contents_to_upload 10 | from patterns.cli.services.output import sprint 11 | 12 | 13 | @dataclass 14 | class DiffResult: 15 | added: List[str] 16 | removed: List[str] 17 | changed: Dict[str, Iterator[str]] 18 | 19 | @property 20 | def is_not_empty(self) -> bool: 21 | return bool(self.added or self.removed or self.changed) 22 | 23 | @property 24 | def is_empty(self) -> bool: 25 | return not self.is_not_empty 26 | 27 | 28 | def get_diffs_between_zip_and_dir( 29 | zf: ZipFile, root: Path, from_remote: bool 30 | ) -> DiffResult: 31 | """Return a map of {filename: diff} where the contents differ between zf and root""" 32 | result = DiffResult([], [], {}) 33 | all_in_zip = set() 34 | for zipinfo in zf.infolist(): 35 | dst = root / zipinfo.filename 36 | if zipinfo.is_dir(): 37 | continue 38 | all_in_zip.add(zipinfo.filename) 39 | if not dst.is_file(): 40 | (result.added if from_remote else result.removed).append(zipinfo.filename) 41 | continue 42 | zip_bytes = zf.read(zipinfo) 43 | try: 44 | zip_content = zip_bytes.decode().splitlines(keepends=False) 45 | fs_content = dst.read_text().splitlines(keepends=False) 46 | except UnicodeDecodeError: 47 | if zip_bytes != dst.read_bytes(): 48 | result.changed[zipinfo.filename] = [ 49 | f"--- {zipinfo.filename}", 50 | f"+++ {zipinfo.filename}", 51 | "Binary contents differ", 52 | ] 53 | else: 54 | if zip_content != fs_content: 55 | if from_remote: 56 | zip_content, fs_content = fs_content, zip_content 57 | diff = difflib.unified_diff( 58 | zip_content, 59 | fs_content, 60 | fromfile=f" {zipinfo.filename}", 61 | tofile=f" {zipinfo.filename}", 62 | lineterm="", 63 | ) 64 | result.changed[zipinfo.filename] = diff 65 | for path in directory_contents_to_upload(root): 66 | file_name = path.relative_to(root).as_posix() 67 | if file_name not in all_in_zip: 68 | (result.removed if from_remote else result.added).append(file_name) 69 | 70 | return result 71 | 72 | 73 | def print_diffs(diffs: DiffResult, context: bool, full: bool): 74 | if full: 75 | if diffs.added: 76 | sprint("Added:") 77 | sprint(Markdown("\n".join(f"- {a}" for a in diffs.added), style="success")) 78 | print() 79 | if diffs.removed: 80 | sprint("Deleted:") 81 | sprint(Markdown("\n".join(f"- {a}" for a in diffs.removed), style="error")) 82 | print() 83 | if not diffs.changed: 84 | return 85 | sprint("Modified:") 86 | if not context: 87 | sprint(Markdown("\n".join(f"- {a}" for a in diffs.changed), style="info")) 88 | return 89 | 90 | print() 91 | diff = "\n\n".join("\n".join(d) for d in diffs.changed.values()) 92 | sprint( 93 | Markdown( 94 | f""" 95 | ```diff 96 | {diff} 97 | ``` 98 | """, 99 | code_theme="vim", 100 | ) 101 | ) 102 | -------------------------------------------------------------------------------- /tests/cli/test_create.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from tests.cli.base import set_tmp_dir, run_cli 4 | 5 | 6 | def test_create_graph(tmp_path: Path): 7 | dr = set_tmp_dir(tmp_path).parent 8 | name = "testgraph" 9 | run_cli("create app", f"{dr / name}\n") 10 | assert name in (dr / name / "graph.yml").read_text() 11 | 12 | 13 | def test_create_graph_explicit(tmp_path: Path): 14 | dr = set_tmp_dir(tmp_path).parent 15 | name = "testgraph" 16 | path = dr / "pth" / "projname" 17 | run_cli(f"create app --name={name} '{path}'") 18 | assert name in (dr / path / "graph.yml").read_text() 19 | 20 | 21 | def test_create_node(tmp_path: Path): 22 | dr = set_tmp_dir(tmp_path).parent / "graph" 23 | name = "mynode.py" 24 | run_cli("create app", f"{dr}\n") 25 | path = dr / name 26 | run_cli(f"create node", f"{path}\n") 27 | assert name in (dr / "graph.yml").read_text() 28 | 29 | 30 | def test_create_subgraph(tmp_path: Path): 31 | dr = set_tmp_dir(tmp_path).parent / "graph" 32 | name = "sub/graph.yml" 33 | run_cli("create app", f"{dr}\n") 34 | path = dr / name 35 | run_cli(f"create node", f"{path}\n") 36 | assert name in (dr / "graph.yml").read_text() 37 | assert "sub" in path.read_text() 38 | 39 | name = (dr / "sub/p.py").as_posix() 40 | run_cli(f"create node", f"{name}\n") 41 | assert "node_file: p.py" in (dr / "sub/graph.yml").read_text() 42 | 43 | 44 | def test_create_node_explicit(tmp_path: Path): 45 | dr = set_tmp_dir(tmp_path).parent / "graph" 46 | name = "mynode.py" 47 | run_cli("create app", f"{dr}\n") 48 | path = dr / name 49 | run_cli(f"create node '{path}'") 50 | assert name in (dr / "graph.yml").read_text() 51 | assert "from patterns import" in path.read_text() 52 | 53 | 54 | def test_create_node_invalid_py_name(tmp_path: Path): 55 | dr = set_tmp_dir(tmp_path).parent / "graph" 56 | name = "0-foo.py" 57 | run_cli("create app", f"{dr}\n") 58 | path = dr / name 59 | run_cli(f"create node", f"{path}\n") 60 | assert name in (dr / "graph.yml").read_text() 61 | assert "from patterns import" in path.read_text() 62 | 63 | 64 | def test_create_webhook(tmp_path: Path): 65 | dr = set_tmp_dir(tmp_path).parent / "graph" 66 | run_cli("create app", f"{dr}\n") 67 | run_cli(f"create node --app={dr} --type=webhook hook") 68 | text = (dr / "graph.yml").read_text() 69 | assert "webhook: hook" in text 70 | assert "table: hook" in text 71 | 72 | 73 | def test_create_component(tmp_path: Path): 74 | dr = set_tmp_dir(tmp_path).parent / "graph" 75 | run_cli("create app", f"{dr}\n") 76 | run_cli(f"create node --type=component --app={dr} foo/bar@v1") 77 | assert f"uses: foo/bar@v1" in (dr / "graph.yml").read_text() 78 | 79 | 80 | def test_create_webhook_deprecated(tmp_path: Path): 81 | dr = set_tmp_dir(tmp_path).parent / "graph" 82 | run_cli("create app", f"{dr}\n") 83 | run_cli(f"create webhook --app={dr} hook") 84 | assert f"webhook: hook" in (dr / "graph.yml").read_text() 85 | 86 | 87 | def test_create_component_deprecated(tmp_path: Path): 88 | dr = set_tmp_dir(tmp_path).parent / "graph" 89 | run_cli("create app", f"{dr}\n") 90 | run_cli(f"create node --component=foo/bar@v1 --app={dr}") 91 | assert f"uses: foo/bar@v1" in (dr / "graph.yml").read_text() 92 | 93 | 94 | def test_create_table(tmp_path: Path): 95 | dr = set_tmp_dir(tmp_path).parent / "graph" 96 | run_cli("create app", f"{dr}\n") 97 | run_cli(f"create node --app={dr} --type=table tbl") 98 | text = (dr / "graph.yml").read_text() 99 | assert "table: tbl" in text 100 | -------------------------------------------------------------------------------- /patterns/cli/helpers.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import re 5 | import secrets 6 | import string 7 | import subprocess 8 | import zipfile 9 | from io import BytesIO 10 | from pathlib import Path 11 | from typing import Iterable 12 | 13 | # standard gitignore entries from https://github.com/github/gitignore 14 | _IGNORE_DIRS = { 15 | ".com.apple.timemachine.donotpresent", 16 | ".DocumentRevisions-V100", 17 | ".DS_Store", 18 | ".eggs", 19 | ".fseventsd", 20 | ".hypothesis", 21 | ".idea", 22 | ".mypy_cache", 23 | ".nox", 24 | ".pybuilder", 25 | ".pyre", 26 | ".pytest_cache", 27 | ".pytype", 28 | ".Spotlight-V100", 29 | ".TemporaryItems", 30 | ".tox", 31 | ".Trashes", 32 | ".VolumeIcon.icns", 33 | ".vscode", 34 | "__pycache__", 35 | "__pypackages__", 36 | "cython_debug", 37 | "develop-eggs", 38 | "docs_build", 39 | "ENV", 40 | "htmlcov", 41 | "instance", 42 | "profile_default", 43 | "sharepython-wheels", 44 | } 45 | _IGNORE_FILES = [ 46 | r".*\$py\.class", 47 | r".*\.cover", 48 | r".*\.egg", 49 | r".*\.log", 50 | r".*\.manifest", 51 | r".*\.mo", 52 | r".*\.pot", 53 | r".*\.py,cover", 54 | r".*\.py[cod]", 55 | r".*\.sage\.py", 56 | r".*\.so", 57 | r".*\.spec", 58 | r"\.cache", 59 | r"\.coverage", 60 | r"\.coverage\..*", 61 | r"\.dmypy\.json", 62 | r"\.env", 63 | r"\.installed\.cfg", 64 | r"\.ipynb_checkpoints", 65 | r"\.pdm\.toml", 66 | r"\.Python", 67 | r"\.ropeproject", 68 | r"\.scrapy", 69 | r"\.spyderproject", 70 | r"\.spyproject", 71 | r"\.venv", 72 | r"\.webassets-cache", 73 | r"celerybeat-schedule", 74 | r"celerybeat\.pid", 75 | r"coverage\.xml", 76 | r"db\.sqlite3", 77 | r"db\.sqlite3-journal", 78 | r"dmypy\.json", 79 | r"ipython_config\.py", 80 | r"MANIFEST", 81 | r"nosetests\.xml", 82 | r"pip-delete-this-directory\.txt", 83 | r"pip-log\.txt", 84 | ] 85 | 86 | _IGNORE_RE = re.compile(f"(?:{'|'.join(_IGNORE_FILES)})$") 87 | 88 | 89 | def _is_git_directory(path: Path) -> bool: 90 | return (path / ".git").is_dir() 91 | 92 | 93 | def _all_files_not_gitignored(path: Path) -> Iterable[Path]: 94 | files = subprocess.check_output( 95 | ["git", "-C", str(path), "ls-files", "-co", "--exclude-standard"] 96 | ).splitlines() 97 | for f in files: 98 | yield path / Path(f.decode()) 99 | 100 | 101 | def _all_files_not_ignored(path: Path) -> Iterable[Path]: 102 | for dirname, dirnames, files in os.walk(path, followlinks=True): 103 | dirnames[:] = [d for d in dirnames if d not in _IGNORE_DIRS] 104 | for f in files: 105 | if _IGNORE_RE.fullmatch(f): 106 | continue 107 | yield Path(dirname) / f 108 | 109 | 110 | def directory_contents_to_upload(directory: Path) -> Iterable[Path]: 111 | if _is_git_directory(directory): 112 | return _all_files_not_gitignored(directory) 113 | else: 114 | return _all_files_not_ignored(directory) 115 | 116 | 117 | def compress_directory(directory: Path) -> BytesIO: 118 | io = BytesIO() 119 | zipf = zipfile.ZipFile(io, "w", zipfile.ZIP_DEFLATED) 120 | for f in directory_contents_to_upload(directory): 121 | zipf.write(f, f.relative_to(directory)) 122 | zipf.close() 123 | io.seek(0) 124 | io.name = "graph_manifest.zip" 125 | return io 126 | 127 | 128 | _alphabet = string.digits + string.ascii_lowercase 129 | 130 | 131 | def random_node_id() -> str: 132 | return "".join(secrets.choice(_alphabet) for _ in range(8)) 133 | -------------------------------------------------------------------------------- /patterns/cli/commands/list.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Iterable 3 | 4 | import typer 5 | from rich.table import Table 6 | from typer import Option 7 | 8 | from patterns.cli.commands._common import app_argument 9 | from patterns.cli.services.graph_list import paginated_graphs 10 | from patterns.cli.services.lookup import IdLookup 11 | from patterns.cli.services.organizations import paginated_organizations 12 | from patterns.cli.services.output import sprint, abort_on_error 13 | from patterns.cli.services.secrets import paginated_secrets 14 | from patterns.cli.services.webhooks import paginated_webhooks 15 | 16 | _type_help = "The type of object to list" 17 | _json_help = "Output the object as JSON Lines" 18 | _organization_help = "The Patterns organization to use" 19 | 20 | _organization_option = Option( 21 | "", "--organization", "-o", metavar="SLUG", help=_organization_help 22 | ) 23 | 24 | list_command = typer.Typer(name="list", help="List objects of a given type") 25 | 26 | 27 | @list_command.command() 28 | def apps( 29 | organization: str = Option("", help=_organization_help), 30 | print_json: bool = Option(False, "--json", help=_json_help), 31 | ): 32 | """List all apps in your organization""" 33 | ids = IdLookup(organization_slug=organization) 34 | with abort_on_error("Error listing apps"): 35 | gs = list(paginated_graphs(ids.organization_uid)) 36 | for g in gs: 37 | g.pop("organization_uid", None) # all graphs are for the current org 38 | g.pop("updated_at", None) # not very useful; leave space for the ui_url 39 | _print_objects("apps", gs, print_json, ("title", "slug", "uid")) 40 | 41 | 42 | @list_command.command() 43 | def organizations( 44 | print_json: bool = Option(False, "--json", help=_json_help), 45 | ): 46 | """List all organizations you are a member of""" 47 | with abort_on_error("Error listing organizations"): 48 | es = list(paginated_organizations()) 49 | _print_objects("organizations", es, print_json) 50 | 51 | 52 | @list_command.command() 53 | def secrets( 54 | organization: str = _organization_option, 55 | print_json: bool = Option(False, "--json", help=_json_help), 56 | ): 57 | """List all secrets in your organization""" 58 | 59 | def clean(r): 60 | return {k: "" if v is None else v for k, v in r.items()} 61 | 62 | ids = IdLookup(organization_slug=organization) 63 | with abort_on_error("Error listing secrets"): 64 | ss = list(map(clean, paginated_secrets(ids.organization_uid))) 65 | _print_objects("secrets", ss, print_json) 66 | 67 | 68 | @list_command.command() 69 | def webhooks( 70 | print_json: bool = Option(False, "--json", help=_json_help), 71 | app: str = app_argument, 72 | ): 73 | """List all webhooks for an app""" 74 | ids = IdLookup(graph_slug_or_uid_or_path=app) 75 | with abort_on_error("Error listing webhooks"): 76 | ws = list(paginated_webhooks(ids.graph_uid)) 77 | _print_objects("webhooks", ws, print_json) 78 | 79 | 80 | def _print_objects( 81 | name: str, objects: list, print_json: bool, headers: Iterable[str] = () 82 | ): 83 | if not objects: 84 | if not print_json: 85 | sprint(f"[info]No {name} found") 86 | return 87 | 88 | if print_json: 89 | for o in objects: 90 | print(json.dumps(o)) 91 | else: 92 | table = Table() 93 | for k in headers: 94 | table.add_column(k) 95 | for k in objects[0].keys(): 96 | if k not in headers: 97 | table.add_column(k) 98 | columns = [str(c.header) for c in table.columns] 99 | for o in objects: 100 | table.add_row(*(str(o.get(c, "")) for c in columns)) 101 | sprint(table) 102 | -------------------------------------------------------------------------------- /patterns/cli/services/output.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import typing 3 | from pathlib import Path 4 | 5 | import rich.prompt 6 | import typer 7 | from requests import HTTPError 8 | from rich.console import Console 9 | from rich.theme import Theme 10 | 11 | """Set to True to raise exceptions from cli commands rather than printing the message""" 12 | DEBUG = False 13 | 14 | console = Console( 15 | theme=( 16 | Theme( 17 | { 18 | "info": "italic cyan", 19 | "warning": "magenta", 20 | "success": "green", 21 | "error": "red", 22 | "code": "bold cyan", 23 | } 24 | ) 25 | ) 26 | ) 27 | 28 | 29 | class _PathPrompt(rich.prompt.Prompt): 30 | response_type = Path 31 | validate_error_message = "[prompt.invalid]Please enter a valid file path" 32 | 33 | 34 | def prompt_path( 35 | message: str, default: typing.Union[Path, str] = None, exists: bool = None 36 | ) -> Path: 37 | while True: 38 | p = _PathPrompt.ask(message, default=default) 39 | if not p: 40 | continue 41 | if exists is True and not p.exists(): 42 | sprint("[prompt.invalid]Path already exists") 43 | elif exists is False and p.exists(): 44 | sprint("[prompt.invalid]Path does not exist") 45 | else: 46 | break 47 | return p 48 | 49 | 50 | def prompt_str(message: str, default: str = None, password: bool = False) -> str: 51 | return rich.prompt.Prompt.ask(message, default=default, password=password) 52 | 53 | 54 | def prompt_choices( 55 | choice_message: str, 56 | prompt_message: str, 57 | choices: typing.Iterable[str], 58 | default: typing.Any = ..., 59 | ) -> str: 60 | sprint(f"[info]{choice_message}:") 61 | for c in choices: 62 | sprint(f" [info]{c}") 63 | return rich.prompt.Prompt.ask( 64 | prompt_message, choices=list(choices), show_choices=False, default=default 65 | ) 66 | 67 | 68 | def sprint(message): 69 | """Print styled content""" 70 | console.print(message) 71 | 72 | 73 | def abort(message: str) -> typing.NoReturn: 74 | """Print an error message and raise an Exit exception""" 75 | sprint(f"[error]{message}") 76 | raise typer.Exit(1) 77 | 78 | 79 | @contextlib.contextmanager 80 | def abort_on_error(message: str, prefix=": ", suffix=""): 81 | """Catch any exceptions that occur and call `abort` with their message""" 82 | if DEBUG: 83 | yield 84 | return 85 | try: 86 | yield 87 | except HTTPError as e: 88 | try: 89 | details = e.response.json()["detail"] 90 | except Exception: 91 | details = e.response.text 92 | if not details: 93 | details = f"HTTP {e.response.status_code}" 94 | 95 | # check 403 error message for unverified email / unsetup account and display message 96 | # we give them the home page, since the webapp will redirect them to the proper setup page automatically 97 | if e.response.status_code == 403: 98 | if details == "unverified email": 99 | abort( 100 | f"Please verify your email address before using Patterns - https://studio.patterns.app" 101 | ) 102 | elif details == "incomplete setup": 103 | abort( 104 | f"Please finish account setup before using Patterns - https://studio.patterns.app" 105 | ) 106 | elif e.response.status_code == 401: 107 | abort( 108 | "You are not logged in to the devkit.\n" 109 | "[info]You can log in with [code]patterns login" 110 | ) 111 | 112 | abort(f"{message}{prefix}{details}{suffix}") 113 | except (typer.Exit, typer.Abort) as e: 114 | raise e 115 | except KeyError as e: 116 | abort(f"{message}{prefix}KeyError: {e}{suffix}") 117 | except Exception as e: 118 | abort(f"{message}{prefix}{e}{suffix}") 119 | -------------------------------------------------------------------------------- /patterns/cli/commands/upload.py: -------------------------------------------------------------------------------- 1 | import io 2 | from pathlib import Path 3 | from zipfile import ZipFile 4 | 5 | import typer 6 | from requests import HTTPError 7 | from typer import Option, Argument 8 | 9 | from patterns.cli.commands._common import app_argument_help 10 | from patterns.cli.services.diffs import get_diffs_between_zip_and_dir, print_diffs 11 | from patterns.cli.services.download import download_graph_zip 12 | from patterns.cli.services.graph_components import create_graph_component 13 | from patterns.cli.services.lookup import IdLookup 14 | from patterns.cli.services.output import sprint, abort_on_error 15 | from patterns.cli.services.upload import upload_graph_version 16 | 17 | _app_help = "The location of the graph.yml file of the app to upload" 18 | _organization_help = "The Patterns organization to upload to" 19 | _component_help = "After uploading, publish the app version as a public component" 20 | _force_help = "Overwrite existing files without prompting" 21 | _diff_help = "Show a full diff of file conflicts" 22 | 23 | 24 | def upload( 25 | organization: str = Option( 26 | "", "-o", "--organization", metavar="SLUG", help=_organization_help 27 | ), 28 | force: bool = Option(False, "-f", "--force", help=_force_help), 29 | diff: bool = Option(False, "-d", "--diff", help=_diff_help), 30 | publish_component: bool = Option(False, help=_component_help), 31 | app: Path = Argument(None, exists=True, help=_app_help), 32 | ): 33 | """Upload a new version of an app to Patterns 34 | 35 | This command will never overwrite data by default. You can call this command with 36 | [bold cyan]--force[/] to overwrite files Patterns Studio. 37 | """ 38 | ids = IdLookup( 39 | organization_slug=organization, 40 | graph_path=app, 41 | ) 42 | 43 | if not force: 44 | try: 45 | content = io.BytesIO(download_graph_zip(ids.graph_version_uid)) 46 | except HTTPError: 47 | # No graph version yet 48 | pass 49 | else: 50 | with ZipFile(content, "r") as zf: 51 | conflicts = get_diffs_between_zip_and_dir( 52 | zf, ids.graph_directory, False 53 | ) 54 | if conflicts.is_not_empty: 55 | sprint("[info]Upload would change the following files:\n") 56 | print_diffs(conflicts, diff, True) 57 | msg = "\n[info]Run this command with [code]--force[/code] to upload the app" 58 | if not diff: 59 | msg += ", or [code]--diff[/code] to see detailed differences" 60 | sprint(msg) 61 | raise typer.Exit(1) 62 | 63 | with abort_on_error("Upload failed"): 64 | resp = upload_graph_version( 65 | ids.graph_file_path, 66 | ids.organization_uid, 67 | add_missing_node_ids=not publish_component, 68 | ) 69 | 70 | graph_version_id = resp["uid"] 71 | ui_url = resp["ui_url"] 72 | sprint(f"\n[success]Uploaded new app version with id [b]{graph_version_id}") 73 | errors = resp.get("errors", []) 74 | if publish_component: 75 | errors = [ 76 | e 77 | for e in errors 78 | if not e["message"].startswith("Top level input is not connected") 79 | and not ( 80 | e["message"].startswith("Parameter") 81 | and e["message"].endswith("has no default or value") 82 | ) 83 | ] 84 | if errors: 85 | sprint(f"[error]App contains the following errors:") 86 | for error in errors: 87 | sprint(f"\t[error]{error}") 88 | 89 | if publish_component: 90 | with abort_on_error("Error creating component"): 91 | resp = create_graph_component(graph_version_id) 92 | resp_org = resp["organization"]["slug"] 93 | resp_version = resp["version_name"] 94 | resp_component = resp["component"]["slug"] 95 | sprint( 96 | f"[success]Published app component " 97 | f"[b]{resp_org}/{resp_component}@{resp_version}[/b]" 98 | ) 99 | 100 | sprint(f"\n[info]Visit [code]{ui_url}[/code] to view your app") 101 | -------------------------------------------------------------------------------- /patterns/cli/main.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from types import MethodType 4 | from typing import List, Optional 5 | 6 | import click 7 | import typer 8 | from click import Context, MultiCommand, Command 9 | from typer import Typer 10 | from typer.core import TyperGroup 11 | 12 | from .commands.config import config 13 | from .commands.create import create 14 | from .commands.delete import delete 15 | from .commands.download import download 16 | from .commands.list import list_command 17 | from .commands.login import login 18 | from .commands.logout import logout 19 | from .commands.trigger import trigger 20 | from .commands.update import update_command 21 | from .commands.upload import upload 22 | from .services import versions 23 | from .services.output import sprint 24 | from .services.versions import ( 25 | print_message_if_devkit_needs_update, 26 | ) 27 | from .. import __version__ 28 | from ..cli.services import output 29 | 30 | 31 | def version_cb(value: bool): 32 | if not value: 33 | return 34 | sprint(f"Patterns Devkit CLI version [code]{__version__}") 35 | 36 | print_message_if_devkit_needs_update() 37 | 38 | raise typer.Exit() 39 | 40 | 41 | def cb( 42 | stacktrace: bool = typer.Option(False, hidden=True), 43 | _: bool = typer.Option( 44 | False, 45 | "--version", 46 | help="Print version information and exit.", 47 | callback=version_cb, 48 | is_eager=True, 49 | ), 50 | disable_version_check: bool = typer.Option( 51 | False, 52 | "--disable-version-check", 53 | help="Don't periodically check if a new devkit version is available for download", 54 | ), 55 | ): 56 | if stacktrace: 57 | output.DEBUG = True 58 | if disable_version_check: 59 | versions.DISABLE_VERSION_CHECK = True 60 | 61 | 62 | def result_cb(*_, **__): 63 | print_message_if_devkit_needs_update() 64 | 65 | 66 | app = Typer( 67 | name="patterns", 68 | no_args_is_help=True, 69 | add_completion=False, 70 | rich_markup_mode="rich", 71 | callback=cb, 72 | result_callback=result_cb, 73 | help=f"""[cyan]Patterns Devkit {__version__} 74 | 75 | [not dim][green]Read the docs:[/] https://www.patterns.app/docs/devkit 76 | """, 77 | ) 78 | 79 | for command in ( 80 | config, 81 | create, 82 | delete, 83 | list_command, 84 | update_command, 85 | login, 86 | logout, 87 | trigger, 88 | upload, 89 | download, 90 | ): 91 | if isinstance(command, typer.Typer): 92 | command._add_completion = False 93 | app.add_typer(command) 94 | else: 95 | app.command()(command) 96 | 97 | 98 | def main(): 99 | def _get_group(*args, **kwargs) -> click.Command: 100 | group = _old_typer_get_group(*args, **kwargs) 101 | 102 | def _list_commands(self, ctx: Context) -> List[str]: 103 | l = super(TyperGroup, self).list_commands(ctx) 104 | for c in l: 105 | sub = super(TyperGroup, self).get_command(ctx, c) 106 | if isinstance(sub, MultiCommand): 107 | l.extend(f"{c} {s}" for s in sub.list_commands(ctx)) 108 | return l 109 | 110 | def _get_command(self, ctx: Context, cmd_name: str) -> Optional[Command]: 111 | parts = cmd_name.split() 112 | base = super(TyperGroup, self).get_command(ctx, parts[0]) 113 | if len(parts) == 1: 114 | return base 115 | assert len(parts) == 2 116 | assert isinstance(base, MultiCommand) 117 | cmd = base.get_command(ctx, parts[1]) 118 | cmd.name = cmd_name 119 | return cmd 120 | 121 | def format_help( 122 | self, ctx: click.Context, formatter: click.HelpFormatter 123 | ) -> None: 124 | old_list = self.list_commands 125 | old_get = self.get_command 126 | 127 | self.list_commands = MethodType(_list_commands, self) 128 | self.get_command = MethodType(_get_command, self) 129 | 130 | typer.core.rich_utils.rich_format_help( 131 | obj=self, 132 | ctx=ctx, 133 | markup_mode=self.rich_markup_mode, 134 | ) 135 | 136 | self.list_commands = old_list 137 | self.get_command = old_get 138 | 139 | group.format_help = MethodType(format_help, group) 140 | 141 | return group 142 | 143 | _old_typer_get_group = typer.main.get_group 144 | typer.main.get_group = _get_group 145 | app() 146 | typer.main.get_group = _old_typer_get_group 147 | -------------------------------------------------------------------------------- /tests/configuration/test_config_editor.py: -------------------------------------------------------------------------------- 1 | import re 2 | import textwrap 3 | from pathlib import Path 4 | 5 | from patterns.cli.configuration.edit import GraphConfigEditor 6 | 7 | 8 | def test_round_trip(tmp_path: Path): 9 | s = """ 10 | title: graph 11 | functions: 12 | - webhook: out # eol comment 13 | # node 1 14 | - node_file: node_1.py 15 | inputs: 16 | in: out 17 | """ 18 | get_editor(tmp_path, s).assert_dump(s) 19 | 20 | 21 | def test_round_trip_no_indent(tmp_path: Path): 22 | s = """ 23 | functions: 24 | - webhook: out # eol comment 25 | - node_file: node_1.py 26 | inputs: 27 | in: out 28 | """ 29 | get_editor(tmp_path, s).assert_dump(s) 30 | 31 | 32 | def test_add_node_to_existing_nodes(tmp_path: Path): 33 | before = """ 34 | functions: 35 | - webhook: out # eol comment 36 | """ 37 | after = """ 38 | functions: 39 | - webhook: out # eol comment 40 | - node_file: node.py 41 | id: 42 | """ 43 | get_editor(tmp_path, before).add_node("node.py").assert_dump(after) 44 | 45 | 46 | def test_add_node_to_empty_graph(tmp_path: Path): 47 | before = """ 48 | title: graph 49 | """ 50 | after = """ 51 | title: graph 52 | functions: 53 | - node_file: node.py 54 | id: 55 | """ 56 | get_editor(tmp_path, before).add_node("node.py").assert_dump(after) 57 | 58 | 59 | def test_add_webhook_with_all_fields(tmp_path: Path): 60 | before = """ 61 | title: graph 62 | """ 63 | after = """ 64 | title: graph 65 | functions: 66 | - webhook: hook 67 | title: n 68 | id: ab234567 69 | description_file: desc.md 70 | """ 71 | get_editor(tmp_path, before).add_webhook( 72 | "hook", "n", "ab234567", "desc.md" 73 | ).assert_dump(after) 74 | 75 | 76 | def test_add_store_with_all_fields(tmp_path: Path): 77 | before = """ 78 | title: graph 79 | """ 80 | after = """ 81 | title: graph 82 | stores: 83 | - table: st 84 | id: ab234567 85 | schema: sc 86 | """ 87 | get_editor(tmp_path, before).add_table("st", "ab234567", "sc").assert_dump( 88 | after 89 | ) 90 | 91 | 92 | def test_add_node_with_all_fields(tmp_path: Path): 93 | before = """ 94 | title: graph 95 | functions: 96 | - webhook: hook 97 | """ 98 | after = """ 99 | title: graph 100 | functions: 101 | - webhook: hook 102 | - node_file: node.py 103 | trigger: 1 * * * * 104 | inputs: 105 | node_in: hook 106 | outputs: 107 | node_out: my_table 108 | parameters: 109 | limit: 2 110 | title: my node 111 | id: ab234567 112 | """ 113 | get_editor(tmp_path, before).add_node( 114 | "node.py", 115 | trigger="1 * * * *", 116 | inputs={"node_in": "hook"}, 117 | outputs={"node_out": "my_table"}, 118 | parameters={"limit": 2}, 119 | title="my node", 120 | id="ab234567", 121 | ).assert_dump(after) 122 | 123 | 124 | def test_add_component_with_all_fields(tmp_path: Path): 125 | before = """ 126 | title: graph 127 | functions: 128 | - webhook: hook 129 | """ 130 | after = """ 131 | title: graph 132 | functions: 133 | - webhook: hook 134 | - uses: org/component@v1 135 | trigger: 1 * * * * 136 | inputs: 137 | node_in: hook 138 | outputs: 139 | node_out: my_table 140 | parameters: 141 | limit: 2 142 | title: my node 143 | id: ab234567 144 | """ 145 | get_editor(tmp_path, before).add_component_uses( 146 | "org/component@v1", 147 | trigger="1 * * * *", 148 | inputs={"node_in": "hook"}, 149 | outputs={"node_out": "my_table"}, 150 | parameters={"limit": 2}, 151 | title="my node", 152 | id="ab234567", 153 | ).assert_dump(after) 154 | 155 | 156 | def test_add_missing_node_ids(tmp_path: Path): 157 | before = """ 158 | functions: 159 | - node_file: a.py 160 | title: a 161 | - node_file: b.py 162 | id: foo 163 | - node_file: c.py 164 | stores: 165 | - table: t 166 | - stream: s 167 | """ 168 | after = """ 169 | functions: 170 | - node_file: a.py 171 | title: a 172 | id: 173 | - node_file: b.py 174 | id: 175 | - node_file: c.py 176 | id: 177 | stores: 178 | - table: t 179 | id: 180 | - stream: s 181 | id: 182 | """ 183 | editor = get_editor(tmp_path, before).add_missing_node_ids() 184 | dump = editor.assert_dump(after) 185 | assert "id: foo" in dump 186 | 187 | 188 | def get_editor(tmp_path: Path, s: str) -> "_EditorTester": 189 | f = tmp_path / "graph.yml" 190 | s = textwrap.dedent(s).strip() 191 | f.write_text(s) 192 | return _EditorTester(f) 193 | 194 | 195 | class _EditorTester(GraphConfigEditor): 196 | def assert_dump(self, s: str) -> str: 197 | s = textwrap.dedent(s).strip() 198 | dump = self.dump().strip() 199 | if "" in s: 200 | dump = re.sub(r"id: \w+", "id: ", dump) 201 | assert dump == s 202 | return self.dump().strip() 203 | -------------------------------------------------------------------------------- /patterns/cli/services/login.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import base64 4 | import dataclasses 5 | import hashlib 6 | import os 7 | import urllib.parse 8 | from socketserver import BaseRequestHandler 9 | from typing import Tuple 10 | from urllib.parse import ParseResult 11 | 12 | import requests 13 | 14 | from patterns.cli.config import update_devkit_config 15 | from patterns.cli.services.api import ( 16 | AuthServer, 17 | get_auth_server, 18 | ) 19 | from patterns.cli.services.auth import ( 20 | LOCAL_OAUTH_PORT, 21 | BaseOAuthRequestHandler, 22 | execute_oauth_flow, 23 | ) 24 | from patterns.cli.services.output import sprint 25 | 26 | 27 | @dataclasses.dataclass 28 | class LoginConfig: 29 | auth_server: AuthServer 30 | state: str 31 | code_verifier: str 32 | redirect_url: str 33 | 34 | 35 | def make_login_config() -> Tuple[str, LoginConfig]: 36 | auth_server = get_auth_server() 37 | 38 | # The code_verifier and code_challenge are part of the OAuth PKCE spec. 39 | # https://datatracker.ietf.org/doc/html/rfc7636#section-4.1 40 | # https://developer.okta.com/blog/2019/08/22/okta-authjs-pkce 41 | # 42 | # We make some random bits (code_verifier), hash it (code_challenge) and send 43 | # the challenge with our initial authorize request. When the user is redirected 44 | # back, we post the unhashed value (code_verifier) when obtaining the 45 | # token. The server then hashes the code_verifier to match the initial value that 46 | # was sent by the redirected client. 47 | # 48 | # This flow stands in the place of having a client secret, since the devkit is 49 | # Open Source we cannot use client secret based auth. 50 | # 51 | # The encoded verifier only needs 32 bytes of entropy, but we use 33 because 52 | # otherwise we end up with padding on our bas64 encoded value 53 | code_verifier = base64.urlsafe_b64encode(os.urandom(33)).decode("utf-8") 54 | 55 | code_challenge = hashlib.sha256(code_verifier.encode("utf-8")).digest() 56 | code_challenge = base64.urlsafe_b64encode(code_challenge).decode("utf-8") 57 | # Remove padding (per Auth0 samples) 58 | code_challenge = code_challenge.replace("=", "") 59 | 60 | # Random state is sent with the initial request and received on the redirect 61 | # and is supposed to mitigate CSRF attacks: 62 | # https://auth0.com/docs/secure/attack-protection/state-parameters 63 | state = os.urandom(50).hex() 64 | 65 | redirect_url = ( 66 | f"http://localhost:{LOCAL_OAUTH_PORT}{LoginRequestHandler.handled_path}" 67 | ) 68 | 69 | params = { 70 | "response_type": "code", 71 | "code_challenge_method": "S256", 72 | "code_challenge": code_challenge, 73 | "client_id": auth_server.devkit_client_id, 74 | "redirect_uri": redirect_url, 75 | "scope": "profile email openid offline_access", 76 | "audience": auth_server.audience, 77 | "state": state, 78 | } 79 | query = urllib.parse.urlencode(params) 80 | url = f"https://{auth_server.domain}/authorize?{query}" 81 | 82 | login_config = LoginConfig( 83 | auth_server=auth_server, 84 | state=state, 85 | code_verifier=code_verifier, 86 | redirect_url=redirect_url, 87 | ) 88 | return url, login_config 89 | 90 | 91 | def login(url: str, login_config: LoginConfig): 92 | def on_request(handler: BaseRequestHandler): 93 | handler._login_config = login_config 94 | 95 | execute_oauth_flow(url, LoginRequestHandler, on_request) 96 | 97 | 98 | class LoginRequestHandler(BaseOAuthRequestHandler): 99 | handled_path: str = "/auth_callback" 100 | _login_config: LoginConfig = None 101 | 102 | def handle_callback(self, parsed_url: ParseResult): 103 | qs = urllib.parse.parse_qs(parsed_url.query) 104 | if not (code := self.get_single_queryparam("code", qs)): 105 | return 106 | 107 | if not (state := self.get_single_queryparam("state", qs)): 108 | return 109 | 110 | login_config = self._login_config 111 | expected_state = login_config.state 112 | if state != expected_state: 113 | return self.finish_with_error( 114 | 401, 115 | f"An invalid state was returned. Expected {expected_state} but was {state}. Unable to login", 116 | ) 117 | 118 | # Exchange code for access & refresh tokens 119 | response = requests.post( 120 | f"https://{login_config.auth_server.domain}/oauth/token", 121 | json={ 122 | "client_id": login_config.auth_server.devkit_client_id, 123 | "code_verifier": login_config.code_verifier, 124 | "code": code, 125 | "grant_type": "authorization_code", 126 | "redirect_uri": login_config.redirect_url, 127 | }, 128 | ) 129 | response.raise_for_status() 130 | 131 | json = response.json() 132 | if "refresh_token" not in json or "access_token" not in json: 133 | return self.finish_with_error( 134 | 401, f"We did not receive a valid authorization result: {json}" 135 | ) 136 | 137 | update_devkit_config( 138 | auth_server=login_config.auth_server, 139 | refresh=json["refresh_token"], 140 | token=json["access_token"], 141 | ) 142 | 143 | return self.finish_with_success( 144 | "Login successful", "Successfully logged in! You may close this window." 145 | ) 146 | -------------------------------------------------------------------------------- /patterns/cli/services/auth.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import threading 4 | import urllib.parse 5 | import webbrowser 6 | from abc import ABCMeta, abstractmethod 7 | from datetime import timedelta 8 | from http.server import HTTPServer, BaseHTTPRequestHandler 9 | from socketserver import BaseRequestHandler 10 | from typing import Tuple, Callable, Optional, List, Dict, Type 11 | from urllib.parse import ParseResult 12 | 13 | from patterns.cli.services.output import abort, sprint 14 | 15 | LOCAL_OAUTH_PORT = 30420 16 | 17 | # Long-ish timeout. If someone is signing up with an email / password, it can take a while. 18 | REQUEST_TIMEOUT = timedelta(minutes=1) 19 | 20 | 21 | def execute_oauth_flow( 22 | url: str, 23 | request_handler_class: Type[BaseOAuthRequestHandler], 24 | on_request: Callable[[BaseOAuthRequestHandler], None] | None = None, 25 | ): 26 | """Execute an oauth flow, opening a web browser and handling callbacks. 27 | 28 | Parameters: 29 | url: URL to open in the browser 30 | request_handler_class: Class which will handle the web request made by the 31 | browser (for an oauth callback) 32 | on_request: Optional callback which is invoked before the actual handler is run 33 | """ 34 | webbrowser.open(url, new=1, autoraise=True) 35 | 36 | with OAuthHttpServer( 37 | ("localhost", LOCAL_OAUTH_PORT), request_handler_class, on_request 38 | ) as server: 39 | server.serve_forever() 40 | 41 | if server.error_result: 42 | abort(server.error_result) 43 | elif server.success_result: 44 | sprint(f"[success]{server.success_result}\n") 45 | else: 46 | abort("OAuth server finished without an error or success result") 47 | 48 | 49 | class OAuthHttpServer(HTTPServer): 50 | """Utility base class for coordinating between handlers and the CLI""" 51 | 52 | def __init__( 53 | self, 54 | server_address: Tuple[str, int], 55 | request_handler_class: Callable[..., BaseRequestHandler], 56 | on_request_cb: Callable[[BaseRequestHandler], None] | None, 57 | ): 58 | super().__init__(server_address, request_handler_class) 59 | self._on_request_cb = on_request_cb 60 | self.error_result = None 61 | self.success_result = None 62 | 63 | def on_request(self, handler: BaseRequestHandler): 64 | if self._on_request_cb: 65 | self._on_request_cb(handler) 66 | 67 | def finish_with_error(self, error_result: str): 68 | self.error_result = error_result 69 | 70 | def finish_with_success(self, success_result: str): 71 | self.success_result = success_result 72 | 73 | 74 | class BaseOAuthRequestHandler(BaseHTTPRequestHandler, metaclass=ABCMeta): 75 | @property 76 | @abstractmethod 77 | def handled_path(self) -> str: 78 | """The path which this handler will process ('/some_callback')""" 79 | ... 80 | 81 | @abstractmethod 82 | def handle_callback(self, parsed_url: ParseResult): 83 | ... 84 | 85 | @property 86 | def oauth_http_server(self) -> OAuthHttpServer: 87 | # noinspection PyTypeChecker 88 | return self.server 89 | 90 | def do_GET(self): 91 | parsed_url = urllib.parse.urlparse(self.path) 92 | 93 | if parsed_url.path == self.handled_path: 94 | self.oauth_http_server.on_request(self) 95 | self.handle_callback(parsed_url) 96 | else: 97 | # We don't finish_with_error here, because the browser might request 98 | # all kinds of things (CORS OPTIONS or favicon.ico, etc.) in addition 99 | # to the request we really want. If there is a real problem here, 100 | # the managing thread will shut down the server 101 | self.send_html_response(404, f"Unhandled path: {parsed_url.path}") 102 | 103 | def get_single_queryparam( 104 | self, param_name: str, params: Dict[str, List[str]] 105 | ) -> Optional[str]: 106 | if ( 107 | not param_name in params 108 | or len(params[param_name]) != 1 109 | or not isinstance(params[param_name][0], str) 110 | ): 111 | self.finish_with_error( 112 | 500, f"Invalid {param_name} in response: {params.get(param_name)}" 113 | ) 114 | return None 115 | return params[param_name][0] 116 | 117 | def send_html_response(self, code: int, html: str): 118 | self.send_response(code) 119 | self.send_header("Content-type", "text/html") 120 | self.end_headers() 121 | 122 | self.wfile.write(bytes(f"{html}", "utf-8")) 123 | 124 | def finish_with_error(self, status_code: int, error_result: str): 125 | self.send_html_response( 126 | status_code, "An error occurred. See the console logs for details." 127 | ) 128 | self.oauth_http_server.finish_with_error(error_result) 129 | self._shutdown_self() 130 | 131 | def finish_with_success(self, success_result: str, success_browser_html: str): 132 | self.send_html_response(201, success_browser_html) 133 | self.oauth_http_server.finish_with_success(success_result) 134 | self._shutdown_self() 135 | 136 | def _shutdown_self(self): 137 | # Need to shut down in a separate thread since `shutdown` blocks 138 | threading.Thread(target=self.oauth_http_server.shutdown, daemon=True).start() 139 | 140 | def log_request(self, code: int | str = ..., size: int | str = ...) -> None: 141 | # Override with a noop to prevent printing request urls that come in 142 | pass 143 | -------------------------------------------------------------------------------- /assets/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

 

2 |

3 | 4 |

5 |

 

6 |

7 | Patterns - Build data systems from re-usable sql and python components 8 |

9 | 10 | --- 11 | 12 | ### What is the Patterns Devkit? 13 | 14 | The Patterns Devkit is a CLI and lightweight SDK to build, version, and deploy data graphs made of reusable SQL and Python nodes. It helps you: 15 | - Scaffold apps (graphs) and nodes quickly 16 | - Define connections between nodes and storage tables in `graph.yml` 17 | - Manage secrets and configuration 18 | - Upload, list, and trigger runs in the Patterns platform 19 | 20 | Documentation: `https://www.patterns.app/docs/devkit` 21 | 22 | ### Features 23 | - Create graphs and nodes (`python`, `sql`, subgraphs) from the CLI 24 | - Describe graph topology declaratively in `graph.yml` 25 | - Write nodes using `patterns.Table`, `patterns.Parameter`, and `patterns.State` 26 | - Manage secrets, auth, and uploads to the Patterns platform 27 | - Trigger and inspect graphs remotely 28 | 29 | ### Installation 30 | 31 | `pip install patterns-devkit` 32 | 33 | ### Quickstart: Build a Leads Ingestion and Scoring Graph 34 | 35 | 1) Create an app (graph) 36 | 37 | ```bash 38 | patterns create app my-leads-app 39 | cd my-leads-app 40 | ``` 41 | 42 | This creates: 43 | 44 | ```text 45 | my-leads-app/ 46 | graph.yml 47 | ``` 48 | 49 | 2) Add two Python nodes 50 | 51 | ```bash 52 | patterns create node --title "Ingest Leads" ingest_leads.py 53 | patterns create node --title "Score Leads" score_leads.py 54 | ``` 55 | 56 | This adds: 57 | 58 | ```text 59 | my-leads-app/ 60 | graph.yml 61 | ingest_leads.py 62 | score_leads.py 63 | ``` 64 | 65 | 3) Wire the graph in `graph.yml` 66 | 67 | Open `graph.yml` and connect node inputs/outputs to tables: 68 | 69 | ```yaml 70 | title: Leads Scoring 71 | 72 | stores: 73 | - table: raw_leads 74 | - table: scored_leads 75 | 76 | functions: 77 | - node_file: ingest_leads.py 78 | title: Ingest Leads 79 | trigger: manual 80 | outputs: 81 | leads: raw_leads 82 | 83 | - node_file: score_leads.py 84 | title: Score Leads 85 | inputs: 86 | leads: raw_leads 87 | outputs: 88 | scored: scored_leads 89 | ``` 90 | 91 | 4) Implement the nodes 92 | 93 | `ingest_leads.py` (writes raw leads): 94 | 95 | ```python 96 | from patterns import Table, Parameter 97 | 98 | def run(): 99 | # Optionally parameterize where to ingest from 100 | source = Parameter("leads_source", description="Lead source label", type=str, default="marketing_form") 101 | 102 | raw_leads = Table("raw_leads", mode="w", description="Raw inbound leads") 103 | # Provide schema and helpful ordering for downstream streaming if desired 104 | raw_leads.init( 105 | schema={"id": "Text", "email": "Text", "source": "Text", "created_at": "Datetime"}, 106 | unique_on="id", 107 | add_created="created_at", 108 | ) 109 | 110 | # Replace this with real ingestion (API/CSV/etc.) 111 | sample = [ 112 | {"id": "L-001", "email": "user1@example.com", "source": source}, 113 | {"id": "L-002", "email": "user2@corp.com", "source": source}, 114 | {"id": "L-003", "email": "ceo@enterprise.com","source": source}, 115 | ] 116 | raw_leads.upsert(sample) 117 | ``` 118 | 119 | `score_leads.py` (reads raw leads, writes scored leads): 120 | 121 | ```python 122 | from patterns import Table 123 | 124 | def lead_score(email: str) -> float: 125 | # Simple heuristic: enterprise domains score higher 126 | domain = email.split("@")[-1].lower() 127 | if domain.endswith("enterprise.com"): 128 | return 0.95 129 | if domain.endswith("corp.com"): 130 | return 0.8 131 | return 0.4 132 | 133 | def run(): 134 | raw = Table("raw_leads") # read mode by default 135 | scored = Table("scored_leads", "w") # write mode 136 | scored.init( 137 | schema={"id": "Text", "email": "Text", "score": "Float", "created_at": "Datetime"}, 138 | unique_on="id", 139 | add_created="created_at", 140 | ) 141 | 142 | rows = raw.read() # list[dict] or dataframe if configured 143 | for r in rows: 144 | r["score"] = lead_score(r["email"]) 145 | scored.upsert(rows) 146 | ``` 147 | 148 | 5) Visualize the example graph topology 149 | 150 | ```mermaid 151 | flowchart TD 152 | A["Ingest Leads (Python)"] -->|raw_leads| B["Score Leads (Python)"] 153 | B -->|scored_leads| C[(scored_leads)] 154 | ``` 155 | 156 | 6) Authenticate and upload 157 | 158 | - Sign up or sign in at `https://studio.patterns.app` 159 | - Authenticate the CLI: 160 | 161 | ```bash 162 | patterns login 163 | ``` 164 | 165 | - Upload your graph: 166 | 167 | ```bash 168 | patterns upload 169 | ``` 170 | 171 | 7) Trigger runs 172 | 173 | ```bash 174 | # Trigger any node by title or id (see list commands below to find ids) 175 | patterns trigger node "Ingest Leads" 176 | patterns trigger node "Score Leads" 177 | ``` 178 | 179 | ### Command overview 180 | 181 | - `patterns create app `: scaffold a new app directory with `graph.yml` 182 | - `patterns create node `: add a function node (Python/SQL/subgraph) 183 | - `patterns create node --type table `: add a table store 184 | - `patterns create secret `: create an organization secret 185 | - `patterns upload`: upload current app to the platform 186 | - `patterns list apps|nodes|webhooks|versions`: list resources 187 | - `patterns trigger node `: manually trigger a node 188 | - `patterns download `: download app contents from the platform 189 | - `patterns update`: update local metadata from remote 190 | - `patterns delete `: delete remote resources 191 | - `patterns config --json`: print CLI configuration 192 | - `patterns login` / `patterns logout`: authenticate the CLI 193 | 194 | See full help: 195 | 196 | ``` 197 | patterns --help 198 | ``` 199 | 200 | ### Node development APIs (Python) 201 | Nodes use a small SDK provided by the platform when running: 202 | - `Table(name, mode="r"|"w")`: read/write table abstraction. Common methods: 203 | - `init(schema=..., unique_on=..., add_created=..., add_monotonic_id=...)` 204 | - `read(as_format="records"|"dataframe", chunksize=...)` 205 | - `read_sql(sql, ...)` 206 | - `append(records)`, `upsert(records)`, `replace(records)`, `truncate()`, `flush()` 207 | - `Parameter(name, description=None, type=str|int|float|bool|datetime|date|list, default=MISSING)`: declare runtime parameters 208 | - `State`: simple key-value state for long-running or iterative jobs 209 | 210 | For more, visit the docs: `https://docs.patterns.app/docs/node-development/python/` 211 | 212 | ### Tips 213 | - Prefer explicit schemas on write tables via `Table.init` to control types and indexes 214 | - Use `unique_on` and `upsert` to deduplicate reliably 215 | - Add `add_created` or `add_monotonic_id` to enable robust downstream streaming 216 | - Keep node code small, composable, and parameterized for reuse 217 | 218 | ### License 219 | 220 | BSD-3-Clause (see `LICENSE`) 221 | -------------------------------------------------------------------------------- /tests/configuration/test_directory_editor.py: -------------------------------------------------------------------------------- 1 | import io 2 | import re 3 | import textwrap 4 | import zipfile 5 | from pathlib import Path 6 | from typing import Dict 7 | 8 | import pytest 9 | 10 | from patterns.cli.configuration.edit import GraphDirectoryEditor, FileOverwriteError 11 | from tests.configuration.utils import setup_graph_files 12 | 13 | 14 | def test_add_new_node(tmp_path: Path): 15 | do_add_zip_test( 16 | tmp_path, 17 | before={"graph.yml": "functions: []"}, 18 | zip={"graph.yml": 'functions: [{"node_file": "node.py"}]', "node.py": "foo"}, 19 | src="node.py", 20 | dst="new.py", 21 | after={ 22 | "graph.yml": """ 23 | functions: 24 | - node_file: new.py 25 | id: 26 | """, 27 | "new.py": "foo", 28 | }, 29 | ) 30 | 31 | 32 | def test_add_unchanged_node(tmp_path: Path): 33 | do_add_zip_test( 34 | tmp_path, 35 | before={"graph.yml": 'functions: [{"node_file": "old.sql"}]', "old.sql": "foo"}, 36 | zip={ 37 | "graph.yml": 'functions: [{"d/node_file": "node.sql"}]', 38 | "d/node.sql": "foo", 39 | }, 40 | src="d/node.sql", 41 | dst="old.sql", 42 | after={"graph.yml": 'functions: [{"node_file": "old.sql"}]', "old.sql": "foo"}, 43 | ) 44 | 45 | 46 | def test_err_add_changed_node(tmp_path: Path): 47 | with pytest.raises(FileOverwriteError) as exc_info: 48 | do_add_zip_test( 49 | tmp_path, 50 | before={ 51 | "graph.yml": 'functions: [{"node_file": "old.sql"}]', 52 | "old.sql": "foo", 53 | }, 54 | zip={ 55 | "graph.yml": 'functions: [{"node_file": "node.sql"}]', 56 | "node.sql": "bar", 57 | }, 58 | src="node.sql", 59 | dst="old.sql", 60 | ) 61 | assert exc_info.value.file_path == tmp_path / "old.sql" 62 | 63 | 64 | def test_overwrite_node(tmp_path: Path): 65 | do_add_zip_test( 66 | tmp_path, 67 | before={"graph.yml": 'functions: [{"node_file": "old.sql"}]', "old.sql": "foo"}, 68 | zip={"graph.yml": 'functions: [{"node_file": "node.sql"}]', "node.sql": "bar"}, 69 | src="node.sql", 70 | dst="old.sql", 71 | after={"graph.yml": 'functions: [{"node_file": "old.sql"}]', "old.sql": "bar"}, 72 | overwrite=True, 73 | ) 74 | 75 | 76 | def test_full_clone(tmp_path: Path): 77 | do_add_zip_test( 78 | tmp_path, 79 | before={}, 80 | zip={"graph.yml": 'functions: [{"node_file": "node.sql"}]', "node.sql": "bar"}, 81 | src="graph.yml", 82 | dst="graph.yml", 83 | after={ 84 | "graph.yml": 'functions: [{"node_file": "node.sql"}]', 85 | "node.sql": "bar", 86 | }, 87 | overwrite=True, 88 | ) 89 | 90 | 91 | def test_add_subgraph(tmp_path: Path): 92 | do_add_zip_test( 93 | tmp_path, 94 | before={ 95 | "graph.yml": """ 96 | functions: 97 | - node_file: s.sql 98 | """, 99 | "s.sql": "foo", 100 | }, 101 | zip={ 102 | "graph.yml": 'functions: [{"node_file": "sub/graph.yml"}]', 103 | "sub/graph.yml": 'functions: [{"node_file": "s.sql"}]', 104 | "sub/s.sql": "bar", 105 | }, 106 | src="sub/graph.yml", 107 | dst="new/graph.yml", 108 | after={ 109 | "graph.yml": """ 110 | functions: 111 | - node_file: s.sql 112 | - node_file: new/graph.yml 113 | id: 114 | """, 115 | "s.sql": "foo", 116 | "new/graph.yml": 'functions: [{"node_file": "s.sql"}]', 117 | "new/s.sql": "bar", 118 | }, 119 | ) 120 | 121 | 122 | def test_add_single_file(tmp_path: Path): 123 | before = { 124 | "graph.yml": """ 125 | functions: 126 | - node_file: s.sql 127 | """, 128 | "s.sql": "foo", 129 | } 130 | after = { 131 | "graph.yml": """ 132 | functions: 133 | - node_file: s.sql 134 | - node_file: new.sql 135 | id: 136 | """, 137 | "s.sql": "foo", 138 | "new.sql": "bar", 139 | } 140 | setup_graph_files(tmp_path, before) 141 | editor = GraphDirectoryEditor(tmp_path, overwrite=False) 142 | content = "bar" 143 | editor.add_node_from_file("new.sql", io.BytesIO(content.encode())) 144 | assert_files(tmp_path, after) 145 | 146 | 147 | def test_add_missing_node_ids(tmp_path: Path): 148 | before = { 149 | "graph.yml": """ 150 | functions: 151 | - node_file: s.sql 152 | - node_file: sub/graph.yml 153 | """, 154 | "s.sql": "foo", 155 | "sub/graph.yml": """ 156 | functions: 157 | - node_file: s.sql 158 | """, 159 | "sub/s.sql": "foo", 160 | } 161 | after = { 162 | "graph.yml": """ 163 | functions: 164 | - node_file: s.sql 165 | id: 166 | - node_file: sub/graph.yml 167 | id: 168 | """, 169 | "s.sql": "foo", 170 | "sub/graph.yml": """ 171 | functions: 172 | - node_file: s.sql 173 | id: 174 | """, 175 | "sub/s.sql": "foo", 176 | } 177 | setup_graph_files(tmp_path, before) 178 | editor = GraphDirectoryEditor(tmp_path, overwrite=True) 179 | editor.add_missing_node_ids() 180 | assert_files(tmp_path, after) 181 | 182 | 183 | def do_add_zip_test( 184 | tmp_path: Path, 185 | before: Dict[str, str], 186 | zip: Dict[str, str], 187 | src: str, 188 | dst: str, 189 | after: Dict[str, str] = None, 190 | overwrite: bool = False, 191 | ): 192 | if before: 193 | setup_graph_files(tmp_path, before) 194 | editor = GraphDirectoryEditor(tmp_path, overwrite=overwrite) 195 | with create_zip(zip) as z: 196 | editor.add_node_from_zip(src, dst, z) 197 | if after: 198 | assert_files(tmp_path, after) 199 | 200 | 201 | def assert_files(root: Path, files: Dict[str, str]): 202 | for path, content in files.items(): 203 | p = root / path 204 | assert p.is_file(), f"{p} does not exist" 205 | content = textwrap.dedent(content).strip() 206 | actual = p.read_text().strip() 207 | if path.endswith(".yml"): 208 | actual = re.sub(r"id: \w+", "id: ", actual) 209 | assert actual == content 210 | for p in root.rglob("*"): 211 | name = "/".join(p.relative_to(root).parts) 212 | if p.is_file(): 213 | assert name in files, f"extra file {p}" 214 | 215 | 216 | def create_zip(files: Dict[str, str]) -> zipfile.ZipFile: 217 | b = io.BytesIO() 218 | with zipfile.ZipFile(b, "w") as f: 219 | for path, content in files.items(): 220 | content = textwrap.dedent(content).strip() 221 | f.writestr(path, content) 222 | b.seek(0) 223 | return zipfile.ZipFile(b, "r") 224 | -------------------------------------------------------------------------------- /patterns/cli/services/api.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Any 3 | 4 | import requests 5 | from requests import Response, Session 6 | 7 | from patterns.cli.config import ( 8 | read_devkit_config, 9 | CliConfig, 10 | write_devkit_config, 11 | AuthServer, 12 | ) 13 | from patterns.cli.services.output import abort, abort_on_error 14 | 15 | API_BASE_URL = os.environ.get( 16 | "PATTERNS_API_URL", 17 | "https://api-production.patterns.app/", 18 | ) 19 | 20 | AUTH_TOKEN_ENV_VAR = "PATTERNS_AUTH_TOKEN" 21 | AUTH_TOKEN_PREFIX = "JWT" 22 | 23 | PUBLIC_API_BASE_URL = "api/v1" 24 | 25 | s = requests.Session() 26 | s.headers["Accept"] = "application/json" 27 | 28 | 29 | def _get_api_session() -> Session: 30 | if "Authorization" not in s.headers: 31 | auth_token = _get_auth_token() 32 | s.headers["Authorization"] = f"{AUTH_TOKEN_PREFIX} {auth_token}" 33 | return s 34 | 35 | 36 | def reset_session_auth(): 37 | s.headers.pop("Authorization", None) 38 | 39 | 40 | def get_auth_server() -> AuthServer: 41 | with abort_on_error("Failed getting the auth server"): 42 | resp = requests.get(build_url(API_BASE_URL, Endpoints.TOKEN_AUTHSERVER)) 43 | resp.raise_for_status() 44 | return AuthServer(**resp.json()) 45 | 46 | 47 | def _get_auth_token(base_url: str = API_BASE_URL) -> str: 48 | override = os.environ.get(AUTH_TOKEN_ENV_VAR) 49 | if override: 50 | return override 51 | 52 | cfg = read_devkit_config() 53 | if not cfg.token: 54 | abort("You must be logged in to use this command. Run [code]patterns login") 55 | 56 | with abort_on_error("Failed verifying auth token"): 57 | resp = requests.post( 58 | build_url(base_url, Endpoints.TOKEN_VERIFY), json={"token": cfg.token} 59 | ) 60 | if resp.status_code == 401: 61 | if cfg.refresh: 62 | cfg = _refresh_token(cfg) 63 | else: 64 | resp.raise_for_status() 65 | return cfg.token 66 | 67 | 68 | def _refresh_token(cfg: CliConfig) -> CliConfig: 69 | auth_server = cfg.auth_server 70 | if not auth_server or not cfg.refresh: 71 | abort("Not logged in\n[info]You can log in with [code]patterns login") 72 | 73 | with abort_on_error("Failed to refresh access token"): 74 | resp = requests.post( 75 | f"https://{auth_server.domain}/oauth/token", 76 | data={ 77 | "client_id": auth_server.devkit_client_id, 78 | "refresh_token": cfg.refresh, 79 | "grant_type": "refresh_token", 80 | }, 81 | ) 82 | resp.raise_for_status() 83 | 84 | cfg.token = resp.json()["access_token"] 85 | write_devkit_config(cfg) 86 | 87 | return cfg 88 | 89 | 90 | def get_json( 91 | path: str, 92 | params: dict = None, 93 | session: Session = None, 94 | base_url: str = API_BASE_URL, 95 | **kwargs, 96 | ) -> Any: 97 | resp = get(path, params, session, base_url, **kwargs) 98 | resp.raise_for_status() 99 | return resp.json() 100 | 101 | 102 | def get( 103 | path: str, 104 | params: dict = None, 105 | session: Session = None, 106 | base_url: str = API_BASE_URL, 107 | **kwargs, 108 | ) -> Response: 109 | session = session or _get_api_session() 110 | resp = session.get(build_url(base_url, path), params=params or {}, **kwargs) 111 | resp.raise_for_status() 112 | return resp 113 | 114 | 115 | def post_for_json( 116 | path: str, 117 | json: dict = None, 118 | session: Session = None, 119 | base_url: str = API_BASE_URL, 120 | **kwargs, 121 | ) -> Any: 122 | resp = post(path, json, session, base_url, **kwargs) 123 | resp.raise_for_status() 124 | return resp.json() 125 | 126 | 127 | def post( 128 | path: str, 129 | json: dict = None, 130 | session: Session = None, 131 | base_url: str = API_BASE_URL, 132 | **kwargs, 133 | ) -> Response: 134 | session = session or _get_api_session() 135 | resp = session.post(build_url(base_url, path), json=json or {}, **kwargs) 136 | resp.raise_for_status() 137 | return resp 138 | 139 | 140 | def delete( 141 | path: str, 142 | params: dict = None, 143 | session: Session = None, 144 | base_url: str = API_BASE_URL, 145 | **kwargs, 146 | ) -> Response: 147 | session = session or _get_api_session() 148 | resp = session.delete(build_url(base_url, path), params=params or {}, **kwargs) 149 | resp.raise_for_status() 150 | return resp 151 | 152 | 153 | def patch( 154 | path: str, 155 | json: dict = None, 156 | session: Session = None, 157 | base_url: str = API_BASE_URL, 158 | **kwargs, 159 | ) -> Response: 160 | session = session or _get_api_session() 161 | resp = session.patch(build_url(base_url, path), json=json or {}, **kwargs) 162 | resp.raise_for_status() 163 | return resp 164 | 165 | 166 | def build_url(base_url: str, url: str) -> str: 167 | return "/".join((base_url.rstrip("/"), url.lstrip("/"))) 168 | 169 | 170 | class Endpoints: 171 | TOKEN_CREATE = "auth/jwt/create" 172 | TOKEN_AUTHSERVER = f"{PUBLIC_API_BASE_URL}/auth/jwt/authserver" 173 | TOKEN_VERIFY = f"{PUBLIC_API_BASE_URL}/auth/jwt/verify" 174 | ACCOUNTS_ME = f"{PUBLIC_API_BASE_URL}/accounts/me" 175 | ORGANIZATIONS_LIST = f"{PUBLIC_API_BASE_URL}/organizations" 176 | COMPONENTS_CREATE = f"{PUBLIC_API_BASE_URL}/marketplace/components/versions" 177 | 178 | @classmethod 179 | def organization_by_slug(cls, slug: str) -> str: 180 | return f"{PUBLIC_API_BASE_URL}/organizations/slug/{slug}" 181 | 182 | @classmethod 183 | def organization_by_id(cls, organization_uid: str) -> str: 184 | return f"{PUBLIC_API_BASE_URL}/organizations/{organization_uid}" 185 | 186 | @classmethod 187 | def webhooks_list(cls, graph_uid: str) -> str: 188 | return f"{PUBLIC_API_BASE_URL}/graphs/{graph_uid}/webhooks" 189 | 190 | @classmethod 191 | def graphs_list(cls, organization_uid: str) -> str: 192 | return f"{PUBLIC_API_BASE_URL}/organizations/{organization_uid}/graphs" 193 | 194 | @classmethod 195 | def graphs_latest(cls, graph_uid: str) -> str: 196 | return f"{PUBLIC_API_BASE_URL}/graphs/{graph_uid}/latest" 197 | 198 | @classmethod 199 | def graph_update(cls, graph_uid: str) -> str: 200 | return f"{PUBLIC_API_BASE_URL}/graphs/{graph_uid}" 201 | 202 | @classmethod 203 | def graph_version_download(cls, graph_version_uid: str) -> str: 204 | return f"{PUBLIC_API_BASE_URL}/graph_versions/{graph_version_uid}/zip" 205 | 206 | @classmethod 207 | def graph_delete(cls, graph_uid: str) -> str: 208 | return f"{PUBLIC_API_BASE_URL}/graphs/{graph_uid}" 209 | 210 | @classmethod 211 | def component_download(cls, organization: str, component: str, version: str) -> str: 212 | return f"{PUBLIC_API_BASE_URL}/marketplace/components/{organization}/{component}/{version}/zip" 213 | 214 | @classmethod 215 | def component_update(cls, graph_uid: str) -> str: 216 | return f"{PUBLIC_API_BASE_URL}/marketplace/components/graph/{graph_uid}" 217 | 218 | @classmethod 219 | def graph_by_slug(cls, organization_uid: str, slug: str) -> str: 220 | return ( 221 | f"{PUBLIC_API_BASE_URL}/organizations/{organization_uid}/graphs/slug/{slug}" 222 | ) 223 | 224 | @classmethod 225 | def graph_version_by_id(cls, graph_version_uid: str) -> str: 226 | return f"{PUBLIC_API_BASE_URL}/graph_versions/{graph_version_uid}" 227 | 228 | @classmethod 229 | def graph_version_create(cls, organization_uid: str) -> str: 230 | return f"{PUBLIC_API_BASE_URL}/organizations/{organization_uid}/graph_versions" 231 | 232 | @classmethod 233 | def trigger_node(cls, graph_uid: str, node_id: str) -> str: 234 | return f"{PUBLIC_API_BASE_URL}/graphs/{graph_uid}/nodes/{node_id}/trigger" 235 | 236 | @classmethod 237 | def org_secrets(cls, organization_uid: str) -> str: 238 | return f"{PUBLIC_API_BASE_URL}/organizations/{organization_uid}/secrets" 239 | -------------------------------------------------------------------------------- /patterns/cli/services/lookup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from functools import cached_property 5 | from pathlib import Path 6 | from typing import Optional 7 | 8 | import yaml 9 | from requests import HTTPError 10 | 11 | from patterns.cli.config import ( 12 | read_devkit_config, 13 | CliConfig, 14 | update_devkit_config, 15 | ) 16 | from patterns.cli.services.graph_path import resolve_graph_path 17 | from patterns.cli.services.graph_versions import ( 18 | get_graph_by_slug, 19 | get_latest_graph_version, 20 | get_graph_version_by_uid, 21 | get_graph_by_uid, 22 | ) 23 | from patterns.cli.services.organizations import ( 24 | get_organization_by_name, 25 | paginated_organizations, 26 | get_organization_by_id, 27 | ) 28 | from patterns.cli.services.output import prompt_choices, prompt_path 29 | 30 | try: 31 | from yaml import CLoader as Loader, CDumper as Dumper 32 | except ImportError: 33 | from yaml import Loader, Dumper 34 | 35 | 36 | class IdLookup: 37 | """A cached lookup of various ids from whatever info you can give it.""" 38 | 39 | def __init__( 40 | self, 41 | organization_slug: str = None, 42 | graph_path: Path = None, 43 | node_file_path: Path = None, 44 | node_id: str = None, 45 | graph_slug_or_uid_or_path: str = None, 46 | ignore_local_cfg: bool = False, 47 | find_nearest_graph: bool = False, 48 | ): 49 | self._given_org_slug = organization_slug 50 | self._given_node_id = node_id 51 | self._node_file_path = node_file_path 52 | self._ignore_local_cfg = ignore_local_cfg 53 | self._find_nearest_graph = find_nearest_graph 54 | self._given_graph_path = graph_path 55 | self._given_graph_slug_or_uid = None 56 | 57 | if ( 58 | graph_slug_or_uid_or_path 59 | and not self._given_graph_path 60 | and (any(c in graph_slug_or_uid_or_path for c in "./\\")) 61 | ): 62 | self._given_graph_path = Path(graph_slug_or_uid_or_path).absolute() 63 | else: 64 | self._given_graph_slug_or_uid = graph_slug_or_uid_or_path 65 | 66 | @cached_property 67 | def organization_name(self) -> str: 68 | if self._given_org_slug: 69 | return self._given_org_slug 70 | return get_organization_by_id(self.organization_uid)["name"] 71 | 72 | @cached_property 73 | def organization_uid(self) -> str: 74 | if self._given_org_slug: 75 | return get_organization_by_name(self._given_org_slug)["uid"] 76 | if self.cfg.organization_id: 77 | return self.cfg.organization_id 78 | organizations = list(paginated_organizations()) 79 | orgs_by_name = {org["name"]: org for org in organizations} 80 | if len(organizations) == 1: 81 | org = organizations[0] 82 | else: 83 | existing = read_devkit_config().organization_id 84 | default = next( 85 | ( 86 | org["name"] 87 | for org in orgs_by_name.values() 88 | if org["uid"] == existing 89 | ), 90 | ..., 91 | ) 92 | org_name = prompt_choices( 93 | "Available organizations", 94 | "Select an organization", 95 | choices=orgs_by_name.keys(), 96 | default=default, 97 | ) 98 | org = orgs_by_name[org_name] 99 | update_devkit_config(organization_id=org["uid"]) 100 | return org["uid"] 101 | 102 | @cached_property 103 | def graph_uid(self) -> str: 104 | if self._given_graph_slug_or_uid: 105 | return self._graph_by_slug_or_uid["uid"] 106 | return get_graph_by_slug(self.organization_uid, self.graph_slug)["uid"] 107 | 108 | @cached_property 109 | def graph_version_uid(self): 110 | return get_latest_graph_version(self.graph_uid)["uid"] 111 | 112 | @cached_property 113 | def node_id(self) -> str: 114 | if self._given_node_id: 115 | return self._given_node_id 116 | 117 | graph = self.graph_file_path 118 | node = self._node_file_path 119 | if not graph or not node: 120 | raise ValueError("Must specify a node") 121 | err_msg = f"Node {node} is not part of the graph at {graph}" 122 | 123 | try: 124 | node_path = node.absolute().relative_to(graph.parent) 125 | except Exception as e: 126 | raise Exception(err_msg) from e 127 | cfg = self._load_yaml(graph) or {} 128 | for node in cfg.get("functions", []): 129 | if node.get("node_file") == node_path.as_posix(): 130 | id = node.get("id") 131 | if id: 132 | return id 133 | raise Exception("Node does not have an id. Run [code]patterns upload") 134 | raise Exception(err_msg) 135 | 136 | @cached_property 137 | def graph_file_path(self) -> Path: 138 | return self.graph_file_path_or_null or _find_graph_file( 139 | given_path=None, prompt=True, nearest=self._find_nearest_graph 140 | ) 141 | 142 | @cached_property 143 | def graph_file_path_or_null(self) -> Path | None: 144 | """Return the file path if possible without prompting""" 145 | if self._given_graph_path: 146 | return resolve_graph_path(self._given_graph_path, exists=True) 147 | if self._node_file_path: 148 | return _find_graph_file( 149 | self._node_file_path.parent, 150 | prompt=False, 151 | nearest=self._find_nearest_graph, 152 | ) 153 | try: 154 | return _find_graph_file( 155 | given_path=None, prompt=False, nearest=self._find_nearest_graph 156 | ) 157 | except ValueError as e: 158 | return None 159 | 160 | @cached_property 161 | def graph_directory(self) -> Path: 162 | return self.graph_file_path.parent 163 | 164 | @cached_property 165 | def root_graph_file(self) -> Path: 166 | return _find_graph_file(self.graph_directory, nearest=False) 167 | 168 | @cached_property 169 | def cfg(self) -> CliConfig: 170 | if self._ignore_local_cfg: 171 | return CliConfig() 172 | return read_devkit_config() 173 | 174 | @cached_property 175 | def graph_slug(self) -> str: 176 | def from_yaml(): 177 | graph = self._load_yaml(self.root_graph_file) 178 | return graph.get( 179 | "slug", 180 | self.root_graph_file.parent.name.replace("_", "-").replace(" ", "-"), 181 | ) 182 | 183 | if self._given_graph_path or self._node_file_path: 184 | return from_yaml() 185 | if self._given_graph_slug_or_uid: 186 | return self._graph_by_slug_or_uid["slug"] 187 | return from_yaml() 188 | 189 | def _load_yaml(self, path: Path) -> dict: 190 | with open(path) as f: 191 | return yaml.load(f.read(), Loader=Loader) 192 | 193 | @cached_property 194 | def _graph_by_slug_or_uid(self) -> dict: 195 | """return graph response json""" 196 | try: 197 | return get_graph_by_slug( 198 | self.organization_uid, self._given_graph_slug_or_uid 199 | ) 200 | except HTTPError: 201 | pass 202 | try: 203 | return get_graph_by_uid(self._given_graph_slug_or_uid) 204 | except HTTPError: 205 | pass 206 | try: 207 | return get_graph_version_by_uid(self._given_graph_slug_or_uid)["graph"] 208 | except HTTPError: 209 | pass 210 | raise Exception(f"No graph with slug or id {self._given_graph_slug_or_uid}") 211 | 212 | 213 | def _find_graph_file( 214 | given_path: Optional[Path], prompt: bool = True, nearest: bool = False 215 | ) -> Path: 216 | """Walk up a directory tree looking for a graph 217 | 218 | :param given_path: The location to start the search 219 | :param prompt: If True, ask the user to enter a path if it can't be found 220 | :param nearest: If False, keep walking up until there's no graph.yml in the parent 221 | directory. If True, stop as soon as one if found. 222 | """ 223 | if given_path and given_path.is_file(): 224 | return resolve_graph_path(given_path, exists=True) 225 | if given_path: 226 | path = given_path.absolute() 227 | else: 228 | path = Path(os.getcwd()).absolute() 229 | 230 | found = None 231 | 232 | for _ in range(100): 233 | if not path or path == path.parent: 234 | break 235 | p = path / "graph.yml" 236 | if p.is_file(): 237 | found = p 238 | if nearest: 239 | break 240 | elif found: 241 | break 242 | path = path.parent 243 | if found: 244 | return found 245 | 246 | if prompt: 247 | resp = prompt_path("Enter the path to the graph yaml file", exists=True) 248 | return resolve_graph_path(resp, exists=True) 249 | else: 250 | raise ValueError(f"Cannot find graph.yml{f' at {given_path}' if path else ''}") 251 | -------------------------------------------------------------------------------- /patterns/cli/commands/create.py: -------------------------------------------------------------------------------- 1 | import re 2 | from enum import Enum 3 | from pathlib import Path 4 | from typing import Optional 5 | 6 | import typer 7 | from typer import Option, Argument 8 | 9 | from patterns.cli.configuration.edit import GraphConfigEditor 10 | from patterns.cli.helpers import random_node_id 11 | from patterns.cli.services.graph_path import resolve_graph_path 12 | from patterns.cli.services.lookup import IdLookup 13 | from patterns.cli.services.output import abort, prompt_path, abort_on_error, prompt_str 14 | from patterns.cli.services.output import sprint 15 | from patterns.cli.services.secrets import create_secret 16 | 17 | create = typer.Typer(name="create", help="Create a new app or node") 18 | 19 | _name_help = "The name of the app. The location will be used as a name by default" 20 | _location_help = "The directory to create for the app" 21 | 22 | 23 | @create.command() 24 | def app( 25 | name: str = Option("", "--name", "-n", help=_name_help), 26 | location: Path = Argument( 27 | None, metavar="APP", help=_location_help, show_default=False 28 | ), 29 | ): 30 | """Create a new empty app""" 31 | if not location: 32 | prompt = "Enter a name for the new app directory [prompt.default](e.g. my-app)" 33 | location = prompt_path(prompt, exists=False) 34 | with abort_on_error("Error creating app"): 35 | path = resolve_graph_path(location, exists=False) 36 | name = name or location.stem 37 | slug = re.sub("[_ ]+", "-", name) 38 | slug = re.sub("[^a-zA-Z0-9-]+", "", slug).lower() 39 | GraphConfigEditor(path, read=False).set_name(name).set_slug(slug).write() 40 | 41 | sprint(f"\n[success]Created app [b]{name}") 42 | sprint( 43 | f"\n[info]You can add nodes with [code]cd {location}[/code]," 44 | f" then [code]patterns create node[/code]" 45 | ) 46 | 47 | 48 | _app_help = "The slug or directory of the app to add this node to" 49 | _title_help = "The title of the node. The location will be used as a title by default" 50 | _component_help = "The name of component to use to create this node" 51 | _type_help = "The type of node to create" 52 | _location_help = "The file to create for the node (for function nodes), or the name of the component or webhook" 53 | _app_default_help = "the app in the current directory" 54 | 55 | 56 | class _NodeType(str, Enum): 57 | function = "function" 58 | component = "component" 59 | webhook = "webhook" 60 | table = "table" 61 | 62 | 63 | @create.command() 64 | def node( 65 | explicit_app: Path = Option( 66 | # the type annotation is wrong: typer accepts a str 67 | None, "--app", "-a", exists=True, help=_app_help, show_default=_app_default_help 68 | ), 69 | title: str = Option("", "-n", "--title", help=_name_help), 70 | component: str = Option("", "-c", "--component", help=_component_help, hidden=True), 71 | type: _NodeType = Option(_NodeType.function.value, "-t", "--type", help=_type_help), 72 | location: str = Argument("", help=_location_help), 73 | ): 74 | """Add a new node to an app 75 | 76 | patterns create node --title='My Node' mynode.py 77 | patterns create node --type=table my_table 78 | patterns create node --type=webhook my_webhook 79 | patterns create node --type=component patterns/component@v1 80 | """ 81 | # --component option is deprecated 82 | if component and location: 83 | abort("Specify either a component or a node location, not both") 84 | if component: 85 | _add_component_node(explicit_app, component, title) 86 | return 87 | 88 | if type == _NodeType.function: 89 | location = Path(location) if location else location 90 | _add_function_node(explicit_app, location, title) 91 | elif type == _NodeType.component: 92 | _add_component_node(explicit_app, location, title) 93 | elif type == _NodeType.webhook: 94 | _add_webhook_node(explicit_app, location, title) 95 | elif type == _NodeType.table: 96 | if title: 97 | abort("Tables cannot have titles") 98 | _add_table_node(explicit_app, location) 99 | else: 100 | raise NotImplementedError(f"Unexpected node type {type}") 101 | 102 | 103 | def _add_component_node(explicit_app: Optional[Path], component: str, title: str): 104 | if not component: 105 | sprint("[info]Components names look like [code]patterns/component@v1") 106 | component = prompt_str("Enter the name of the component to add") 107 | ids = IdLookup(find_nearest_graph=True, graph_path=explicit_app) 108 | GraphConfigEditor(ids.graph_file_path).add_component_uses( 109 | component_key=component, title=title 110 | ).write() 111 | sprint(f"[success]Added component {component} to app") 112 | 113 | 114 | def _add_table_node(explicit_app: Optional[Path], title: str): 115 | ids = IdLookup(find_nearest_graph=True, graph_path=explicit_app) 116 | GraphConfigEditor(ids.graph_file_path).add_table(name=title).write() 117 | sprint(f"[success]Added table {title} to app") 118 | 119 | 120 | def _add_function_node( 121 | explicit_app: Optional[Path], location: Optional[Path], title: str 122 | ): 123 | if not location: 124 | sprint("[info]Nodes can be python files like [code]ingest.py") 125 | sprint("[info]Nodes can be sql files like [code]aggregate.sql") 126 | sprint("[info]You also can add a subgraph like [code]processor/graph.yml") 127 | location = prompt_path("Enter a name for the new node file", exists=False) 128 | 129 | if location.exists(): 130 | abort(f"Cannot create node: {location} already exists") 131 | 132 | ids = IdLookup( 133 | node_file_path=location, find_nearest_graph=True, graph_path=explicit_app 134 | ) 135 | # Update the graph yaml 136 | node_file = "/".join(location.absolute().relative_to(ids.graph_directory).parts) 137 | node_title = title or ( 138 | location.parent.name if location.name == "graph.yml" else location.stem 139 | ) 140 | with abort_on_error("Adding node failed"): 141 | editor = GraphConfigEditor(ids.graph_file_path) 142 | editor.add_node( 143 | title=node_title, 144 | node_file=node_file, 145 | id=str(random_node_id()), 146 | ) 147 | 148 | # Write to disk last to avoid partial updates 149 | if location.suffix == ".py": 150 | location.write_text(_PY_FILE_TEMPLATE) 151 | elif location.suffix == ".sql": 152 | location.write_text(_SQL_FILE_TEMPLATE) 153 | elif location.name == "graph.yml": 154 | location.parent.mkdir(exist_ok=True, parents=True) 155 | GraphConfigEditor(location, read=False).set_name(node_title).write() 156 | else: 157 | abort("Node file must be graph.yml or end in .py or .sql") 158 | editor.write() 159 | 160 | sprint(f"\n[success]Created node [b]{location}") 161 | sprint( 162 | f"\n[info]Once you've edited the node and are ready to run the app, " 163 | f"use [code]patterns upload" 164 | ) 165 | 166 | 167 | # deprecated 168 | @create.command(hidden=True) 169 | def webhook( 170 | explicit_app: Path = Option(None, "--app", "-a", exists=True, help=_app_help), 171 | name: str = Argument(..., help="The name of the webhook output stream"), 172 | ): 173 | """Add a new webhook node to an app""" 174 | _add_webhook_node(explicit_app, name, None) 175 | 176 | 177 | def _add_webhook_node(explicit_app: Optional[Path], name: str, title: Optional[str]): 178 | ids = IdLookup(graph_path=explicit_app) 179 | 180 | if not name: 181 | name = prompt_str("Enter the table that the webhook will write to") 182 | 183 | with abort_on_error("Adding webhook failed"): 184 | editor = GraphConfigEditor(ids.graph_file_path) 185 | editor.add_webhook(name, id=random_node_id(), title=title) 186 | 187 | # Add the output table if it doesn't exist already 188 | if not any(n.get("table") == name for n in editor.store_nodes()): 189 | editor.add_table(name, id=random_node_id()) 190 | 191 | editor.write() 192 | 193 | sprint(f"\n[success]Created webhook [b]{name}") 194 | sprint( 195 | f"\n[info]Once you've uploaded the app, use " 196 | f"[code]patterns list webhooks[/code] to get the url of the webhook" 197 | ) 198 | 199 | 200 | _organization_help = "The Patterns organization to add a secret to" 201 | _secret_name_help = ( 202 | "The name of the secret. Can only contain letters, numbers, and underscores." 203 | ) 204 | _secret_value_help = "The value of the secret" 205 | _secret_desc_help = "A description for the secret" 206 | _sensitive_help = "Mark the secret value as sensitive. This value won't be visible to the UI or devkit." 207 | 208 | 209 | @create.command() 210 | def secret( 211 | organization: str = Option( 212 | "", "-o", "--organization", metavar="SLUG", help=_organization_help 213 | ), 214 | sensitive: bool = Option(False, "--sensitive", "-s", help=_sensitive_help), 215 | description: str = Option( 216 | None, "-d", "--description", help=_secret_desc_help, show_default=False 217 | ), 218 | name: str = Argument(..., help=_secret_name_help), 219 | value: str = Argument(..., help=_secret_value_help), 220 | ): 221 | """Create a new secret value in your organization""" 222 | ids = IdLookup(organization_slug=organization) 223 | 224 | with abort_on_error("Creating secret failed"): 225 | create_secret(ids.organization_uid, name, value, description, sensitive) 226 | sprint(f"\n[success]Created secret [b]{name}") 227 | 228 | 229 | _PY_FILE_TEMPLATE = """ 230 | # Documentation: https://docs.patterns.app/docs/node-development/python/ 231 | 232 | from patterns import ( 233 | Parameter, 234 | State, 235 | Table, 236 | ) 237 | """ 238 | 239 | _SQL_FILE_TEMPLATE = """ 240 | -- Type '{{' to use Tables and Parameters 241 | -- Documentation: https://docs.patterns.app/docs/node-development/sql/ 242 | 243 | select 244 | """ 245 | -------------------------------------------------------------------------------- /patterns/cli/configuration/edit.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import functools 4 | import io 5 | import re 6 | from io import StringIO 7 | from pathlib import Path 8 | from typing import Dict, Any, Optional, Union, IO, Iterator 9 | from zipfile import ZipFile, ZipInfo 10 | 11 | import ruyaml 12 | 13 | from patterns.cli.helpers import compress_directory, random_node_id 14 | from patterns.cli.services.graph_path import resolve_graph_path 15 | 16 | MISSING = object() 17 | 18 | 19 | class GraphConfigEditor: 20 | """Edit a graph.yml file, preserving comments 21 | 22 | By default, constructing an instance of this class will raise an exception if the 23 | yaml file doesn't exist or can't be parsed. 24 | 25 | If you pass `read=False` to the constructor, the file won't be read, and you'll 26 | start with an empty config. 27 | 28 | You can also pass `path_to_graph_yml=None` if you aren't going to write back to 29 | disk. 30 | """ 31 | 32 | def __init__(self, path_to_graph_yml: Optional[Path], read: bool = True): 33 | self._yaml = ruyaml.YAML() 34 | self._path_to_graph_yml = path_to_graph_yml 35 | self._yaml.indent(mapping=2, sequence=4, offset=2) 36 | # read text manually instead of loading the Path directly to normalize line 37 | # breaks. Ruyaml opens files in binary mode (bypassing universal newline 38 | # support), then proceeds to behave incorrectly in the presence of \r\n, adding 39 | # extra line breaks in the output. 40 | if read: 41 | with self._path_to_graph_yml.open() as f: 42 | text = f.read() 43 | self._cfg = self._yaml.load(text) or {} 44 | # ruyaml doesn't provide a way to preserve indentation, 45 | # so pick a value that matches the least-indented list item we see. 46 | matches = ( 47 | len(m.group(1)) for m in re.finditer(r"^( *)-", text, re.MULTILINE) 48 | ) 49 | # ruyaml's indent number includes the length of "- " for some reason 50 | indent = min(matches, default=2) + 2 51 | else: 52 | self._cfg = {} 53 | indent = 4 54 | 55 | self._yaml.indent( 56 | mapping=int(indent / 2), sequence=indent, offset=max(0, indent - 2) 57 | ) 58 | 59 | def write(self): 60 | """Write the config back to the file""" 61 | self._yaml.dump(self._cfg, self._path_to_graph_yml) 62 | 63 | def dump(self) -> str: 64 | """Return the edited config as a yaml string""" 65 | s = StringIO() 66 | self._yaml.dump(self._cfg, s) 67 | return s.getvalue() 68 | 69 | def set_name(self, name: str) -> GraphConfigEditor: 70 | self._cfg["title"] = name 71 | return self 72 | 73 | def get_title(self) -> Optional[str]: 74 | return self._cfg.get("title") 75 | 76 | def set_slug(self, slug: str) -> GraphConfigEditor: 77 | self._cfg["slug"] = slug 78 | return self 79 | 80 | def get_slug(self) -> Optional[str]: 81 | """Return the slug value listed in the yaml, or None if it's not present""" 82 | return self._cfg.get("slug") 83 | 84 | def add_function_node_dict(self, node: dict) -> GraphConfigEditor: 85 | d = {k: v for (k, v) in node.items() if v is not None} 86 | for k in ("node_file", "id", "webhook"): 87 | if ( 88 | k in d 89 | and d[k] 90 | and any(it.get(k) == d[k] for it in self._cfg.get("functions", [])) 91 | ): 92 | raise ValueError( 93 | f"{k} '{d[k]}' already defined in the graph configuration" 94 | ) 95 | 96 | if "functions" not in self._cfg: 97 | self._cfg["functions"] = [] 98 | self._cfg["functions"].append(d) 99 | return self 100 | 101 | def add_node( 102 | self, 103 | node_file: str, 104 | trigger: str = None, 105 | inputs: Dict[str, str] = None, 106 | outputs: Dict[str, str] = None, 107 | parameters: Dict[str, Any] = None, 108 | title: str = None, 109 | id: Optional[str] = MISSING, 110 | description_file: str = None, 111 | ) -> GraphConfigEditor: 112 | if id is MISSING: 113 | id = random_node_id() 114 | self.add_function_node_dict( 115 | { 116 | "node_file": node_file, 117 | "trigger": trigger, 118 | "inputs": inputs, 119 | "outputs": outputs, 120 | "parameters": parameters, 121 | "title": title, 122 | "id": str(id) if id else id, 123 | "description_file": description_file, 124 | } 125 | ) 126 | return self 127 | 128 | def add_table( 129 | self, 130 | name: str, 131 | id: Optional[str] = MISSING, 132 | schema: str = None, 133 | ): 134 | if id is MISSING: 135 | id = random_node_id() 136 | d = { 137 | "table": name, 138 | "id": str(id) if id else id, 139 | "schema": schema, 140 | } 141 | d = {k: v for (k, v) in d.items() if v is not None} 142 | 143 | for k in ("table", "stream", "id"): 144 | if d.get(k) and any( 145 | it.get(k) == d[k] for it in self._cfg.get("stores", []) 146 | ): 147 | raise ValueError( 148 | f"{k} '{d[k]}' already defined in the graph configuration" 149 | ) 150 | 151 | if "stores" not in self._cfg: 152 | self._cfg["stores"] = [] 153 | self._cfg["stores"].append(d) 154 | return self 155 | 156 | def add_webhook( 157 | self, 158 | webhook: str, 159 | title: str = None, 160 | id: Optional[str] = MISSING, 161 | description_file: str = None, 162 | ) -> GraphConfigEditor: 163 | if id is MISSING: 164 | id = random_node_id() 165 | self.add_function_node_dict( 166 | { 167 | "webhook": webhook, 168 | "title": title or None, 169 | "id": str(id) if id else id, 170 | "description_file": description_file, 171 | } 172 | ) 173 | return self 174 | 175 | def add_component_uses( 176 | self, 177 | component_key: str, 178 | trigger: str = None, 179 | inputs: Dict[str, str] = None, 180 | outputs: Dict[str, str] = None, 181 | parameters: Dict[str, Any] = None, 182 | title: str = None, 183 | id: Optional[str] = MISSING, 184 | description_file: str = None, 185 | ) -> GraphConfigEditor: 186 | if id is MISSING: 187 | id = random_node_id() 188 | self.add_function_node_dict( 189 | { 190 | "uses": component_key, 191 | "trigger": trigger, 192 | "inputs": inputs, 193 | "outputs": outputs, 194 | "parameters": parameters, 195 | "title": title, 196 | "id": str(id) if id else id, 197 | "description_file": description_file, 198 | } 199 | ) 200 | return self 201 | 202 | def add_missing_node_ids(self) -> GraphConfigEditor: 203 | """Add a random id to any node entry that doesn't specify one""" 204 | for node in self.all_nodes(): 205 | if "id" not in node: 206 | node["id"] = random_node_id() 207 | return self 208 | 209 | def all_nodes(self) -> Iterator[dict]: 210 | """Return an iterator over all function and store node declarations""" 211 | yield from self.function_nodes() 212 | yield from self.store_nodes() 213 | 214 | def function_nodes(self) -> Iterator[dict]: 215 | nodes = self._cfg.get("functions") 216 | if not isinstance(nodes, list): 217 | return 218 | 219 | for node in nodes: 220 | if not isinstance(node, dict): 221 | continue 222 | yield node 223 | 224 | def store_nodes(self) -> Iterator[dict]: 225 | nodes = self._cfg.get("stores") 226 | if not isinstance(nodes, list): 227 | return 228 | 229 | for node in nodes: 230 | if not isinstance(node, dict): 231 | continue 232 | yield node 233 | 234 | 235 | class GraphDirectoryEditor: 236 | def __init__(self, graph_path: Path, overwrite: bool = False): 237 | """ 238 | :param graph_path: The path to a graph.yml file, or a directory containing one 239 | :param overwrite: If False, raise an exception in add_node_from_zip if a node 240 | exists and differs from the extracted content. 241 | """ 242 | try: 243 | self.yml_path = resolve_graph_path(graph_path, exists=True) 244 | except ValueError: 245 | self.yml_path = resolve_graph_path(graph_path, exists=False) 246 | self.dir = self.yml_path.parent 247 | self.overwrite = overwrite 248 | if self.yml_path.is_file(): 249 | self._cfg = self._editor(self.yml_path) 250 | else: 251 | self._cfg: Optional[GraphConfigEditor] = None 252 | 253 | def graph_slug(self) -> str: 254 | """Return the graph name slug based on the graph directory name""" 255 | if self._cfg and self._cfg.get_slug(): 256 | name = self._cfg.get_slug() 257 | else: 258 | name = self.yml_path.parent.name 259 | return re.sub(r"[^a-zA-Z0-9]", "-", name) 260 | 261 | def compress_directory(self) -> io.BytesIO: 262 | """Return an in-memory zip file containing the compressed graph directory""" 263 | return compress_directory(self.dir) 264 | 265 | def add_node_from_file(self, dst_path: Union[Path, str], file: IO[bytes]): 266 | """Write the content of a file to dst_path 267 | 268 | :param dst_path: Path relative to the output graph directory 269 | :param file: A file-like object open in read mode 270 | """ 271 | dst_path = Path(dst_path) 272 | self._write_file(dst_path, file) 273 | self._add_cfg_node(dst_path) 274 | 275 | def add_node_from_zip( 276 | self, 277 | src_path: Union[Path, str], 278 | dst_path: Union[Path, str], 279 | zf: Union[ZipFile, Path, IO[bytes]], 280 | ) -> GraphDirectoryEditor: 281 | """Copy the node or subgraph located at src_path in zipfile to dst_path 282 | 283 | :param src_path: Path relative to the root of zipfile 284 | :param dst_path: Path relative to the output graph directory 285 | :param zf: A ZipFile open in read mode, or a path to a zip file to open 286 | """ 287 | src_path = Path(src_path) 288 | dst_path = Path(dst_path) 289 | if isinstance(zf, ZipFile): 290 | self._add(src_path, dst_path, zf) 291 | else: 292 | with ZipFile(zf, "r") as f: 293 | self._add(src_path, dst_path, f) 294 | return self 295 | 296 | def add_missing_node_ids(self) -> GraphDirectoryEditor: 297 | """Add a random id to any node entry that doesn't specify one 298 | 299 | This will update all graph.yml files in the directory 300 | """ 301 | for editor in self._graph_editors(): 302 | editor.add_missing_node_ids() 303 | editor.write() 304 | return self 305 | 306 | def _add(self, src_path: Path, dst_path: Path, zf: ZipFile): 307 | if src_path.name == "graph.yml": 308 | 309 | def dirname(p): 310 | if len(p.parts) <= 1: 311 | return "" 312 | return _zip_name(p.parent) + "/" 313 | 314 | src_dir = dirname(src_path) 315 | dst_dir = dirname(dst_path) 316 | 317 | for info in zf.infolist(): 318 | if info.filename.startswith(src_dir) and not info.is_dir(): 319 | new_name = dst_dir + info.filename[len(src_dir) :] 320 | self._extract_file(info, Path(new_name), zf) 321 | else: 322 | self._extract_file(zf.getinfo(_zip_name(src_path)), dst_path, zf) 323 | self._add_cfg_node(dst_path) 324 | 325 | def _add_cfg_node(self, dst_path: Path): 326 | if not self._cfg or str(dst_path) == "graph.yml": 327 | return 328 | try: 329 | self._cfg.add_node(_zip_name(dst_path)).write() 330 | except ValueError: 331 | pass # node already exists, leave it unchanged 332 | 333 | def _extract_file(self, member: ZipInfo, dst_path: Path, zf: ZipFile): 334 | full_dst_path = self.dir / dst_path 335 | if full_dst_path.is_dir(): 336 | raise ValueError( 337 | f"Cannot extract {dst_path}: a directory by that name exists" 338 | ) 339 | if self.overwrite or not full_dst_path.is_file(): 340 | member.filename = _zip_name(dst_path) 341 | zf.extract(member, self.dir) 342 | else: 343 | with zf.open(member, "r") as f: 344 | self._write_file(dst_path, f) 345 | 346 | def _write_file(self, dst_path: Path, file: IO[bytes]): 347 | full_dst_path = self.dir / dst_path 348 | new_content = io.TextIOWrapper(file).read() 349 | 350 | if not self.overwrite: 351 | try: 352 | old_content = full_dst_path.read_text() 353 | except FileNotFoundError: 354 | pass 355 | else: 356 | if new_content != old_content: 357 | raise FileOverwriteError( 358 | full_dst_path, 359 | f"Cannot extract {dst_path}: would overwrite existing file", 360 | ) 361 | full_dst_path.write_text(new_content) 362 | 363 | def _graph_editors(self) -> Iterator[GraphConfigEditor]: 364 | for p in self.dir.rglob("graph.yml"): 365 | yield self._editor(p) 366 | 367 | @functools.lru_cache(maxsize=None) 368 | def _editor(self, yaml_path: Path) -> GraphConfigEditor: 369 | return GraphConfigEditor(yaml_path) 370 | 371 | 372 | class FileOverwriteError(Exception): 373 | def __init__(self, file_path: Path, message: str) -> None: 374 | super().__init__(message) 375 | self.file_path = file_path 376 | 377 | 378 | def _zip_name(p: Path): 379 | return "/".join(p.parts) 380 | -------------------------------------------------------------------------------- /patterns/node/node.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from datetime import datetime, date 4 | from typing import Iterator, Any, List, TYPE_CHECKING, Union 5 | from typing import TypeVar, Type 6 | 7 | if TYPE_CHECKING: 8 | try: 9 | from commonmodel import Schema 10 | except ImportError: 11 | Schema = None 12 | try: 13 | from pandas import DataFrame 14 | except ImportError: 15 | DataFrame = None 16 | 17 | 18 | class Stream: 19 | """A stateful view of a Table that supports consuming the table in a 20 | one-record-at-a-time manner in a given ordering, preserving progress 21 | across executions for the given node. 22 | 23 | Example:: 24 | 25 | table = Table("my_table") 26 | stream = table.as_stream(order_by="id") 27 | for record in stream.consume_records(): 28 | print(record) 29 | 30 | # Rewind and the stream will consume from the beginning again 31 | stream.rewind() 32 | for record in stream.consume_records(): 33 | print(record) 34 | 35 | stream.seek(42) 36 | for record in stream.consume_records(): 37 | print(record) # only values *greater* than 42 38 | """ 39 | 40 | @classmethod 41 | def consume_records(cls, with_metadata: bool = False) -> Iterator[dict]: 42 | """Iterate over records in this stream one at a time. 43 | 44 | When a record is yielded it is marked as consumed, regardless of what happens after. 45 | If you want to recover from errors and have the option to re-process records, 46 | you can use ``rollback`` and ``checkpoint`` explicitly in a try / except block. 47 | """ 48 | ... 49 | 50 | def __iter__(cls) -> Iterator[dict]: 51 | """Equivalent to ``consume_records``""" 52 | ... 53 | 54 | @classmethod 55 | def checkpoint(cls): 56 | """Save the stream state (which records have been consumed from the iterator) 57 | to disk.""" 58 | ... 59 | 60 | @classmethod 61 | def rollback(cls): 62 | """Roll back stream to beginning of execution or last ``checkpoint``.""" 63 | ... 64 | 65 | @classmethod 66 | def rewind(self): 67 | """Reset the stream to consume from the beginning again""" 68 | ... 69 | 70 | @classmethod 71 | def seek(self, value: Any): 72 | """Seek to the given value (of the order_by field). 73 | 74 | Stream will consume values strictly *greater* than the given value, not including 75 | any record equal to the given value.""" 76 | ... 77 | 78 | @property 79 | def order_by_field(self) -> str: 80 | """Return the ordering field for this stream""" 81 | ... 82 | 83 | 84 | class TableVersion: 85 | """A specific version of a Table, representing an actual database table 86 | that may or may not be stored on disk yet. 87 | 88 | A Table may have many TableVersions, one or zero of which will be active 89 | at any given time.""" 90 | 91 | @property 92 | def name(self) -> str: 93 | """The unqualified name of the table.""" 94 | ... 95 | 96 | @property 97 | def storage(self): 98 | """The dcp Storage object this table is stored on.""" 99 | ... 100 | 101 | @property 102 | def schema(self) -> Schema | None: 103 | """The realized schema of this TableVersion. None if does not exist on disk.""" 104 | ... 105 | 106 | @property 107 | def record_count(self) -> int | None: 108 | """The realized schema of this TableVersion. None if does not exist on disk.""" 109 | ... 110 | 111 | @property 112 | def exists(self) -> bool: 113 | """True if this version exists on disk.""" 114 | ... 115 | 116 | 117 | class Table: 118 | def __init__( 119 | self, 120 | name: str, 121 | mode: str = "r", 122 | description: str = None, 123 | schema: str = None, 124 | required: bool = True, 125 | ): 126 | """Table is a thin abstraction over a database table that 127 | provides a stable reference across versions of the table. 128 | 129 | Args: 130 | name: The Patterns name for the table. The actual database table 131 | on disk will include this name and a hash. 132 | mode: Whether to use the table in "read" mode ("r") or "write" mode ("w") 133 | description: An optional short description of this table 134 | schema: An optional explicit Schema for this table. If not provided the 135 | schema will be inferred, or can be set with the table's `init` method. 136 | required: Whether this table is a required table for the operation of the 137 | node, or is optional. 138 | """ 139 | pass 140 | 141 | @classmethod 142 | def read( 143 | cls, 144 | as_format: str = "records", 145 | chunksize: int | None = None, 146 | ) -> List[dict] | DataFrame | Iterator[List[dict]] | Iterator[DataFrame]: 147 | """Read records from this table. 148 | 149 | Args: 150 | as_format: Format to return records in. Defaults to list of dicts ('records'). 151 | Set to 'dataframe' to get pandas dataframe (equivalent to ``read_dataframe``) 152 | chunksize: If specified, returns an iterator of the requested format in chunks of given size 153 | """ 154 | ... 155 | 156 | @classmethod 157 | def read_dataframe( 158 | cls, 159 | chunksize: int | None = None, 160 | ) -> Iterator[DataFrame] | DataFrame: 161 | """Return records as a pandas dataframe. Equivalent to `.read(as_format='dataframe')` 162 | 163 | Args: 164 | chunksize: If specified, returns an iterator of the dataframes of given size 165 | """ 166 | ... 167 | 168 | @classmethod 169 | def read_sql( 170 | cls, 171 | sql: str, 172 | as_format: str = "records", 173 | chunksize: int | None = None, 174 | ) -> List[dict] | DataFrame | Iterator[List[dict]] | Iterator[DataFrame]: 175 | """Read records resulting from the given sql expression, in same manner as ``read``. 176 | 177 | To reference tables in the sql, you can get their current (fully qualified and quoted) 178 | sql name by referencing `.sql_name` or, equivalently, taking their str() representation:: 179 | 180 | my_table = Table("my_table") 181 | my_table.read_sql(f'select * from {my_table} limit 10') 182 | 183 | Args: 184 | sql: The sql select statement to execute 185 | as_format: Format to return records in. Defaults to list of dicts ('records'). 186 | Set to 'dataframe' to get pandas dataframe. 187 | chunksize: If specified, returns an iterator of the requested format in chunks of given size 188 | """ 189 | ... 190 | 191 | @classmethod 192 | def as_stream(cls, order_by: str = None, starting_value: Any = None) -> Stream: 193 | """Return a Stream over the given table that will consume each record in the 194 | table exactly once, in order. 195 | 196 | Progress along the stream is stored in the node's state. A table may have 197 | multiple simultaneous streams with different orderings. The stream is ordered 198 | by the `order_by` parameter if provided otherwise defaults to the schema's 199 | `strictly_monotonic_ordering` if defined or its `created_ordering` if defined. 200 | If none of those orderings exist, an exception is thrown. 201 | 202 | To add a convenient ordering to records when writing (if you plan on streaming 203 | the table downstream), you can use `table.init(add_monotonic_id="id")` or 204 | `table.init(add_created="created_at")`. 205 | 206 | Args: 207 | order_by: Optional, the field to order the stream by. If not provided 208 | defaults to schema-defined orderings 209 | starting_value: Optional, value on the order by field at which to start the stream 210 | 211 | Returns: 212 | Stream object. 213 | """ 214 | ... 215 | 216 | @classmethod 217 | def reset(cls): 218 | """Reset the table. 219 | 220 | No data is deleted on disk, but the active version of the table is reset to None. 221 | """ 222 | ... 223 | 224 | @classmethod 225 | def get_active_version(cls) -> TableVersion | None: 226 | ... 227 | 228 | @classmethod 229 | def has_active_version(cls) -> bool: 230 | ... 231 | 232 | @property 233 | def is_connected(cls) -> bool: 234 | """Return True if this table port is connected to a store in the graph. 235 | 236 | Operations on unconnected tables are no-ops and return dummy objects. 237 | """ 238 | ... 239 | 240 | @property 241 | def sql_name(cls) -> str | None: 242 | """The fully qualified and quoted sql name of the active table version. 243 | 244 | The table may or may not exist on disk yet. 245 | """ 246 | ... 247 | 248 | @property 249 | def schema(cls) -> Schema | None: 250 | """The Schema of the active table version. 251 | 252 | May be None. 253 | """ 254 | ... 255 | 256 | @property 257 | def record_count(cls) -> int | None: 258 | """The record count of the active table version. 259 | 260 | May be None. 261 | """ 262 | ... 263 | 264 | @property 265 | def exists(self) -> bool: 266 | """True if the table has been created on disk.""" 267 | ... 268 | 269 | @classmethod 270 | def init( 271 | cls, 272 | schema: Schema | str | dict | None = None, 273 | schema_hints: dict[str, str] | None = None, 274 | unique_on: str | list[str] | None = None, 275 | add_created: str | None = None, 276 | add_monotonic_id: str | None = None, 277 | auto_indexes: bool = True, 278 | ): 279 | """Provide properties for this table that are used when a table version is first created on disk. 280 | 281 | Args: 282 | schema: A CommonModel Schema object or str name, or a dictionary of field names to field types 283 | schema_hints: A dictionary of field names to CommonModel field types that are used to override any inferred types. e.g. {"field1": "Text", "field2": "Integer"} 284 | unique_on: A field name or list of field names to that records should be unique on. Used by components 285 | to operate efficiently and correctly on the table. 286 | add_created: If specified, is the field name that an "auto_now" timestamp will be added to each 287 | record when `append` or `upsert` is called. This field 288 | will be the default streaming order for the table (by automatically filling the 289 | `created_ordering` role on the associated Schema), but only if add_monotonic_id is NOT specified 290 | and the associated schema defines no monotonic ordering. 291 | add_monotonic_id: If specified, is the field name that a unique, strictly monotonically increasing 292 | base32 string will be added to each record when `append` or `upsert` is called. This field 293 | will be the default streaming order for the table (by automatically filling the 294 | `strictly_monotonic_ordering` role on the associated Schema). 295 | auto_indexes: If true (the default), an index is automatically created on new table 296 | versions for the `unique_on` property 297 | """ 298 | 299 | @classmethod 300 | def append(cls, records: DataFrame | List[dict] | dict): 301 | """Append the records to the end of this table. 302 | 303 | If this is the first write to this table then any schema provided is used to 304 | create the table, otherwise the schema is inferred from the passed in records. 305 | 306 | Records are buffered and written to disk in batches. To force an immediate write, 307 | call `table.flush()`. 308 | 309 | To replace a table with a new (empty) version and append from there, call 310 | `table.reset()`. 311 | 312 | Args: 313 | records: May be a list of records (list of dicts with str keys), 314 | a single record (dict), or a pandas dataframe. 315 | """ 316 | ... 317 | 318 | @classmethod 319 | def upsert(cls, records: DataFrame | List[dict] | dict): 320 | """Upsert the records into this table, inserting new rows or 321 | updating if unique key conflicts. 322 | 323 | Unique fields must be provided by the Schema or passed to ``init``. If this is 324 | the first write to this table then any schema provided is used to create the table, 325 | otherwise the schema is inferred from the passed in records. 326 | 327 | Records are buffered and written to disk in batches. To force an immediate write, 328 | call `table.flush()`. 329 | 330 | Args: 331 | records: May be a list of records (list of dicts with str keys), 332 | a single record (dict), or a pandas dataframe. 333 | """ 334 | ... 335 | 336 | @classmethod 337 | def replace(cls, records: DataFrame | List[dict]): 338 | """Replace the current table version (if any) with a new one containing just `records`. 339 | 340 | Equivalent to `table.reset(); table.append(records)`. 341 | 342 | Args: 343 | records: May be a list of records (list of dicts with str keys) 344 | or a pandas dataframe. 345 | """ 346 | 347 | @classmethod 348 | def truncate(cls): 349 | """Truncate this table, preserving the table and schema on disk, but deleting all rows. 350 | 351 | Unlike ``reset`, which sets the active TableVersion to a new version, this action is 352 | destructive and cannot be undone. 353 | """ 354 | ... 355 | 356 | @classmethod 357 | def execute_sql(cls, sql: str): 358 | """Execute the given sql against the database this table is stored on. 359 | 360 | The sql is inspected to determine if it creates new tables or only modifies them, 361 | and appropriate events are recorded. The sql should ONLY create or update THIS table. 362 | Creating or updating other tables will result in incorrect event propagation. 363 | 364 | To reference tables in the sql, you can get their current (fully qualified and quoted) 365 | sql name by referencing `.sql_name` or, equivalently, taking their str() representation:: 366 | 367 | my_table = Table("my_table", "w") 368 | my_table.execute_sql(f'create table {my_table} as select 1 as a, 2 as b') 369 | 370 | Args: 371 | sql: Any valid sql statement that creates, inserts, updates, or otherwise alters this table. 372 | """ 373 | ... 374 | 375 | @classmethod 376 | def create_new_version(cls) -> TableVersion: 377 | ... 378 | 379 | @classmethod 380 | def get_active_version(cls) -> TableVersion | None: 381 | ... 382 | 383 | @classmethod 384 | def set_active_version(cls, table: TableVersion): 385 | ... 386 | 387 | @classmethod 388 | def signal_create(cls): 389 | ... 390 | 391 | @classmethod 392 | def signal_update(cls): 393 | ... 394 | 395 | @classmethod 396 | def signal_reset(cls): 397 | ... 398 | 399 | @classmethod 400 | def reset(cls): 401 | """Reset this table to point to a new (null) TableVersion with no Schema or data. 402 | 403 | Schema and data of previous version still exist on disk until garbage collected according to the 404 | table's retention policy.""" 405 | ... 406 | 407 | @classmethod 408 | def flush(cls): 409 | """Flush any buffered records to disk. 410 | 411 | Calls to table.append and table.upsert are buffered and flushed periodically 412 | and at the end of an execution. Use this method to force an immediate write. 413 | """ 414 | ... 415 | 416 | 417 | class State: 418 | """ 419 | State is a wrapper around a Table that supports quickly storing 420 | and retrieving single values from the database. 421 | """ 422 | 423 | @classmethod 424 | def set(cls, state: dict): 425 | """Replace the whole state dict with the provided one""" 426 | ... 427 | 428 | @classmethod 429 | def set_value(cls, key: str, value: Any): 430 | """Set the given value for the given key on this node's state.""" 431 | ... 432 | 433 | @classmethod 434 | def get(cls) -> dict: 435 | """Get the current state dict""" 436 | ... 437 | 438 | @classmethod 439 | def get_value(cls, key: str, default: Any = None) -> Any: 440 | """Get the latest value from state for this node for the given key. 441 | 442 | Args: 443 | key: key for state value 444 | default: default value if key is not present in state 445 | 446 | Returns: 447 | value from state 448 | """ 449 | ... 450 | 451 | @classmethod 452 | def get_datetime(cls, key: str, default: datetime = None) -> datetime | None: 453 | """Get the latest value from state for given key and tries 454 | to cast to a python datetime. 455 | 456 | Args: 457 | key: key for state 458 | default: default datetime if key is not present in state 459 | 460 | Returns: 461 | datetime from state or None 462 | """ 463 | ... 464 | 465 | @classmethod 466 | def should_continue( 467 | cls, pct_of_limit: float = None, seconds_till_limit: int = None 468 | ) -> bool: 469 | """Return False if execution is near its hard time limit (10 minutes typically), 470 | otherwise returns True. 471 | 472 | Used to exit gracefully from long-running jobs, typically in conjunction with 473 | ``request_new_run``. Defaults to 80% of limit or 120 seconds before the 474 | hard limit, which ever is greater. 475 | 476 | Args: 477 | pct_of_limit: percent of time limit to trigger at 478 | seconds_till_limit: seconds before time limit to trigger at 479 | """ 480 | ... 481 | 482 | @classmethod 483 | def request_new_run( 484 | cls, trigger_downstream: bool = True, wait_atleast_seconds: int = None 485 | ): 486 | """Request a new run from the server for this node, to be started 487 | once the current execution finishes. 488 | 489 | Often used in conjunction with ``should_continue`` to run long jobs 490 | over multiple executions safely. 491 | 492 | The requested run be delayed with `wait_atleast_seconds` to space out 493 | the executions. 494 | 495 | Args: 496 | trigger_downstream: Whether new run should trigger downstream nodes too 497 | wait_atleast_seconds: Time to wait until starting the new run 498 | 499 | """ 500 | ... 501 | 502 | @classmethod 503 | def reset(cls): 504 | """Reset (clear) the state for this node.""" 505 | ... 506 | 507 | 508 | class Connection(dict): 509 | def __init__(self, connection_type: str): 510 | super().__init__() 511 | 512 | 513 | T = TypeVar("T", str, int, float, bool, datetime, date, list, Connection) 514 | 515 | 516 | def Parameter( 517 | name: str, 518 | description: str = None, 519 | type: Union[Type[T], Connection] = str, 520 | default: T = "MISSING", 521 | ) -> T: 522 | """Parameters let a python script take values from the end user / UI. 523 | 524 | Allowed parameter types: 525 | 526 | * str 527 | * int 528 | * float 529 | * bool 530 | * datetime 531 | * date 532 | * list 533 | * Connection 534 | 535 | Args: 536 | name: The parameter name. Must be a valid python identifier. 537 | description: Description / help text 538 | type: should be the actual python type, e.g. `type=str` or `type=datetime` 539 | default: default value. If not set explicitly, the parameter is assumed to be required. 540 | May be set to None 541 | """ 542 | pass 543 | 544 | def respond_to_request(request_key: str, payload: dict): 545 | """Respond with a payload to a request identified by a request_key 546 | 547 | Args: 548 | request_key: The key of the request to respond to. This is the value of the 549 | "patterns_request_key" field added to the table for requests that wait for a 550 | response. 551 | payload: Data that will be json encoded and used as the body of the response. 552 | """ 553 | pass 554 | --------------------------------------------------------------------------------