├── .github └── workflows │ ├── docs-publish.yml │ ├── pr_linting.yml │ └── python-publish.yml ├── .gitignore ├── LICENSE ├── README.md ├── automated_api.py ├── ayon_api ├── __init__.py ├── _api.py ├── constants.py ├── entity_hub.py ├── events.py ├── exceptions.py ├── graphql.py ├── graphql_queries.py ├── operations.py ├── server_api.py ├── typing.py ├── utils.py └── version.py ├── docs ├── .nojekyll ├── Makefile ├── check_docstrings.bat ├── make_api.bat ├── make_html.bat └── source │ ├── _static │ ├── AYON_blackG_dot.svg │ └── favicon.ico │ ├── ayon_api.rst │ ├── conf.py │ └── index.rst ├── poetry.lock ├── pyproject.toml ├── ruff.toml ├── setup.py └── tests ├── __init__.py ├── conftest.py ├── resources ├── addon │ ├── .gitignore │ ├── create_package.py │ ├── package.py │ ├── private │ │ └── ayon-symbol.png │ └── server │ │ └── __init__.py └── ayon-symbol.png ├── test_entity_hub.py ├── test_folder_hierarchy.py ├── test_get_events.py ├── test_graphql_queries.py └── test_server.py /.github/workflows/docs-publish.yml: -------------------------------------------------------------------------------- 1 | name: Build sphinx documentation 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["main", "develop"] 8 | 9 | permissions: 10 | contents: write 11 | 12 | jobs: 13 | docs: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | - uses: actions/setup-python@v5 18 | with: 19 | python-version: "3.10.x" 20 | - uses: abatilo/actions-poetry@v2 21 | - name: Install dependencies 22 | run: | 23 | poetry install 24 | - name: Sphinx build HTML 25 | run: | 26 | poetry run sphinx-apidoc -f -e -M -o ./docs/source/ ./ayon_api/ 27 | - name: Sphinx build HTML 28 | run: | 29 | poetry run sphinx-build -M html ./docs/source ./docs/build 30 | 31 | - name: Deploy to GitHub Pages 32 | if: github.event_name == 'push' 33 | uses: peaceiris/actions-gh-pages@v4 34 | with: 35 | publish_branch: gh-pages 36 | github_token: ${{ secrets.GITHUB_TOKEN }} 37 | publish_dir: ./docs/build/html/ 38 | force_orphan: true -------------------------------------------------------------------------------- /.github/workflows/pr_linting.yml: -------------------------------------------------------------------------------- 1 | name: 📇 Code Linting 2 | 3 | on: 4 | push: 5 | branches: [ develop ] 6 | pull_request: 7 | branches: [ develop ] 8 | 9 | workflow_dispatch: 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.event.pull_request.number}} 13 | cancel-in-progress: true 14 | 15 | permissions: 16 | contents: read 17 | pull-requests: write 18 | 19 | jobs: 20 | linting: 21 | runs-on: ubuntu-latest 22 | steps: 23 | - uses: actions/checkout@v4 24 | - uses: chartboost/ruff-action@v1 25 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.10.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install wheel 33 | - name: Build package 34 | run: python setup.py sdist bdist_wheel 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@release/v1 37 | with: 38 | password: ${{ secrets.PYPI_API_TOKEN }} 39 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | wheels/ 22 | pip-wheel-metadata/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # pipenv 87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 90 | # install all needed dependencies. 91 | #Pipfile.lock 92 | 93 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 94 | __pypackages__/ 95 | 96 | # Celery stuff 97 | celerybeat-schedule 98 | celerybeat.pid 99 | 100 | # SageMath parsed files 101 | *.sage.py 102 | 103 | # Environments 104 | .env 105 | .venv 106 | env/ 107 | venv/ 108 | ENV/ 109 | env.bak/ 110 | venv.bak/ 111 | .poetry/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # JetBrains 132 | .idea/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AYON server API 2 | Python client for connection server. The client is using REST and GraphQl to communicate with server with `requests` module. 3 | 4 | AYON Python api should support connection to server with raw REST functions and prepared functionality for work with entities. Must not contain only functionality that can be used with core server functionality. 5 | 6 | Module support singleton connection which is using `AYON_SERVER_URL` and `AYON_API_KEY` environment variables as source for connection. The singleton connection is using `ServerAPI` object. There can be created multiple connection to different server at one time, for that purpose use `ServerAPIBase` object. 7 | 8 | ## Install 9 | AYON python api is available on PyPi: 10 | 11 | pip install ayon-python-api 12 | 13 | For development purposes you may follow [build](#build-wheel) guide to build and install custom wheels. 14 | 15 | 16 | ## Cloning the repository 17 | Repository does not have submodules or special cases. Clone is simple as: 18 | 19 | git clone git@github.com:ynput/ayon-python-api.git 20 | 21 | 22 | ## Build wheel 23 | For wheel build is required a `wheel` module from PyPi: 24 | 25 | pip install wheel 26 | 27 | Open terminal and change directory to ayon-python-api repository and build wheel: 28 | 29 | cd /ayon-python-api 30 | python setup.py sdist bdist_wheel 31 | 32 | 33 | Once finished a wheel should be created in `./dist/ayon_python_api--py3-none-any`. 34 | 35 | --- 36 | 37 | ### Wheel installation 38 | The wheel file can be used to install using pip: 39 | 40 | pip install /dist/ayon_python_api--py3-none-any 41 | 42 | If pip complain that `ayon-python-api` is already installed just uninstall existing one first: 43 | 44 | pip uninstall ayon-python-api 45 | 46 | 47 | ## TODOs 48 | - Find more suitable name of `ServerAPI` objects (right now is used `con` or `connection`) 49 | - Add all available CRUD operation on entities using REST 50 | - Add folder and task changes to operations 51 | - Enhance entity hub 52 | - Missing docstrings in EntityHub -> especially entity arguments are missing 53 | - Better order of arguments for entity classes 54 | - Move entity hub to first place 55 | - Skip those which are invalid for the entity and fake it for base or remove it from base 56 | - Entity hub should use operations session to do changes 57 | - Entity hub could also handle 'product', 'version' and 'representation' entities 58 | - Missing 'status' on folders 59 | - Missing assignees on tasks 60 | - Pass docstrings and arguments definitions from `ServerAPI` methods to global functions 61 | - Split `ServerAPI` into smaller chunks (somehow), the class has 4k+ lines of code 62 | - Add .pyi stub for ServerAPI 63 | - Missing websockets connection 64 | -------------------------------------------------------------------------------- /automated_api.py: -------------------------------------------------------------------------------- 1 | """Create public API functions based on ServerAPI methods. 2 | 3 | Public functions are created in '_api.py' file and imported in '__init_.py'. 4 | The script reads the 'ServerAPI' class and creates functions with the same 5 | signature and docstring in '_api.py' and '__init__.py' with new/removed 6 | functions. 7 | 8 | The script is executed by running 'python automated_api.py' in the terminal. 9 | 10 | TODOs: 11 | Use same signature in api functions as is used in 'ServerAPI' methods. 12 | Right now is used only '(*args, **kwargs)' signature. 13 | Prepare CI or pre-commit hook to run the script automatically. 14 | """ 15 | 16 | import os 17 | import sys 18 | import re 19 | import inspect 20 | import typing 21 | 22 | # Fake modules to avoid import errors 23 | 24 | requests = type(sys)("requests") 25 | requests.__dict__["Response"] = type( 26 | "Response", (), {"__module__": "requests"} 27 | ) 28 | 29 | sys.modules["requests"] = requests 30 | sys.modules["unidecode"] = type(sys)("unidecode") 31 | 32 | import ayon_api # noqa: E402 33 | from ayon_api.server_api import ServerAPI, _PLACEHOLDER # noqa: E402 34 | from ayon_api.utils import NOT_SET # noqa: E402 35 | 36 | EXCLUDED_METHODS = { 37 | "get_default_service_username", 38 | "get_default_settings_variant", 39 | "validate_token", 40 | "set_token", 41 | "reset_token", 42 | "create_session", 43 | "close_session", 44 | "as_username", 45 | "validate_server_availability", 46 | "get_headers", 47 | "login", 48 | "logout", 49 | "set_default_service_username", 50 | } 51 | EXCLUDED_IMPORT_NAMES = {"GlobalContext"} 52 | AUTOMATED_COMMENT = """ 53 | # ------------------------------------------------ 54 | # This content is generated automatically. 55 | # ------------------------------------------------ 56 | """.strip() 57 | 58 | 59 | # Read init file and remove ._api imports 60 | def prepare_init_without_api(init_filepath): 61 | with open(init_filepath, "r") as stream: 62 | content = stream.read() 63 | 64 | api_regex = re.compile(r"from \._api import \((?P[^\)]*)\)") 65 | api_imports = api_regex.search(content) 66 | start, end = api_imports.span() 67 | api_imports_text = content[start:end] 68 | functions_text = api_imports.group("functions") 69 | function_names = [ 70 | line.strip().rstrip(",") 71 | for line in functions_text.split("\n") 72 | if line.strip() 73 | ] 74 | function_names_q = { 75 | f'"{name}"' for name in function_names 76 | } 77 | 78 | all_regex = re.compile(r"__all__ = \([^\)]*\)") 79 | all_content = all_regex.search(content) 80 | start, end = all_content.span() 81 | all_content_text = content[start:end] 82 | filtered_lines = [] 83 | for line in content[start:end].split("\n"): 84 | found = False 85 | for name in function_names_q: 86 | if name in line: 87 | found = True 88 | break 89 | if not found: 90 | filtered_lines.append(line) 91 | new_all_content_text = ( 92 | "\n".join(filtered_lines).rstrip(") \n") + "\n\n{all_content}\n)" 93 | ) 94 | 95 | return ( 96 | content 97 | .replace(api_imports_text, "{api_imports}") 98 | .replace(all_content_text, new_all_content_text) 99 | ).rstrip("\n") 100 | 101 | 102 | # Creation of _api.py content 103 | def indent_lines(src_str, indent=1): 104 | new_lines = [] 105 | for line in src_str.split("\n"): 106 | if line: 107 | line = f"{' ' * indent}{line}" 108 | new_lines.append(line) 109 | return "\n".join(new_lines) 110 | 111 | 112 | def prepare_docstring(func): 113 | docstring = inspect.getdoc(func) 114 | if not docstring: 115 | return "" 116 | 117 | line_char = "" 118 | if "\n" in docstring: 119 | line_char = "\n" 120 | return f'"""{docstring}{line_char}\n"""' 121 | 122 | 123 | def _get_typehint(annotation, api_globals): 124 | if inspect.isclass(annotation): 125 | module_name_parts = list(str(annotation.__module__).split(".")) 126 | module_name_parts.append(annotation.__name__) 127 | module_name_parts.reverse() 128 | options = [] 129 | _name = None 130 | for name in module_name_parts: 131 | if _name is None: 132 | _name = name 133 | options.append(name) 134 | else: 135 | _name = f"{name}.{_name}" 136 | options.append(_name) 137 | 138 | options.reverse() 139 | for option in options: 140 | try: 141 | # Test if typehint is valid for known '_api' content 142 | exec(f"_: {option} = None", api_globals) 143 | return option 144 | except NameError: 145 | pass 146 | 147 | typehint = options[0] 148 | print("Unknown typehint:", typehint) 149 | typehint = f'"{typehint}"' 150 | return typehint 151 | 152 | typehint = ( 153 | str(annotation) 154 | .replace("NoneType", "None") 155 | ) 156 | full_path_regex = re.compile( 157 | r"(?P(?P[a-zA-Z0-9_\.]+))" 158 | ) 159 | for item in full_path_regex.finditer(str(typehint)): 160 | groups = item.groupdict() 161 | name = groups["name"].split(".")[-1] 162 | typehint = typehint.replace(groups["full"], name) 163 | 164 | forwardref_regex = re.compile( 165 | r"(?PForwardRef\('(?P[a-zA-Z0-9]+)'\))" 166 | ) 167 | for item in forwardref_regex.finditer(str(typehint)): 168 | groups = item.groupdict() 169 | name = groups["name"].split(".")[-1] 170 | typehint = typehint.replace(groups["full"], f'"{name}"') 171 | 172 | try: 173 | # Test if typehint is valid for known '_api' content 174 | exec(f"_: {typehint} = None", api_globals) 175 | except NameError: 176 | print("Unknown typehint:", typehint) 177 | typehint = f'"{typehint}"' 178 | return typehint 179 | 180 | 181 | def _get_param_typehint(param, api_globals): 182 | if param.annotation is inspect.Parameter.empty: 183 | return None 184 | return _get_typehint(param.annotation, api_globals) 185 | 186 | 187 | def _add_typehint(param_name, param, api_globals): 188 | typehint = _get_param_typehint(param, api_globals) 189 | if not typehint: 190 | return param_name 191 | return f"{param_name}: {typehint}" 192 | 193 | 194 | def _kw_default_to_str(param_name, param, api_globals): 195 | if param.default is inspect.Parameter.empty: 196 | return _add_typehint(param_name, param, api_globals) 197 | 198 | default = param.default 199 | if default is _PLACEHOLDER: 200 | default = "_PLACEHOLDER" 201 | elif default is NOT_SET: 202 | default = "NOT_SET" 203 | elif ( 204 | default is not None 205 | and not isinstance(default, (str, bool, int, float)) 206 | ): 207 | raise TypeError("Unknown default value type") 208 | else: 209 | default = repr(default) 210 | typehint = _get_param_typehint(param, api_globals) 211 | if typehint: 212 | return f"{param_name}: {typehint} = {default}" 213 | return f"{param_name}={default}" 214 | 215 | 216 | def sig_params_to_str(sig, param_names, api_globals, indent=0): 217 | pos_only = [] 218 | pos_or_kw = [] 219 | var_positional = None 220 | kw_only = [] 221 | var_keyword = None 222 | for param_name in param_names: 223 | param = sig.parameters[param_name] 224 | if param.kind == inspect.Parameter.POSITIONAL_ONLY: 225 | pos_only.append((param_name, param)) 226 | elif param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD: 227 | pos_or_kw.append((param_name, param)) 228 | elif param.kind == inspect.Parameter.VAR_POSITIONAL: 229 | var_positional = param_name 230 | elif param.kind == inspect.Parameter.KEYWORD_ONLY: 231 | kw_only.append((param_name, param)) 232 | elif param.kind == inspect.Parameter.VAR_KEYWORD: 233 | var_keyword = param_name 234 | 235 | func_params = [] 236 | body_params = [] 237 | for param_name, param in pos_only: 238 | body_params.append(param_name) 239 | func_params.append(_add_typehint(param_name, param, api_globals)) 240 | 241 | if pos_only: 242 | func_params.append("/") 243 | 244 | for param_name, param in pos_or_kw: 245 | body_params.append(f"{param_name}={param_name}") 246 | func_params.append(_kw_default_to_str(param_name, param, api_globals)) 247 | 248 | if var_positional: 249 | body_params.append(f"*{var_positional}") 250 | func_params.append(f"*{var_positional}") 251 | 252 | for param_name, param in kw_only: 253 | body_params.append(f"{param_name}={param_name}") 254 | func_params.append(_kw_default_to_str(param_name, param, api_globals)) 255 | 256 | if var_keyword is not None: 257 | body_params.append(f"**{var_keyword}") 258 | func_params.append(f"**{var_keyword}") 259 | 260 | base_indent_str = " " * indent 261 | param_indent_str = " " * (indent + 4) 262 | 263 | func_params_str = "()" 264 | if func_params: 265 | lines_str = "\n".join([ 266 | f"{param_indent_str}{line}," 267 | for line in func_params 268 | ]) 269 | func_params_str = f"(\n{lines_str}\n{base_indent_str})" 270 | 271 | if sig.return_annotation is not inspect.Signature.empty: 272 | return_typehint = _get_typehint(sig.return_annotation, api_globals) 273 | func_params_str += f" -> {return_typehint}" 274 | 275 | body_params_str = "()" 276 | if body_params: 277 | lines_str = "\n".join([ 278 | f"{param_indent_str}{line}," 279 | for line in body_params 280 | ]) 281 | body_params_str = f"(\n{lines_str}\n{base_indent_str})" 282 | 283 | return func_params_str, body_params_str 284 | 285 | 286 | def prepare_api_functions(api_globals): 287 | functions = [] 288 | for attr_name, attr in ServerAPI.__dict__.items(): 289 | if ( 290 | attr_name.startswith("_") 291 | or attr_name in EXCLUDED_METHODS 292 | or not callable(attr) 293 | ): 294 | continue 295 | 296 | sig = inspect.signature(attr) 297 | param_names = list(sig.parameters) 298 | if inspect.isfunction(attr): 299 | param_names.pop(0) 300 | 301 | func_def_params, func_body_params = sig_params_to_str( 302 | sig, param_names, api_globals 303 | ) 304 | 305 | func_def = f"def {attr_name}{func_def_params}:\n" 306 | 307 | func_body_parts = [] 308 | docstring = prepare_docstring(attr) 309 | if docstring: 310 | func_body_parts.append(docstring) 311 | 312 | func_body_parts.extend([ 313 | "con = get_server_api_connection()", 314 | f"return con.{attr_name}{func_body_params}", 315 | ]) 316 | 317 | func_body = indent_lines("\n".join(func_body_parts)) 318 | full_def = func_def + func_body 319 | functions.append(full_def) 320 | return "\n\n\n".join(functions) 321 | 322 | 323 | def main(): 324 | print("Creating public API functions based on ServerAPI methods") 325 | # TODO order methods in some order 326 | dirpath = os.path.dirname(os.path.dirname( 327 | os.path.abspath(ayon_api.__file__) 328 | )) 329 | ayon_api_root = os.path.join(dirpath, "ayon_api") 330 | init_filepath = os.path.join(ayon_api_root, "__init__.py") 331 | api_filepath = os.path.join(ayon_api_root, "_api.py") 332 | 333 | print("(1/5) Reading current content of '_api.py'") 334 | with open(api_filepath, "r") as stream: 335 | old_content = stream.read() 336 | 337 | parts = old_content.split(AUTOMATED_COMMENT) 338 | if len(parts) == 1: 339 | raise RuntimeError( 340 | "Automated comment not found in '_api.py'" 341 | ) 342 | if len(parts) > 2: 343 | raise RuntimeError( 344 | "Automated comment found multiple times in '_api.py'" 345 | ) 346 | 347 | print("(2/5) Parsing current '__init__.py' content") 348 | formatting_init_content = prepare_init_without_api(init_filepath) 349 | 350 | # Read content of first part of `_api.py` to get global variables 351 | # - disable type checking so imports done only during typechecking are 352 | # not executed 353 | old_value = typing.TYPE_CHECKING 354 | typing.TYPE_CHECKING = False 355 | api_globals = {"__name__": "ayon_api._api"} 356 | exec(parts[0], api_globals) 357 | for attr_name in dir(__builtins__): 358 | api_globals[attr_name] = getattr(__builtins__, attr_name) 359 | typing.TYPE_CHECKING = old_value 360 | 361 | # print(api_globals) 362 | print("(3/5) Preparing functions body based on 'ServerAPI' class") 363 | result = prepare_api_functions(api_globals) 364 | 365 | print("(4/5) Store new functions body to '_api.py'") 366 | new_content = f"{parts[0]}{AUTOMATED_COMMENT}\n{result}" 367 | with open(api_filepath, "w") as stream: 368 | print(new_content, file=stream) 369 | 370 | # find all functions and classes available in '_api.py' 371 | func_regex = re.compile(r"^(def|class) (?P[^\(]*)(\(|:).*") 372 | func_names = [] 373 | for line in new_content.split("\n"): 374 | result = func_regex.search(line) 375 | if result: 376 | name = result.group("name") 377 | if name.startswith("_") or name in EXCLUDED_IMPORT_NAMES: 378 | continue 379 | func_names.append(name) 380 | 381 | print("(5/5) Updating imports in '__init__.py'") 382 | import_lines = ["from ._api import ("] 383 | for name in func_names: 384 | import_lines.append(f" {name},") 385 | import_lines.append(")") 386 | 387 | all_lines = [ 388 | f' "{name}",' 389 | for name in func_names 390 | ] 391 | new_init_content = formatting_init_content.format( 392 | api_imports="\n".join(import_lines), 393 | all_content="\n".join(all_lines), 394 | ) 395 | 396 | with open(init_filepath, "w") as stream: 397 | print(new_init_content, file=stream) 398 | 399 | print("Public API functions created successfully") 400 | 401 | 402 | if __name__ == "__main__": 403 | main() 404 | -------------------------------------------------------------------------------- /ayon_api/__init__.py: -------------------------------------------------------------------------------- 1 | from .version import __version__ 2 | from .utils import ( 3 | TransferProgress, 4 | slugify_string, 5 | create_dependency_package_basename, 6 | get_user_by_token, 7 | is_token_valid, 8 | validate_url, 9 | login_to_server, 10 | take_web_action_event, 11 | abort_web_action_event, 12 | SortOrder, 13 | ) 14 | from .server_api import ( 15 | RequestTypes, 16 | ServerAPI, 17 | ) 18 | 19 | from ._api import ( 20 | GlobalServerAPI, 21 | ServiceContext, 22 | init_service, 23 | get_service_addon_name, 24 | get_service_addon_version, 25 | get_service_name, 26 | get_service_addon_settings, 27 | is_connection_created, 28 | create_connection, 29 | close_connection, 30 | change_token, 31 | set_environments, 32 | get_server_api_connection, 33 | get_default_settings_variant, 34 | get_base_url, 35 | get_rest_url, 36 | get_ssl_verify, 37 | set_ssl_verify, 38 | get_cert, 39 | set_cert, 40 | get_timeout, 41 | set_timeout, 42 | get_max_retries, 43 | set_max_retries, 44 | is_service_user, 45 | get_site_id, 46 | set_site_id, 47 | get_client_version, 48 | set_client_version, 49 | set_default_settings_variant, 50 | get_sender, 51 | set_sender, 52 | get_sender_type, 53 | set_sender_type, 54 | get_info, 55 | get_server_version, 56 | get_server_version_tuple, 57 | get_users, 58 | get_user_by_name, 59 | get_user, 60 | raw_post, 61 | raw_put, 62 | raw_patch, 63 | raw_get, 64 | raw_delete, 65 | post, 66 | put, 67 | patch, 68 | get, 69 | delete, 70 | get_event, 71 | get_events, 72 | update_event, 73 | dispatch_event, 74 | delete_event, 75 | enroll_event_job, 76 | get_activities, 77 | get_activity_by_id, 78 | create_activity, 79 | update_activity, 80 | delete_activity, 81 | download_file_to_stream, 82 | download_file, 83 | upload_file_from_stream, 84 | upload_file, 85 | upload_reviewable, 86 | trigger_server_restart, 87 | query_graphql, 88 | get_graphql_schema, 89 | get_server_schema, 90 | get_schemas, 91 | get_attributes_schema, 92 | reset_attributes_schema, 93 | set_attribute_config, 94 | remove_attribute_config, 95 | get_attributes_for_type, 96 | get_attributes_fields_for_type, 97 | get_default_fields_for_type, 98 | get_addons_info, 99 | get_addon_endpoint, 100 | get_addon_url, 101 | download_addon_private_file, 102 | get_installers, 103 | create_installer, 104 | update_installer, 105 | delete_installer, 106 | download_installer, 107 | upload_installer, 108 | get_dependency_packages, 109 | create_dependency_package, 110 | update_dependency_package, 111 | delete_dependency_package, 112 | download_dependency_package, 113 | upload_dependency_package, 114 | delete_addon, 115 | delete_addon_version, 116 | upload_addon_zip, 117 | get_bundles, 118 | create_bundle, 119 | update_bundle, 120 | check_bundle_compatibility, 121 | delete_bundle, 122 | get_project_anatomy_presets, 123 | get_default_anatomy_preset_name, 124 | get_project_anatomy_preset, 125 | get_built_in_anatomy_preset, 126 | get_build_in_anatomy_preset, 127 | get_project_root_overrides, 128 | get_project_roots_by_site, 129 | get_project_root_overrides_by_site_id, 130 | get_project_roots_for_site, 131 | get_project_roots_by_site_id, 132 | get_project_roots_by_platform, 133 | get_addon_settings_schema, 134 | get_addon_site_settings_schema, 135 | get_addon_studio_settings, 136 | get_addon_project_settings, 137 | get_addon_settings, 138 | get_addon_site_settings, 139 | get_bundle_settings, 140 | get_addons_studio_settings, 141 | get_addons_project_settings, 142 | get_addons_settings, 143 | get_secrets, 144 | get_secret, 145 | save_secret, 146 | delete_secret, 147 | get_rest_project, 148 | get_rest_projects, 149 | get_rest_entity_by_id, 150 | get_rest_folder, 151 | get_rest_folders, 152 | get_rest_task, 153 | get_rest_product, 154 | get_rest_version, 155 | get_rest_representation, 156 | get_project_names, 157 | get_projects, 158 | get_project, 159 | get_folders_hierarchy, 160 | get_folders_rest, 161 | get_folders, 162 | get_folder_by_id, 163 | get_folder_by_path, 164 | get_folder_by_name, 165 | get_folder_ids_with_products, 166 | create_folder, 167 | update_folder, 168 | delete_folder, 169 | get_tasks, 170 | get_task_by_name, 171 | get_task_by_id, 172 | get_tasks_by_folder_paths, 173 | get_tasks_by_folder_path, 174 | get_task_by_folder_path, 175 | create_task, 176 | update_task, 177 | delete_task, 178 | get_products, 179 | get_product_by_id, 180 | get_product_by_name, 181 | get_product_types, 182 | get_project_product_types, 183 | get_product_type_names, 184 | create_product, 185 | update_product, 186 | delete_product, 187 | get_versions, 188 | get_version_by_id, 189 | get_version_by_name, 190 | get_hero_version_by_id, 191 | get_hero_version_by_product_id, 192 | get_hero_versions, 193 | get_last_versions, 194 | get_last_version_by_product_id, 195 | get_last_version_by_product_name, 196 | version_is_latest, 197 | create_version, 198 | update_version, 199 | delete_version, 200 | get_representations, 201 | get_representation_by_id, 202 | get_representation_by_name, 203 | get_representations_hierarchy, 204 | get_representation_hierarchy, 205 | get_representations_parents, 206 | get_representation_parents, 207 | get_repre_ids_by_context_filters, 208 | create_representation, 209 | update_representation, 210 | delete_representation, 211 | get_workfiles_info, 212 | get_workfile_info, 213 | get_workfile_info_by_id, 214 | get_thumbnail_by_id, 215 | get_thumbnail, 216 | get_folder_thumbnail, 217 | get_task_thumbnail, 218 | get_version_thumbnail, 219 | get_workfile_thumbnail, 220 | create_thumbnail, 221 | update_thumbnail, 222 | create_project, 223 | update_project, 224 | delete_project, 225 | get_full_link_type_name, 226 | get_link_types, 227 | get_link_type, 228 | create_link_type, 229 | delete_link_type, 230 | make_sure_link_type_exists, 231 | create_link, 232 | delete_link, 233 | get_entities_links, 234 | get_folders_links, 235 | get_folder_links, 236 | get_tasks_links, 237 | get_task_links, 238 | get_products_links, 239 | get_product_links, 240 | get_versions_links, 241 | get_version_links, 242 | get_representations_links, 243 | get_representation_links, 244 | send_batch_operations, 245 | send_activities_batch_operations, 246 | ) 247 | 248 | 249 | __all__ = ( 250 | "__version__", 251 | 252 | "TransferProgress", 253 | "slugify_string", 254 | "create_dependency_package_basename", 255 | "get_user_by_token", 256 | "is_token_valid", 257 | "validate_url", 258 | "login_to_server", 259 | "take_web_action_event", 260 | "abort_web_action_event", 261 | "SortOrder", 262 | 263 | "RequestTypes", 264 | "ServerAPI", 265 | 266 | "GlobalServerAPI", 267 | "ServiceContext", 268 | "init_service", 269 | "get_service_addon_name", 270 | "get_service_addon_version", 271 | "get_service_name", 272 | "get_service_addon_settings", 273 | "is_connection_created", 274 | "create_connection", 275 | "close_connection", 276 | "change_token", 277 | "set_environments", 278 | "get_server_api_connection", 279 | "get_default_settings_variant", 280 | "get_base_url", 281 | "get_rest_url", 282 | "get_ssl_verify", 283 | "set_ssl_verify", 284 | "get_cert", 285 | "set_cert", 286 | "get_timeout", 287 | "set_timeout", 288 | "get_max_retries", 289 | "set_max_retries", 290 | "is_service_user", 291 | "get_site_id", 292 | "set_site_id", 293 | "get_client_version", 294 | "set_client_version", 295 | "set_default_settings_variant", 296 | "get_sender", 297 | "set_sender", 298 | "get_sender_type", 299 | "set_sender_type", 300 | "get_info", 301 | "get_server_version", 302 | "get_server_version_tuple", 303 | "get_users", 304 | "get_user_by_name", 305 | "get_user", 306 | "raw_post", 307 | "raw_put", 308 | "raw_patch", 309 | "raw_get", 310 | "raw_delete", 311 | "post", 312 | "put", 313 | "patch", 314 | "get", 315 | "delete", 316 | "get_event", 317 | "get_events", 318 | "update_event", 319 | "dispatch_event", 320 | "delete_event", 321 | "enroll_event_job", 322 | "get_activities", 323 | "get_activity_by_id", 324 | "create_activity", 325 | "update_activity", 326 | "delete_activity", 327 | "download_file_to_stream", 328 | "download_file", 329 | "upload_file_from_stream", 330 | "upload_file", 331 | "upload_reviewable", 332 | "trigger_server_restart", 333 | "query_graphql", 334 | "get_graphql_schema", 335 | "get_server_schema", 336 | "get_schemas", 337 | "get_attributes_schema", 338 | "reset_attributes_schema", 339 | "set_attribute_config", 340 | "remove_attribute_config", 341 | "get_attributes_for_type", 342 | "get_attributes_fields_for_type", 343 | "get_default_fields_for_type", 344 | "get_addons_info", 345 | "get_addon_endpoint", 346 | "get_addon_url", 347 | "download_addon_private_file", 348 | "get_installers", 349 | "create_installer", 350 | "update_installer", 351 | "delete_installer", 352 | "download_installer", 353 | "upload_installer", 354 | "get_dependency_packages", 355 | "create_dependency_package", 356 | "update_dependency_package", 357 | "delete_dependency_package", 358 | "download_dependency_package", 359 | "upload_dependency_package", 360 | "delete_addon", 361 | "delete_addon_version", 362 | "upload_addon_zip", 363 | "get_bundles", 364 | "create_bundle", 365 | "update_bundle", 366 | "check_bundle_compatibility", 367 | "delete_bundle", 368 | "get_project_anatomy_presets", 369 | "get_default_anatomy_preset_name", 370 | "get_project_anatomy_preset", 371 | "get_built_in_anatomy_preset", 372 | "get_build_in_anatomy_preset", 373 | "get_project_root_overrides", 374 | "get_project_roots_by_site", 375 | "get_project_root_overrides_by_site_id", 376 | "get_project_roots_for_site", 377 | "get_project_roots_by_site_id", 378 | "get_project_roots_by_platform", 379 | "get_addon_settings_schema", 380 | "get_addon_site_settings_schema", 381 | "get_addon_studio_settings", 382 | "get_addon_project_settings", 383 | "get_addon_settings", 384 | "get_addon_site_settings", 385 | "get_bundle_settings", 386 | "get_addons_studio_settings", 387 | "get_addons_project_settings", 388 | "get_addons_settings", 389 | "get_secrets", 390 | "get_secret", 391 | "save_secret", 392 | "delete_secret", 393 | "get_rest_project", 394 | "get_rest_projects", 395 | "get_rest_entity_by_id", 396 | "get_rest_folder", 397 | "get_rest_folders", 398 | "get_rest_task", 399 | "get_rest_product", 400 | "get_rest_version", 401 | "get_rest_representation", 402 | "get_project_names", 403 | "get_projects", 404 | "get_project", 405 | "get_folders_hierarchy", 406 | "get_folders_rest", 407 | "get_folders", 408 | "get_folder_by_id", 409 | "get_folder_by_path", 410 | "get_folder_by_name", 411 | "get_folder_ids_with_products", 412 | "create_folder", 413 | "update_folder", 414 | "delete_folder", 415 | "get_tasks", 416 | "get_task_by_name", 417 | "get_task_by_id", 418 | "get_tasks_by_folder_paths", 419 | "get_tasks_by_folder_path", 420 | "get_task_by_folder_path", 421 | "create_task", 422 | "update_task", 423 | "delete_task", 424 | "get_products", 425 | "get_product_by_id", 426 | "get_product_by_name", 427 | "get_product_types", 428 | "get_project_product_types", 429 | "get_product_type_names", 430 | "create_product", 431 | "update_product", 432 | "delete_product", 433 | "get_versions", 434 | "get_version_by_id", 435 | "get_version_by_name", 436 | "get_hero_version_by_id", 437 | "get_hero_version_by_product_id", 438 | "get_hero_versions", 439 | "get_last_versions", 440 | "get_last_version_by_product_id", 441 | "get_last_version_by_product_name", 442 | "version_is_latest", 443 | "create_version", 444 | "update_version", 445 | "delete_version", 446 | "get_representations", 447 | "get_representation_by_id", 448 | "get_representation_by_name", 449 | "get_representations_hierarchy", 450 | "get_representation_hierarchy", 451 | "get_representations_parents", 452 | "get_representation_parents", 453 | "get_repre_ids_by_context_filters", 454 | "create_representation", 455 | "update_representation", 456 | "delete_representation", 457 | "get_workfiles_info", 458 | "get_workfile_info", 459 | "get_workfile_info_by_id", 460 | "get_thumbnail_by_id", 461 | "get_thumbnail", 462 | "get_folder_thumbnail", 463 | "get_task_thumbnail", 464 | "get_version_thumbnail", 465 | "get_workfile_thumbnail", 466 | "create_thumbnail", 467 | "update_thumbnail", 468 | "create_project", 469 | "update_project", 470 | "delete_project", 471 | "get_full_link_type_name", 472 | "get_link_types", 473 | "get_link_type", 474 | "create_link_type", 475 | "delete_link_type", 476 | "make_sure_link_type_exists", 477 | "create_link", 478 | "delete_link", 479 | "get_entities_links", 480 | "get_folders_links", 481 | "get_folder_links", 482 | "get_tasks_links", 483 | "get_task_links", 484 | "get_products_links", 485 | "get_product_links", 486 | "get_versions_links", 487 | "get_version_links", 488 | "get_representations_links", 489 | "get_representation_links", 490 | "send_batch_operations", 491 | "send_activities_batch_operations", 492 | ) 493 | -------------------------------------------------------------------------------- /ayon_api/constants.py: -------------------------------------------------------------------------------- 1 | # Environments where server url and api key are stored for global connection 2 | SERVER_URL_ENV_KEY = "AYON_SERVER_URL" 3 | SERVER_API_ENV_KEY = "AYON_API_KEY" 4 | SERVER_TIMEOUT_ENV_KEY = "AYON_SERVER_TIMEOUT" 5 | SERVER_RETRIES_ENV_KEY = "AYON_SERVER_RETRIES" 6 | # Default variant used for settings 7 | DEFAULT_VARIANT_ENV_KEY = "AYON_DEFAULT_SETTINGS_VARIANT" 8 | # Default site id used for connection 9 | SITE_ID_ENV_KEY = "AYON_SITE_ID" 10 | 11 | # Backwards compatibility 12 | SERVER_TOKEN_ENV_KEY = SERVER_API_ENV_KEY 13 | 14 | # --- User --- 15 | DEFAULT_USER_FIELDS = { 16 | "accessGroups", 17 | "defaultAccessGroups", 18 | "name", 19 | "isService", 20 | "isManager", 21 | "isGuest", 22 | "isAdmin", 23 | "createdAt", 24 | "active", 25 | "hasPassword", 26 | "updatedAt", 27 | "apiKeyPreview", 28 | "attrib.avatarUrl", 29 | "attrib.email", 30 | "attrib.fullName", 31 | } 32 | 33 | # --- Folder types --- 34 | DEFAULT_FOLDER_TYPE_FIELDS = { 35 | "name", 36 | "icon", 37 | } 38 | 39 | # --- Task types --- 40 | DEFAULT_TASK_TYPE_FIELDS = { 41 | "name", 42 | } 43 | 44 | # --- Product types --- 45 | DEFAULT_PRODUCT_TYPE_FIELDS = { 46 | "name", 47 | "icon", 48 | "color", 49 | } 50 | 51 | # --- Project --- 52 | DEFAULT_PROJECT_FIELDS = { 53 | "active", 54 | "name", 55 | "code", 56 | "config", 57 | "createdAt", 58 | "data", 59 | "folderTypes", 60 | "taskTypes", 61 | "productTypes", 62 | } 63 | 64 | # --- Folders --- 65 | DEFAULT_FOLDER_FIELDS = { 66 | "id", 67 | "name", 68 | "label", 69 | "folderType", 70 | "path", 71 | "parentId", 72 | "active", 73 | "thumbnailId", 74 | "data", 75 | "status", 76 | "tags", 77 | } 78 | 79 | # --- Tasks --- 80 | DEFAULT_TASK_FIELDS = { 81 | "id", 82 | "name", 83 | "label", 84 | "taskType", 85 | "folderId", 86 | "active", 87 | "thumbnailId", 88 | "assignees", 89 | "data", 90 | "status", 91 | "tags", 92 | } 93 | 94 | # --- Products --- 95 | DEFAULT_PRODUCT_FIELDS = { 96 | "id", 97 | "name", 98 | "folderId", 99 | "active", 100 | "productType", 101 | "data", 102 | "status", 103 | "tags", 104 | } 105 | 106 | # --- Versions --- 107 | DEFAULT_VERSION_FIELDS = { 108 | "id", 109 | "name", 110 | "version", 111 | "productId", 112 | "taskId", 113 | "active", 114 | "author", 115 | "thumbnailId", 116 | "createdAt", 117 | "updatedAt", 118 | "data", 119 | "status", 120 | "tags", 121 | } 122 | 123 | # --- Representations --- 124 | DEFAULT_REPRESENTATION_FIELDS = { 125 | "id", 126 | "name", 127 | "context", 128 | "createdAt", 129 | "active", 130 | "versionId", 131 | "data", 132 | "status", 133 | "tags", 134 | "traits", 135 | } 136 | 137 | REPRESENTATION_FILES_FIELDS = { 138 | "files.name", 139 | "files.hash", 140 | "files.id", 141 | "files.path", 142 | "files.size", 143 | } 144 | 145 | # --- Workfile info --- 146 | DEFAULT_WORKFILE_INFO_FIELDS = { 147 | "active", 148 | "createdAt", 149 | "createdBy", 150 | "id", 151 | "name", 152 | "path", 153 | "projectName", 154 | "taskId", 155 | "thumbnailId", 156 | "updatedAt", 157 | "updatedBy", 158 | "data", 159 | "status", 160 | "tags", 161 | } 162 | 163 | DEFAULT_EVENT_FIELDS = { 164 | "id", 165 | "hash", 166 | "createdAt", 167 | "dependsOn", 168 | "description", 169 | "project", 170 | "retries", 171 | "sender", 172 | "status", 173 | "topic", 174 | "updatedAt", 175 | "user", 176 | } 177 | 178 | DEFAULT_LINK_FIELDS = { 179 | "id", 180 | "linkType", 181 | "projectName", 182 | "entityType", 183 | "entityId", 184 | "name", 185 | "direction", 186 | "description", 187 | "author", 188 | } 189 | 190 | DEFAULT_ACTIVITY_FIELDS = { 191 | "activityId", 192 | "activityType", 193 | "activityData", 194 | "body", 195 | "entityId", 196 | "entityType", 197 | "author.name", 198 | } 199 | -------------------------------------------------------------------------------- /ayon_api/events.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | 4 | class ServerEvent(object): 5 | def __init__( 6 | self, 7 | topic, 8 | sender=None, 9 | event_hash=None, 10 | project_name=None, 11 | username=None, 12 | dependencies=None, 13 | description=None, 14 | summary=None, 15 | payload=None, 16 | finished=True, 17 | store=True, 18 | ): 19 | if dependencies is None: 20 | dependencies = [] 21 | if payload is None: 22 | payload = {} 23 | if summary is None: 24 | summary = {} 25 | 26 | self.topic = topic 27 | self.sender = sender 28 | self.event_hash = event_hash 29 | self.project_name = project_name 30 | self.username = username 31 | self.dependencies = dependencies 32 | self.description = description 33 | self.summary = summary 34 | self.payload = payload 35 | self.finished = finished 36 | self.store = store 37 | 38 | def to_data(self): 39 | return { 40 | "topic": self.topic, 41 | "sender": self.sender, 42 | "hash": self.event_hash, 43 | "project": self.project_name, 44 | "user": self.username, 45 | "dependencies": copy.deepcopy(self.dependencies), 46 | "description": self.description, 47 | "summary": copy.deepcopy(self.summary), 48 | "payload": self.payload, 49 | "finished": self.finished, 50 | "store": self.store 51 | } 52 | -------------------------------------------------------------------------------- /ayon_api/exceptions.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | 4 | class UrlError(Exception): 5 | """Url cannot be parsed as url. 6 | 7 | Exception may contain hints of possible fixes of url that can be used in 8 | UI if needed. 9 | """ 10 | 11 | def __init__(self, message, title, hints=None): 12 | if hints is None: 13 | hints = [] 14 | 15 | self.title = title 16 | self.hints = hints 17 | super(UrlError, self).__init__(message) 18 | 19 | 20 | class ServerError(Exception): 21 | pass 22 | 23 | 24 | class UnauthorizedError(ServerError): 25 | pass 26 | 27 | 28 | class AuthenticationError(ServerError): 29 | pass 30 | 31 | 32 | class ServerNotReached(ServerError): 33 | pass 34 | 35 | 36 | class UnsupportedServerVersion(ServerError): 37 | """Server version does not support the requested operation. 38 | 39 | This is used for known incompatibilities between the python api and 40 | server. E.g. can be used when endpoint is not available anymore, or 41 | is not yet available on server. 42 | """ 43 | pass 44 | 45 | 46 | class RequestError(Exception): 47 | def __init__(self, message, response): 48 | self.response = response 49 | super(RequestError, self).__init__(message) 50 | 51 | 52 | class HTTPRequestError(RequestError): 53 | pass 54 | 55 | 56 | class GraphQlQueryFailed(Exception): 57 | def __init__(self, errors, query, variables): 58 | if variables is None: 59 | variables = {} 60 | 61 | error_messages = [] 62 | for error in errors: 63 | msg = error["message"] 64 | path = error.get("path") 65 | if path: 66 | msg += " on item '{}'".format("/".join( 67 | # Convert to string 68 | str(x) for x in path 69 | )) 70 | locations = error.get("locations") 71 | if locations: 72 | _locations = [ 73 | "Line {} Column {}".format( 74 | location["line"], location["column"] 75 | ) 76 | for location in locations 77 | ] 78 | 79 | msg += " ({})".format(" and ".join(_locations)) 80 | error_messages.append(msg) 81 | 82 | message = "GraphQl query Failed" 83 | if error_messages: 84 | message = "{}: {}".format(message, " | ".join(error_messages)) 85 | 86 | self.errors = errors 87 | self.query = query 88 | self.variables = copy.deepcopy(variables) 89 | super(GraphQlQueryFailed, self).__init__(message) 90 | 91 | 92 | class MissingEntityError(Exception): 93 | pass 94 | 95 | 96 | class ProjectNotFound(MissingEntityError): 97 | def __init__(self, project_name, message=None): 98 | if not message: 99 | message = "Project \"{}\" was not found".format(project_name) 100 | self.project_name = project_name 101 | super(ProjectNotFound, self).__init__(message) 102 | 103 | 104 | class FolderNotFound(MissingEntityError): 105 | def __init__(self, project_name, folder_id, message=None): 106 | self.project_name = project_name 107 | self.folder_id = folder_id 108 | if not message: 109 | message = ( 110 | "Folder with id \"{}\" was not found in project \"{}\"" 111 | ).format(folder_id, project_name) 112 | super(FolderNotFound, self).__init__(message) 113 | 114 | 115 | class FailedOperations(Exception): 116 | pass 117 | 118 | 119 | class FailedServiceInit(Exception): 120 | pass 121 | -------------------------------------------------------------------------------- /ayon_api/graphql_queries.py: -------------------------------------------------------------------------------- 1 | import collections 2 | 3 | from .constants import DEFAULT_LINK_FIELDS 4 | from .graphql import FIELD_VALUE, GraphQlQuery, fields_to_dict 5 | 6 | 7 | def add_links_fields(entity_field, nested_fields): 8 | if "links" not in nested_fields: 9 | return 10 | links_fields = nested_fields.pop("links") 11 | 12 | link_edge_fields = set(DEFAULT_LINK_FIELDS) 13 | if isinstance(links_fields, dict): 14 | simple_fields = set(links_fields) 15 | simple_variant = len(simple_fields - link_edge_fields) == 0 16 | else: 17 | simple_variant = True 18 | simple_fields = link_edge_fields 19 | 20 | link_field = entity_field.add_field_with_edges("links") 21 | 22 | link_name_var = link_field.add_variable("linkNames", "[String!]") 23 | link_name_regex_var = link_field.add_variable("linkNameRegex", "String!") 24 | link_type_var = link_field.add_variable("linkTypes", "[String!]") 25 | link_dir_var = link_field.add_variable("linkDirection", "String!") 26 | link_field.set_filter("names", link_name_var) 27 | link_field.set_filter("nameEx", link_name_regex_var) 28 | link_field.set_filter("linkTypes", link_type_var) 29 | link_field.set_filter("direction", link_dir_var) 30 | 31 | if simple_variant: 32 | for key in simple_fields: 33 | link_field.add_edge_field(key) 34 | return 35 | 36 | query_queue = collections.deque() 37 | for key, value in links_fields.items(): 38 | if key in link_edge_fields: 39 | link_field.add_edge_field(key) 40 | continue 41 | query_queue.append((key, value, link_field)) 42 | 43 | while query_queue: 44 | item = query_queue.popleft() 45 | key, value, parent = item 46 | field = parent.add_field(key) 47 | if value is FIELD_VALUE: 48 | continue 49 | 50 | for k, v in value.items(): 51 | query_queue.append((k, v, field)) 52 | 53 | 54 | def project_graphql_query(fields): 55 | query = GraphQlQuery("ProjectQuery") 56 | project_name_var = query.add_variable("projectName", "String!") 57 | project_field = query.add_field("project") 58 | project_field.set_filter("name", project_name_var) 59 | 60 | nested_fields = fields_to_dict(fields) 61 | 62 | query_queue = collections.deque() 63 | for key, value in nested_fields.items(): 64 | query_queue.append((key, value, project_field)) 65 | 66 | while query_queue: 67 | item = query_queue.popleft() 68 | key, value, parent = item 69 | field = parent.add_field(key) 70 | if value is FIELD_VALUE: 71 | continue 72 | 73 | for k, v in value.items(): 74 | query_queue.append((k, v, field)) 75 | return query 76 | 77 | 78 | def projects_graphql_query(fields): 79 | query = GraphQlQuery("ProjectsQuery") 80 | projects_field = query.add_field_with_edges("projects") 81 | 82 | nested_fields = fields_to_dict(fields) 83 | 84 | query_queue = collections.deque() 85 | for key, value in nested_fields.items(): 86 | query_queue.append((key, value, projects_field)) 87 | 88 | while query_queue: 89 | item = query_queue.popleft() 90 | key, value, parent = item 91 | field = parent.add_field(key) 92 | if value is FIELD_VALUE: 93 | continue 94 | 95 | for k, v in value.items(): 96 | query_queue.append((k, v, field)) 97 | return query 98 | 99 | 100 | def product_types_query(fields): 101 | query = GraphQlQuery("ProductTypes") 102 | product_types_field = query.add_field("productTypes") 103 | 104 | nested_fields = fields_to_dict(fields) 105 | 106 | query_queue = collections.deque() 107 | for key, value in nested_fields.items(): 108 | query_queue.append((key, value, product_types_field)) 109 | 110 | while query_queue: 111 | item = query_queue.popleft() 112 | key, value, parent = item 113 | field = parent.add_field(key) 114 | if value is FIELD_VALUE: 115 | continue 116 | 117 | for k, v in value.items(): 118 | query_queue.append((k, v, field)) 119 | return query 120 | 121 | 122 | def project_product_types_query(fields): 123 | query = GraphQlQuery("ProjectProductTypes") 124 | project_query = query.add_field("project") 125 | project_name_var = query.add_variable("projectName", "String!") 126 | project_query.set_filter("name", project_name_var) 127 | product_types_field = project_query.add_field("productTypes") 128 | nested_fields = fields_to_dict(fields) 129 | 130 | query_queue = collections.deque() 131 | for key, value in nested_fields.items(): 132 | query_queue.append((key, value, product_types_field)) 133 | 134 | while query_queue: 135 | item = query_queue.popleft() 136 | key, value, parent = item 137 | field = parent.add_field(key) 138 | if value is FIELD_VALUE: 139 | continue 140 | 141 | for k, v in value.items(): 142 | query_queue.append((k, v, field)) 143 | return query 144 | 145 | 146 | def folders_graphql_query(fields): 147 | query = GraphQlQuery("FoldersQuery") 148 | project_name_var = query.add_variable("projectName", "String!") 149 | folder_ids_var = query.add_variable("folderIds", "[String!]") 150 | parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]") 151 | folder_paths_var = query.add_variable("folderPaths", "[String!]") 152 | folder_path_regex_var = query.add_variable("folderPathRegex", "String!") 153 | folder_names_var = query.add_variable("folderNames", "[String!]") 154 | folder_types_var = query.add_variable("folderTypes", "[String!]") 155 | has_products_var = query.add_variable("folderHasProducts", "Boolean!") 156 | has_tasks_var = query.add_variable("folderHasTasks", "Boolean!") 157 | has_links_var = query.add_variable("folderHasLinks", "HasLinksFilter") 158 | has_children_var = query.add_variable("folderHasChildren", "Boolean!") 159 | statuses_var = query.add_variable("folderStatuses", "[String!]") 160 | folder_assignees_all_var = query.add_variable( 161 | "folderAssigneesAll", "[String!]" 162 | ) 163 | tags_var = query.add_variable("folderTags", "[String!]") 164 | 165 | project_field = query.add_field("project") 166 | project_field.set_filter("name", project_name_var) 167 | 168 | folders_field = project_field.add_field_with_edges("folders") 169 | folders_field.set_filter("ids", folder_ids_var) 170 | folders_field.set_filter("parentIds", parent_folder_ids_var) 171 | folders_field.set_filter("names", folder_names_var) 172 | folders_field.set_filter("paths", folder_paths_var) 173 | folders_field.set_filter("pathEx", folder_path_regex_var) 174 | folders_field.set_filter("folderTypes", folder_types_var) 175 | folders_field.set_filter("statuses", statuses_var) 176 | folders_field.set_filter("assignees", folder_assignees_all_var) 177 | folders_field.set_filter("tags", tags_var) 178 | folders_field.set_filter("hasProducts", has_products_var) 179 | folders_field.set_filter("hasTasks", has_tasks_var) 180 | folders_field.set_filter("hasLinks", has_links_var) 181 | folders_field.set_filter("hasChildren", has_children_var) 182 | 183 | nested_fields = fields_to_dict(fields) 184 | add_links_fields(folders_field, nested_fields) 185 | 186 | query_queue = collections.deque() 187 | for key, value in nested_fields.items(): 188 | query_queue.append((key, value, folders_field)) 189 | 190 | while query_queue: 191 | item = query_queue.popleft() 192 | key, value, parent = item 193 | field = parent.add_field(key) 194 | if value is FIELD_VALUE: 195 | continue 196 | 197 | for k, v in value.items(): 198 | query_queue.append((k, v, field)) 199 | return query 200 | 201 | 202 | def tasks_graphql_query(fields): 203 | query = GraphQlQuery("TasksQuery") 204 | project_name_var = query.add_variable("projectName", "String!") 205 | task_ids_var = query.add_variable("taskIds", "[String!]") 206 | task_names_var = query.add_variable("taskNames", "[String!]") 207 | task_types_var = query.add_variable("taskTypes", "[String!]") 208 | folder_ids_var = query.add_variable("folderIds", "[String!]") 209 | assignees_any_var = query.add_variable("taskAssigneesAny", "[String!]") 210 | assignees_all_var = query.add_variable("taskAssigneesAll", "[String!]") 211 | statuses_var = query.add_variable("taskStatuses", "[String!]") 212 | tags_var = query.add_variable("taskTags", "[String!]") 213 | 214 | project_field = query.add_field("project") 215 | project_field.set_filter("name", project_name_var) 216 | 217 | tasks_field = project_field.add_field_with_edges("tasks") 218 | tasks_field.set_filter("ids", task_ids_var) 219 | # WARNING: At moment when this been created 'names' filter is not supported 220 | tasks_field.set_filter("names", task_names_var) 221 | tasks_field.set_filter("taskTypes", task_types_var) 222 | tasks_field.set_filter("folderIds", folder_ids_var) 223 | tasks_field.set_filter("assigneesAny", assignees_any_var) 224 | tasks_field.set_filter("assignees", assignees_all_var) 225 | tasks_field.set_filter("statuses", statuses_var) 226 | tasks_field.set_filter("tags", tags_var) 227 | 228 | nested_fields = fields_to_dict(fields) 229 | add_links_fields(tasks_field, nested_fields) 230 | 231 | query_queue = collections.deque() 232 | for key, value in nested_fields.items(): 233 | query_queue.append((key, value, tasks_field)) 234 | 235 | while query_queue: 236 | item = query_queue.popleft() 237 | key, value, parent = item 238 | field = parent.add_field(key) 239 | if value is FIELD_VALUE: 240 | continue 241 | 242 | for k, v in value.items(): 243 | query_queue.append((k, v, field)) 244 | return query 245 | 246 | 247 | def tasks_by_folder_paths_graphql_query(fields): 248 | query = GraphQlQuery("TasksByFolderPathQuery") 249 | project_name_var = query.add_variable("projectName", "String!") 250 | task_names_var = query.add_variable("taskNames", "[String!]") 251 | task_types_var = query.add_variable("taskTypes", "[String!]") 252 | folder_paths_var = query.add_variable("folderPaths", "[String!]") 253 | assignees_any_var = query.add_variable("taskAssigneesAny", "[String!]") 254 | assignees_all_var = query.add_variable("taskAssigneesAll", "[String!]") 255 | statuses_var = query.add_variable("taskStatuses", "[String!]") 256 | tags_var = query.add_variable("taskTags", "[String!]") 257 | 258 | project_field = query.add_field("project") 259 | project_field.set_filter("name", project_name_var) 260 | 261 | folders_field = project_field.add_field_with_edges("folders") 262 | folders_field.add_field("path") 263 | folders_field.set_filter("paths", folder_paths_var) 264 | 265 | tasks_field = folders_field.add_field_with_edges("tasks") 266 | # WARNING: At moment when this been created 'names' filter is not supported 267 | tasks_field.set_filter("names", task_names_var) 268 | tasks_field.set_filter("taskTypes", task_types_var) 269 | tasks_field.set_filter("assigneesAny", assignees_any_var) 270 | tasks_field.set_filter("assignees", assignees_all_var) 271 | tasks_field.set_filter("statuses", statuses_var) 272 | tasks_field.set_filter("tags", tags_var) 273 | 274 | nested_fields = fields_to_dict(fields) 275 | add_links_fields(tasks_field, nested_fields) 276 | 277 | query_queue = collections.deque() 278 | for key, value in nested_fields.items(): 279 | query_queue.append((key, value, tasks_field)) 280 | 281 | while query_queue: 282 | item = query_queue.popleft() 283 | key, value, parent = item 284 | field = parent.add_field(key) 285 | if value is FIELD_VALUE: 286 | continue 287 | 288 | for k, v in value.items(): 289 | query_queue.append((k, v, field)) 290 | return query 291 | 292 | 293 | def products_graphql_query(fields): 294 | query = GraphQlQuery("ProductsQuery") 295 | 296 | project_name_var = query.add_variable("projectName", "String!") 297 | product_ids_var = query.add_variable("productIds", "[String!]") 298 | product_names_var = query.add_variable("productNames", "[String!]") 299 | folder_ids_var = query.add_variable("folderIds", "[String!]") 300 | product_types_var = query.add_variable("productTypes", "[String!]") 301 | product_name_regex_var = query.add_variable("productNameRegex", "String!") 302 | product_path_regex_var = query.add_variable("productPathRegex", "String!") 303 | statuses_var = query.add_variable("productStatuses.", "[String!]") 304 | tags_var = query.add_variable("productTags.", "[String!]") 305 | 306 | project_field = query.add_field("project") 307 | project_field.set_filter("name", project_name_var) 308 | 309 | products_field = project_field.add_field_with_edges("products") 310 | products_field.set_filter("ids", product_ids_var) 311 | products_field.set_filter("names", product_names_var) 312 | products_field.set_filter("folderIds", folder_ids_var) 313 | products_field.set_filter("productTypes", product_types_var) 314 | products_field.set_filter("statuses", statuses_var) 315 | products_field.set_filter("tags", tags_var) 316 | products_field.set_filter("nameEx", product_name_regex_var) 317 | products_field.set_filter("pathEx", product_path_regex_var) 318 | 319 | nested_fields = fields_to_dict(set(fields)) 320 | add_links_fields(products_field, nested_fields) 321 | 322 | query_queue = collections.deque() 323 | for key, value in nested_fields.items(): 324 | query_queue.append((key, value, products_field)) 325 | 326 | while query_queue: 327 | item = query_queue.popleft() 328 | key, value, parent = item 329 | field = parent.add_field(key) 330 | if value is FIELD_VALUE: 331 | continue 332 | 333 | for k, v in value.items(): 334 | query_queue.append((k, v, field)) 335 | return query 336 | 337 | 338 | def versions_graphql_query(fields): 339 | query = GraphQlQuery("VersionsQuery") 340 | 341 | project_name_var = query.add_variable("projectName", "String!") 342 | product_ids_var = query.add_variable("productIds", "[String!]") 343 | version_ids_var = query.add_variable("versionIds", "[String!]") 344 | task_ids_var = query.add_variable("taskIds", "[String!]") 345 | versions_var = query.add_variable("versions", "[Int!]") 346 | hero_only_var = query.add_variable("heroOnly", "Boolean") 347 | latest_only_var = query.add_variable("latestOnly", "Boolean") 348 | hero_or_latest_only_var = query.add_variable( 349 | "heroOrLatestOnly", "Boolean" 350 | ) 351 | statuses_var = query.add_variable("versionStatuses", "[String!]") 352 | tags_var = query.add_variable("versionTags", "[String!]") 353 | 354 | project_field = query.add_field("project") 355 | project_field.set_filter("name", project_name_var) 356 | 357 | versions_field = project_field.add_field_with_edges("versions") 358 | versions_field.set_filter("ids", version_ids_var) 359 | versions_field.set_filter("productIds", product_ids_var) 360 | versions_field.set_filter("versions", versions_var) 361 | versions_field.set_filter("taskIds", task_ids_var) 362 | versions_field.set_filter("heroOnly", hero_only_var) 363 | versions_field.set_filter("latestOnly", latest_only_var) 364 | versions_field.set_filter("heroOrLatestOnly", hero_or_latest_only_var) 365 | versions_field.set_filter("statuses", statuses_var) 366 | versions_field.set_filter("tags", tags_var) 367 | 368 | nested_fields = fields_to_dict(set(fields)) 369 | add_links_fields(versions_field, nested_fields) 370 | 371 | query_queue = collections.deque() 372 | for key, value in nested_fields.items(): 373 | query_queue.append((key, value, versions_field)) 374 | 375 | while query_queue: 376 | item = query_queue.popleft() 377 | key, value, parent = item 378 | field = parent.add_field(key) 379 | if value is FIELD_VALUE: 380 | continue 381 | 382 | for k, v in value.items(): 383 | query_queue.append((k, v, field)) 384 | return query 385 | 386 | 387 | def representations_graphql_query(fields): 388 | query = GraphQlQuery("RepresentationsQuery") 389 | 390 | project_name_var = query.add_variable("projectName", "String!") 391 | repre_ids_var = query.add_variable("representationIds", "[String!]") 392 | repre_names_var = query.add_variable("representationNames", "[String!]") 393 | version_ids_var = query.add_variable("versionIds", "[String!]") 394 | has_links_var = query.add_variable( 395 | "representationHasLinks", "HasLinksFilter" 396 | ) 397 | statuses_var = query.add_variable( 398 | "representationStatuses", "[String!]" 399 | ) 400 | tags_var = query.add_variable( 401 | "representationTags", "[String!]" 402 | ) 403 | 404 | project_field = query.add_field("project") 405 | project_field.set_filter("name", project_name_var) 406 | 407 | repres_field = project_field.add_field_with_edges("representations") 408 | repres_field.set_filter("ids", repre_ids_var) 409 | repres_field.set_filter("versionIds", version_ids_var) 410 | repres_field.set_filter("names", repre_names_var) 411 | repres_field.set_filter("hasLinks", has_links_var) 412 | repres_field.set_filter("statuses", statuses_var) 413 | repres_field.set_filter("tags", tags_var) 414 | 415 | nested_fields = fields_to_dict(set(fields)) 416 | add_links_fields(repres_field, nested_fields) 417 | 418 | query_queue = collections.deque() 419 | for key, value in nested_fields.items(): 420 | query_queue.append((key, value, repres_field)) 421 | 422 | while query_queue: 423 | item = query_queue.popleft() 424 | key, value, parent = item 425 | field = parent.add_field(key) 426 | if value is FIELD_VALUE: 427 | continue 428 | 429 | for k, v in value.items(): 430 | query_queue.append((k, v, field)) 431 | return query 432 | 433 | 434 | def representations_parents_qraphql_query( 435 | version_fields, product_fields, folder_fields 436 | ): 437 | query = GraphQlQuery("RepresentationsParentsQuery") 438 | 439 | project_name_var = query.add_variable("projectName", "String!") 440 | repre_ids_var = query.add_variable("representationIds", "[String!]") 441 | 442 | project_field = query.add_field("project") 443 | project_field.set_filter("name", project_name_var) 444 | 445 | repres_field = project_field.add_field_with_edges("representations") 446 | repres_field.add_field("id") 447 | repres_field.set_filter("ids", repre_ids_var) 448 | version_field = repres_field.add_field("version") 449 | 450 | fields_queue = collections.deque() 451 | for key, value in fields_to_dict(version_fields).items(): 452 | fields_queue.append((key, value, version_field)) 453 | 454 | product_field = version_field.add_field("product") 455 | for key, value in fields_to_dict(product_fields).items(): 456 | fields_queue.append((key, value, product_field)) 457 | 458 | folder_field = product_field.add_field("folder") 459 | for key, value in fields_to_dict(folder_fields).items(): 460 | fields_queue.append((key, value, folder_field)) 461 | 462 | while fields_queue: 463 | item = fields_queue.popleft() 464 | key, value, parent = item 465 | field = parent.add_field(key) 466 | if value is FIELD_VALUE: 467 | continue 468 | 469 | for k, v in value.items(): 470 | fields_queue.append((k, v, field)) 471 | 472 | return query 473 | 474 | 475 | def representations_hierarchy_qraphql_query( 476 | folder_fields, 477 | task_fields, 478 | product_fields, 479 | version_fields, 480 | representation_fields, 481 | ): 482 | query = GraphQlQuery("RepresentationsParentsQuery") 483 | 484 | project_name_var = query.add_variable("projectName", "String!") 485 | repre_ids_var = query.add_variable("representationIds", "[String!]") 486 | 487 | project_field = query.add_field("project") 488 | project_field.set_filter("name", project_name_var) 489 | 490 | fields_queue = collections.deque() 491 | 492 | repres_field = project_field.add_field_with_edges("representations") 493 | for key, value in fields_to_dict(representation_fields).items(): 494 | fields_queue.append((key, value, repres_field)) 495 | 496 | repres_field.set_filter("ids", repre_ids_var) 497 | version_field = None 498 | if folder_fields or task_fields or product_fields or version_fields: 499 | version_field = repres_field.add_field("version") 500 | if version_fields: 501 | for key, value in fields_to_dict(version_fields).items(): 502 | fields_queue.append((key, value, version_field)) 503 | 504 | if task_fields: 505 | task_field = version_field.add_field("task") 506 | for key, value in fields_to_dict(task_fields).items(): 507 | fields_queue.append((key, value, task_field)) 508 | 509 | product_field = None 510 | if folder_fields or product_fields: 511 | product_field = version_field.add_field("product") 512 | for key, value in fields_to_dict(product_fields).items(): 513 | fields_queue.append((key, value, product_field)) 514 | 515 | if folder_fields: 516 | folder_field = product_field.add_field("folder") 517 | for key, value in fields_to_dict(folder_fields).items(): 518 | fields_queue.append((key, value, folder_field)) 519 | 520 | while fields_queue: 521 | item = fields_queue.popleft() 522 | key, value, parent = item 523 | field = parent.add_field(key) 524 | if value is FIELD_VALUE: 525 | continue 526 | 527 | for k, v in value.items(): 528 | fields_queue.append((k, v, field)) 529 | 530 | return query 531 | 532 | 533 | def workfiles_info_graphql_query(fields): 534 | query = GraphQlQuery("WorkfilesInfo") 535 | project_name_var = query.add_variable("projectName", "String!") 536 | workfiles_info_ids = query.add_variable("workfileIds", "[String!]") 537 | task_ids_var = query.add_variable("taskIds", "[String!]") 538 | paths_var = query.add_variable("paths", "[String!]") 539 | path_regex_var = query.add_variable("workfilePathRegex", "String!") 540 | has_links_var = query.add_variable("workfilehasLinks", "HasLinksFilter") 541 | statuses_var = query.add_variable("workfileStatuses", "[String!]") 542 | tags_var = query.add_variable("workfileTags", "[String!]") 543 | 544 | project_field = query.add_field("project") 545 | project_field.set_filter("name", project_name_var) 546 | 547 | workfiles_field = project_field.add_field_with_edges("workfiles") 548 | workfiles_field.set_filter("ids", workfiles_info_ids) 549 | workfiles_field.set_filter("taskIds", task_ids_var) 550 | workfiles_field.set_filter("paths", paths_var) 551 | workfiles_field.set_filter("pathEx", path_regex_var) 552 | workfiles_field.set_filter("hasLinks", has_links_var) 553 | workfiles_field.set_filter("statuses", statuses_var) 554 | workfiles_field.set_filter("tags", tags_var) 555 | 556 | nested_fields = fields_to_dict(set(fields)) 557 | add_links_fields(workfiles_field, nested_fields) 558 | 559 | query_queue = collections.deque() 560 | for key, value in nested_fields.items(): 561 | query_queue.append((key, value, workfiles_field)) 562 | 563 | while query_queue: 564 | item = query_queue.popleft() 565 | key, value, parent = item 566 | field = parent.add_field(key) 567 | if value is FIELD_VALUE: 568 | continue 569 | 570 | for k, v in value.items(): 571 | query_queue.append((k, v, field)) 572 | return query 573 | 574 | 575 | def events_graphql_query(fields, order, use_states=False): 576 | query = GraphQlQuery("Events", order=order) 577 | topics_var = query.add_variable("eventTopics", "[String!]") 578 | ids_var = query.add_variable("eventIds", "[String!]") 579 | projects_var = query.add_variable("projectNames", "[String!]") 580 | statuses_var = query.add_variable("eventStatuses", "[String!]") 581 | users_var = query.add_variable("eventUsers", "[String!]") 582 | include_logs_var = query.add_variable("includeLogsFilter", "Boolean!") 583 | has_children_var = query.add_variable("hasChildrenFilter", "Boolean!") 584 | newer_than_var = query.add_variable("newerThanFilter", "String!") 585 | older_than_var = query.add_variable("olderThanFilter", "String!") 586 | 587 | statuses_filter_name = "statuses" 588 | if use_states: 589 | statuses_filter_name = "states" 590 | events_field = query.add_field_with_edges("events") 591 | events_field.set_filter("ids", ids_var) 592 | events_field.set_filter("topics", topics_var) 593 | events_field.set_filter("projects", projects_var) 594 | events_field.set_filter(statuses_filter_name, statuses_var) 595 | events_field.set_filter("users", users_var) 596 | events_field.set_filter("includeLogs", include_logs_var) 597 | events_field.set_filter("hasChildren", has_children_var) 598 | events_field.set_filter("newerThan", newer_than_var) 599 | events_field.set_filter("olderThan", older_than_var) 600 | 601 | nested_fields = fields_to_dict(set(fields)) 602 | 603 | query_queue = collections.deque() 604 | for key, value in nested_fields.items(): 605 | query_queue.append((key, value, events_field)) 606 | 607 | while query_queue: 608 | item = query_queue.popleft() 609 | key, value, parent = item 610 | field = parent.add_field(key) 611 | if value is FIELD_VALUE: 612 | continue 613 | 614 | for k, v in value.items(): 615 | query_queue.append((k, v, field)) 616 | return query 617 | 618 | 619 | def users_graphql_query(fields): 620 | query = GraphQlQuery("Users") 621 | names_var = query.add_variable("userNames", "[String!]") 622 | emails_var = query.add_variable("emails", "[String!]") 623 | project_name_var = query.add_variable("projectName", "String!") 624 | 625 | users_field = query.add_field_with_edges("users") 626 | users_field.set_filter("names", names_var) 627 | users_field.set_filter("emails", emails_var) 628 | users_field.set_filter("projectName", project_name_var) 629 | 630 | nested_fields = fields_to_dict(set(fields)) 631 | 632 | query_queue = collections.deque() 633 | for key, value in nested_fields.items(): 634 | query_queue.append((key, value, users_field)) 635 | 636 | while query_queue: 637 | item = query_queue.popleft() 638 | key, value, parent = item 639 | field = parent.add_field(key) 640 | if value is FIELD_VALUE: 641 | continue 642 | 643 | for k, v in value.items(): 644 | query_queue.append((k, v, field)) 645 | return query 646 | 647 | 648 | def activities_graphql_query(fields, order): 649 | query = GraphQlQuery("Activities", order=order) 650 | project_name_var = query.add_variable("projectName", "String!") 651 | activity_ids_var = query.add_variable("activityIds", "[String!]") 652 | activity_types_var = query.add_variable("activityTypes", "[String!]") 653 | entity_ids_var = query.add_variable("entityIds", "[String!]") 654 | entity_names_var = query.add_variable("entityNames", "[String!]") 655 | entity_type_var = query.add_variable("entityType", "String!") 656 | changed_after_var = query.add_variable("changedAfter", "String!") 657 | changed_before_var = query.add_variable("changedBefore", "String!") 658 | reference_types_var = query.add_variable("referenceTypes", "[String!]") 659 | 660 | project_field = query.add_field("project") 661 | project_field.set_filter("name", project_name_var) 662 | 663 | activities_field = project_field.add_field_with_edges("activities") 664 | activities_field.set_filter("activityIds", activity_ids_var) 665 | activities_field.set_filter("activityTypes", activity_types_var) 666 | activities_field.set_filter("entityIds", entity_ids_var) 667 | activities_field.set_filter("entityNames", entity_names_var) 668 | activities_field.set_filter("entityType", entity_type_var) 669 | activities_field.set_filter("changedAfter", changed_after_var) 670 | activities_field.set_filter("changedBefore", changed_before_var) 671 | activities_field.set_filter("referenceTypes", reference_types_var) 672 | 673 | nested_fields = fields_to_dict(set(fields)) 674 | 675 | query_queue = collections.deque() 676 | for key, value in nested_fields.items(): 677 | query_queue.append((key, value, activities_field)) 678 | 679 | while query_queue: 680 | item = query_queue.popleft() 681 | key, value, parent = item 682 | field = parent.add_field(key) 683 | if value is FIELD_VALUE: 684 | continue 685 | 686 | for k, v in value.items(): 687 | query_queue.append((k, v, field)) 688 | 689 | return query 690 | -------------------------------------------------------------------------------- /ayon_api/typing.py: -------------------------------------------------------------------------------- 1 | import io 2 | from typing import ( 3 | Literal, 4 | Dict, 5 | List, 6 | Any, 7 | TypedDict, 8 | Union, 9 | Optional, 10 | BinaryIO, 11 | ) 12 | 13 | ActivityType = Literal[ 14 | "comment", 15 | "watch", 16 | "reviewable", 17 | "status.change", 18 | "assignee.add", 19 | "assignee.remove", 20 | "version.publish" 21 | ] 22 | 23 | ActivityReferenceType = Literal[ 24 | "origin", 25 | "mention", 26 | "author", 27 | "relation", 28 | "watching", 29 | ] 30 | 31 | EventFilterValueType = Union[ 32 | None, 33 | str, int, float, 34 | List[str], List[int], List[float], 35 | ] 36 | 37 | 38 | class EventFilterCondition(TypedDict): 39 | key: str 40 | value: EventFilterValueType 41 | operator: Literal[ 42 | "eq", 43 | "lt", 44 | "gt", 45 | "lte", 46 | "gte", 47 | "ne", 48 | "isnull", 49 | "notnull", 50 | "in", 51 | "notin", 52 | "contains", 53 | "excludes", 54 | "like", 55 | ] 56 | 57 | 58 | class EventFilter(TypedDict): 59 | conditions: List[EventFilterCondition] 60 | operator: Literal["and", "or"] 61 | 62 | 63 | AttributeScope = Literal[ 64 | "project", 65 | "folder", 66 | "task", 67 | "product", 68 | "version", 69 | "representation", 70 | "workfile", 71 | "user" 72 | ] 73 | 74 | AttributeType = Literal[ 75 | "string", 76 | "integer", 77 | "float", 78 | "boolean", 79 | "datetime", 80 | "list_of_strings", 81 | "list_of_integers", 82 | "list_of_any", 83 | "list_of_submodels", 84 | "dict", 85 | ] 86 | 87 | LinkDirection = Literal["in", "out"] 88 | 89 | 90 | class AttributeEnumItemDict(TypedDict): 91 | value: Union[str, int, float, bool] 92 | label: str 93 | icon: Union[str, None] 94 | color: Union[str, None] 95 | 96 | 97 | class AttributeSchemaDataDict(TypedDict): 98 | type: AttributeType 99 | inherit: bool 100 | title: str 101 | description: Optional[str] 102 | example: Optional[Any] 103 | default: Optional[Any] 104 | gt: Union[int, float, None] 105 | lt: Union[int, float, None] 106 | ge: Union[int, float, None] 107 | le: Union[int, float, None] 108 | minLength: Optional[int] 109 | maxLength: Optional[int] 110 | minItems: Optional[int] 111 | maxItems: Optional[int] 112 | regex: Optional[str] 113 | enum: Optional[List[AttributeEnumItemDict]] 114 | 115 | 116 | class AttributeSchemaDict(TypedDict): 117 | name: str 118 | position: int 119 | scope: List[AttributeScope] 120 | builtin: bool 121 | data: AttributeSchemaDataDict 122 | 123 | 124 | class AttributesSchemaDict(TypedDict): 125 | attributes: List[AttributeSchemaDict] 126 | 127 | 128 | class AddonVersionInfoDict(TypedDict): 129 | hasSettings: bool 130 | hasSiteSettings: bool 131 | frontendScopes: Dict[str, Any] 132 | clientPyproject: Dict[str, Any] 133 | clientSourceInfo: List[Dict[str, Any]] 134 | isBroken: bool 135 | 136 | 137 | class AddonInfoDict(TypedDict): 138 | name: str 139 | title: str 140 | versions: Dict[str, AddonVersionInfoDict] 141 | 142 | 143 | class AddonsInfoDict(TypedDict): 144 | addons: List[AddonInfoDict] 145 | 146 | 147 | class InstallerInfoDict(TypedDict): 148 | filename: str 149 | platform: str 150 | size: int 151 | checksum: str 152 | checksumAlgorithm: str 153 | sources: List[Dict[str, Any]] 154 | version: str 155 | pythonVersion: str 156 | pythonModules: Dict[str, str] 157 | runtimePythonModules: Dict[str, str] 158 | 159 | 160 | class InstallersInfoDict(TypedDict): 161 | installers: List[InstallerInfoDict] 162 | 163 | 164 | class DependencyPackageDict(TypedDict): 165 | filename: str 166 | platform: str 167 | size: int 168 | checksum: str 169 | checksumAlgorithm: str 170 | sources: List[Dict[str, Any]] 171 | installerVersion: str 172 | sourceAddons: Dict[str, str] 173 | pythonModules: Dict[str, str] 174 | 175 | 176 | class DependencyPackagesDict(TypedDict): 177 | packages: List[DependencyPackageDict] 178 | 179 | 180 | class DevBundleAddonInfoDict(TypedDict): 181 | enabled: bool 182 | path: str 183 | 184 | 185 | class BundleInfoDict(TypedDict): 186 | name: str 187 | createdAt: str 188 | addons: Dict[str, str] 189 | installerVersion: str 190 | dependencyPackages: Dict[str, str] 191 | addonDevelopment: Dict[str, DevBundleAddonInfoDict] 192 | isProduction: bool 193 | isStaging: bool 194 | isArchived: bool 195 | isDev: bool 196 | activeUser: Optional[str] 197 | 198 | 199 | class BundlesInfoDict(TypedDict): 200 | bundles: List[BundleInfoDict] 201 | productionBundle: str 202 | devBundles: List[str] 203 | 204 | 205 | class AnatomyPresetInfoDict(TypedDict): 206 | name: str 207 | primary: bool 208 | version: str 209 | 210 | 211 | class AnatomyPresetRootDict(TypedDict): 212 | name: str 213 | windows: str 214 | linux: str 215 | darwin: str 216 | 217 | 218 | class AnatomyPresetTemplateDict(TypedDict): 219 | name: str 220 | directory: str 221 | file: str 222 | 223 | 224 | class AnatomyPresetTemplatesDict(TypedDict): 225 | version_padding: int 226 | version: str 227 | frame_padding: int 228 | frame: str 229 | work: List[AnatomyPresetTemplateDict] 230 | publish: List[AnatomyPresetTemplateDict] 231 | hero: List[AnatomyPresetTemplateDict] 232 | delivery: List[AnatomyPresetTemplateDict] 233 | staging: List[AnatomyPresetTemplateDict] 234 | others: List[AnatomyPresetTemplateDict] 235 | 236 | 237 | class AnatomyPresetSubtypeDict(TypedDict): 238 | name: str 239 | shortName: str 240 | icon: str 241 | original_name: str 242 | 243 | 244 | class AnatomyPresetLinkTypeDict(TypedDict): 245 | link_type: str 246 | input_type: str 247 | output_type: str 248 | color: str 249 | style: str 250 | 251 | 252 | StatusScope = Literal[ 253 | "folder", 254 | "task", 255 | "product", 256 | "version", 257 | "representation", 258 | "workfile" 259 | ] 260 | 261 | 262 | class AnatomyPresetStatusDict(TypedDict): 263 | name: str 264 | shortName: str 265 | state: str 266 | icon: str 267 | color: str 268 | scope: List[StatusScope] 269 | original_name: str 270 | 271 | 272 | class AnatomyPresetTagDict(TypedDict): 273 | name: str 274 | color: str 275 | original_name: str 276 | 277 | 278 | class AnatomyPresetDict(TypedDict): 279 | roots: List[AnatomyPresetRootDict] 280 | templates: AnatomyPresetTemplatesDict 281 | attributes: Dict[str, Any] 282 | folder_types: List[AnatomyPresetSubtypeDict] 283 | task_types: List[AnatomyPresetSubtypeDict] 284 | link_types: List[AnatomyPresetLinkTypeDict] 285 | statuses: List[AnatomyPresetStatusDict] 286 | tags: List[AnatomyPresetTagDict] 287 | 288 | 289 | class SecretDict(TypedDict): 290 | name: str 291 | value: str 292 | 293 | ProjectDict = Dict[str, Any] 294 | FolderDict = Dict[str, Any] 295 | TaskDict = Dict[str, Any] 296 | ProductDict = Dict[str, Any] 297 | VersionDict = Dict[str, Any] 298 | RepresentationDict = Dict[str, Any] 299 | WorkfileInfoDict = Dict[str, Any] 300 | EventDict = Dict[str, Any] 301 | ActivityDict = Dict[str, Any] 302 | AnyEntityDict = Union[ 303 | ProjectDict, 304 | FolderDict, 305 | TaskDict, 306 | ProductDict, 307 | VersionDict, 308 | RepresentationDict, 309 | WorkfileInfoDict, 310 | EventDict, 311 | ActivityDict, 312 | ] 313 | 314 | 315 | class FlatFolderDict(TypedDict): 316 | id: str 317 | parentId: Optional[str] 318 | path: str 319 | parents: List[str] 320 | name: str 321 | label: Optional[str] 322 | folderType: str 323 | hasTasks: bool 324 | hasChildren: bool 325 | taskNames: List[str] 326 | status: str 327 | attrib: Dict[str, Any] 328 | ownAttrib: List[str] 329 | updatedAt: str 330 | 331 | 332 | class ProjectHierarchyItemDict(TypedDict): 333 | id: str 334 | name: str 335 | label: str 336 | status: str 337 | folderType: str 338 | hasTasks: bool 339 | taskNames: List[str] 340 | parents: List[str] 341 | parentId: Optional[str] 342 | children: List["ProjectHierarchyItemDict"] 343 | 344 | 345 | class ProjectHierarchyDict(TypedDict): 346 | hierarchy: List[ProjectHierarchyItemDict] 347 | 348 | 349 | class ProductTypeDict(TypedDict): 350 | name: str 351 | color: Optional[str] 352 | icon: Optional[str] 353 | 354 | 355 | StreamType = Union[io.BytesIO, BinaryIO] 356 | -------------------------------------------------------------------------------- /ayon_api/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import datetime 4 | import uuid 5 | import string 6 | import platform 7 | import traceback 8 | import collections 9 | from urllib.parse import urlparse, urlencode 10 | import typing 11 | from typing import Optional, Dict, Set, Any, Iterable 12 | from enum import IntEnum 13 | 14 | import requests 15 | import unidecode 16 | 17 | from .constants import ( 18 | SERVER_TIMEOUT_ENV_KEY, 19 | DEFAULT_VARIANT_ENV_KEY, 20 | SITE_ID_ENV_KEY, 21 | ) 22 | from .exceptions import UrlError 23 | 24 | if typing.TYPE_CHECKING: 25 | from typing import Union 26 | from .typing import AnyEntityDict, StreamType 27 | 28 | REMOVED_VALUE = object() 29 | NOT_SET = object() 30 | SLUGIFY_WHITELIST = string.ascii_letters + string.digits 31 | SLUGIFY_SEP_WHITELIST = " ,./\\;:!|*^#@~+-_=" 32 | 33 | RepresentationParents = collections.namedtuple( 34 | "RepresentationParents", 35 | ("version", "product", "folder", "project") 36 | ) 37 | 38 | RepresentationHierarchy = collections.namedtuple( 39 | "RepresentationHierarchy", 40 | ( 41 | "project", 42 | "folder", 43 | "task", 44 | "product", 45 | "version", 46 | "representation", 47 | ) 48 | ) 49 | 50 | 51 | class SortOrder(IntEnum): 52 | """Sort order for GraphQl requests.""" 53 | ascending = 0 54 | descending = 1 55 | 56 | @classmethod 57 | def parse_value(cls, value, default=None): 58 | if value in (cls.ascending, "ascending", "asc"): 59 | return cls.ascending 60 | if value in (cls.descending, "descending", "desc"): 61 | return cls.descending 62 | return default 63 | 64 | 65 | def get_default_timeout() -> float: 66 | """Default value for requests timeout. 67 | 68 | First looks for environment variable SERVER_TIMEOUT_ENV_KEY which 69 | can affect timeout value. If not available then use 10.0 s. 70 | 71 | Returns: 72 | float: Timeout value in seconds. 73 | 74 | """ 75 | try: 76 | return float(os.environ.get(SERVER_TIMEOUT_ENV_KEY)) 77 | except (ValueError, TypeError): 78 | pass 79 | return 10.0 80 | 81 | 82 | def get_default_settings_variant() -> str: 83 | """Default settings variant. 84 | 85 | Returns: 86 | str: Settings variant from environment variable or 'production'. 87 | 88 | """ 89 | return os.environ.get(DEFAULT_VARIANT_ENV_KEY) or "production" 90 | 91 | 92 | def get_machine_name() -> str: 93 | """Get machine name. 94 | 95 | Returns: 96 | str: Machine name. 97 | 98 | """ 99 | return unidecode.unidecode(platform.node()) 100 | 101 | 102 | def get_default_site_id() -> Optional[str]: 103 | """Site id used for server connection. 104 | 105 | Returns: 106 | Optional[str]: Site id from environment variable or None. 107 | 108 | """ 109 | return os.environ.get(SITE_ID_ENV_KEY) 110 | 111 | 112 | class ThumbnailContent: 113 | """Wrapper for thumbnail content. 114 | 115 | Args: 116 | project_name (str): Project name. 117 | thumbnail_id (Optional[str]): Thumbnail id. 118 | content (Optional[bytes]): Thumbnail content. 119 | content_type (Optional[str]): Content type e.g. 'image/png'. 120 | 121 | """ 122 | def __init__( 123 | self, 124 | project_name: str, 125 | thumbnail_id: Optional[str], 126 | content: Optional[bytes], 127 | content_type: Optional[str], 128 | ): 129 | self.project_name: str = project_name 130 | self.thumbnail_id: Optional[str] = thumbnail_id 131 | self.content_type: Optional[str] = content_type 132 | self.content: bytes = content or b"" 133 | 134 | @property 135 | def id(self) -> str: 136 | """Wrapper for thumbnail id.""" 137 | return self.thumbnail_id 138 | 139 | @property 140 | def is_valid(self) -> bool: 141 | """Content of thumbnail is valid. 142 | 143 | Returns: 144 | bool: Content is valid and can be used. 145 | 146 | """ 147 | return ( 148 | self.thumbnail_id is not None 149 | and self.content_type is not None 150 | ) 151 | 152 | 153 | def prepare_query_string( 154 | key_values: Dict[str, Any], skip_none: bool = True 155 | ) -> str: 156 | """Prepare data to query string. 157 | 158 | If there are any values a query starting with '?' is returned otherwise 159 | an empty string. 160 | 161 | Args: 162 | key_values (dict[str, Any]): Query values. 163 | skip_none (bool): Filter values which are 'None'. 164 | 165 | Returns: 166 | str: Query string. 167 | 168 | """ 169 | if skip_none: 170 | key_values = { 171 | key: value 172 | for key, value in key_values.items() 173 | if value is not None 174 | } 175 | 176 | if not key_values: 177 | return "" 178 | return "?{}".format(urlencode(key_values)) 179 | 180 | 181 | def create_entity_id() -> str: 182 | return uuid.uuid1().hex 183 | 184 | 185 | def convert_entity_id(entity_id) -> Optional[str]: 186 | if not entity_id: 187 | return None 188 | 189 | if isinstance(entity_id, uuid.UUID): 190 | return entity_id.hex 191 | 192 | try: 193 | return uuid.UUID(entity_id).hex 194 | 195 | except (TypeError, ValueError, AttributeError): 196 | pass 197 | return None 198 | 199 | 200 | def convert_or_create_entity_id(entity_id: Optional[str] = None) -> str: 201 | output = convert_entity_id(entity_id) 202 | if output is None: 203 | output = create_entity_id() 204 | return output 205 | 206 | 207 | def entity_data_json_default(value: Any) -> Any: 208 | if isinstance(value, datetime.datetime): 209 | return int(value.timestamp()) 210 | 211 | raise TypeError( 212 | "Object of type {} is not JSON serializable".format(str(type(value))) 213 | ) 214 | 215 | 216 | def slugify_string( 217 | input_string: str, 218 | separator: Optional[str] = "_", 219 | slug_whitelist: Optional[Iterable[str]] = SLUGIFY_WHITELIST, 220 | split_chars: Optional[Iterable[str]] = SLUGIFY_SEP_WHITELIST, 221 | min_length: int = 1, 222 | lower: bool = False, 223 | make_set: bool = False, 224 | ) -> "Union[str, Set[str]]": 225 | """Slugify a text string. 226 | 227 | This function removes transliterates input string to ASCII, removes 228 | special characters and use join resulting elements using 229 | specified separator. 230 | 231 | Args: 232 | input_string (str): Input string to slugify 233 | separator (str): A string used to separate returned elements 234 | (default: "_") 235 | slug_whitelist (str): Characters allowed in the output 236 | (default: ascii letters, digits and the separator) 237 | split_chars (str): Set of characters used for word splitting 238 | (there is a sane default) 239 | lower (bool): Convert to lower-case (default: False) 240 | make_set (bool): Return "set" object instead of string. 241 | min_length (int): Minimal length of an element (word). 242 | 243 | Returns: 244 | Union[str, Set[str]]: Based on 'make_set' value returns slugified 245 | string. 246 | 247 | """ 248 | tmp_string = unidecode.unidecode(input_string) 249 | if lower: 250 | tmp_string = tmp_string.lower() 251 | 252 | parts = [ 253 | # Remove all characters that are not in whitelist 254 | re.sub("[^{}]".format(re.escape(slug_whitelist)), "", part) 255 | # Split text into part by split characters 256 | for part in re.split("[{}]".format(re.escape(split_chars)), tmp_string) 257 | ] 258 | # Filter text parts by length 259 | filtered_parts = [ 260 | part 261 | for part in parts 262 | if len(part) >= min_length 263 | ] 264 | if make_set: 265 | return set(filtered_parts) 266 | return separator.join(filtered_parts) 267 | 268 | 269 | def failed_json_default(value: Any) -> str: 270 | return "< Failed value {} > {}".format(type(value), str(value)) 271 | 272 | 273 | def prepare_attribute_changes( 274 | old_entity: "AnyEntityDict", 275 | new_entity: "AnyEntityDict", 276 | replace: int = False, 277 | ): 278 | attrib_changes = {} 279 | new_attrib = new_entity.get("attrib") 280 | old_attrib = old_entity.get("attrib") 281 | if new_attrib is None: 282 | if not replace: 283 | return attrib_changes 284 | new_attrib = {} 285 | 286 | if old_attrib is None: 287 | return new_attrib 288 | 289 | for attr, new_attr_value in new_attrib.items(): 290 | old_attr_value = old_attrib.get(attr) 291 | if old_attr_value != new_attr_value: 292 | attrib_changes[attr] = new_attr_value 293 | 294 | if replace: 295 | for attr in old_attrib: 296 | if attr not in new_attrib: 297 | attrib_changes[attr] = REMOVED_VALUE 298 | 299 | return attrib_changes 300 | 301 | 302 | def prepare_entity_changes( 303 | old_entity: "AnyEntityDict", 304 | new_entity: "AnyEntityDict", 305 | replace: bool = False, 306 | ) -> Dict[str, Any]: 307 | """Prepare changes of entities.""" 308 | changes = {} 309 | for key, new_value in new_entity.items(): 310 | if key == "attrib": 311 | continue 312 | 313 | old_value = old_entity.get(key) 314 | if old_value != new_value: 315 | changes[key] = new_value 316 | 317 | if replace: 318 | for key in old_entity: 319 | if key not in new_entity: 320 | changes[key] = REMOVED_VALUE 321 | 322 | attr_changes = prepare_attribute_changes(old_entity, new_entity, replace) 323 | if attr_changes: 324 | changes["attrib"] = attr_changes 325 | return changes 326 | 327 | 328 | def _try_parse_url(url: str) -> Optional[str]: 329 | try: 330 | return urlparse(url) 331 | except BaseException: 332 | return None 333 | 334 | 335 | def _try_connect_to_server( 336 | url: str, 337 | timeout: Optional[float], 338 | verify: Optional["Union[str, bool]"], 339 | cert: Optional[str], 340 | ) -> Optional[str]: 341 | if timeout is None: 342 | timeout = get_default_timeout() 343 | 344 | if verify is None: 345 | verify = os.environ.get("AYON_CA_FILE") or True 346 | 347 | if cert is None: 348 | cert = os.environ.get("AYON_CERT_FILE") or None 349 | 350 | try: 351 | # TODO add validation if the url lead to AYON server 352 | # - this won't validate if the url lead to 'google.com' 353 | response = requests.get( 354 | url, 355 | timeout=timeout, 356 | verify=verify, 357 | cert=cert, 358 | ) 359 | if response.history: 360 | return response.history[-1].headers["location"].rstrip("/") 361 | return url 362 | 363 | except Exception: 364 | print(f"Failed to connect to '{url}'") 365 | traceback.print_exc() 366 | 367 | return None 368 | 369 | 370 | def login_to_server( 371 | url: str, 372 | username: str, 373 | password: str, 374 | timeout: Optional[float] = None, 375 | ) -> Optional[str]: 376 | """Use login to the server to receive token. 377 | 378 | Args: 379 | url (str): Server url. 380 | username (str): User's username. 381 | password (str): User's password. 382 | timeout (Optional[float]): Timeout for request. Value from 383 | 'get_default_timeout' is used if not specified. 384 | 385 | Returns: 386 | Optional[str]: User's token if login was successfull. 387 | Otherwise 'None'. 388 | 389 | """ 390 | if timeout is None: 391 | timeout = get_default_timeout() 392 | headers = {"Content-Type": "application/json"} 393 | response = requests.post( 394 | "{}/api/auth/login".format(url), 395 | headers=headers, 396 | json={ 397 | "name": username, 398 | "password": password 399 | }, 400 | timeout=timeout, 401 | ) 402 | token = None 403 | # 200 - success 404 | # 401 - invalid credentials 405 | # * - other issues 406 | if response.status_code == 200: 407 | token = response.json()["token"] 408 | return token 409 | 410 | 411 | def logout_from_server(url: str, token: str, timeout: Optional[float] = None): 412 | """Logout from server and throw token away. 413 | 414 | Args: 415 | url (str): Url from which should be logged out. 416 | token (str): Token which should be used to log out. 417 | timeout (Optional[float]): Timeout for request. Value from 418 | 'get_default_timeout' is used if not specified. 419 | 420 | """ 421 | if timeout is None: 422 | timeout = get_default_timeout() 423 | headers = { 424 | "Content-Type": "application/json", 425 | "Authorization": "Bearer {}".format(token) 426 | } 427 | requests.post( 428 | url + "/api/auth/logout", 429 | headers=headers, 430 | timeout=timeout, 431 | ) 432 | 433 | 434 | def get_user_by_token( 435 | url: str, 436 | token: str, 437 | timeout: Optional[float] = None, 438 | ) -> Optional[Dict[str, Any]]: 439 | """Get user information by url and token. 440 | 441 | Args: 442 | url (str): Server url. 443 | token (str): User's token. 444 | timeout (Optional[float]): Timeout for request. Value from 445 | 'get_default_timeout' is used if not specified. 446 | 447 | Returns: 448 | Optional[Dict[str, Any]]: User information if url and token are valid. 449 | 450 | """ 451 | if timeout is None: 452 | timeout = get_default_timeout() 453 | 454 | base_headers = { 455 | "Content-Type": "application/json", 456 | } 457 | for header_value in ( 458 | {"Authorization": "Bearer {}".format(token)}, 459 | {"X-Api-Key": token}, 460 | ): 461 | headers = base_headers.copy() 462 | headers.update(header_value) 463 | response = requests.get( 464 | "{}/api/users/me".format(url), 465 | headers=headers, 466 | timeout=timeout, 467 | ) 468 | if response.status_code == 200: 469 | return response.json() 470 | return None 471 | 472 | 473 | def is_token_valid( 474 | url: str, 475 | token: str, 476 | timeout: Optional[float] = None, 477 | ) -> bool: 478 | """Check if token is valid. 479 | 480 | Token can be a user token or service api key. 481 | 482 | Args: 483 | url (str): Server url. 484 | token (str): User's token. 485 | timeout (Optional[float]): Timeout for request. Value from 486 | 'get_default_timeout' is used if not specified. 487 | 488 | Returns: 489 | bool: True if token is valid. 490 | 491 | """ 492 | if get_user_by_token(url, token, timeout=timeout): 493 | return True 494 | return False 495 | 496 | 497 | def validate_url( 498 | url: str, 499 | timeout: Optional[int] = None, 500 | verify: Optional["Union[str, bool]"] = None, 501 | cert: Optional[str] = None, 502 | ) -> str: 503 | """Validate url if is valid and server is available. 504 | 505 | Validation checks if can be parsed as url and contains scheme. 506 | 507 | Function will try to autofix url thus will return modified url when 508 | connection to server works. 509 | 510 | .. highlight:: python 511 | .. code-block:: python 512 | 513 | my_url = "my.server.url" 514 | try: 515 | # Store new url 516 | validated_url = validate_url(my_url) 517 | 518 | except UrlError: 519 | # Handle invalid url 520 | ... 521 | 522 | Args: 523 | url (str): Server url. 524 | timeout (Optional[int]): Timeout in seconds for connection to server. 525 | 526 | Returns: 527 | Url which was used to connect to server. 528 | 529 | Raises: 530 | UrlError: Error with short description and hints for user. 531 | 532 | """ 533 | stripperd_url = url.strip() 534 | if not stripperd_url: 535 | raise UrlError( 536 | "Invalid url format. Url is empty.", 537 | title="Invalid url format", 538 | hints=["url seems to be empty"] 539 | ) 540 | 541 | # Not sure if this is good idea? 542 | modified_url = stripperd_url.rstrip("/") 543 | parsed_url = _try_parse_url(modified_url) 544 | universal_hints = [ 545 | "does the url work in browser?" 546 | ] 547 | if parsed_url is None: 548 | raise UrlError( 549 | "Invalid url format. Url cannot be parsed as url \"{}\".".format( 550 | modified_url 551 | ), 552 | title="Invalid url format", 553 | hints=universal_hints 554 | ) 555 | 556 | # Try add 'https://' scheme if is missing 557 | # - this will trigger UrlError if both will crash 558 | if not parsed_url.scheme: 559 | new_url = _try_connect_to_server( 560 | "http://" + modified_url, 561 | timeout=timeout, 562 | verify=verify, 563 | cert=cert, 564 | ) 565 | if new_url: 566 | return new_url 567 | 568 | new_url = _try_connect_to_server( 569 | modified_url, 570 | timeout=timeout, 571 | verify=verify, 572 | cert=cert, 573 | ) 574 | if new_url: 575 | return new_url 576 | 577 | hints = [] 578 | if "/" in parsed_url.path or not parsed_url.scheme: 579 | new_path = parsed_url.path.split("/")[0] 580 | if not parsed_url.scheme: 581 | new_path = "https://" + new_path 582 | 583 | hints.append( 584 | "did you mean \"{}\"?".format(parsed_url.scheme + new_path) 585 | ) 586 | 587 | raise UrlError( 588 | "Couldn't connect to server on \"{}\"".format(url), 589 | title="Couldn't connect to server", 590 | hints=hints + universal_hints 591 | ) 592 | 593 | 594 | class TransferProgress: 595 | """Object to store progress of download/upload from/to server.""" 596 | 597 | def __init__(self): 598 | self._started: bool = False 599 | self._transfer_done: bool = False 600 | self._transferred: int = 0 601 | self._content_size: Optional[int] = None 602 | 603 | self._failed: bool = False 604 | self._fail_reason: Optional[str] = None 605 | 606 | self._source_url: str = "N/A" 607 | self._destination_url: str = "N/A" 608 | 609 | def get_content_size(self): 610 | """Content size in bytes. 611 | 612 | Returns: 613 | Union[int, None]: Content size in bytes or None 614 | if is unknown. 615 | 616 | """ 617 | return self._content_size 618 | 619 | def set_content_size(self, content_size: int): 620 | """Set content size in bytes. 621 | 622 | Args: 623 | content_size (int): Content size in bytes. 624 | 625 | Raises: 626 | ValueError: If content size was already set. 627 | 628 | """ 629 | if self._content_size is not None: 630 | raise ValueError("Content size was set more then once") 631 | self._content_size = content_size 632 | 633 | def get_started(self) -> bool: 634 | """Transfer was started. 635 | 636 | Returns: 637 | bool: True if transfer started. 638 | 639 | """ 640 | return self._started 641 | 642 | def set_started(self): 643 | """Mark that transfer started. 644 | 645 | Raises: 646 | ValueError: If transfer was already started. 647 | 648 | """ 649 | if self._started: 650 | raise ValueError("Progress already started") 651 | self._started = True 652 | 653 | def get_transfer_done(self) -> bool: 654 | """Transfer finished. 655 | 656 | Returns: 657 | bool: Transfer finished. 658 | 659 | """ 660 | return self._transfer_done 661 | 662 | def set_transfer_done(self): 663 | """Mark progress as transfer finished. 664 | 665 | Raises: 666 | ValueError: If progress was already marked as done 667 | or wasn't started yet. 668 | 669 | """ 670 | if self._transfer_done: 671 | raise ValueError("Progress was already marked as done") 672 | if not self._started: 673 | raise ValueError("Progress didn't start yet") 674 | self._transfer_done = True 675 | 676 | def get_failed(self) -> bool: 677 | """Transfer failed. 678 | 679 | Returns: 680 | bool: True if transfer failed. 681 | 682 | """ 683 | return self._failed 684 | 685 | def get_fail_reason(self) -> Optional[str]: 686 | """Get reason why transfer failed. 687 | 688 | Returns: 689 | Optional[str]: Reason why transfer 690 | failed or None. 691 | 692 | """ 693 | return self._fail_reason 694 | 695 | def set_failed(self, reason: str): 696 | """Mark progress as failed. 697 | 698 | Args: 699 | reason (str): Reason why transfer failed. 700 | 701 | """ 702 | self._fail_reason = reason 703 | self._failed = True 704 | 705 | def get_transferred_size(self) -> int: 706 | """Already transferred size in bytes. 707 | 708 | Returns: 709 | int: Already transferred size in bytes. 710 | 711 | """ 712 | return self._transferred 713 | 714 | def set_transferred_size(self, transferred: int): 715 | """Set already transferred size in bytes. 716 | 717 | Args: 718 | transferred (int): Already transferred size in bytes. 719 | 720 | """ 721 | self._transferred = transferred 722 | 723 | def add_transferred_chunk(self, chunk_size: int): 724 | """Add transferred chunk size in bytes. 725 | 726 | Args: 727 | chunk_size (int): Add transferred chunk size 728 | in bytes. 729 | 730 | """ 731 | self._transferred += chunk_size 732 | 733 | def get_source_url(self) -> str: 734 | """Source url from where transfer happens. 735 | 736 | Note: 737 | Consider this as title. Must be set using 738 | 'set_source_url' or 'N/A' will be returned. 739 | 740 | Returns: 741 | str: Source url from where transfer happens. 742 | 743 | """ 744 | return self._source_url 745 | 746 | def set_source_url(self, url: str): 747 | """Set source url from where transfer happens. 748 | 749 | Args: 750 | url (str): Source url from where transfer happens. 751 | 752 | """ 753 | self._source_url = url 754 | 755 | def get_destination_url(self) -> str: 756 | """Destination url where transfer happens. 757 | 758 | Note: 759 | Consider this as title. Must be set using 760 | 'set_source_url' or 'N/A' will be returned. 761 | 762 | Returns: 763 | str: Destination url where transfer happens. 764 | 765 | """ 766 | return self._destination_url 767 | 768 | def set_destination_url(self, url: str): 769 | """Set destination url where transfer happens. 770 | 771 | Args: 772 | url (str): Destination url where transfer happens. 773 | 774 | """ 775 | self._destination_url = url 776 | 777 | @property 778 | def is_running(self) -> bool: 779 | """Check if transfer is running. 780 | 781 | Returns: 782 | bool: True if transfer is running. 783 | 784 | """ 785 | if ( 786 | not self.started 787 | or self.transfer_done 788 | or self.failed 789 | ): 790 | return False 791 | return True 792 | 793 | @property 794 | def transfer_progress(self) -> Optional[float]: 795 | """Get transfer progress in percents. 796 | 797 | Returns: 798 | Optional[float]: Transfer progress in percents or 'None' 799 | if content size is unknown. 800 | 801 | """ 802 | if self._content_size is None: 803 | return None 804 | return (self._transferred * 100.0) / float(self._content_size) 805 | 806 | content_size = property(get_content_size, set_content_size) 807 | started = property(get_started) 808 | transfer_done = property(get_transfer_done) 809 | failed = property(get_failed) 810 | fail_reason = property(get_fail_reason) 811 | source_url = property(get_source_url, set_source_url) 812 | destination_url = property(get_destination_url, set_destination_url) 813 | transferred_size = property(get_transferred_size, set_transferred_size) 814 | 815 | 816 | def create_dependency_package_basename( 817 | platform_name: Optional[str] = None 818 | ) -> str: 819 | """Create basename for dependency package file. 820 | 821 | Args: 822 | platform_name (Optional[str]): Name of platform for which the 823 | bundle is targeted. Default value is current platform. 824 | 825 | Returns: 826 | str: Dependency package name with timestamp and platform. 827 | 828 | """ 829 | if platform_name is None: 830 | platform_name = platform.system().lower() 831 | 832 | now_date = datetime.datetime.now() 833 | time_stamp = now_date.strftime("%y%m%d%H%M") 834 | return "ayon_{}_{}".format(time_stamp, platform_name) 835 | 836 | 837 | 838 | def _get_media_mime_type_from_ftyp(content: bytes) -> Optional[str]: 839 | if content[8:10] == b"qt" or content[8:12] == b"MSNV": 840 | return "video/quicktime" 841 | 842 | if content[8:12] in (b"3g2a", b"3g2b", b"3g2c", b"KDDI"): 843 | return "video/3gpp2" 844 | 845 | if content[8:12] in ( 846 | b"isom", b"iso2", b"avc1", b"F4V", b"F4P", b"F4A", b"F4B", b"mmp4", 847 | # These might be "video/mp4v" 848 | b"mp41", b"mp42", 849 | # Nero 850 | b"NDSC", b"NDSH", b"NDSM", b"NDSP", b"NDSS", b"NDXC", b"NDXH", 851 | b"NDXM", b"NDXP", b"NDXS", 852 | ): 853 | return "video/mp4" 854 | 855 | if content[8:12] in ( 856 | b"3ge6", b"3ge7", b"3gg6", 857 | b"3gp1", b"3gp2", b"3gp3", b"3gp4", b"3gp5", b"3gp6", b"3gs7", 858 | ): 859 | return "video/3gpp" 860 | 861 | if content[8:11] == b"JP2": 862 | return "image/jp2" 863 | 864 | if content[8:11] == b"jpm": 865 | return "image/jpm" 866 | 867 | if content[8:11] == b"jpx": 868 | return "image/jpx" 869 | 870 | if content[8:12] in (b"M4V\x20", b"M4VH", b"M4VP"): 871 | return "video/x-m4v" 872 | 873 | if content[8:12] in (b"mj2s", b"mjp2"): 874 | return "video/mj2" 875 | return None 876 | 877 | 878 | def _get_media_mime_type_for_content_base(content: bytes) -> Optional[str]: 879 | """Determine Mime-Type of a file. 880 | 881 | Use header of the file to determine mime type (needs 12 bytes). 882 | """ 883 | content_len = len(content) 884 | # Pre-validation (largest definition check) 885 | # - hopefully there cannot be media defined in less than 12 bytes 886 | if content_len < 12: 887 | return None 888 | 889 | # FTYP 890 | if content[4:8] == b"ftyp": 891 | return _get_media_mime_type_from_ftyp(content) 892 | 893 | # BMP 894 | if content[0:2] == b"BM": 895 | return "image/bmp" 896 | 897 | # Tiff 898 | if content[0:2] in (b"MM", b"II"): 899 | return "tiff" 900 | 901 | # PNG 902 | if content[0:4] == b"\211PNG": 903 | return "image/png" 904 | 905 | # JPEG 906 | # - [0:2] is constant b"\xff\xd8" 907 | # (ref. https://www.file-recovery.com/jpg-signature-format.htm) 908 | # - [2:4] Marker identifier b"\xff{?}" 909 | # (ref. https://www.disktuna.com/list-of-jpeg-markers/) 910 | # NOTE: File ends with b"\xff\xd9" 911 | if content[0:3] == b"\xff\xd8\xff": 912 | return "image/jpeg" 913 | 914 | # Webp 915 | if content[0:4] == b"RIFF" and content[8:12] == b"WEBP": 916 | return "image/webp" 917 | 918 | # Gif 919 | if content[0:6] in (b"GIF87a", b"GIF89a"): 920 | return "gif" 921 | 922 | # Adobe PhotoShop file (8B > Adobe, PS > PhotoShop) 923 | if content[0:4] == b"8BPS": 924 | return "image/vnd.adobe.photoshop" 925 | 926 | # Windows ICO > this might be wild guess as multiple files can start 927 | # with this header 928 | if content[0:4] == b"\x00\x00\x01\x00": 929 | return "image/x-icon" 930 | return None 931 | 932 | 933 | def _get_svg_mime_type(content: bytes) -> Optional[str]: 934 | # SVG 935 | if b'xmlns="http://www.w3.org/2000/svg"' in content: 936 | return "image/svg+xml" 937 | return None 938 | 939 | 940 | def get_media_mime_type_for_content(content: bytes) -> Optional[str]: 941 | mime_type = _get_media_mime_type_for_content_base(content) 942 | if mime_type is not None: 943 | return mime_type 944 | return _get_svg_mime_type(content) 945 | 946 | 947 | def get_media_mime_type_for_stream(stream: "StreamType") -> Optional[str]: 948 | # Read only 12 bytes to determine mime type 949 | content = stream.read(12) 950 | if len(content) < 12: 951 | return None 952 | mime_type = _get_media_mime_type_for_content_base(content) 953 | if mime_type is None: 954 | content += stream.read() 955 | mime_type = _get_svg_mime_type(content) 956 | return mime_type 957 | 958 | 959 | def get_media_mime_type(filepath: str) -> Optional[str]: 960 | """Determine Mime-Type of a file. 961 | 962 | Args: 963 | filepath (str): Path to file. 964 | 965 | Returns: 966 | Optional[str]: Mime type or None if is unknown mime type. 967 | 968 | """ 969 | if not filepath or not os.path.exists(filepath): 970 | return None 971 | 972 | with open(filepath, "rb") as stream: 973 | return get_media_mime_type_for_stream(stream) 974 | 975 | 976 | def take_web_action_event( 977 | server_url: str, 978 | action_token: str 979 | ) -> Dict[str, Any]: 980 | """Take web action event using action token. 981 | 982 | Action token is generated by AYON server and passed to AYON launcher. 983 | 984 | Args: 985 | server_url (str): AYON server url. 986 | action_token (str): Action token. 987 | 988 | Returns: 989 | Dict[str, Any]: Web action event. 990 | 991 | """ 992 | response = requests.get( 993 | f"{server_url}/api/actions/take/{action_token}" 994 | ) 995 | response.raise_for_status() 996 | return response.json() 997 | 998 | 999 | def abort_web_action_event( 1000 | server_url: str, 1001 | action_token: str, 1002 | reason: str 1003 | ) -> requests.Response: 1004 | """Abort web action event using action token. 1005 | 1006 | A web action event could not be processed for some reason. 1007 | 1008 | Args: 1009 | server_url (str): AYON server url. 1010 | action_token (str): Action token. 1011 | reason (str): Reason why webaction event was aborted. 1012 | 1013 | Returns: 1014 | requests.Response: Response from server. 1015 | 1016 | """ 1017 | response = requests.post( 1018 | f"{server_url}/api/actions/abort/{action_token}", 1019 | json={"message": reason}, 1020 | ) 1021 | response.raise_for_status() 1022 | return response 1023 | -------------------------------------------------------------------------------- /ayon_api/version.py: -------------------------------------------------------------------------------- 1 | """Package declaring Python API for AYON server.""" 2 | __version__ = "1.1.4-dev" 3 | -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ynput/ayon-python-api/86a991c29bbe4f7eb9a69013ba5d688f1a0bab55/docs/.nojekyll -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/check_docstrings.bat: -------------------------------------------------------------------------------- 1 | poetry run pydocstyle --convention=google --add-ignore=D103,D104,D100 -------------------------------------------------------------------------------- /docs/make_api.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | poetry run sphinx-apidoc -f -e -M -o .\source\ ..\ayon_api\ 6 | 7 | -------------------------------------------------------------------------------- /docs/make_html.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | poetry run sphinx-build -M html .\source .\build 6 | -------------------------------------------------------------------------------- /docs/source/_static/AYON_blackG_dot.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /docs/source/_static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ynput/ayon-python-api/86a991c29bbe4f7eb9a69013ba5d688f1a0bab55/docs/source/_static/favicon.ico -------------------------------------------------------------------------------- /docs/source/ayon_api.rst: -------------------------------------------------------------------------------- 1 | ayon\_api package 2 | ================= 3 | 4 | .. automodule:: ayon_api 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Submodules 10 | ---------- 11 | 12 | .. toctree:: 13 | :maxdepth: 4 14 | 15 | ayon_api.constants 16 | ayon_api.entity_hub 17 | ayon_api.events 18 | ayon_api.exceptions 19 | ayon_api.graphql 20 | ayon_api.graphql_queries 21 | ayon_api.operations 22 | ayon_api.server_api 23 | ayon_api.utils 24 | ayon_api.version 25 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | sys.path.insert(0, os.path.abspath('../../')) 4 | 5 | # Configuration file for the Sphinx documentation builder. 6 | # 7 | # For the full list of built-in configuration values, see the documentation: 8 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 9 | 10 | # -- Project information ----------------------------------------------------- 11 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 12 | 13 | current_dir = os.path.dirname(os.path.abspath(__file__)) 14 | ayon_api_version_path = os.path.join( 15 | os.path.dirname(os.path.dirname(current_dir)), 16 | "ayon_api", 17 | "version.py" 18 | ) 19 | version_content = {} 20 | with open(ayon_api_version_path, "r") as stream: 21 | exec(stream.read(), version_content) 22 | project = 'ayon-python-api' 23 | copyright = '2024, ynput.io ' 24 | author = 'ynput.io ' 25 | release = version_content["__version__"] 26 | 27 | # -- General configuration --------------------------------------------------- 28 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 29 | 30 | extensions = [ 31 | 'sphinx.ext.autodoc', 32 | 'sphinx.ext.doctest', 33 | 'sphinx.ext.todo', 34 | 'sphinx.ext.coverage', 35 | 'sphinx.ext.mathjax', 36 | 'sphinx.ext.ifconfig', 37 | 'sphinx.ext.viewcode', 38 | 'sphinx.ext.githubpages', 39 | 'sphinx.ext.napoleon', 40 | 'revitron_sphinx_theme', 41 | ] 42 | 43 | # -- Napoleon settings ------------------------------------------------------- 44 | add_module_names = False 45 | 46 | napoleon_google_docstring = True 47 | napoleon_numpy_docstring = False 48 | napoleon_include_init_with_doc = False 49 | napoleon_include_private_with_doc = False 50 | napoleon_include_special_with_doc = False 51 | napoleon_use_admonition_for_examples = True 52 | napoleon_use_admonition_for_notes = True 53 | napoleon_use_admonition_for_references = True 54 | napoleon_use_ivar = True 55 | napoleon_use_param = True 56 | napoleon_use_rtype = True 57 | napoleon_preprocess_types = True 58 | napoleon_attr_annotations = True 59 | 60 | templates_path = ['_templates'] 61 | exclude_patterns = ['tests', 'venv', 'build', 'Thumbs.db', '.DS_Store'] 62 | 63 | 64 | 65 | # -- Options for HTML output ------------------------------------------------- 66 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 67 | 68 | html_theme = "revitron_sphinx_theme" 69 | html_static_path = ['_static'] 70 | html_logo = './_static/AYON_blackG_dot.svg' 71 | html_favicon = './_static/favicon.ico' 72 | 73 | html_context = { 74 | 'landing_page': { 75 | } 76 | } 77 | myst_footnote_transition = False 78 | html_sidebars = {} 79 | 80 | html_theme_options = { 81 | 'color_scheme': '', 82 | 'canonical_url': 'https://github.com/ynput/ayon-python-api', 83 | 'style_external_links': False, 84 | 'collapse_navigation': True, 85 | 'sticky_navigation': True, 86 | 'navigation_depth': 4, 87 | 'includehidden': False, 88 | 'titles_only': False, 89 | 'github_url': 'https://github.com/ynput/ayon-python-api', 90 | } 91 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to AYON Python API documentation! 2 | ========================================= 3 | 4 | .. container:: .image 5 | 6 | .. image:: ./_static/AYON_blackG_dot.svg 7 | 8 | .. container:: .large 9 | 10 | This is mainly auto-generated documentation for the AYON Python API. 11 | 12 | .. container:: .buttons 13 | 14 | `Python API Reference <./ayon_api.html>`_ 15 | `REST API `_ 16 | `All AYON Docs `_ 17 | 18 | 19 | Getting Started 20 | =============== 21 | 22 | .. code-block:: text 23 | :caption: Install latest version from PyPi 24 | 25 | pip install ayon-python-api 26 | 27 | .. code-block:: text 28 | :caption: Install from Github sources (Alternatively) 29 | 30 | git clone git@github.com:ynput/ayon-python-api.git 31 | cd ayon-python-api 32 | pip install . 33 | 34 | .. code-block:: text 35 | :caption: Ensure installed properly by printing ayon_api version 36 | 37 | python -c "import ayon_api ; print(ayon_api.__version__)" 38 | 39 | 40 | Python API 41 | ========== 42 | 43 | * `API Reference <./ayon_api.html>`_ 44 | ------------------------------------ 45 | 46 | * `Github Repository `_ 47 | ------------------------------------------------------------------ 48 | 49 | Miscellaneous 50 | ============= 51 | 52 | * :ref:`genindex` 53 | ----------------- 54 | 55 | * :ref:`modindex` 56 | ----------------- 57 | 58 | * :ref:`search` 59 | --------------- 60 | 61 | 62 | Summary 63 | ======= 64 | 65 | .. toctree:: 66 | :maxdepth: 4 67 | 68 | modules -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "ayon-python-api" 3 | version = "1.1.4-dev" 4 | description = "AYON Python API" 5 | license = {file = "LICENSE"} 6 | readme = {file = "README.md", content-type = "text/markdown"} 7 | authors = [ 8 | {name = "ynput.io", email = "info@ynput.io"} 9 | ] 10 | keywords = ["AYON", "ynput", "OpenPype", "vfx"] 11 | classifiers = [ 12 | "Development Status :: 5 - Production/Stable", 13 | "Programming Language :: Python", 14 | "Programming Language :: Python :: 3", 15 | ] 16 | dependencies = [ 17 | "requests >= 2.27.1", 18 | "Unidecode >= 1.3.0", 19 | ] 20 | 21 | [project.urls] 22 | Repository = "https://github.com/ynput/ayon-python-api" 23 | Changelog = "https://github.com/ynput/ayon-python-api/releases" 24 | 25 | [build-system] 26 | requires = ["poetry-core>=1.0.0"] 27 | build-backend = "poetry.core.masonry.api" 28 | 29 | [tool.poetry] 30 | name = "ayon-python-api" 31 | version = "1.1.4-dev" 32 | description = "AYON Python API" 33 | authors = [ 34 | "ynput.io " 35 | ] 36 | packages = [ 37 | { include = "ayon_api" } 38 | ] 39 | 40 | [tool.poetry.dependencies] 41 | python = ">=3.6.5" 42 | requests = "^2.27" 43 | Unidecode = "^1.3" 44 | 45 | [tool.poetry.group.dev.dependencies] 46 | sphinx = "*" 47 | mock = "*" 48 | sphinx-autoapi = "*" 49 | revitron-sphinx-theme = { git = "https://github.com/revitron/revitron-sphinx-theme.git", branch = "master" } 50 | pytest = "^6.2.5" 51 | pydocstyle = "^6.3.0" 52 | -------------------------------------------------------------------------------- /ruff.toml: -------------------------------------------------------------------------------- 1 | # Exclude a variety of commonly ignored directories. 2 | exclude = [ 3 | ".bzr", 4 | ".direnv", 5 | ".eggs", 6 | ".git", 7 | ".git-rewrite", 8 | ".hg", 9 | ".ipynb_checkpoints", 10 | ".mypy_cache", 11 | ".nox", 12 | ".pants.d", 13 | ".pyenv", 14 | ".pytest_cache", 15 | ".pytype", 16 | ".ruff_cache", 17 | ".svn", 18 | ".tox", 19 | ".venv", 20 | ".vscode", 21 | "__pypackages__", 22 | "_build", 23 | "buck-out", 24 | "build", 25 | "dist", 26 | "node_modules", 27 | "site-packages", 28 | "venv", 29 | ] 30 | 31 | # Same as Black. 32 | line-length = 79 33 | indent-width = 4 34 | 35 | [lint] 36 | # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. 37 | # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or 38 | # McCabe complexity (`C901`) by default. 39 | select = ["E", "F", "W"] 40 | ignore = [] 41 | 42 | # Allow fix for all enabled rules (when `--fix`) is provided. 43 | fixable = ["ALL"] 44 | unfixable = [] 45 | 46 | # Allow unused variables when underscore-prefixed. 47 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" 48 | 49 | [lint.extend-per-file-ignores] 50 | "tests/*" = ["F401", "F811"] 51 | 52 | [format] 53 | # Like Black, use double quotes for strings. 54 | quote-style = "double" 55 | 56 | # Like Black, indent with spaces, rather than tabs. 57 | indent-style = "space" 58 | 59 | # Like Black, respect magic trailing commas. 60 | skip-magic-trailing-comma = false 61 | 62 | # Like Black, automatically detect the appropriate line ending. 63 | line-ending = "auto" 64 | 65 | # Enable auto-formatting of code examples in docstrings. Markdown, 66 | # reStructuredText code/literal blocks and doctests are all supported. 67 | # 68 | # This is currently disabled by default, but it is planned for this 69 | # to be opt-out in the future. 70 | docstring-code-format = false 71 | 72 | # Set the line length limit used when formatting code snippets in 73 | # docstrings. 74 | # 75 | # This only has an effect when the `docstring-code-format` setting is 76 | # enabled. 77 | docstring-code-line-length = "dynamic" 78 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import os 4 | from setuptools import setup 5 | 6 | REPO_ROOT = os.path.dirname(os.path.abspath(__file__)) 7 | README_PATH = os.path.join(REPO_ROOT, "README.md") 8 | VERSION_PATH = os.path.join(REPO_ROOT, "ayon_api", "version.py") 9 | _version_content = {} 10 | exec(open(VERSION_PATH).read(), _version_content) 11 | 12 | setup( 13 | name="ayon-python-api", 14 | version=_version_content["__version__"], 15 | py_modules=["ayon_api"], 16 | packages=["ayon_api"], 17 | author="ynput.io", 18 | author_email="info@ynput.io", 19 | license="Apache License (2.0)", 20 | description="AYON Python API", 21 | long_description=open(README_PATH, encoding="utf-8").read(), 22 | long_description_content_type="text/markdown", 23 | url="https://github.com/ynput/ayon-python-api", 24 | include_package_data=True, 25 | # https://pypi.org/classifiers/ 26 | classifiers=[ 27 | "Development Status :: 5 - Production/Stable", 28 | "Programming Language :: Python", 29 | "Programming Language :: Python :: 3", 30 | ], 31 | install_requires=[ 32 | "requests >= 2.27.1", 33 | "Unidecode >= 1.3.0", 34 | "appdirs >=1, <2", 35 | ], 36 | keywords=["AYON", "ynput", "OpenPype", "vfx"], 37 | ) 38 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ynput/ayon-python-api/86a991c29bbe4f7eb9a69013ba5d688f1a0bab55/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta, timezone 2 | import pytest 3 | 4 | from ayon_api import ( 5 | get_project, 6 | create_project, 7 | update_project, 8 | delete_project, 9 | get_events, 10 | get_folders, 11 | get_products, 12 | get_tasks 13 | ) 14 | from ayon_api.entity_hub import EntityHub 15 | 16 | 17 | class _Cache: 18 | # Cache project entity as scope 'session' of a fixture does not handle 19 | # parametrized fixtures. 20 | project_entity = None 21 | 22 | 23 | @pytest.fixture(scope="session") 24 | def project_name_fixture(): 25 | return "AYONApiTestProject" 26 | 27 | 28 | @pytest.fixture(scope="session") 29 | def project_entity_fixture(project_name_fixture): 30 | project_entity = _Cache.project_entity 31 | created = False 32 | if _Cache.project_entity is None: 33 | created = True 34 | project_entity = get_project(project_name_fixture) 35 | if project_entity: 36 | delete_project(project_name_fixture) 37 | create_project(project_name_fixture, "AYTP") 38 | update_project( 39 | project_name_fixture, 40 | folder_types=[ 41 | { 42 | "name": "Folder", 43 | "icon": "folder", 44 | "shortName": "" 45 | }, 46 | { 47 | "name": "Episode", 48 | "icon": "live_tv", 49 | "shortName": "" 50 | }, 51 | { 52 | "name": "Sequence", 53 | "icon": "theaters", 54 | "shortName": "" 55 | }, 56 | { 57 | "name": "Shot", 58 | "icon": "movie", 59 | "shortName": "" 60 | } 61 | ] 62 | ) 63 | project_entity = get_project(project_name_fixture) 64 | _Cache.project_entity = project_entity 65 | 66 | yield project_entity 67 | if created: 68 | delete_project(project_name_fixture) 69 | 70 | 71 | @pytest.fixture 72 | def clean_project(project_name_fixture): 73 | hub = EntityHub(project_name_fixture) 74 | hub.fetch_hierarchy_entities() 75 | 76 | folder_ids = { 77 | folder["id"] 78 | for folder in get_folders(project_name_fixture, fields={"id"}) 79 | } 80 | task_ids = { 81 | task["id"] 82 | for task in get_tasks( 83 | project_name_fixture, folder_ids=folder_ids, fields={"id"} 84 | ) 85 | } 86 | product_ids = { 87 | product["id"] 88 | for product in get_products( 89 | project_name_fixture, folder_ids=folder_ids, fields={"id"} 90 | ) 91 | } 92 | for product_id in product_ids: 93 | product = hub.get_product_by_id(product_id) 94 | if product is not None: 95 | hub.delete_entity(product) 96 | 97 | for task_id in task_ids: 98 | task = hub.get_task_by_id(task_id) 99 | if task is not None: 100 | hub.delete_entity(task) 101 | 102 | hub.commit_changes() 103 | 104 | for folder_id in folder_ids: 105 | folder = hub.get_folder_by_id(folder_id) 106 | if folder is not None: 107 | hub.delete_entity(folder) 108 | 109 | hub.commit_changes() 110 | 111 | 112 | @pytest.fixture(params=[3, 4, 5]) 113 | def event_ids(request): 114 | length = request.param 115 | if length == 0: 116 | return None 117 | 118 | recent_events = list(get_events( 119 | newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() 120 | )) 121 | 122 | return [recent_event["id"] for recent_event in recent_events[:length]] 123 | 124 | 125 | @pytest.fixture 126 | def event_id(): 127 | """Fixture that retrieves the ID of a recent event created within 128 | the last 5 days. 129 | 130 | Returns: 131 | - The event ID of the most recent event within the last 5 days 132 | if available. 133 | - `None` if no recent events are found within this time frame. 134 | 135 | """ 136 | recent_events = list(get_events( 137 | newer_than=(datetime.now(timezone.utc) - timedelta(days=5)).isoformat() 138 | )) 139 | return recent_events[0]["id"] if recent_events else None 140 | 141 | 142 | class TestEventFilters: 143 | project_names = [ 144 | ([]), 145 | (["demo_Big_Episodic"]), 146 | (["demo_Big_Feature"]), 147 | (["demo_Commercial"]), 148 | (["AY_Tests"]), 149 | ([ 150 | "demo_Big_Episodic", 151 | "demo_Big_Feature", 152 | "demo_Commercial", 153 | "AY_Tests" 154 | ]) 155 | ] 156 | 157 | topics = [ 158 | ([]), 159 | (["entity.folder.attrib_changed"]), 160 | (["entity.task.created", "entity.project.created"]), 161 | (["settings.changed", "entity.version.status_changed"]), 162 | (["entity.task.status_changed", "entity.folder.deleted"]), 163 | ([ 164 | "entity.project.changed", 165 | "entity.task.tags_changed", 166 | "entity.product.created" 167 | ]) 168 | ] 169 | 170 | users = [ 171 | (None), 172 | ([]), 173 | (["admin"]), 174 | (["mkolar", "tadeas.8964"]), 175 | (["roy", "luke.inderwick", "ynbot"]), 176 | ([ 177 | "entity.folder.attrib_changed", 178 | "entity.project.created", 179 | "entity.task.created", 180 | "settings.changed" 181 | ]), 182 | ] 183 | 184 | # states is incorrect name for statuses 185 | states = [ 186 | (None), 187 | ([]), 188 | ([ 189 | "pending", 190 | "in_progress", 191 | "finished", 192 | "failed", 193 | "aborted", 194 | "restarted" 195 | ]), 196 | (["failed", "aborted"]), 197 | (["pending", "in_progress"]), 198 | (["finished", "failed", "restarted"]), 199 | (["finished"]), 200 | ] 201 | 202 | include_logs = [ 203 | (None), 204 | (True), 205 | (False), 206 | ] 207 | 208 | has_children = [ 209 | (None), 210 | (True), 211 | (False), 212 | ] 213 | 214 | now = datetime.now(timezone.utc) 215 | 216 | newer_than = [ 217 | (None), 218 | ((now - timedelta(days=2)).isoformat()), 219 | ((now - timedelta(days=5)).isoformat()), 220 | ((now - timedelta(days=10)).isoformat()), 221 | ((now - timedelta(days=20)).isoformat()), 222 | ((now - timedelta(days=30)).isoformat()), 223 | ] 224 | 225 | older_than = [ 226 | (None), 227 | ((now - timedelta(days=0)).isoformat()), 228 | ((now - timedelta(days=5)).isoformat()), 229 | ((now - timedelta(days=10)).isoformat()), 230 | ((now - timedelta(days=20)).isoformat()), 231 | ((now - timedelta(days=30)).isoformat()), 232 | ] 233 | 234 | fields = [ 235 | (None), 236 | ([]), 237 | ] 238 | 239 | 240 | class TestInvalidEventFilters: 241 | topics = [ 242 | (None), 243 | (["invalid_topic_name_1", "invalid_topic_name_2"]), 244 | (["invalid_topic_name_1"]), 245 | ] 246 | 247 | project_names = [ 248 | (None), 249 | (["invalid_project"]), 250 | (["invalid_project", "demo_Big_Episodic", "demo_Big_Feature"]), 251 | (["invalid_name_2", "demo_Commercial"]), 252 | (["demo_Commercial"]), 253 | ] 254 | 255 | states = [ 256 | (None), 257 | (["pending_invalid"]), 258 | (["in_progress_invalid"]), 259 | (["finished_invalid", "failed_invalid"]), 260 | ] 261 | 262 | users = [ 263 | (None), 264 | (["ayon_invalid_user"]), 265 | (["ayon_invalid_user1", "ayon_invalid_user2"]), 266 | (["ayon_invalid_user1", "ayon_invalid_user2", "admin"]), 267 | ] 268 | 269 | newer_than = [ 270 | (None), 271 | ((datetime.now(timezone.utc) + timedelta(days=2)).isoformat()), 272 | ((datetime.now(timezone.utc) + timedelta(days=5)).isoformat()), 273 | ((datetime.now(timezone.utc) - timedelta(days=5)).isoformat()), 274 | ] 275 | 276 | 277 | class TestUpdateEventData: 278 | update_sender = [ 279 | ("test.server.api"), 280 | ] 281 | 282 | update_username = [ 283 | ("testing_user"), 284 | ] 285 | 286 | update_status = [ 287 | ("pending"), 288 | ("in_progress"), 289 | ("finished"), 290 | ("failed"), 291 | ("aborted"), 292 | ("restarted") 293 | ] 294 | 295 | update_description = [ 296 | ( 297 | "Lorem ipsum dolor sit amet, consectetur adipiscing elit." 298 | " Fusce vivera." 299 | ), 300 | ("Updated description test...") 301 | ] 302 | 303 | update_retries = [ 304 | (1), 305 | (0), 306 | (10), 307 | ] 308 | 309 | 310 | class TestProductData: 311 | names = [ 312 | ("test_name"), 313 | ("test_123"), 314 | ] 315 | 316 | product_types = [ 317 | ("animation"), 318 | ("camera"), 319 | ("render"), 320 | ("workfile"), 321 | ] 322 | -------------------------------------------------------------------------------- /tests/resources/addon/.gitignore: -------------------------------------------------------------------------------- 1 | /package/ 2 | /__pycache__/ -------------------------------------------------------------------------------- /tests/resources/addon/create_package.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Prepares server package from addon repo to upload to server. 4 | 5 | Requires Python 3.9. (Or at least 3.8+). 6 | 7 | This script should be called from cloned addon repo. 8 | 9 | It will produce 'package' subdirectory which could be pasted into server 10 | addon directory directly (eg. into `ayon-backend/addons`). 11 | 12 | Format of package folder: 13 | ADDON_REPO/package/{addon name}/{addon version} 14 | 15 | You can specify `--output_dir` in arguments to change output directory where 16 | package will be created. Existing package directory will always be purged if 17 | already present! This could be used to create package directly in server folder 18 | if available. 19 | 20 | Package contains server side files directly, 21 | client side code zipped in `private` subfolder. 22 | """ 23 | 24 | import os 25 | import sys 26 | import re 27 | import io 28 | import shutil 29 | import platform 30 | import argparse 31 | import logging 32 | import collections 33 | import zipfile 34 | import subprocess 35 | from typing import Optional, Iterable, Pattern, Union, List, Tuple 36 | 37 | import package 38 | 39 | FileMapping = Tuple[Union[str, io.BytesIO], str] 40 | ADDON_NAME: str = package.name 41 | ADDON_VERSION: str = package.version 42 | ADDON_CLIENT_DIR: Union[str, None] = getattr(package, "client_dir", None) 43 | 44 | CURRENT_ROOT: str = os.path.dirname(os.path.abspath(__file__)) 45 | SERVER_ROOT: str = os.path.join(CURRENT_ROOT, "server") 46 | FRONTEND_ROOT: str = os.path.join(CURRENT_ROOT, "frontend") 47 | FRONTEND_DIST_ROOT: str = os.path.join(FRONTEND_ROOT, "dist") 48 | DST_DIST_DIR: str = os.path.join("frontend", "dist") 49 | PRIVATE_ROOT: str = os.path.join(CURRENT_ROOT, "private") 50 | PUBLIC_ROOT: str = os.path.join(CURRENT_ROOT, "public") 51 | CLIENT_ROOT: str = os.path.join(CURRENT_ROOT, "client") 52 | 53 | VERSION_PY_CONTENT = f'''# -*- coding: utf-8 -*- 54 | """Package declaring AYON addon '{ADDON_NAME}' version.""" 55 | __version__ = "{ADDON_VERSION}" 56 | ''' 57 | 58 | # Patterns of directories to be skipped for server part of addon 59 | IGNORE_DIR_PATTERNS: List[Pattern] = [ 60 | re.compile(pattern) 61 | for pattern in { 62 | # Skip directories starting with '.' 63 | r"^\.", 64 | # Skip any pycache folders 65 | "^__pycache__$" 66 | } 67 | ] 68 | 69 | # Patterns of files to be skipped for server part of addon 70 | IGNORE_FILE_PATTERNS: List[Pattern] = [ 71 | re.compile(pattern) 72 | for pattern in { 73 | # Skip files starting with '.' 74 | # NOTE this could be an issue in some cases 75 | r"^\.", 76 | # Skip '.pyc' files 77 | r"\.pyc$" 78 | } 79 | ] 80 | 81 | 82 | class ZipFileLongPaths(zipfile.ZipFile): 83 | """Allows longer paths in zip files. 84 | 85 | Regular DOS paths are limited to MAX_PATH (260) characters, including 86 | the string's terminating NUL character. 87 | That limit can be exceeded by using an extended-length path that 88 | starts with the '\\?\' prefix. 89 | """ 90 | _is_windows = platform.system().lower() == "windows" 91 | 92 | def _extract_member(self, member, tpath, pwd): 93 | if self._is_windows: 94 | tpath = os.path.abspath(tpath) 95 | if tpath.startswith("\\\\"): 96 | tpath = "\\\\?\\UNC\\" + tpath[2:] 97 | else: 98 | tpath = "\\\\?\\" + tpath 99 | 100 | return super()._extract_member(member, tpath, pwd) 101 | 102 | 103 | def _get_yarn_executable() -> Union[str, None]: 104 | cmd = "which" 105 | if platform.system().lower() == "windows": 106 | cmd = "where" 107 | 108 | for line in subprocess.check_output( 109 | [cmd, "yarn"], encoding="utf-8" 110 | ).splitlines(): 111 | if not line or not os.path.exists(line): 112 | continue 113 | try: 114 | subprocess.call([line, "--version"]) 115 | return line 116 | except OSError: 117 | continue 118 | return None 119 | 120 | 121 | def safe_copy_file(src_path: str, dst_path: str): 122 | """Copy file and make sure destination directory exists. 123 | 124 | Ignore if destination already contains directories from source. 125 | 126 | Args: 127 | src_path (str): File path that will be copied. 128 | dst_path (str): Path to destination file. 129 | """ 130 | 131 | if src_path == dst_path: 132 | return 133 | 134 | dst_dir: str = os.path.dirname(dst_path) 135 | os.makedirs(dst_dir, exist_ok=True) 136 | 137 | shutil.copy2(src_path, dst_path) 138 | 139 | 140 | def _value_match_regexes(value: str, regexes: Iterable[Pattern]) -> bool: 141 | return any( 142 | regex.search(value) 143 | for regex in regexes 144 | ) 145 | 146 | 147 | def find_files_in_subdir( 148 | src_path: str, 149 | ignore_file_patterns: Optional[List[Pattern]] = None, 150 | ignore_dir_patterns: Optional[List[Pattern]] = None 151 | ) -> List[Tuple[str, str]]: 152 | """Find all files to copy in subdirectories of given path. 153 | 154 | All files that match any of the patterns in 'ignore_file_patterns' will 155 | be skipped and any directories that match any of the patterns in 156 | 'ignore_dir_patterns' will be skipped with all subfiles. 157 | 158 | Args: 159 | src_path (str): Path to directory to search in. 160 | ignore_file_patterns (Optional[list[Pattern]]): List of regexes 161 | to match files to ignore. 162 | ignore_dir_patterns (Optional[list[Pattern]]): List of regexes 163 | to match directories to ignore. 164 | 165 | Returns: 166 | list[tuple[str, str]]: List of tuples with path to file and parent 167 | directories relative to 'src_path'. 168 | """ 169 | 170 | if ignore_file_patterns is None: 171 | ignore_file_patterns = IGNORE_FILE_PATTERNS 172 | 173 | if ignore_dir_patterns is None: 174 | ignore_dir_patterns = IGNORE_DIR_PATTERNS 175 | output: List[Tuple[str, str]] = [] 176 | if not os.path.exists(src_path): 177 | return output 178 | 179 | hierarchy_queue: collections.deque = collections.deque() 180 | hierarchy_queue.append((src_path, [])) 181 | while hierarchy_queue: 182 | item: Tuple[str, str] = hierarchy_queue.popleft() 183 | dirpath, parents = item 184 | for name in os.listdir(dirpath): 185 | path: str = os.path.join(dirpath, name) 186 | if os.path.isfile(path): 187 | if not _value_match_regexes(name, ignore_file_patterns): 188 | items: List[str] = list(parents) 189 | items.append(name) 190 | output.append((path, os.path.sep.join(items))) 191 | continue 192 | 193 | if not _value_match_regexes(name, ignore_dir_patterns): 194 | items: List[str] = list(parents) 195 | items.append(name) 196 | hierarchy_queue.append((path, items)) 197 | 198 | return output 199 | 200 | 201 | def update_client_version(logger): 202 | """Update version in client code if version.py is present.""" 203 | if not ADDON_CLIENT_DIR: 204 | return 205 | 206 | version_path: str = os.path.join( 207 | CLIENT_ROOT, ADDON_CLIENT_DIR, "version.py" 208 | ) 209 | if not os.path.exists(version_path): 210 | logger.debug("Did not find version.py in client directory") 211 | return 212 | 213 | logger.info("Updating client version") 214 | with open(version_path, "w") as stream: 215 | stream.write(VERSION_PY_CONTENT) 216 | 217 | 218 | def build_frontend(): 219 | yarn_executable = _get_yarn_executable() 220 | if yarn_executable is None: 221 | raise RuntimeError("Yarn executable was not found.") 222 | 223 | subprocess.run([yarn_executable, "install"], cwd=FRONTEND_ROOT) 224 | subprocess.run([yarn_executable, "build"], cwd=FRONTEND_ROOT) 225 | if not os.path.exists(FRONTEND_DIST_ROOT): 226 | raise RuntimeError( 227 | "Frontend build failed. Did not find 'dist' folder." 228 | ) 229 | 230 | 231 | def get_client_files_mapping() -> List[Tuple[str, str]]: 232 | """Mapping of source client code files to destination paths. 233 | 234 | Example output: 235 | [ 236 | ( 237 | "C:/addons/MyAddon/version.py", 238 | "my_addon/version.py" 239 | ), 240 | ( 241 | "C:/addons/MyAddon/client/my_addon/__init__.py", 242 | "my_addon/__init__.py" 243 | ) 244 | ] 245 | 246 | Returns: 247 | list[tuple[str, str]]: List of path mappings to copy. The destination 248 | path is relative to expected output directory. 249 | 250 | """ 251 | # Add client code content to zip 252 | client_code_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR) 253 | mapping = [ 254 | (path, os.path.join(ADDON_CLIENT_DIR, sub_path)) 255 | for path, sub_path in find_files_in_subdir(client_code_dir) 256 | ] 257 | 258 | license_path = os.path.join(CURRENT_ROOT, "LICENSE") 259 | if os.path.exists(license_path): 260 | mapping.append((license_path, f"{ADDON_CLIENT_DIR}/LICENSE")) 261 | return mapping 262 | 263 | 264 | def get_client_zip_content(log) -> io.BytesIO: 265 | log.info("Preparing client code zip") 266 | files_mapping: List[Tuple[str, str]] = get_client_files_mapping() 267 | stream = io.BytesIO() 268 | with ZipFileLongPaths(stream, "w", zipfile.ZIP_DEFLATED) as zipf: 269 | for src_path, subpath in files_mapping: 270 | zipf.write(src_path, subpath) 271 | stream.seek(0) 272 | return stream 273 | 274 | 275 | def get_base_files_mapping() -> List[FileMapping]: 276 | filepaths_to_copy: List[FileMapping] = [ 277 | ( 278 | os.path.join(CURRENT_ROOT, "package.py"), 279 | "package.py" 280 | ) 281 | ] 282 | # Add license file to package if exists 283 | license_path = os.path.join(CURRENT_ROOT, "LICENSE") 284 | if os.path.exists(license_path): 285 | filepaths_to_copy.append((license_path, "LICENSE")) 286 | 287 | # Go through server, private and public directories and find all files 288 | for dirpath in (SERVER_ROOT, PRIVATE_ROOT, PUBLIC_ROOT): 289 | if not os.path.exists(dirpath): 290 | continue 291 | 292 | dirname = os.path.basename(dirpath) 293 | for src_file, subpath in find_files_in_subdir(dirpath): 294 | dst_subpath = os.path.join(dirname, subpath) 295 | filepaths_to_copy.append((src_file, dst_subpath)) 296 | 297 | if os.path.exists(FRONTEND_DIST_ROOT): 298 | for src_file, subpath in find_files_in_subdir(FRONTEND_DIST_ROOT): 299 | dst_subpath = os.path.join(DST_DIST_DIR, subpath) 300 | filepaths_to_copy.append((src_file, dst_subpath)) 301 | 302 | pyproject_toml = os.path.join(CLIENT_ROOT, "pyproject.toml") 303 | if os.path.exists(pyproject_toml): 304 | filepaths_to_copy.append( 305 | (pyproject_toml, "private/pyproject.toml") 306 | ) 307 | 308 | return filepaths_to_copy 309 | 310 | 311 | def copy_client_code(output_dir: str, log: logging.Logger): 312 | """Copies server side folders to 'addon_package_dir' 313 | 314 | Args: 315 | output_dir (str): Output directory path. 316 | log (logging.Logger) 317 | 318 | """ 319 | log.info(f"Copying client for {ADDON_NAME}-{ADDON_VERSION}") 320 | 321 | full_output_path = os.path.join( 322 | output_dir, f"{ADDON_NAME}_{ADDON_VERSION}" 323 | ) 324 | if os.path.exists(full_output_path): 325 | shutil.rmtree(full_output_path) 326 | os.makedirs(full_output_path, exist_ok=True) 327 | 328 | for src_path, dst_subpath in get_client_files_mapping(): 329 | dst_path = os.path.join(full_output_path, dst_subpath) 330 | safe_copy_file(src_path, dst_path) 331 | 332 | log.info("Client copy finished") 333 | 334 | 335 | def copy_addon_package( 336 | output_dir: str, 337 | files_mapping: List[FileMapping], 338 | log: logging.Logger 339 | ): 340 | """Copy client code to output directory. 341 | 342 | Args: 343 | output_dir (str): Directory path to output client code. 344 | files_mapping (List[FileMapping]): List of tuples with source file 345 | and destination subpath. 346 | log (logging.Logger): Logger object. 347 | 348 | """ 349 | log.info(f"Copying package for {ADDON_NAME}-{ADDON_VERSION}") 350 | 351 | # Add addon name and version to output directory 352 | addon_output_dir: str = os.path.join( 353 | output_dir, ADDON_NAME, ADDON_VERSION 354 | ) 355 | if os.path.isdir(addon_output_dir): 356 | log.info(f"Purging {addon_output_dir}") 357 | shutil.rmtree(addon_output_dir) 358 | 359 | os.makedirs(addon_output_dir, exist_ok=True) 360 | 361 | # Copy server content 362 | for src_file, dst_subpath in files_mapping: 363 | dst_path: str = os.path.join(addon_output_dir, dst_subpath) 364 | dst_dir: str = os.path.dirname(dst_path) 365 | os.makedirs(dst_dir, exist_ok=True) 366 | if isinstance(src_file, io.BytesIO): 367 | with open(dst_path, "wb") as stream: 368 | stream.write(src_file.getvalue()) 369 | else: 370 | safe_copy_file(src_file, dst_path) 371 | 372 | log.info("Package copy finished") 373 | 374 | 375 | def create_addon_package( 376 | output_dir: str, 377 | files_mapping: List[FileMapping], 378 | log: logging.Logger 379 | ): 380 | log.info(f"Creating package for {ADDON_NAME}-{ADDON_VERSION}") 381 | 382 | os.makedirs(output_dir, exist_ok=True) 383 | output_path = os.path.join( 384 | output_dir, f"{ADDON_NAME}-{ADDON_VERSION}.zip" 385 | ) 386 | 387 | with ZipFileLongPaths(output_path, "w", zipfile.ZIP_DEFLATED) as zipf: 388 | # Copy server content 389 | for src_file, dst_subpath in files_mapping: 390 | if isinstance(src_file, io.BytesIO): 391 | zipf.writestr(dst_subpath, src_file.getvalue()) 392 | else: 393 | zipf.write(src_file, dst_subpath) 394 | 395 | log.info("Package created") 396 | 397 | 398 | def main( 399 | output_dir: Optional[str] = None, 400 | skip_zip: Optional[bool] = False, 401 | only_client: Optional[bool] = False 402 | ): 403 | log: logging.Logger = logging.getLogger("create_package") 404 | log.info("Package creation started") 405 | 406 | if not output_dir: 407 | output_dir = os.path.join(CURRENT_ROOT, "package") 408 | 409 | has_client_code = bool(ADDON_CLIENT_DIR) 410 | if has_client_code: 411 | client_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR) 412 | if not os.path.exists(client_dir): 413 | raise RuntimeError( 414 | f"Client directory was not found '{client_dir}'." 415 | " Please check 'client_dir' in 'package.py'." 416 | ) 417 | update_client_version(log) 418 | 419 | if only_client: 420 | if not has_client_code: 421 | raise RuntimeError("Client code is not available. Skipping") 422 | 423 | copy_client_code(output_dir, log) 424 | return 425 | 426 | log.info(f"Preparing package for {ADDON_NAME}-{ADDON_VERSION}") 427 | 428 | if os.path.exists(FRONTEND_ROOT): 429 | build_frontend() 430 | 431 | files_mapping: List[FileMapping] = [] 432 | files_mapping.extend(get_base_files_mapping()) 433 | 434 | if has_client_code: 435 | files_mapping.append( 436 | (get_client_zip_content(log), "private/client.zip") 437 | ) 438 | 439 | # Skip server zipping 440 | if skip_zip: 441 | copy_addon_package(output_dir, files_mapping, log) 442 | else: 443 | create_addon_package(output_dir, files_mapping, log) 444 | 445 | log.info("Package creation finished") 446 | 447 | 448 | if __name__ == "__main__": 449 | parser = argparse.ArgumentParser() 450 | parser.add_argument( 451 | "--skip-zip", 452 | dest="skip_zip", 453 | action="store_true", 454 | help=( 455 | "Skip zipping server package and create only" 456 | " server folder structure." 457 | ) 458 | ) 459 | parser.add_argument( 460 | "-o", "--output", 461 | dest="output_dir", 462 | default=None, 463 | help=( 464 | "Directory path where package will be created" 465 | " (Will be purged if already exists!)" 466 | ) 467 | ) 468 | parser.add_argument( 469 | "--only-client", 470 | dest="only_client", 471 | action="store_true", 472 | help=( 473 | "Extract only client code. This is useful for development." 474 | " Requires '-o', '--output' argument to be filled." 475 | ) 476 | ) 477 | parser.add_argument( 478 | "--debug", 479 | dest="debug", 480 | action="store_true", 481 | help="Debug log messages." 482 | ) 483 | 484 | args = parser.parse_args(sys.argv[1:]) 485 | level = logging.INFO 486 | if args.debug: 487 | level = logging.DEBUG 488 | logging.basicConfig(level=level) 489 | main(args.output_dir, args.skip_zip, args.only_client) 490 | -------------------------------------------------------------------------------- /tests/resources/addon/package.py: -------------------------------------------------------------------------------- 1 | name = "tests" 2 | title = "Tests" 3 | version = "1.0.0" 4 | 5 | client_dir = None 6 | # ayon_launcher_version = ">=1.0.2" 7 | 8 | ayon_required_addons = {} 9 | ayon_compatible_addons = {} 10 | -------------------------------------------------------------------------------- /tests/resources/addon/private/ayon-symbol.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ynput/ayon-python-api/86a991c29bbe4f7eb9a69013ba5d688f1a0bab55/tests/resources/addon/private/ayon-symbol.png -------------------------------------------------------------------------------- /tests/resources/addon/server/__init__.py: -------------------------------------------------------------------------------- 1 | from ayon_server.addons import BaseServerAddon 2 | from ayon_server.api.dependencies import CurrentUser 3 | 4 | 5 | class TestsAddon(BaseServerAddon): 6 | def initialize(self): 7 | self.add_endpoint( 8 | "test-get", 9 | self.get_test, 10 | method="GET", 11 | ) 12 | 13 | async def get_test( 14 | self, user: CurrentUser, 15 | ): 16 | """Return a random folder from the database""" 17 | return { 18 | "success": True, 19 | } 20 | -------------------------------------------------------------------------------- /tests/resources/ayon-symbol.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ynput/ayon-python-api/86a991c29bbe4f7eb9a69013ba5d688f1a0bab55/tests/resources/ayon-symbol.png -------------------------------------------------------------------------------- /tests/test_folder_hierarchy.py: -------------------------------------------------------------------------------- 1 | """Tests of folder hierarchy - creating, deleting and moving 2 | folders, products, versions, etc. 3 | 4 | To run use: pytest --envfile {environment path}. 5 | Make sure you have set AYON_TOKEN in your environment. 6 | """ 7 | 8 | import pytest 9 | 10 | from ayon_api.operations import ( 11 | OperationsSession, 12 | new_folder_entity, 13 | new_product_entity, 14 | new_version_entity, 15 | new_representation_entity 16 | ) 17 | from ayon_api import ( 18 | get_versions, 19 | get_folder_by_id, 20 | get_product_by_id, 21 | get_folders, 22 | get_products, 23 | get_representations, 24 | ) 25 | from ayon_api.exceptions import ( 26 | FailedOperations 27 | ) 28 | 29 | from .conftest import project_entity_fixture 30 | 31 | 32 | @pytest.mark.parametrize( 33 | "folder_name", 34 | [ 35 | ("operations_with_folder1"), 36 | ("operations_with_folder2"), 37 | ("operations_with_folder3") 38 | ] 39 | ) 40 | def test_operations_with_folder(project_entity_fixture, folder_name): 41 | """Test of attributes updates - folder. 42 | """ 43 | project_name = project_entity_fixture["name"] 44 | s = OperationsSession() 45 | 46 | # create folder 47 | folder = new_folder_entity(folder_name, "Folder") 48 | folder_id = s.create_entity( 49 | project_name, "folder", folder 50 | ).entity_id 51 | s.commit() 52 | 53 | folder_entity = get_folder_by_id(project_name, folder_id) 54 | 55 | # update entity attributes 56 | s.update_entity( 57 | project_name, 58 | "folder", 59 | folder_entity["id"], 60 | {"attrib": {"frameStart": 1002}} 61 | ) 62 | s.commit() 63 | 64 | folder_entity = get_folder_by_id(project_name, folder_id) 65 | assert folder_entity["attrib"]["frameStart"] == 1002 66 | 67 | s.delete_entity(project_name, "folder", folder_id) 68 | s.commit() 69 | assert get_folder_by_id(project_name, folder_id) is None 70 | 71 | 72 | @pytest.mark.parametrize( 73 | "folder_name", 74 | [ 75 | ("!invalid"), 76 | ("in/valid"), 77 | ("in~valid") 78 | ] 79 | ) 80 | def test_folder_name_invalid_characters(project_entity_fixture, folder_name): 81 | """Tries to create folders with invalid 82 | names and checks if exception was raised. 83 | """ 84 | project_name = project_entity_fixture["name"] 85 | s = OperationsSession() 86 | 87 | # create folder with invalid name 88 | with pytest.raises(FailedOperations): 89 | folder = new_folder_entity(folder_name, "Folder") 90 | _ = s.create_entity(project_name, "folder", folder) 91 | s.commit() 92 | 93 | 94 | @pytest.mark.parametrize( 95 | "folder_name", 96 | [ 97 | ("folder_duplicated_names"), 98 | ] 99 | ) 100 | def test_folder_duplicated_names(project_entity_fixture, folder_name): 101 | """Tries to create folders with duplicated 102 | names and checks if exception was raised. 103 | """ 104 | project_name = project_entity_fixture["name"] 105 | s = OperationsSession() 106 | 107 | # create folder 108 | folder = new_folder_entity(folder_name, "Folder") 109 | folder_id = s.create_entity( 110 | project_name, "folder", folder 111 | ).entity_id 112 | s.commit() 113 | 114 | assert list(get_folders( 115 | project_name, 116 | folder_ids=[folder_id])) != [] 117 | 118 | # create folder with duplicated names 119 | with pytest.raises(FailedOperations): 120 | folder = new_folder_entity(folder_name, "Folder") 121 | _ = s.create_entity(project_name, "folder", folder) 122 | s.commit() 123 | 124 | s.delete_entity(project_name, "folder", folder_id) 125 | s.commit() 126 | assert get_folder_by_id(project_name, folder_id) is None 127 | 128 | 129 | @pytest.mark.parametrize( 130 | "folder_name, product_names", 131 | [ 132 | ( 133 | "product_duplicated_names", 134 | ["modelMain", "modelProxy", "modelSculpt"] 135 | ), 136 | ] 137 | ) 138 | def test_product_duplicated_names( 139 | project_entity_fixture, 140 | folder_name, 141 | product_names 142 | ): 143 | """Tries to create products with duplicated 144 | names and checks if exception was raised. 145 | """ 146 | project_name = project_entity_fixture["name"] 147 | s = OperationsSession() 148 | 149 | # create folder 150 | folder = new_folder_entity(folder_name, "Folder") 151 | folder_id = s.create_entity( 152 | project_name, "folder", folder 153 | ).entity_id 154 | s.commit() 155 | 156 | # create products inside the folder 157 | product_ids = [] 158 | for name in product_names: 159 | product = new_product_entity(name, "model", folder_id) 160 | product_id = s.create_entity( 161 | project_name, "product", product 162 | ).entity_id 163 | s.commit() 164 | 165 | assert list(get_products( 166 | project_name, 167 | product_ids=[product_id], 168 | folder_ids=[folder_id])) != [] 169 | 170 | product_ids.append(product_id) 171 | 172 | # create products with duplicated names 173 | for name in product_names: 174 | with pytest.raises(FailedOperations): 175 | product = new_product_entity(name, "model", folder_id) 176 | _ = s.create_entity(project_name, "product", product) 177 | s.commit() 178 | 179 | # delete products 180 | for product_id in product_ids: 181 | s.delete_entity(project_name, "product", product_id) 182 | s.commit() 183 | assert get_product_by_id(project_name, product_id) is None 184 | 185 | # delete folder 186 | s.delete_entity(project_name, "folder", folder_id) 187 | s.commit() 188 | assert get_folder_by_id(project_name, folder_id) is None 189 | 190 | 191 | @pytest.mark.parametrize( 192 | ( 193 | "folder_name, product_name, version_name, representation_name," 194 | " num_of_versions, num_of_representations" 195 | ), 196 | [ 197 | ("whole_hierarchy", "modelMain", "version", "representation", 2, 3) 198 | ] 199 | ) 200 | def test_whole_hierarchy( 201 | project_entity_fixture, 202 | folder_name, 203 | product_name, 204 | version_name, 205 | representation_name, 206 | num_of_versions, 207 | num_of_representations 208 | ): 209 | """Creates the whole hierarchy (folder, product, version, representation). 210 | Tries to create versions and representations with duplicated 211 | names and checks if exceptions are raised. 212 | """ 213 | project_name = project_entity_fixture["name"] 214 | s = OperationsSession() 215 | 216 | # create folder 217 | folder = new_folder_entity(folder_name, "Folder") 218 | folder_id = s.create_entity( 219 | project_name, "folder", folder 220 | ).entity_id 221 | s.commit() 222 | 223 | assert list(get_folders( 224 | project_name, 225 | folder_ids=[folder_id] 226 | )) != [] 227 | 228 | # create product 229 | product = new_product_entity(product_name, "model", folder_id) 230 | product_id = s.create_entity( 231 | project_name, "product", product 232 | ).entity_id 233 | s.commit() 234 | 235 | assert list(get_products( 236 | project_name, 237 | product_ids=[product_id], 238 | folder_ids=[folder_id] 239 | )) != [] 240 | 241 | # create versions 242 | my_version_ids = [] 243 | for i in range(num_of_versions): 244 | version = new_version_entity(i, product_id) 245 | version_id = s.create_entity( 246 | project_name, "version", version 247 | ).entity_id 248 | s.commit() 249 | 250 | assert list(get_versions( 251 | project_name, 252 | version_ids=[version_id], 253 | product_ids=[product_id])) != [] 254 | 255 | my_version_ids.append(version_id) 256 | 257 | # test duplicate name 258 | with pytest.raises(FailedOperations): 259 | version = new_version_entity(i, product_id) 260 | s.create_entity( 261 | project_name, "version", version 262 | ) 263 | s.commit() 264 | 265 | # create representations 266 | for i, version_id in enumerate(my_version_ids): 267 | for j in range(num_of_representations): 268 | unique_name = str(i) + "v" + str(j) # unique in this version 269 | representation = new_representation_entity( 270 | unique_name, version_id, [] 271 | ) 272 | representation_id = s.create_entity( 273 | project_name, 274 | "representation", 275 | representation 276 | ).entity_id 277 | s.commit() 278 | 279 | assert list(get_representations( 280 | project_name, 281 | representation_ids=[representation_id], 282 | version_ids=[version_id]) 283 | ) != [] 284 | 285 | # not unique under this version 286 | with pytest.raises(FailedOperations): 287 | representation = new_representation_entity( 288 | unique_name, version_id, [] 289 | ) 290 | s.create_entity( 291 | project_name, 292 | "representation", 293 | representation 294 | ) 295 | s.commit() 296 | 297 | # under different version will be created 298 | if i > 0: 299 | representation = new_representation_entity( 300 | unique_name, my_version_ids[i-1], [] 301 | ) 302 | representation_id = s.create_entity( 303 | project_name, 304 | "representation", 305 | representation 306 | ).entity_id 307 | s.commit() 308 | 309 | assert list(get_representations( 310 | project_name, 311 | representation_ids=[representation_id], 312 | version_ids=my_version_ids 313 | )) != [] 314 | 315 | s.delete_entity(project_name, "product", product_id) 316 | s.commit() 317 | 318 | s.delete_entity(project_name, "folder", folder_id) 319 | s.commit() 320 | 321 | 322 | @pytest.mark.parametrize( 323 | "folder_name, product_name", 324 | [ 325 | ("test_folder_with_product001", "modelMain"), 326 | ] 327 | ) 328 | def test_delete_folder_with_product( 329 | project_entity_fixture, 330 | folder_name, 331 | product_name 332 | ): 333 | """Creates product in folder and tries to delete the folder. 334 | Checks if exception was raised. 335 | """ 336 | project_name = project_entity_fixture["name"] 337 | s = OperationsSession() 338 | 339 | # create parent folder 340 | folder = new_folder_entity(folder_name, "Folder") 341 | folder_id = s.create_entity( 342 | project_name, "folder", folder 343 | ).entity_id 344 | s.commit() 345 | 346 | assert list(get_folders( 347 | project_name, 348 | folder_ids=[folder_id])) != [] 349 | 350 | # create product 351 | product = new_product_entity(product_name, "model", folder_id) 352 | product_id = s.create_entity( 353 | project_name, "product", product 354 | ).entity_id 355 | s.commit() 356 | 357 | assert list(get_products( 358 | project_name, 359 | product_ids=[product_id], 360 | folder_ids=[folder_id])) != [] 361 | 362 | # delete folder with product 363 | with pytest.raises(FailedOperations): 364 | s.delete_entity( 365 | project_name, "folder", folder_id 366 | ) 367 | s.commit() 368 | 369 | # check if wasn't deleted 370 | assert list(get_folders( 371 | project_name, 372 | folder_ids=[folder_id])) != [] 373 | 374 | # delete in the right order 375 | s.delete_entity(project_name, "product", product_id) 376 | s.commit() 377 | 378 | assert list(get_products( 379 | project_name, 380 | product_ids=[product_id], 381 | folder_ids=[folder_id])) == [] 382 | 383 | s.delete_entity(project_name, "folder", folder_id) 384 | s.commit() 385 | 386 | assert list(get_folders( 387 | project_name, 388 | folder_ids=[folder_id])) == [] 389 | 390 | 391 | @pytest.mark.parametrize( 392 | ( 393 | "folder_name, subfolder_name1, subfolder_name2," 394 | " count_level1, count_level2" 395 | ), 396 | [ 397 | ("folder_with_subfolders1", "subfolder", "shot", 2, 3), 398 | ("folder_with_subfolders2", "subfolder", "shot", 3, 4), 399 | ] 400 | ) 401 | def test_subfolder_hierarchy( 402 | project_entity_fixture, 403 | folder_name, 404 | subfolder_name1, 405 | subfolder_name2, 406 | count_level1, 407 | count_level2 408 | ): 409 | """Creates three levels of folder hierarchy and product in the last one. 410 | Tries creating products with duplicated names and checks raising 411 | exceptions. After creation of every product is checked if the product 412 | was really created. 413 | """ 414 | project_name = project_entity_fixture["name"] 415 | s = OperationsSession() 416 | 417 | # create parent folder 418 | folder = new_folder_entity(folder_name, "Folder") 419 | parent_id = s.create_entity( 420 | project_name, "folder", folder 421 | ).entity_id 422 | s.commit() 423 | 424 | # create subfolder with subfolders in each iteration 425 | folder_with_product = [] 426 | for folder_number in range(count_level1): 427 | folder = new_folder_entity( 428 | f"{subfolder_name1}{folder_number:03}", 429 | "Folder", 430 | parent_id=parent_id 431 | ) 432 | folder_id = s.create_entity( 433 | project_name, "folder", folder 434 | ).entity_id 435 | s.commit() 436 | 437 | assert list(get_folders( 438 | project_name, 439 | folder_ids=[folder_id], 440 | parent_ids=[parent_id]) 441 | ) != [] 442 | 443 | # subfolder with same name 444 | with pytest.raises(FailedOperations): 445 | folder = new_folder_entity( 446 | f"{subfolder_name1}{folder_number:03}", 447 | "Folder", 448 | parent_id=parent_id 449 | ) 450 | _ = s.create_entity(project_name, "folder", folder) 451 | s.commit() 452 | 453 | # subfolder with same name but different type 454 | with pytest.raises(FailedOperations): 455 | folder = new_folder_entity( 456 | f"{subfolder_name1}{folder_number:03}", 457 | "Shot", 458 | parent_id=parent_id 459 | ) 460 | _ = s.create_entity(project_name, "folder", folder) 461 | s.commit() 462 | 463 | # create subfolder with products in each iteration 464 | for subfolder_number in range(count_level2): 465 | folder = new_folder_entity( 466 | f"{subfolder_name2}{subfolder_number:03}", 467 | "Shot", 468 | parent_id=folder_id 469 | ) 470 | subfolder_id = s.create_entity( 471 | project_name, "folder", folder 472 | ).entity_id 473 | s.commit() 474 | folder_with_product.append(subfolder_id) 475 | # folder_with_product.append( 476 | # f"{subfolder_name2}{subfolder_number:03}" 477 | # ) 478 | 479 | assert list(get_folders( 480 | project_name, 481 | folder_ids=[subfolder_id], 482 | parent_ids=[folder_id])) != [] 483 | 484 | # subfolder with same name 485 | with pytest.raises(FailedOperations): 486 | folder = new_folder_entity( 487 | f"{subfolder_name2}{subfolder_number:03}", 488 | "Shot", 489 | parent_id=folder_id 490 | ) 491 | s.create_entity(project_name, "folder", folder) 492 | s.commit() 493 | 494 | # products in subfolder 495 | product = new_product_entity( 496 | "modelMain", 497 | "model", 498 | subfolder_id 499 | ) 500 | product_id = s.create_entity( 501 | project_name, "product", product 502 | ).entity_id 503 | s.commit() 504 | 505 | assert list(get_products( 506 | project_name, 507 | product_ids=[product_id], 508 | folder_ids=[subfolder_id])) != [] 509 | 510 | product = new_product_entity( 511 | "modelProxy", "model", subfolder_id 512 | ) 513 | product_id = s.create_entity( 514 | project_name, "product", product 515 | ).entity_id 516 | s.commit() 517 | 518 | assert list(get_products( 519 | project_name, 520 | product_ids=[product_id], 521 | folder_ids=[subfolder_id])) != [] 522 | 523 | # delete folders with products 524 | with pytest.raises(FailedOperations): 525 | s.delete_entity(project_name, "folder", parent_id) 526 | s.commit() 527 | 528 | for f_id in folder_with_product: 529 | with pytest.raises(FailedOperations): 530 | s.delete_entity(project_name, "folder", f_id) 531 | s.commit() 532 | 533 | # delete everything correctly 534 | for folder_id in folder_with_product: 535 | products = list( 536 | get_products( 537 | project_name, 538 | folder_ids=[folder_id] 539 | ) 540 | ) 541 | for product in products: 542 | s.delete_entity(project_name, "product", product["id"]) 543 | 544 | s.delete_entity(project_name, "folder", parent_id) 545 | s.commit() 546 | -------------------------------------------------------------------------------- /tests/test_get_events.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import pytest 3 | 4 | from ayon_api import ( 5 | get_events, 6 | get_default_fields_for_type, 7 | exceptions, 8 | set_timeout, 9 | get_timeout 10 | ) 11 | from .conftest import TestEventFilters 12 | 13 | 14 | @pytest.mark.parametrize("topics", TestEventFilters.topics[-3:]) 15 | @pytest.mark.parametrize( 16 | "event_ids", 17 | [None] + [pytest.param(None, marks=pytest.mark.usefixtures("event_ids"))] 18 | ) 19 | @pytest.mark.parametrize("project_names", TestEventFilters.project_names[-3:]) 20 | @pytest.mark.parametrize("states", TestEventFilters.states[-3:]) 21 | @pytest.mark.parametrize("users", TestEventFilters.users[-3:]) 22 | @pytest.mark.parametrize("include_logs", TestEventFilters.include_logs[-3:]) 23 | @pytest.mark.parametrize("has_children", TestEventFilters.has_children[-3:]) 24 | @pytest.mark.parametrize("newer_than", TestEventFilters.newer_than[-2:]) 25 | @pytest.mark.parametrize("older_than", TestEventFilters.older_than[-2:]) 26 | @pytest.mark.parametrize("fields", TestEventFilters.fields[0:1]) 27 | def test_get_events_all_filter_combinations( 28 | topics, 29 | event_ids, 30 | project_names, 31 | states, 32 | users, 33 | include_logs, 34 | has_children, 35 | newer_than, 36 | older_than, 37 | fields 38 | ): 39 | """Tests all combinations of possible filters for `get_events`. 40 | 41 | Verifies: 42 | - Calls `get_events` with the provided filter parameters. 43 | - Ensures each event in the result set matches the specified filters. 44 | - Checks that the number of returned events matches the expected count 45 | based on the filters applied. 46 | - Confirms that each event contains only the specified fields, with 47 | no extra keys. 48 | 49 | Note: 50 | - Adjusts the timeout setting if necessary to handle a large number 51 | of tests and avoid timeout errors. 52 | - Some combinations of filter parameters may lead to a server timeout 53 | error. When this occurs, the test will skip instead of failing. 54 | - Currently, a ServerError due to timeout may occur when `has_children` 55 | is set to False. 56 | 57 | """ 58 | if get_timeout() < 5: 59 | set_timeout(None) # default timeout 60 | 61 | try: 62 | res = list(get_events( 63 | topics=topics, 64 | event_ids=event_ids, 65 | project_names=project_names, 66 | statuses=states, 67 | users=users, 68 | include_logs=include_logs, 69 | has_children=has_children, 70 | newer_than=newer_than, 71 | older_than=older_than, 72 | fields=fields 73 | )) 74 | except exceptions.ServerError as exc: 75 | assert has_children is False, ( 76 | f"{exc} even if has_children is {has_children}." 77 | ) 78 | print("Warning: ServerError encountered, test skipped due to timeout.") 79 | pytest.skip("Skipping test due to server timeout.") 80 | 81 | for item in res: 82 | assert item.get("topic") in topics 83 | assert item.get("project") in project_names 84 | assert item.get("user") in users 85 | assert item.get("status") in states 86 | 87 | assert (newer_than is None) or ( 88 | datetime.fromisoformat(item.get("createdAt")) 89 | > datetime.fromisoformat(newer_than) 90 | ) 91 | assert (older_than is None) or ( 92 | datetime.fromisoformat(item.get("createdAt")) 93 | < datetime.fromisoformat(older_than) 94 | ) 95 | 96 | assert topics is None or len(res) == sum(len( 97 | list(get_events( 98 | topics=[topic], 99 | project_names=project_names, 100 | statuses=states, 101 | users=users, 102 | include_logs=include_logs, 103 | has_children=has_children, 104 | newer_than=newer_than, 105 | older_than=older_than, 106 | fields=fields 107 | )) or [] 108 | ) for topic in topics) 109 | 110 | assert project_names is None or len(res) == sum(len( 111 | list(get_events( 112 | topics=topics, 113 | project_names=[project_name], 114 | statuses=states, 115 | users=users, 116 | include_logs=include_logs, 117 | has_children=has_children, 118 | newer_than=newer_than, 119 | older_than=older_than, 120 | fields=fields 121 | )) or [] 122 | ) for project_name in project_names) 123 | 124 | assert states is None or len(res) == sum(len( 125 | list(get_events( 126 | topics=topics, 127 | project_names=project_names, 128 | statuses=[state], 129 | users=users, 130 | include_logs=include_logs, 131 | has_children=has_children, 132 | newer_than=newer_than, 133 | older_than=older_than, 134 | fields=fields 135 | )) or [] 136 | ) for state in states) 137 | 138 | assert users is None or len(res) == sum(len( 139 | list(get_events( 140 | topics=topics, 141 | project_names=project_names, 142 | statuses=states, 143 | users=[user], 144 | include_logs=include_logs, 145 | has_children=has_children, 146 | newer_than=newer_than, 147 | older_than=older_than, 148 | fields=fields 149 | )) or [] 150 | ) for user in users) 151 | 152 | if fields == []: 153 | fields = get_default_fields_for_type("event") 154 | 155 | assert fields is None \ 156 | or all( 157 | set(event.keys()) == set(fields) 158 | for event in res 159 | ) 160 | -------------------------------------------------------------------------------- /tests/test_graphql_queries.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from ayon_api.graphql import GraphQlQuery 4 | from ayon_api.graphql_queries import ( 5 | project_graphql_query, 6 | folders_graphql_query, 7 | ) 8 | 9 | from .conftest import project_name_fixture 10 | 11 | 12 | @pytest.fixture 13 | def empty_query(): 14 | return GraphQlQuery("ProjectQuery") 15 | 16 | 17 | @pytest.fixture 18 | def folder_query(): 19 | return folders_graphql_query(["name"]) 20 | 21 | 22 | def test_simple_duplicate_add_variable_exception( 23 | project_name_fixture, empty_query 24 | ): 25 | key, value_type, value = "projectName", "[String!]", project_name_fixture 26 | empty_query.add_variable(key, value_type, value) 27 | with pytest.raises(KeyError): 28 | empty_query.add_variable(key, value_type) 29 | 30 | 31 | def test_exception_empty_query(empty_query): 32 | with pytest.raises(ValueError, match="Missing fields to query"): 33 | _out = empty_query.calculate_query() 34 | 35 | 36 | def test_simple_project_query(): 37 | project_query = project_graphql_query(["name"]) 38 | result = project_query.calculate_query() 39 | expected = "\n".join([ 40 | "query ProjectQuery {", 41 | " project {", 42 | " name", 43 | " }", 44 | "}" 45 | ]) 46 | assert result == expected 47 | 48 | 49 | def make_project_query(keys, values, types): 50 | query = project_graphql_query(["name"]) 51 | 52 | # by default from project_graphql_query(["name"]) 53 | inserted = {"projectName"} 54 | 55 | for key, entity_type, value in zip(keys, types, values): 56 | try: 57 | query.add_variable(key, entity_type, value) 58 | except KeyError: 59 | if key not in inserted: 60 | return None 61 | else: 62 | query.set_variable_value(key, value) 63 | 64 | inserted.add(key) 65 | return query 66 | 67 | 68 | def make_expected_get_variables_values(keys, values): 69 | return dict(zip(keys, values)) 70 | 71 | 72 | @pytest.mark.parametrize( 73 | "keys, values, types", 74 | [ 75 | ( 76 | ["projectName", "projectId", "numOf"], 77 | ["my_name", "0x23", 3], 78 | ["[String!]", "[String!]", "Int"] 79 | ), ( 80 | ["projectName", "testStrInt"], 81 | ["my_name", 42], 82 | ["[String!]", "[String!]"] 83 | ), ( 84 | ["projectName", "testIntStr"], 85 | ["my_name", "test_123"], 86 | ["[String!]", "Int"] 87 | ), 88 | ] 89 | ) 90 | def test_get_variables_values(keys, values, types): 91 | query = make_project_query(keys, values, types) 92 | # None means: unexpected exception thrown while adding variables 93 | assert query is not None 94 | 95 | expected = make_expected_get_variables_values(keys, values) 96 | assert query.get_variables_values() == expected 97 | 98 | 99 | """ 100 | def test_filtering(empty_query): 101 | assert empty_query._children == [] 102 | project_name_var = empty_query.add_variable("projectName", "String!") 103 | project_field = empty_query.add_field("project") 104 | project_field.set_filter("name", project_name_var) 105 | 106 | for field in empty_query._children: 107 | print(field.get_filters()) 108 | 109 | print(empty_query.calculate_query()) 110 | 111 | 112 | def print_rec_filters(field): 113 | print(field.get_filters()) 114 | for k in field._children: 115 | print_rec_filters(k) 116 | 117 | 118 | def test_folders_graphql_query(folder_query): 119 | print(folder_query.calculate_query()) 120 | 121 | 122 | def test_filters(folder_query): 123 | print(folder_query._children[0]._children[0].get_filters()) 124 | folder_query._children[0]._children[0].remove_filter("ids") 125 | print(folder_query._children[0]._children[0].get_filters()) 126 | print(folder_query.calculate_query()) 127 | """ 128 | --------------------------------------------------------------------------------