├── CHANGELOG.md ├── doc ├── requirements.txt └── sphinx_source │ ├── _static │ ├── favicon.ico │ └── logo_white.svg │ ├── collections.md │ ├── entity.md │ ├── query.md │ ├── installation.md │ ├── Makefile │ ├── make.bat │ ├── collections │ ├── overview.md │ ├── emptycollection.md │ └── entitycollection.md │ ├── conf.py │ ├── session.md │ ├── session │ └── examples.md │ ├── query │ ├── examples.md │ └── overview.md │ ├── configuration.md │ ├── index.md │ └── quickstart.md ├── tests ├── resources │ └── session │ │ ├── operations_linux.dbm │ │ ├── operations_windows.dbm.bak │ │ ├── operations_windows.dbm.dir │ │ └── operations_windows.dbm.dat ├── test_collection.py ├── test_session.py ├── conftest.py └── test_authoring.py ├── .gitignore ├── src └── trackteroid │ ├── query │ ├── query.pyi │ ├── __init__.py │ ├── criteria.pyi │ ├── utils.py │ └── criteria.py │ ├── stubs │ ├── __init__.py │ └── stubs.py │ ├── constants.py │ ├── entities │ ├── __init__.py │ ├── declarations.pyi │ ├── schematypes.py │ ├── declarations.py │ └── relationships_parser.py │ ├── __init__.py │ ├── configuration.py │ └── session.py ├── tox.ini ├── requirements.txt ├── AUTHORS.md ├── .github ├── ISSUE_TEMPLATE │ ├── issue--feature-request.md │ └── issue--bug-report.md └── workflows │ ├── tox.yml │ └── python-publish.yml ├── .readthedocs.yaml ├── pyproject.toml ├── LICENSE ├── .graphics └── svg │ ├── logo_white.svg │ └── logo_black.svg └── README.md /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | -------------------------------------------------------------------------------- /doc/requirements.txt: -------------------------------------------------------------------------------- 1 | myst-parser==1.0.0 2 | sphinx-rtd-theme==1.2.1 3 | sphinx==6.2.1 4 | sphinx-togglebutton==0.3.2 -------------------------------------------------------------------------------- /doc/sphinx_source/_static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TrixterFilm/trackteroid/HEAD/doc/sphinx_source/_static/favicon.ico -------------------------------------------------------------------------------- /tests/resources/session/operations_linux.dbm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TrixterFilm/trackteroid/HEAD/tests/resources/session/operations_linux.dbm -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .idea 3 | .vscode 4 | venv 5 | build 6 | dist 7 | *.egg-info 8 | .tox 9 | temp 10 | doc/sphinx_source/_build 11 | tests/.pytest_cache 12 | tests/_trial_temp 13 | -------------------------------------------------------------------------------- /tests/resources/session/operations_windows.dbm.bak: -------------------------------------------------------------------------------- 1 | "('User', ['2e3b4242-bd22-479f-aedd-4602f378d1e0'])", (0, 177) 2 | "('User', ['5e1d85f2-97f3-4f66-b0e1-8ce612cf3297'])", (512, 135) 3 | '__operations__', (1024, 898) 4 | -------------------------------------------------------------------------------- /tests/resources/session/operations_windows.dbm.dir: -------------------------------------------------------------------------------- 1 | "('User', ['2e3b4242-bd22-479f-aedd-4602f378d1e0'])", (0, 177) 2 | "('User', ['5e1d85f2-97f3-4f66-b0e1-8ce612cf3297'])", (512, 135) 3 | '__operations__', (1024, 898) 4 | -------------------------------------------------------------------------------- /doc/sphinx_source/collections.md: -------------------------------------------------------------------------------- 1 | # Collections 2 | 3 | ```{include} collections/overview.md 4 | ``` 5 | 6 | ## EntityCollection 7 | ```{include} collections/entitycollection.md 8 | ``` 9 | 10 | ## EmptyCollection 11 | ```{include} collections/emptycollection.md 12 | ``` -------------------------------------------------------------------------------- /doc/sphinx_source/entity.md: -------------------------------------------------------------------------------- 1 | # Entity 2 | 3 | Entities in Ftrack are represented as classes within the `trackteroid` module, and they serve various purposes, including the following: 4 | 5 | - [Querying](query.md#query) 6 | - [Collection Filtering](collections.md#type-filtering) 7 | - [Type Coercion](collections.md#type-coercion) -------------------------------------------------------------------------------- /src/trackteroid/query/query.pyi: -------------------------------------------------------------------------------- 1 | from typing import * 2 | from ..session import Session 3 | 4 | T = TypeVar('T') 5 | 6 | class Query(Generic[T]): 7 | _known_order: ClassVar[List[str]] = ["ascending", "descending"] 8 | 9 | def __new__(cls: Type[Query[T]], _cls: T, session: Optional[Session], schema: Optional[dict]) -> T: ... -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = 3 | tox>=4 4 | env_list = py{37,38,39,310,311} 5 | skip_missing_interpreters = true 6 | 7 | [testenv] 8 | description = run unit tests 9 | deps = 10 | pytest>=7 11 | pytest-sugar 12 | commands = 13 | pytest {posargs:tests} 14 | passenv = FTRACK_API_KEY,FTRACK_API_USER,FTRACK_SERVER 15 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | appdirs==1.4.4 2 | arrow==0.17.0 3 | certifi==2023.5.7 4 | charset-normalizer==3.1.0 5 | clique==1.6.1 6 | ftrack-python-api==2.5.0 7 | future==0.18.3 8 | idna==3.4 9 | pyparsing==2.4.7 10 | python-dateutil==2.8.2 11 | requests==2.31.0 12 | six==1.16.0 13 | termcolor==1.1.0 14 | urllib3==2.0.3 15 | websocket-client==0.59.0 16 | wrapt==1.15.0 17 | -------------------------------------------------------------------------------- /doc/sphinx_source/query.md: -------------------------------------------------------------------------------- 1 | # Query 2 | ```{include} query/examples.md 3 | :start-after: example minimal start 4 | :end-before: example minimal end 5 | ``` 6 | ```{include} query/overview.md 7 | ``` 8 | 9 | ## Reusing Query Results 10 | 11 | ```{include} query/examples.md 12 | :start-after: example reuse query results start 13 | :end-before: example reuse query results end 14 | ``` -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Primary Authors 2 | - [Rico Koschmitzy](https://github.com/rkoschmitzky) 3 | - [Dennis Weil](https://github.com/dennisweil) 4 | 5 | # Contributions 6 | - [Salvador Olmos Miralles](https://github.com/salvaom) 7 | - [Alf Kraus](https://github.com/99alfie) 8 | - [Pedro Zuñeda](https://github.com/pedrozuneda) 9 | - [Fridtjof Kühn](https://github.com/Fridi-K) 10 | - [Andreas Schuster](https://github.com/smoi23) 11 | 12 | -------------------------------------------------------------------------------- /tests/test_collection.py: -------------------------------------------------------------------------------- 1 | from trackteroid import Query, Asset, AssetVersion 2 | 3 | 4 | def test_attribute_empty_i4(): 5 | """Tests a bug on the Ftrack API where accessing collection attributes before fetching 6 | may result on that attribute being "locked": https://github.com/TrixterFilm/trackteroid/issues/4 7 | """ 8 | asset_collection = Query(Asset).by_id(AssetVersion, "%").get_first() 9 | assert not asset_collection.versions.id 10 | 11 | asset_collection = Query(Asset).by_id(AssetVersion, "%").get_first(projections=["versions"]) 12 | assert asset_collection.versions.id 13 | -------------------------------------------------------------------------------- /doc/sphinx_source/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | Install and update using [pip](https://pip.pypa.io/en/stable/getting-started/): 4 | 5 | ```shell 6 | pip install trackteroid 7 | ``` 8 | 9 | ## Python Version 10 | 11 | We recommend using the latest version of Python. Trackteroid supports Python 3.7 and newer. 12 | 13 | ## Dependencies 14 | 15 | These distributions will be installed automatically when installing Trackteroid. 16 | 17 | - [ftrack-python-api](https://pypi.org/project/ftrack-python-api/): This is the Python client for the ftrack API 18 | - [wrapt](https://pypi.org/project/wrapt/): Module for decorators, wrappers and monkey patching -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue--feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 'Issue: Feature Request' 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | # Goal 11 | A clear and concise description of what you want to happen. 12 | 13 | # Motivation 14 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 15 | 16 | # Considerations 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | # Risks 20 | A breakdown of the potential negative consequences or uncertainties that could accompany the implementation of this new feature. 21 | -------------------------------------------------------------------------------- /doc/sphinx_source/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /doc/sphinx_source/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.8" 13 | # You can also specify other tool versions: 14 | # nodejs: "19" 15 | # rust: "1.64" 16 | # golang: "1.19" 17 | 18 | # Build documentation in the "docs/" directory with Sphinx 19 | sphinx: 20 | configuration: doc/sphinx_source/conf.py 21 | 22 | # Optionally build your docs in additional formats such as PDF and ePub 23 | # formats: 24 | # - pdf 25 | # - epub 26 | 27 | # Optionally but recommended, declare the Python requirements required 28 | # to build your documentation 29 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 30 | python: 31 | install: 32 | - requirements: doc/requirements.txt -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue--bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 'Issue: Bug report' 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | # Problem Statement 11 | A clear and concise description of the problem or issue being encountered. 12 | 13 | # Expected Behavior 14 | A description of the expected outcome or behavior that should occur under normal circumstances. 15 | 16 | # Context 17 | Provide any relevant background information or context that may help in understanding the issue. 18 | 19 | - **OS:** 20 | - **Python version:** 21 | - **Trackteroid version:** 22 | - **Ftrack Python API version:** 23 | - **Ftrack Server version:** 24 | 25 | # Reproducible Steps 26 | Detailed instructions or a sequence of actions required to reproduce the problem. 27 | 1. **Step 1:** [Description of the first step to reproduce the issue] 28 | 2. **Step 2:** ... 29 | 3. **Step 3:** ... 30 | 31 | # Workaround 32 | If applicable, a suggestion or recommendation for a temporary solution or workaround that can be implemented to address the issue. 33 | -------------------------------------------------------------------------------- /doc/sphinx_source/collections/overview.md: -------------------------------------------------------------------------------- 1 | The result of terminated _Query_ is a collection, specifically an instance of either EntityCollection or EmptyCollection, depending on the outcome. When printed, the collection is represented as: 2 | 3 | `EntityCollection[]{}` or `EmptyCollection[]` 4 | 5 | An _EntityCollection_ is a container of wrapped ftrack entity objects with the following definitions: 6 | - It is an **ordered** container of entity objects. 7 | - It is **immutable**, meaning its contents entities can not be added or removed once created. 8 | - It is **iterable**, allowing for easy iteration over the entities no matter if there is a single or multiple entities in it. 9 | - It only contains **unique** elements, ensuring there are no duplicate entities. 10 | 11 | An _EmptyCollection_ is placeholder for an _EntityCollection_ that doesn't contain any entities. 12 | - It is iterable, allowing for iteration even though it doesn't have any entities. 13 | - It allows for any attribute access that you would typically perform on an _EntityCollection_, providing flexibility for operations or checks on the collection itself. 14 | -------------------------------------------------------------------------------- /.github/workflows/tox.yml: -------------------------------------------------------------------------------- 1 | name: Tox 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | strategy: 14 | fail-fast: false 15 | # Some tests are creating the same entity in the database. 16 | # Until we have these tests adjusted we have to run the 17 | # tests serial to avoid race conditions. 18 | max-parallel: 1 19 | matrix: 20 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] 21 | 22 | steps: 23 | - uses: actions/checkout@v3 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v3 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | - name: Install tox 29 | run: | 30 | python -m pip install --upgrade pip 31 | python -m pip install "tox>=4.6" 32 | - name: Run tox 33 | # Run tox using the version of Python in `PATH` 34 | run: tox -e py 35 | env: 36 | FTRACK_SERVER: ${{ secrets.FTRACK_SERVER }} 37 | FTRACK_API_USER: ${{ secrets.FTRACK_API_USER }} 38 | FTRACK_API_KEY: ${{ secrets.FTRACK_API_KEY }} 39 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "trackteroid" 3 | authors = [ 4 | { name = "Rico Koschmitzky" }, 5 | { name = "Dennis Weil" }, 6 | ] 7 | description = "Declarative, object-oriented wrapper for Ftrack queries. Powerful functional-style interactions with resulting collections." 8 | readme = "README.md" 9 | license = { file = "LICENSE" } 10 | requires-python = ">=3.7" 11 | 12 | classifiers = [ 13 | "Programming Language :: Python :: 3 :: Only", 14 | "Programming Language :: Python :: 3", 15 | "Programming Language :: Python :: 3.7", 16 | "Programming Language :: Python :: 3.8", 17 | "Programming Language :: Python :: 3.9", 18 | "Programming Language :: Python :: 3.10", 19 | "Programming Language :: Python :: 3.11", 20 | "License :: OSI Approved :: BSD License", 21 | "Operating System :: OS Independent" 22 | ] 23 | dependencies = [ 24 | "ftrack-python-api", 25 | "wrapt" 26 | ] 27 | dynamic = ["version"] 28 | 29 | [project.urls] 30 | "Homepage" = "https://github.com/TrixterFilm/trackteroid" 31 | "Bug Tracker" = "https://github.com/TrixterFilm/trackteroid/issues" 32 | 33 | [tool.setuptools.dynamic] 34 | version = { attr = "trackteroid.constants.__VERSION__" } 35 | 36 | [build-system] 37 | requires = ["setuptools", "setuptools-scm"] 38 | build-backend = "setuptools.build_meta" 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2023, Trixter 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright notice, 12 | this list of conditions and the following disclaimer in the documentation 13 | and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of the copyright holder nor the names of its 16 | contributors may be used to endorse or promote products derived from 17 | this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /doc/sphinx_source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | 6 | # -- Project information ----------------------------------------------------- 7 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 8 | 9 | import time 10 | 11 | project = 'Trackteroid' 12 | copyright = f'2023-{time.strftime("%Y")} Trixter Film GmbH' 13 | author = 'Dennis Weil, Rico Koschmitzky' 14 | release = '0.1.0' 15 | 16 | # -- General configuration --------------------------------------------------- 17 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 18 | # Add the following lines at the top of the file 19 | import myst_parser 20 | source_suffix = { 21 | '.rst': 'restructuredtext', 22 | '.md': 'markdown', 23 | } 24 | 25 | extensions = [ 26 | 'myst_parser', 27 | 'sphinx_togglebutton', 28 | ] 29 | myst_enable_extensions = [ 30 | 'colon_fence', 31 | ] 32 | 33 | templates_path = ['_templates'] 34 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 35 | 36 | 37 | # -- Options for HTML output ------------------------------------------------- 38 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 39 | 40 | html_theme = 'sphinx_rtd_theme' 41 | html_static_path = ['_static'] 42 | 43 | html_favicon = "_static/favicon.ico" 44 | html_logo = '_static/logo_white.svg' 45 | 46 | suppress_warnings = [ 47 | 'myst.xref_missing', 48 | 'myst.header' 49 | ] -------------------------------------------------------------------------------- /src/trackteroid/stubs/__init__.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | -------------------------------------------------------------------------------- /src/trackteroid/constants.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | __VERSION__ = "0.1.0rc5" 32 | -------------------------------------------------------------------------------- /src/trackteroid/entities/__init__.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | # TODO(low): only import Entity subclasses 32 | # is it possible to do this conditionally without listing each entity 33 | # individually 34 | from .entities import * 35 | -------------------------------------------------------------------------------- /tests/resources/session/operations_windows.dbm.dat: -------------------------------------------------------------------------------- 1 | {"__entity_type__": "User", "id": "2e3b4242-bd22-479f-aedd-4602f378d1e0", "is_active": false, "resource_type": "user", "username": "test-46f4b64b-851c-4172-927b-f64822000268-0"}{"__entity_type__": "User", "id": "5e1d85f2-97f3-4f66-b0e1-8ce612cf3297", "resource_type": "user", "username": "demo.user@example.com"}"[{\"operation\": \"create\", \"entity_data\": {\"id\": \"5e1d85f2-97f3-4f66-b0e1-8ce612cf3297\", \"username\": \"demo.user@example.com\", \"resource_type\": \"user\"}, \"entity_type\": \"User\", \"entity_key\": {\"id\": \"5e1d85f2-97f3-4f66-b0e1-8ce612cf3297\"}}, {\"operation\": \"update\", \"entity_type\": \"User\", \"entity_key\": {\"id\": \"5e1d85f2-97f3-4f66-b0e1-8ce612cf3297\"}, \"attribute_name\": \"is_active\", \"old_value\": \"NOT SET\", \"new_value\": true}, {\"operation\": \"update\", \"entity_type\": \"User\", \"entity_key\": {\"id\": \"5e1d85f2-97f3-4f66-b0e1-8ce612cf3297\"}, \"attribute_name\": \"first_name\", \"old_value\": \"NOT SET\", \"new_value\": \"demo\"}, {\"operation\": \"update\", \"entity_type\": \"User\", \"entity_key\": {\"id\": \"5e1d85f2-97f3-4f66-b0e1-8ce612cf3297\"}, \"attribute_name\": \"last_name\", \"old_value\": \"NOT SET\", \"new_value\": \"user\"}]" -------------------------------------------------------------------------------- /src/trackteroid/query/__init__.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | from .criteria import ( 32 | Criterion, 33 | Criteria 34 | ) 35 | from .query import ( 36 | Query, 37 | SCHEMA 38 | ) 39 | from ..configuration import RELATIONSHIPS_RESOLVER 40 | from ..constants import __VERSION__ -------------------------------------------------------------------------------- /doc/sphinx_source/collections/emptycollection.md: -------------------------------------------------------------------------------- 1 | The concept of the `EmptyCollection` shares similarities with the [optional type](https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html) found in various programming languages. It serves as a mechanism to handle the absence of values or empty results. 2 | 3 | Similar to optional types in other languages, the `EmptyCollection` provides a consistent interface and allows for operations and attribute access without the need for explicit checks for empty or null values. It acts as a container that represents the absence of a value or result. 4 | 5 | By utilizing the `EmptyCollection`, developers can write cleaner and more concise code by treating empty results as a valid state without the need for verbose conditional statements. This promotes a more functional programming style, allowing for seamless chaining and composition of operations even in scenarios where the result might be empty. 6 | 7 | Just as optional types in different programming languages offer methods or functions to check for presence or provide fallback values , the `EmptyCollection` provides a simple fallback functionality to handle cases where the collection is empty as it always evaluates to `False`. 8 | 9 | This demonstrates how you can implement a straightforward fallback mechanism using the or operator when retrieving the final data. 10 | ```{include} collections/examples.md 11 | :start-after: example fallback1 start 12 | :end-before: example fallback1 end 13 | ``` 14 | 15 | This code example showcases how to gracefully handle scenarios where the intermediate steps of querying, filtering, and retrieving data may result in an empty collection. By utilizing the or operator and providing an empty list as a fallback, we ensure that the final result is either the desired data or an empty list, mitigating the risk of errors or unexpected behavior. 16 | 17 | ```{include} collections/examples.md 18 | :start-after: example fallback2 start 19 | :end-before: example fallback2 end 20 | ``` 21 | -------------------------------------------------------------------------------- /src/trackteroid/__init__.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | from .query import ( 32 | Query, 33 | SCHEMA 34 | ) 35 | from .session import SESSION 36 | from .entities import * 37 | from .entities.schematypes import ( 38 | CUSTOM_ATTRIBUTE_TYPE_COMPATIBILITY, 39 | OBJECT_TYPES, 40 | ASSET_TYPES, 41 | TASK_TYPES, 42 | PROJECT_SCHEMAS 43 | ) 44 | from .constants import __VERSION__ -------------------------------------------------------------------------------- /src/trackteroid/query/criteria.pyi: -------------------------------------------------------------------------------- 1 | import datetime 2 | import arrow 3 | from typing import Optional, Union, Callable 4 | from collections import namedtuple 5 | from ..entities import Entity 6 | from .query import Query 7 | 8 | optional_time = Optional[Union[datetime.datetime, arrow.Arrow]] 9 | 10 | Criterion: namedtuple 11 | 12 | class Criteria: 13 | def by_id(self, *ids: int) -> Query: ... 14 | def by_location(self, *locations: object) -> Query: ... 15 | def by_path(self, *paths: str) -> Query: ... 16 | def by_version(self, *versions: int) -> Query: ... 17 | def by_name(self, *names: str) -> Query: ... 18 | def by_component(self, *components: str) -> Query: ... 19 | def by_status(self, *status: str) -> Query: ... 20 | def by_shot(self, *shots: str) -> Query: ... 21 | def by_sequence(self, *sequences: str) -> Query: ... 22 | def by_task(self, *tasks: str) -> Query: ... 23 | def by_type(self, *types: str) -> Query: ... 24 | def by_project(self, *projects: str) -> Query: ... 25 | def by_metadata(self, **dictionaries: str) -> Query: ... 26 | def by_description(self, regex: str) -> Query: ... 27 | def by_comment(self, regex: str) -> Query: ... 28 | def by_publisher(self, *publishers: str) -> Query: ... 29 | def by_assetgroup(self, *assetgroups: str) -> Query: ... 30 | def by_state(self, *states: str) -> Query: ... 31 | def by_list(self, *lists: str) -> Query: ... 32 | def by_publish_time(self, start: optional_time = None, end: optional_time = None) -> Query: ... 33 | def by_status_change_time(self, start: optional_time = None, end: optional_time = None) -> Query: ... 34 | def by_assignee(self, *assignees: str) -> Query: ... 35 | def by_lifespan(self, start: optional_time = None, end: optional_time = None) -> Query: ... 36 | def by_outgoing_link(self, *ids: str) -> Query: ... 37 | def by_incoming_link(self, *ids: str) -> Query: ... 38 | def inject(self, filter: str) -> Query: ... 39 | 40 | @staticmethod 41 | def supported_targets(*supported: Entity) -> Callable: ... -------------------------------------------------------------------------------- /doc/sphinx_source/session.md: -------------------------------------------------------------------------------- 1 | # Session 2 | 3 | Trackeroid's `Session` extends the functionality of the regular [Session](https://ftrack-python-api.readthedocs.io/en/stable/understanding_sessions.html) in the Ftrack Python API by incorporating additional features while preserving all the existing capabilities. 4 | 5 | ## Deferred Operations 6 | 7 | Operations can be tracked and serialized to a [.dbm file](https://docs.python.org/3/library/dbm.html#module-dbm) using the `deferred_operations` contextmanager. These serialized operations can then be committed at a later time, even in a different process. 8 | 9 | **Process 1:** 10 | ```{include} session/examples.md 11 | :start-after: example deferred operations1 start 12 | :end-before: example deferred operations1 end 13 | ``` 14 | 15 | ```{attention} 16 | To commit previously stored operations you need to reconnect the session and commit. This is a single step to ensure no unwanted operations can be committed to the server. 17 | ``` 18 | 19 | **Process 2:** 20 | ```{include} session/examples.md 21 | :start-after: example deferred operations2 start 22 | :end-before: example deferred operations2 end 23 | ``` 24 | 25 | ## Reusing Query Results 26 | 27 | In certain scenarios where you repeatedly execute the same query, you can utilize the `reusing_query_results` context manager if the data you are querying is expected to remain unchanged. This context manager enables the retrieval of previously obtained results, executing the query only if no results have been retrieved before. 28 | 29 | ```{include} session/examples.md 30 | :start-after: example reuse query results start 31 | :end-before: example reuse query results end 32 | ``` 33 | 34 | ## Multiple Sessions 35 | 36 | Trackteroid utilizes a single session instance called the `SESSION` Singleton, which serves as the default session for performing queries. If required, additional sessions can be initialized and passed to queries for performing further operations. 37 | 38 | Default Query construction: 39 | ```{include} query/examples.md 40 | :start-after: example session start 41 | :end-before: example session end 42 | ``` 43 | 44 | Query construction with different session: 45 | ```{include} query/examples.md 46 | :start-after: example session2 start 47 | :end-before: example session2 end 48 | ``` 49 | 50 | ```{important} 51 | Although it is feasible to work with multiple sessions, there are limitations associated with it. 52 | Since session objects serve as the foundation for collection containers and manage caches and operations, using collections from different sessions may not work seamlessly. In such cases, attempting operations across collections from different sessions will result in an `EntityCollectionOperationError` being raised to prevent unauthorized actions. 53 | 54 | ``` 55 | -------------------------------------------------------------------------------- /doc/sphinx_source/session/examples.md: -------------------------------------------------------------------------------- 1 | example deferred operations1 start 2 | ```python 3 | from trackteroid import ( 4 | Query, 5 | SESSION, 6 | Asset, 7 | AssetVersion, 8 | Component, 9 | ComponentLocation 10 | ) 11 | 12 | operations_file = "/tmp/recorded_operations.dbm" 13 | 14 | with SESSION.deferred_operations(operations_file): 15 | asset_collection = Query(Asset).by_name("bc0040_comp").get_one( 16 | projections=[ 17 | AssetVersion, 18 | AssetVersion.Task, 19 | Component, 20 | ComponentLocation.resource_identifier, 21 | ] 22 | ) 23 | assetversion_collection = asset_collection.AssetVersion.create( 24 | task=asset_collection.AssetVersion.Task 25 | ) 26 | component_collection = assetversion_collection.Component.create( 27 | name="main" 28 | ) 29 | componentlocation_collection = component_collection.ComponentLocation.create( 30 | resource_identifier="/path/to/final/file" 31 | ) 32 | assetversion_collection.is_published = True 33 | ``` 34 | example deferred operations1 end 35 | 36 | example deferred operations2 start 37 | ```python 38 | from trackteroid import SESSION 39 | 40 | operations_file = "/tmp/recorded_operations.dbm" 41 | SESSION.reconnect_and_commit(operations_file) 42 | ``` 43 | example deferred operations2 end 44 | 45 | example reuse query results start 46 | ```python 47 | import logging 48 | import sys 49 | 50 | from trackteroid import ( 51 | Query, 52 | AssetVersion, 53 | Project, 54 | SESSION 55 | ) 56 | 57 | logging.basicConfig(level=logging.INFO, stream=sys.stdout) 58 | 59 | 60 | with SESSION.reusing_query_results(): 61 | # will perform a query and logs: 62 | # INFO:trackteroid.query:Performing query: "select id, asset.name, version from AssetVersion" 63 | all_assetversions = Query(AssetVersion).get_all() 64 | 65 | # will not perform the query again - nothing will be logged 66 | all_assetversions_again = Query(AssetVersion).get_all() 67 | 68 | print( 69 | f"result contains same entities: {all_assetversions == all_assetversions_again}\n", 70 | f"result is same collection: {all_assetversions is all_assetversions_again}" 71 | 72 | ) 73 | # output: 74 | # result contains same entities: True 75 | # result is same collection: True 76 | 77 | # as the resolved query will be different this will not be picked from the cache 78 | # although the result would contain the same entities as before 79 | # will perform a query and logs: 80 | # INFO:trackteroid.query:Performing query: "select id, asset.name, version from AssetVersion where (version like "%")" 81 | all_assetversions_once_more = Query(AssetVersion).by_version("%").get_all() 82 | print( 83 | f"result contains same entities: {all_assetversions == all_assetversions_once_more}\n", 84 | f"result is same collection: {all_assetversions is all_assetversions_once_more}" 85 | 86 | ) 87 | # output: 88 | # result contains same entities: True 89 | # result is same collection: False 90 | 91 | ``` 92 | example reuse query results end -------------------------------------------------------------------------------- /.graphics/svg/logo_white.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /.graphics/svg/logo_black.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /tests/test_session.py: -------------------------------------------------------------------------------- 1 | import dbm 2 | import os 3 | import platform 4 | import shutil 5 | import tempfile 6 | 7 | import ftrack_api 8 | import pytest 9 | 10 | 11 | from trackteroid import ( 12 | SESSION, 13 | Query, 14 | AssetVersion, 15 | User 16 | ) 17 | 18 | 19 | @pytest.fixture 20 | def dumped_operations_file(): 21 | yield os.path.join(tempfile.gettempdir(), "operations.dbm") 22 | 23 | 24 | @pytest.fixture 25 | def initial_operations(): 26 | # these contents need to match the operations in the given resource file 27 | test_data = { 28 | "username": "demo.user@example.com", 29 | "first_name": "demo", 30 | "last_name": "user", 31 | "is_active": True 32 | } 33 | yield ( 34 | f"{os.path.dirname(__file__)}/resources/session" 35 | f"/operations_{'windows' if platform.platform().startswith('Windows') else 'linux'}.dbm", 36 | test_data 37 | ) 38 | with ftrack_api.Session(auto_populate=False, auto_connect_event_hub=False) as session: 39 | demo_user = session.query( 40 | f"select first_name, last_name from User where username is \"{test_data['username']}\"" 41 | ).first() 42 | if demo_user: 43 | session.delete(demo_user) 44 | session.commit() 45 | 46 | 47 | def test_deferred_operations(dumped_operations_file): 48 | # case 1. clear an existing cache temporarily 49 | query_result = Query(AssetVersion).get_first(projections=["task", "version"]) 50 | operations_count = len(SESSION.recorded_operations) 51 | 52 | # do a query -> cache a result 53 | with SESSION.deferred_operations(dumped_operations_file): 54 | assert 0 == len(SESSION.recorded_operations) 55 | 56 | avs = Query(AssetVersion).by_id(*query_result.id).get_one(projections=["task"]) 57 | avs.version = avs.version[0] + 10 58 | avs2 = avs.create(task=avs.task) # -> create entity, update task, update asset 59 | avs2.version = avs.version[0] + 10 60 | 61 | assert 5 == len(SESSION.recorded_operations) 62 | 63 | assert operations_count == len(SESSION.recorded_operations) 64 | # check the created file database 65 | database = dbm.open(dumped_operations_file, "r") 66 | 67 | def make_keys(entity_collection): 68 | return entity_collection.map( 69 | lambda x: "('{}', ['{}'])".format(x.entity_type.__name__, x.id[0]) 70 | ) 71 | 72 | expected_keys = make_keys(avs.union(avs2)) + make_keys(avs.task) + make_keys(avs.asset) + ["__operations__"] 73 | assert expected_keys, database.keys() 74 | 75 | 76 | def test_reconnect_and_commit(initial_operations): 77 | 78 | SESSION.reconnect_and_commit(initial_operations[0]) 79 | 80 | user = Query(User).by_name(initial_operations[1]["username"]).get_one( 81 | projections=["first_name", "last_name", "is_active"] 82 | ) 83 | assert initial_operations[1]["first_name"] == user.first_name[0] 84 | assert initial_operations[1]["last_name"] == user.last_name[0] 85 | assert initial_operations[1]["is_active"] is True 86 | 87 | 88 | def test_get_cached_collections(initial_operations): 89 | SESSION.reconnect_and_commit(initial_operations[0]) 90 | 91 | users = SESSION.get_cached_collections()[User].fetch_attributes("first_name", "last_name", "is_active") 92 | 93 | users_split = users.partition(lambda u: u.username[0] == initial_operations[1]["username"]) 94 | assert users_split[0].first_name == [initial_operations[1]["first_name"]] 95 | -------------------------------------------------------------------------------- /src/trackteroid/query/utils.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | # TODO: Move this into query.py 32 | def build_partial_query(target, arguments, attribute): 33 | """ 34 | 35 | Args: 36 | target (TargetRelation): target relation data (relation + collection). 37 | arguments (tuple): attribute values/patterns to match in the query. 38 | attribute (str): attribute chain. 39 | 40 | Returns: 41 | str: partial query expression. 42 | 43 | """ 44 | def _build_for_relation(relation, attribute): 45 | pattern_match_expressions = [] 46 | non_pattern_elements = [] 47 | for element in arguments: 48 | if isinstance(element, str) and "%" in element: 49 | pattern_match_expressions.append( 50 | "{} like \"{}\"".format(attribute, element) 51 | ) 52 | else: 53 | non_pattern_elements.append("\"{}\"".format(element)) 54 | 55 | non_pattern_expressions = [] 56 | 57 | # handle known id shortcuts like parent.id -> parent_id, version.id -> version_id 58 | if attribute == "id" and target.relation and not target.collection: 59 | relation_tokens = relation.split(".") 60 | if len(relation_tokens) > 1: 61 | attribute = "{}_id".format(relation_tokens[-1]) 62 | relation = ".".join(relation_tokens[:-1]) 63 | else: 64 | attribute = "{}_id".format(relation_tokens[0]) 65 | relation = "" 66 | 67 | if len(non_pattern_elements) == 1: 68 | non_pattern_expressions.append( 69 | "{} is {}".format(attribute, non_pattern_elements[0]) 70 | ) 71 | elif non_pattern_elements: 72 | non_pattern_expressions.append( 73 | "{} in ({})".format(attribute, ",".join(non_pattern_elements)) 74 | ) 75 | 76 | # Attribute expression 77 | query = "({})".format(" or ".join(non_pattern_expressions + pattern_match_expressions)) 78 | 79 | if relation: 80 | # Relation expression 81 | query = "{} {} {}".format(relation, "any" if target.collection else "has", query) 82 | 83 | return query 84 | 85 | relations = [target.relation] if not isinstance(target.relation, list) else target.relation 86 | if len(relations) > 1: 87 | start, end = "(", ")" 88 | else: 89 | start, end = "", "" 90 | 91 | return start + " or ".join(_build_for_relation(relation, attribute) for relation in relations) + end 92 | -------------------------------------------------------------------------------- /doc/sphinx_source/_static/logo_white.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /doc/sphinx_source/query/examples.md: -------------------------------------------------------------------------------- 1 | example minimal start 2 | ```python 3 | from trackteroid import ( 4 | Query, 5 | AssetVersion 6 | ) 7 | 8 | version_collection = Query(AssetVersion).get_first() 9 | print(version_collection) 10 | 11 | # output: EntityCollection[AssetVersion]{1} 12 | ``` 13 | example minimal end 14 | 15 | example projections1 start 16 | ```python 17 | print(Query(AssetVersion).get_first().id) 18 | # output: [u'00001180-b7e7-43cf-b0e5-a2df0cefe669'] 19 | 20 | print(Query(AssetVersion).get_first().comment) 21 | # output: [Symbol(NOT_SET)] 22 | 23 | print(Query(AssetVersion).get_first(projections=["comment"]).comment) 24 | # output: [u'Hello World'] 25 | ``` 26 | example projections1 end 27 | 28 | example projections shortcut start 29 | ```python 30 | from trackteroid import ( 31 | Query, 32 | AssetVersion, 33 | Project 34 | ) 35 | 36 | version_collection = Query(AssetVersion).get_first(projections=[Project.name]) 37 | # Performing query: "select asset.name, task.project.name, id, version from AssetVersion" 38 | 39 | # The abbreviation is not only working for projections, 40 | # but also via attribute access on the resulting collection 41 | print( 42 | version_collection.task.project.name, 43 | version_collection.Project.name 44 | ) 45 | # output: ([u'DummyProject'], [u'DummyProject']) 46 | ``` 47 | example projections shortcut end 48 | 49 | example criteria1 start 50 | ```python 51 | from trackteroid import ( 52 | Query, 53 | AssetVersion 54 | ) 55 | 56 | print(Query(AssetVersion).by_id("00001180-b7e7-43cf-b0e5-a2df0cefe669").get_all()) 57 | # output: EntityCollection[AssetVersion]{1} 58 | 59 | # while you can technically also do 60 | # `Query(AssetVersion).by_id("00001180-b7e7-43cf-b0e5-a2df0cefe669").by_id("00001fd9-c8b8-4d84-8a8d-2c8fbbed46a0").get_all()` 61 | print( 62 | Query(AssetVersion).by_id( 63 | "00001180-b7e7-43cf-b0e5-a2df0cefe669", 64 | "00001fd9-c8b8-4d84-8a8d-2c8fbbed46a0" 65 | ).get_all() 66 | ) 67 | # output: EntityCollection[AssetVersion]{2} 68 | 69 | # get all AssetVersions with version number 1 or 2 of an Asset called 'SomeAsset' 70 | print(Query(AssetVersion).by_name("SomeAsset").by_version(1, 2)).get_all() 71 | # output: EntityCollection[AssetVersion]{2} 72 | 73 | # get all AssetVersions with version number 1 or 2 of any Asset that is NOT called 'SomeAsset' 74 | print(Query(AssetVersion).not_by_name("SomeAsset").by_version(1, 2)).get_all() 75 | # output: EntityCollection[AssetVersion]{10} 76 | ``` 77 | example criteria1 end 78 | 79 | example criteria like start 80 | ```python 81 | from trackteroid import ( 82 | Query, 83 | Asset 84 | ) 85 | 86 | print(Query(Asset).by_name("%Asset").get_all().name) 87 | # output: [u'SomeAsset', u'SomeAsset', u'SomeAsset'] 88 | 89 | print(Query(Asset).by_name("Some%").get_all().name) 90 | # output: [u'SomeAsset', u'SomeAsset', u'SomeAsset', u'SomeCharacter', u'SomeScene'] 91 | 92 | print(Query(Asset).by_name("%Asset%").get_all().name) 93 | # output: [u'SomeAsset', u'SomeAsset', u'SomeAsset', u'AnAssetClone'] 94 | ``` 95 | example criteria like end 96 | 97 | example criteria target start 98 | ```python 99 | from trackteroid import ( 100 | Query, 101 | Asset, 102 | Project 103 | ) 104 | print(Query(Asset).by_name("SomeAsset").get_all()) 105 | # output: EntityCollection[Asset]{3} 106 | print(Query(Asset).by_name(Project, "DummyProject", "DummyProject2").get_all()) 107 | # output: EntityCollection[Asset]{10} 108 | print(Query(Asset).by_name("SomeAsset").by_name(Project, "DummyProject", "DummyProject2").get_all()) 109 | # output: EntityCollection[Asset]{2} 110 | ``` 111 | example criteria target end 112 | 113 | example limiting start 114 | ```python 115 | from trackteroid import ( 116 | Query, 117 | AssetVersion 118 | ) 119 | 120 | # get all AssetVersions ordered descending by their version number across all Assets 121 | print(Query(AssetVersion).get_all(limit=8, order="descending", order_by="version").version) 122 | # output: [55, 43, 42, 22, 10, 10, 8, 7] 123 | ``` 124 | example limiting end 125 | 126 | example session start 127 | ```python 128 | from trackteroid import ( 129 | AssetVersion, 130 | Query, 131 | SCHEMA, 132 | SESSION, 133 | ) 134 | 135 | # same as Query(AssetVersion) 136 | Query(AssetVersion, session=SESSION, schema=SCHEMA.default) 137 | ``` 138 | example session end 139 | 140 | example session2 start 141 | ```python 142 | from trackteroid import ( 143 | Query, 144 | AssetVersion, 145 | SCHEMA 146 | ) 147 | from trackteroid.session import Session 148 | 149 | session = Session() 150 | 151 | Query(AssetVersion, session=session) 152 | ``` 153 | example session2 end -------------------------------------------------------------------------------- /src/trackteroid/entities/declarations.pyi: -------------------------------------------------------------------------------- 1 | from .base import Entity 2 | from typing import * 3 | 4 | class ForwardDeclaration: ... 5 | 6 | class RelationshipDeclaration: 7 | def __init__(self, parent: Union[Type['Entity'], Type[ForwardDeclareCompare]], child: Union[Type['Entity'], Type[ForwardDeclareCompare], str]): ... 8 | 9 | class ForwardDeclareCompare(type): 10 | def __eq__(self, other: Union[Type['Entity'], Type[ForwardDeclaration]]) -> bool: ... 11 | 12 | class Asset(ForwardDeclaration): ... 13 | class Scope(ForwardDeclaration): ... 14 | class Metadata(ForwardDeclaration): ... 15 | class TaskTemplateItem(ForwardDeclaration): ... 16 | class ReviewSessionInvitee(ForwardDeclaration): ... 17 | class Bid(ForwardDeclaration): ... 18 | class Milestone(ForwardDeclaration): ... 19 | class SequenceComponent(ForwardDeclaration): ... 20 | class ObjectType(ForwardDeclaration): ... 21 | class ReviewSessionObjectStatus(ForwardDeclaration): ... 22 | class Task(ForwardDeclaration): ... 23 | class Project(ForwardDeclaration): ... 24 | class Conversation(ForwardDeclaration): ... 25 | class AssetVersionLink(ForwardDeclaration): ... 26 | class Timelog(ForwardDeclaration): ... 27 | class Shot(ForwardDeclaration): ... 28 | class ListCustomAttributeValue(ForwardDeclaration): ... 29 | class Disk(ForwardDeclaration): ... 30 | class Event(ForwardDeclaration): ... 31 | class Participant(ForwardDeclaration): ... 32 | class JobComponent(ForwardDeclaration): ... 33 | class NoteCategory(ForwardDeclaration): ... 34 | class State(ForwardDeclaration): ... 35 | class Setting(ForwardDeclaration): ... 36 | class Location(ForwardDeclaration): ... 37 | class Episode(ForwardDeclaration): ... 38 | class CalendarEvent(ForwardDeclaration): ... 39 | class Type(ForwardDeclaration): ... 40 | class ContextCustomAttributeValue(ForwardDeclaration): ... 41 | class Job(ForwardDeclaration): ... 42 | class TypedContextLink(ForwardDeclaration): ... 43 | class Chapter(ForwardDeclaration): ... 44 | class AssetType(ForwardDeclaration): ... 45 | class Timer(ForwardDeclaration): ... 46 | class CustomAttributeGroup(ForwardDeclaration): ... 47 | class ReviewSession(ForwardDeclaration): ... 48 | class AssetVersion(ForwardDeclaration): ... 49 | class AssetBuild(ForwardDeclaration): ... 50 | class UserCustomAttributeValue(ForwardDeclaration): ... 51 | class ContainerComponent(ForwardDeclaration): ... 52 | class Message(ForwardDeclaration): ... 53 | class CustomAttributeValue(ForwardDeclaration): ... 54 | class TypedContext(ForwardDeclaration): ... 55 | class Group(ForwardDeclaration): ... 56 | class Note(ForwardDeclaration): ... 57 | class WorkflowSchema(ForwardDeclaration): ... 58 | class DashboardWidget(ForwardDeclaration): ... 59 | class Epic(ForwardDeclaration): ... 60 | class Status(ForwardDeclaration): ... 61 | class Appointment(ForwardDeclaration): ... 62 | class CalendarEventResource(ForwardDeclaration): ... 63 | class Membership(ForwardDeclaration): ... 64 | class User(ForwardDeclaration): ... 65 | class ProjectSchemaOverride(ForwardDeclaration): ... 66 | class Recipient(ForwardDeclaration): ... 67 | class TypedContextStatusChange(ForwardDeclaration): ... 68 | class AssetVersionCustomAttributeValue(ForwardDeclaration): ... 69 | class ProjectSchema(ForwardDeclaration): ... 70 | class TaskTemplate(ForwardDeclaration): ... 71 | class EntitySetting(ForwardDeclaration): ... 72 | class ListCategory(ForwardDeclaration): ... 73 | class StatusChange(ForwardDeclaration): ... 74 | class Feed(ForwardDeclaration): ... 75 | class Priority(ForwardDeclaration): ... 76 | class AssetCustomAttributeValue(ForwardDeclaration): ... 77 | class NoteComponent(ForwardDeclaration): ... 78 | class TaskTypeSchema(ForwardDeclaration): ... 79 | class TypedContextList(ForwardDeclaration): ... 80 | class AssetVersionStatusChange(ForwardDeclaration): ... 81 | class UserSecurityRole(ForwardDeclaration): ... 82 | class FileComponent(ForwardDeclaration): ... 83 | class AssetGroup(ForwardDeclaration): ... 84 | class Folder(ForwardDeclaration): ... 85 | class Schema(ForwardDeclaration): ... 86 | class CustomAttributeType(ForwardDeclaration): ... 87 | class Component(ForwardDeclaration): ... 88 | class SchemaType(ForwardDeclaration): ... 89 | class SchemaStatus(ForwardDeclaration): ... 90 | class Resource(ForwardDeclaration): ... 91 | class SecurityRole(ForwardDeclaration): ... 92 | class List(ForwardDeclaration): ... 93 | class ReviewSessionObject(ForwardDeclaration): ... 94 | class CustomAttributeConfiguration(ForwardDeclaration): ... 95 | class Context(ForwardDeclaration): ... 96 | class AssetVersionList(ForwardDeclaration): ... 97 | class Sequence(ForwardDeclaration): ... 98 | class ComponentLocation(ForwardDeclaration): ... 99 | class Dashboard(ForwardDeclaration): ... 100 | -------------------------------------------------------------------------------- /src/trackteroid/entities/schematypes.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | import re 32 | 33 | from arrow.arrow import Arrow 34 | from datetime import datetime 35 | from collections import defaultdict 36 | 37 | from ..session import SESSION 38 | 39 | 40 | class AttributeInfo(object): 41 | def __init__(self, types=tuple(), array=False, mutable=True): 42 | self.types = types 43 | self.array = array 44 | self.mutable = mutable 45 | 46 | 47 | class _CustomAttributeTypeCompatibility(object): 48 | _compatibility_map = { 49 | "number": (int, float), 50 | "date": (Arrow, datetime), 51 | "text": (str), 52 | "boolean": (bool,), 53 | "enumerator": (str), 54 | "dynamic_enumerator": (str), 55 | "expression": (str) 56 | } 57 | 58 | def __init__(self): 59 | self.attributes = {} 60 | 61 | def __getattr__(self, item): 62 | if not self.attributes: 63 | configs = SESSION.query("select key, type.name from CustomAttributeConfiguration").all() 64 | for config in configs: 65 | self.attributes[config["key"]] = AttributeInfo( 66 | types=self._compatibility_map[config["type"]["name"]], 67 | ) 68 | 69 | if item in self.attributes: 70 | return self.attributes[item] 71 | 72 | raise KeyError("Custom Attribute '{}' not found.".format(item)) 73 | 74 | 75 | class TypesBase(object): 76 | projections = ["id", "name"] 77 | entity_type = None 78 | 79 | def __init__(self): 80 | self._types = defaultdict(dict) 81 | 82 | def get(self, item, session=SESSION): 83 | if " " in item: 84 | item = self._to_camel_case(item) 85 | session_id = str(session) 86 | types = self._types[session_id] 87 | if not types: 88 | object_types = session.query( 89 | "select {} from {}".format(", ".join(self.projections), self.entity_type) 90 | ).all() 91 | for _type in object_types: 92 | types[self._to_camel_case(_type["name"])] = _type 93 | 94 | if item in types: 95 | return types[item] 96 | elif item == "types": 97 | return types 98 | 99 | raise KeyError("{} '{}' not found.".format(self.entity_type, item)) 100 | 101 | def __getattr__(self, item): 102 | return self.get(item) 103 | 104 | def __getitem__(self, item): 105 | return self.get(item) 106 | 107 | @staticmethod 108 | def _to_camel_case(string): 109 | return "".join([_.capitalize() for _ in re.findall(r"[a-zA-Z]*\s*?", string)]) 110 | 111 | 112 | class _ObjectTypes(TypesBase): 113 | entity_type = "ObjectType" 114 | 115 | 116 | class _AssetTypes(TypesBase): 117 | entity_type = "AssetType" 118 | 119 | 120 | class _ProjectSchemas(TypesBase): 121 | entity_type = "ProjectSchema" 122 | 123 | 124 | class _TaskTypes(TypesBase): 125 | entity_type = "Type" 126 | 127 | 128 | CUSTOM_ATTRIBUTE_TYPE_COMPATIBILITY = _CustomAttributeTypeCompatibility() 129 | OBJECT_TYPES = _ObjectTypes() 130 | ASSET_TYPES = _AssetTypes() 131 | TASK_TYPES = _TaskTypes() 132 | PROJECT_SCHEMAS = _ProjectSchemas() 133 | -------------------------------------------------------------------------------- /src/trackteroid/configuration.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | import importlib 32 | import logging 33 | import os 34 | import traceback 35 | import sys 36 | 37 | import wrapt 38 | 39 | 40 | def _fallback_decorator(fallback_func, attribute): 41 | """ fallback mechanism for the wrapped function on the associated config entry 42 | 43 | Args: 44 | fallback_func callable: function to wrap 45 | attribute: name of the attribute the fallback will be applied to 46 | 47 | Returns: 48 | callable: function wrapper 49 | 50 | """ 51 | @wrapt.decorator 52 | def wrapper(wrapped, instance, args, kwargs): 53 | try: 54 | return wrapped(*args, **kwargs) 55 | except: 56 | _LOG.error( 57 | f"Calling '{attribute}' raised an exception. Falling back to default implementation!", 58 | exc_info=True 59 | ) 60 | return fallback_func(*args, **kwargs) 61 | 62 | return wrapper 63 | 64 | 65 | def _override_configuration(): 66 | """ identifies and parses a user configuration and applies relevant attributes to this module 67 | 68 | """ 69 | _custom_configuration = os.getenv("TRACKTEROID_CONFIGURATION", False) 70 | # try our best to source a custom configuration 71 | if _custom_configuration: 72 | if not os.path.exists(_custom_configuration): 73 | raise OSError(f"Custom configuration `{_custom_configuration}` doesn't exist.") 74 | if not os.path.splitext(_custom_configuration)[1] in [".py"]: 75 | raise AssertionError(f"Custom configuration must `{_custom_configuration}` be a .py file.") 76 | 77 | _LOG.info(f"Custom configuration specified in `{_custom_configuration}`. Trying to load...") 78 | try: 79 | spec = importlib.util.spec_from_file_location("custom_configuration", _custom_configuration) 80 | custom_configuration = importlib.util.module_from_spec(spec) 81 | spec.loader.exec_module(custom_configuration) 82 | except: 83 | raise RuntimeError( 84 | "Failed to load custom configuration.\n{}".format( 85 | "\n".join(traceback.format_exception(*sys.exc_info())) 86 | ) 87 | ) 88 | 89 | relevant_attributes = [_ for _ in globals().keys() if _.isupper() and not _.startswith("_")] 90 | for attribute in relevant_attributes: 91 | if hasattr(custom_configuration, attribute): 92 | _LOG.info(f"Adding '{attribute}' from user configuration.") 93 | if attribute in _CALLABLES_REQUIRE_FALLBACK: 94 | globals()[attribute] = _fallback_decorator( 95 | globals()[attribute], 96 | attribute 97 | )(getattr(custom_configuration, attribute)) 98 | else: 99 | globals()[attribute] = getattr(custom_configuration, attribute) 100 | 101 | 102 | # default entries - ALL upper variable names in here can be set in custom user configuration 103 | ############################################################################################ 104 | 105 | # A functions that gets the current api version string and would return a custom entities 106 | # schema used for relationship resolutions. 107 | RELATIONSHIPS_RESOLVER = lambda api_version: {} 108 | 109 | # The primary logger 110 | LOGGING_NAMESPACE = "trackteroid" 111 | 112 | # A function that gets the current session and the type name that it requests a 113 | # deletion for and resolves to True or False. 114 | ALLOWED_FOR_DELETION_RESOLVER = lambda session, type_name: True 115 | 116 | WARN_ON_INJECT = False 117 | ############################################################################################ 118 | 119 | _LOG = logging.getLogger(f"{LOGGING_NAMESPACE}.configuration") 120 | _CALLABLES_REQUIRE_FALLBACK = [ 121 | "RELATIONSHIPS_RESOLVER", 122 | ] 123 | 124 | _override_configuration() 125 | 126 | 127 | -------------------------------------------------------------------------------- /doc/sphinx_source/query/overview.md: -------------------------------------------------------------------------------- 1 | To retrieve data from the Ftrack server, you need to perform queries using the `Query` class. This class serves as the entry point for accessing data and should be initialized with the desired entity type: `Query()`. 2 | 3 | In the previous example, we wanted to retrieve any `AssetVersion` but only fetched the first result from the server. Any `get_` prefixed method on the `Query` instance we do refer to as **terminators**. 4 | These terminators are responsible for executing the resolved query instruction, sending it to the Ftrack server, and fetching the corresponding data. 5 | 6 | The result is returned as a collection, specifically an instance of either `EntityCollection` or `EmptyCollection`, depending on the outcome. When printed, the collection is represented as: 7 | 8 | `EntityCollection[]{}` or `EmptyCollection[]` 9 | 10 | If you wish to preview the resolved query without sending it to the server, you can simply print the collection or call str on it. 11 | 12 | ```python 13 | from trackteroid import AssetVersion 14 | 15 | print(Query(AssetVersion)) 16 | # output: select id, asset.name, version from AssetVersion 17 | ``` 18 | 19 | #### Projections 20 | 21 | When accessing data from the resulting collection, it is important to _project_ (specify) the attributes that you will need to access later. 22 | A resolved query typically takes the form: 23 | 24 | ```sql 25 | select from where 26 | ``` 27 | 28 | Looking back at the previous example, you can observe that the attributes _id_, _asset.name_, and _version_ were included in the resolved query instruction. 29 | This was done because these attributes are commonly accessed and can be predefined for certain entity types. 30 | 31 | By projecting the necessary attributes in the query, you ensure that the resulting collection includes the specific data you require. 32 | 33 | ```python 34 | from trackteroid import AssetVersion 35 | 36 | print(AssetVersion.projections) 37 | # output: ['id', 'asset.name', 'version'] 38 | ``` 39 | In contrast to the Ftrack Python API, the default Session within Trackteroid disables the auto-population feature. This means that the Session will not automatically fetch missing data when accessing attributes on your collections. Instead, the data is fetched only for the attributes that were explicitly projected in the query. 40 | This behavior provides a more controlled and optimized approach to data retrieval. By avoiding unnecessary data fetching, disabled auto-population minimizes server requests and might improve performance significantly. 41 | 42 | When working with Trackteroid, it is important to be aware of this behavior and ensure that you project all the attributes you need in your queries. 43 | 44 | ```{include} query/examples.md 45 | :start-after: example projections1 start 46 | :end-before: example projections1 end 47 | ``` 48 | When attempting to access the _comment_ attribute without projecting it, the output contains `Symbol(NOT_SET)`, indicating that the data for the comment attribute was not fetched. 49 | However, by modifying the query to include the _comment_ attribute in the projections list (projections=["comment"]) and accessing it, the output becomes [u'Hello World'], providing the retrieved value of the comment. 50 | 51 | ```python 52 | from trackteroid import ( 53 | Query, 54 | AssetVersion 55 | ) 56 | 57 | print( 58 | Query(AssetVersion).get_first( 59 | projections=["comment", "asset.parent.project.name"] 60 | ).asset.parent.project.name 61 | ) 62 | # output: [u'DummyProject'] 63 | ``` 64 | Knowing these relationships and constructing written queries can be challenging, leading to long and complex queries. However, Trackteroid provides a shorter and easier alternative for many relationships. 65 | ```{include} query/examples.md 66 | :start-after: example projections shortcut start 67 | :end-before: example projections shortcut end 68 | ``` 69 | 70 | This concise and intuitive approach simplifies querying and attribute retrieval for complex relationships. 71 | 72 | #### Filtering 73 | 74 | To ensure optimal performance and avoid fetching unnecessary data, it's recommended to narrow down the query results directly using `Query` criteria. Criteria methods in Trackteroid follow a `by_` and `not_by_` name prefix convention and can be chained together. While different entity types may have different criteria methods available, many share common ones. 75 | By utilizing criteria methods, you can specify filtering conditions directly in the query construction process, reducing the amount of data retrieved. This approach helps improve code performance and efficiency. 76 | 77 | ```{include} query/examples.md 78 | :start-after: example criteria1 start 79 | :end-before: example criteria1 end 80 | ``` 81 | 82 | Moreover, the query mechanism allows for pattern-based filtering using the % placeholder, which denotes "zero or more of any character". This feature enhances the flexibility and sophistication of your filtering options within queries. 83 | ```{include} query/examples.md 84 | :start-after: example criteria like start 85 | :end-before: example criteria like end 86 | ``` 87 | 88 | Frequently, criteria in the query mechanism involve searching for direct properties of an entity, such as _id_, _name_, or _metadata_. By default, those criteria are associated with the entity type specified in the Query, representing the desired results. However, criteria can also offer the flexibility to define a target, allowing you to specify the entity type for which you want to reference its property instead. 89 | ```{include} query/examples.md 90 | :start-after: example criteria target start 91 | :end-before: example criteria target end 92 | ``` 93 | 94 | For criteria that support target specification, you have the option to provide **exactly one target as the first positional argument**. This target defines the relationship for the property used within the criterion. 95 | 96 | ### Limiting and Ordering 97 | 98 | The `get_all` terminator supports limiting and ordering results. 99 | 100 | ```{include} query/examples.md 101 | :start-after: example limiting start 102 | :end-before: example limiting end 103 | ``` -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import uuid 3 | from typing import List 4 | 5 | import ftrack_api 6 | import pytest 7 | 8 | from trackteroid import SESSION 9 | 10 | 11 | @dataclasses.dataclass 12 | class TestScenario: 13 | project_id: str 14 | sequence_ids: List[str] = dataclasses.field(default_factory=list) 15 | shot_ids: List[str] = dataclasses.field(default_factory=list) 16 | assetbuild_ids: List[str] = dataclasses.field(default_factory=list) 17 | task_ids: List[str] = dataclasses.field(default_factory=list) 18 | asset_ids: List[str] = dataclasses.field(default_factory=list) 19 | version_ids: List[str] = dataclasses.field(default_factory=list) 20 | component_ids: List[str] = dataclasses.field(default_factory=list) 21 | 22 | def query_project(self, session, projections): 23 | return session.query( 24 | f"select {', '.join(projections)} from Project " 25 | f"where id is {self.project_id}" 26 | ).one() 27 | 28 | def grab(self, session, entity_type, required_fields): 29 | 30 | type_ids = { 31 | "Sequence": self.sequence_ids, 32 | "Shot": self.shot_ids, 33 | "AssetBuild": self.assetbuild_ids, 34 | "Task": self.task_ids, 35 | "Asset": self.asset_ids, 36 | "AssetVersion": self.version_ids, 37 | "Component": self.component_ids, 38 | } 39 | 40 | if entity_type not in type_ids: 41 | raise KeyError(entity_type) 42 | if not type_ids[entity_type]: 43 | raise ValueError(f"No entities of type {entity_type} exists in the scenario") 44 | 45 | query = f"select {', '.join(required_fields)} from {entity_type} where " 46 | for field in required_fields: 47 | query += f"{field} like '%' " 48 | query += f"and id in ({', '.join(type_ids[entity_type])})" 49 | return session.query(query).all() 50 | 51 | 52 | @pytest.fixture(autouse=True) 53 | def reconnect_session(): 54 | return SESSION.reconnect() 55 | 56 | 57 | @pytest.fixture() 58 | def ftrack_session(): 59 | with ftrack_api.Session() as session: 60 | yield session 61 | 62 | 63 | @pytest.fixture(scope="session") 64 | def ftrack_project_id(): 65 | session = ftrack_api.Session() 66 | name = "unittests_{0}".format(uuid.uuid1().hex) 67 | 68 | # Naively pick the first project schema. For this example to work the 69 | # schema must contain `Shot` and `Sequence` object types. 70 | required_types = ["Sequence", "Shot"] 71 | project_schema = None 72 | for schema in session.query("ProjectSchema").all(): 73 | types = [x["name"] for x in schema["object_types"]] 74 | if all([x in types for x in required_types]): 75 | project_schema = schema 76 | break 77 | 78 | if not project_schema: 79 | raise ValueError( 80 | f"A project schema with the following types could not be found on {session.server_url}:" 81 | f" {', '.join(required_types)}" 82 | ) 83 | 84 | # Create the project with the chosen schema. 85 | project = session.create( 86 | "Project", 87 | {"name": name, "full_name": name + "_full", "project_schema": project_schema}, 88 | ) 89 | session.commit() 90 | 91 | yield project["id"] 92 | 93 | session.delete(project) 94 | session.commit() 95 | 96 | 97 | @pytest.fixture 98 | def scenario_project(ftrack_project_id) -> TestScenario: 99 | return TestScenario(project_id=ftrack_project_id) 100 | 101 | 102 | @pytest.fixture 103 | def scenario_sequence(ftrack_session, scenario_project) -> TestScenario: 104 | project = scenario_project.query_project(ftrack_session, ["project_schema"]) 105 | 106 | # Create sequences, shots and tasks. 107 | for sequence_number in range(1, 5): 108 | sequence = ftrack_session.create( 109 | "Sequence", 110 | {"name": "seq_{0}".format(uuid.uuid4()), "parent": project}, 111 | ) 112 | 113 | scenario_project.sequence_ids.append(sequence["id"]) 114 | ftrack_session.commit() 115 | return scenario_project 116 | 117 | 118 | @pytest.fixture 119 | def scenario_assetbuild(ftrack_session, scenario_project) -> TestScenario: 120 | project = scenario_project.query_project(ftrack_session, ["project_schema"]) 121 | 122 | for _ in range(4): 123 | sequence = ftrack_session.create( 124 | "AssetBuild", 125 | {"name": "ab_{0}".format(uuid.uuid4()), "parent": project}, 126 | ) 127 | 128 | scenario_project.assetbuild_ids.append(sequence["id"]) 129 | ftrack_session.commit() 130 | return scenario_project 131 | 132 | 133 | @pytest.fixture 134 | def scenario_shot(ftrack_session, scenario_sequence) -> TestScenario: 135 | project = scenario_sequence.query_project(ftrack_session, ["project_schema"]) 136 | project_schema = project["project_schema"] 137 | default_shot_status = project_schema.get_statuses("Shot")[0] 138 | 139 | for sequence_id in scenario_sequence.sequence_ids: 140 | sequence = ftrack_session.get("Sequence", sequence_id) 141 | for shot_number in range(1, 8): 142 | shot = ftrack_session.create( 143 | "Shot", 144 | { 145 | "name": "shot_{}".format(uuid.uuid4()), 146 | "parent": sequence, 147 | "status": default_shot_status, 148 | }, 149 | ) 150 | scenario_sequence.shot_ids.append(shot["id"]) 151 | ftrack_session.commit() 152 | return scenario_sequence 153 | 154 | 155 | def _create_tasks(session, scenario, parent_ids): 156 | for id_ in parent_ids: 157 | parent = session.get("TypedContext", id_) 158 | for task_number in range(1, 5): 159 | task = session.create( 160 | "Task", 161 | { 162 | "name": "task_{0}".format(uuid.uuid4()), 163 | "parent": parent 164 | }, 165 | ) 166 | scenario.task_ids.append(task["id"]) 167 | 168 | session.commit() 169 | return scenario 170 | 171 | 172 | @pytest.fixture 173 | def scenario_shot_task(ftrack_session, scenario_shot) -> TestScenario: 174 | return _create_tasks(ftrack_session, scenario_shot, scenario_shot.shot_ids) 175 | 176 | 177 | @pytest.fixture 178 | def scenario_assetbuild_task(ftrack_session, scenario_shot) -> TestScenario: 179 | return _create_tasks(ftrack_session, scenario_shot, scenario_shot.assetbuild_ids) 180 | 181 | 182 | def _create_assets(session, scenario, parent_ids): 183 | for id_ in parent_ids: 184 | parent = session.get("TypedContext", id_) 185 | for task_number in range(1, 5): 186 | task = session.create( 187 | "Asset", 188 | { 189 | "name": "asset_{0}".format(uuid.uuid4()), 190 | "parent": parent 191 | }, 192 | ) 193 | scenario.asset_ids.append(task["id"]) 194 | 195 | session.commit() 196 | return scenario 197 | 198 | 199 | @pytest.fixture 200 | def scenario_shot_asset(ftrack_session, scenario_shot) -> TestScenario: 201 | return _create_assets(ftrack_session, scenario_shot, scenario_shot.shot_ids) 202 | 203 | 204 | @pytest.fixture 205 | def scenario_assetbuild_asset(ftrack_session, scenario_assetbuild) -> TestScenario: 206 | return _create_assets(ftrack_session, scenario_assetbuild, scenario_assetbuild.assetbuild_ids) 207 | -------------------------------------------------------------------------------- /src/trackteroid/query/criteria.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | import inspect 32 | import logging 33 | import re 34 | 35 | from collections import namedtuple 36 | 37 | import wrapt 38 | 39 | from ..configuration import LOGGING_NAMESPACE 40 | 41 | LOG = logging.getLogger("{}.criteria".format(LOGGING_NAMESPACE)) 42 | 43 | 44 | class Criterion(object): 45 | """ a helper class to store and resolve a query criterion""" 46 | def __init__(self, name="", args=(), kwargs={}, target=None, filter=lambda *args, **kwargs: "", negate=False): 47 | self.name = name 48 | self.args = args 49 | self.kwargs = kwargs 50 | self.target = target 51 | self.filter = filter 52 | self.negate = negate 53 | 54 | def resolve(self): 55 | partial_query = self.filter(self.target, *self.args, **self.kwargs) 56 | if self.negate: 57 | partial_query = re.sub(r"\s+(in|like)\s+", r" not_\1 ", partial_query) 58 | partial_query = re.sub(r"\s+is\s+", " is_not ", partial_query) 59 | 60 | return partial_query 61 | 62 | # stay compatible - previously we used a namedtuple as Criterion object 63 | def as_dict(self): 64 | return { 65 | "name": self.name, 66 | "args": self.args, 67 | "kwargs": self.kwargs, 68 | "target": self.target, 69 | "filter": self.filter, 70 | "negate": self.negate 71 | } 72 | 73 | 74 | class Criteria: 75 | """ stubs for filtering (aka query criteria) 76 | 77 | This is used to allow proper autocompletion within (some) IDE(s). 78 | """ 79 | 80 | class InvalidArgumentsError(ValueError): 81 | pass 82 | 83 | class CriterionNotImplementedError(NotImplementedError): 84 | pass 85 | 86 | def by_id(self, *ids): 87 | raise NotImplementedError 88 | 89 | def by_location(self, *locations): 90 | raise NotImplementedError 91 | 92 | def by_resource_identifier(self, *resource_identifiers): 93 | raise NotImplementedError 94 | 95 | def by_version(self, *versions): 96 | raise NotImplementedError 97 | 98 | def by_name(self, *names): 99 | raise NotImplementedError 100 | 101 | def by_type(self, *types): 102 | raise NotImplementedError 103 | 104 | def by_metadata(self, **dictionaries): 105 | raise NotImplementedError 106 | 107 | def by_description(self, regex): 108 | raise NotImplementedError 109 | 110 | def by_comment(self, regex): 111 | raise NotImplementedError 112 | 113 | def by_publisher(self, *publishers): 114 | raise NotImplementedError 115 | 116 | def by_status(self, *statuses): 117 | raise NotImplementedError 118 | 119 | def by_state(self, *states): 120 | raise NotImplementedError 121 | 122 | def by_list(self, *lists): 123 | raise NotImplementedError 124 | 125 | def by_publish_time(self, start=None, end=None): 126 | raise NotImplementedError 127 | 128 | def by_status_change_time(self, start=None, end=None): 129 | raise NotImplementedError 130 | 131 | def by_assignee(self, *assignees): 132 | raise NotImplementedError 133 | 134 | def by_lifespan(self, start=None, end=None): 135 | raise NotImplementedError 136 | 137 | def by_outgoing_link(self, *ids): 138 | raise NotImplementedError 139 | 140 | def by_incoming_link(self, *ids): 141 | raise NotImplementedError 142 | 143 | def by_component_location(self, *component_locations): 144 | raise NotImplementedError 145 | 146 | def by_file_type(self, *file_types): 147 | raise NotImplementedError 148 | 149 | def by_system_type(self, *system_types): 150 | raise NotImplementedError 151 | 152 | def by_size(self, *sizes): 153 | raise NotImplementedError 154 | 155 | def by_active_state(self, *states): 156 | raise NotImplementedError 157 | 158 | def by_action(self, *actions): 159 | raise NotImplementedError 160 | 161 | def by_data(self, *datas): 162 | raise NotImplementedError 163 | 164 | def by_publish_state(self, publish_state): 165 | raise NotImplementedError 166 | 167 | def inject(self, filter): 168 | raise NotImplementedError 169 | 170 | def by_creation_date(self, *dates): 171 | raise NotImplementedError 172 | 173 | def by_finish_date(self, *dates): 174 | raise NotImplementedError 175 | 176 | @staticmethod 177 | def supported_targets(*supported): 178 | """ Wraps the filter methods and validates the filter targets against 179 | the given supported targets. Raises an error if a given target is 180 | not supported. 181 | 182 | Args: 183 | *supported (Entity): arbitrary number of supported entities 184 | 185 | Returns: 186 | func (filter method): original filter method 187 | """ 188 | 189 | @wrapt.decorator 190 | def wrapper(wrapped, instance, args, kwargs): 191 | _supported_class_names = [_.__name__ for _ in supported] 192 | 193 | if args: 194 | if (not [_ for _ in _supported_class_names if _ in ["_EntityBase", "Entity"]]) and inspect.isclass(args[0]): 195 | if not ((args[0] in supported) or bool([_ for _ in supported if args[0].__bases__[0] == _])): 196 | raise Criteria.InvalidArgumentsError( 197 | "You provided an unsupported target {} in `.{}()`. {}.".format( 198 | args[0].__name__, 199 | wrapped.__name__, 200 | "No targets supported" if not supported else "Supported targets: " + ", ".join( 201 | [_.__name__ for _ in supported] 202 | ) 203 | ) 204 | ) 205 | 206 | # Propagate target as TargetRelation object 207 | if args[0]: 208 | args = [instance.relationship[args[0]]] + list(args[1:]) 209 | return wrapped(*args, **kwargs) 210 | 211 | return wrapper 212 | -------------------------------------------------------------------------------- /doc/sphinx_source/configuration.md: -------------------------------------------------------------------------------- 1 | # Configuration 2 | 3 | To tailor Trackteroid to meet your specific requirements, you have the ability to configure it according to your needs. 4 | 5 | Configuration in Trackteroid is accomplished through a Python file, providing a high degree of flexibility and adjustability in a language you are already familiar with. 6 | 7 | To make your configuration file accessible, you can utilize the `TRACKTEROID_CONFIGURATION` environment variable. 8 | 9 | Here's an example for Linux with Bash: 10 | ```shell 11 | export TRACKTEROID_CONFIGURATION=/path/to/my/trackteroid_user_config.py 12 | ``` 13 | 14 | The subsequent section outlines the available configuration options within Trackteroid and explains how to customize them according to your preferences. 15 | Configuration entries are defined in ALL_CAPS style and can store constant values or callables. 16 | 17 | 18 | ## ALLOWED_FOR_DELETION_RESOLVER 19 | 20 | The _ALLOWED_FOR_DELETION_RESOLVER_ is a callable function that provides the ability to implement overrides for controlling the deletion of specific entity types. When invoked, this function receives the current session object and the name of the entity type that is being requested for deletion. 21 | Default Implementation: 22 | ```python 23 | ALLOWED_FOR_DELETION_RESOLVER = lambda session, type_name: True 24 | ``` 25 | 26 | ```{warning} 27 | While this function primarily serves as an **additional** security measure to prevent accidental deletions, it should not be relied upon as a substitute for proper API user access management in FTrack. 28 | ``` 29 | 30 | 31 | ## LOGGING NAMESPACE 32 | 33 | The logging namespace utilized by Trackteroid is set to `trackteroid` by default. This namespace serves as the primary identifier for logging purposes within Trackteroid. Modifying this namespace allows for seamless integration of the API into your existing codebase without the need for additional logging configuration setup. 34 | 35 | 36 | ## RELATIONSHIPS_RESOLVER 37 | 38 | The _RELATIONSHIPS_RESOLVER_ is a callable function that plays a crucial role in supplying relationship information that cannot be automatically derived by Trackteroid. 39 | 40 | Default implementation: 41 | ```python 42 | RELATIONSHIPS_RESOLVER = lambda api_version: {} 43 | ``` 44 | 45 | When invoked, the API will pass the version string to the callable, ensuring resilience in the event of changes to the Trackteroid API that necessitate schema modifications. Your resolver function should return a dictionary adhering to a specific schema. 46 | 47 | Let's explore the schema using a comprehensive example that involves reading a JSON file to provide the necessary relationship information. 48 | 49 | Here's an example implementation of a custom resolver in a _trackteroid_user_config.py_ file. 50 | 51 | ```python 52 | def relationships_from_json(**kwargs): 53 | import json 54 | 55 | api_version = kwargs.get("api_version", "0.0.1") 56 | 57 | if api_version.startswith("0.1"): 58 | with open("/path/to/trackteroid_relationships.json") as f: 59 | return json.load(f) 60 | elif api_version.startswith("0.2"): 61 | with open("/path/to/trackteroid_relationships_new.json") as f: 62 | return json.load(f) 63 | 64 | 65 | RELATIONSHIPS_RESOLVER = relationships_from_json 66 | ``` 67 | In this example, the custom resolver function relationships_from_json reads the appropriate JSON file based on the API version. The returned data from the JSON file provides the relationship information needed by Trackteroid. 68 | 69 | Here's an example of the JSON file _trackteroid_relationships.json_ that conforms to the schema: 70 | 71 | ```json 72 | { 73 | "entities":{ 74 | "AssetVersion":{ 75 | "AssetBuild":"asset.parent[AssetBuild]", 76 | "AssetGroup":"asset.parent[AssetBuild].parent[AssetGroup]", 77 | "Shot":"asset.parent[Shot]", 78 | "Sequence":["asset.parent[Shot].parent[Sequence]", "asset.parent[Sequence]"] 79 | }, 80 | "Component":{ 81 | "AssetBuild":"asset.parent[AssetBuild]", 82 | "AssetGroup":"asset.parent[AssetBuild].parent[AssetGroup]", 83 | "Shot":"asset.parent[Shot]", 84 | "Sequence":"asset.parent.parent[Sequence]", 85 | "Project":"asset.parent.parent.project" 86 | } 87 | }, 88 | "overrides":[ 89 | { 90 | "entities":{ 91 | "AssetVersion":{ 92 | "Shot":"asset.parent[Folder].parent[Shot]" 93 | } 94 | }, 95 | "name":"vfx" 96 | }, 97 | { 98 | "entities":{ 99 | "AssetVersion":{ 100 | "Episode":"asset.parent[Shot].parent[Episode]" 101 | } 102 | }, 103 | "inherit":0, 104 | "name":"episodic" 105 | } 106 | ] 107 | } 108 | ``` 109 | 110 | The resulting dictionary schema can be summarized as follows: 111 | 112 | ``` 113 | { 114 | "entities":{ 115 | :{ 116 | :, 117 | :[,] 118 | } 119 | }, 120 | "overrides": [ 121 | { 122 | "entities":{ 123 | :{ 124 | :, 125 | :[,] 126 | } 127 | }, 128 | "name":, 129 | "inherit"(default:True): 130 | } 131 | ] 132 | } 133 | ``` 134 | 135 | The schema at the root level contains the relationships that will be used as the default schema for the Query. These relationships serve as the base schema, which can be reused or overridden for other custom schemas, as demonstrated in the examples for vfx and episodic. 136 | If a target entity entry contains multiple values it presents an OR relationship. 137 | 138 | Here's an example to illustrate how the overrides are applied using the SCHEMA object: 139 | ```python 140 | from pprint import pprint 141 | 142 | from trackteroid import SCHEMA 143 | 144 | pprint(SCHEMA.default) 145 | # output: 146 | # {'entities': {'AssetVersion': {'AssetBuild': 'asset.parent[AssetBuild]', 147 | # 'AssetGroup': 'asset.parent[AssetBuild].parent[AssetGroup]', 148 | # 'Sequence': ['asset.parent[Shot].parent[Sequence]', 149 | # 'asset.parent[Sequence]'], 150 | # 'Shot': 'asset.parent[Shot]'}, 151 | # 'Component': {'AssetBuild': 'asset.parent[AssetBuild]', 152 | # 'AssetGroup': 'asset.parent[AssetBuild].parent[AssetGroup]', 153 | # 'Project': 'asset.parent.parent.project', 154 | # 'Sequence': 'asset.parent.parent[Sequence]', 155 | # 'Shot': 'asset.parent[Shot]'}}} 156 | 157 | pprint(SCHEMA.vfx) 158 | # output: 159 | # {'entities': {'AssetVersion': {'AssetBuild': 'asset.parent[AssetBuild]', 160 | # 'AssetGroup': 'asset.parent[AssetBuild].parent[AssetGroup]', 161 | # 'Sequence': ['asset.parent[Shot].parent[Sequence]', 162 | # 'asset.parent[Sequence]'], 163 | # 'Shot': 'asset.parent[Folder].parent[Shot]'}, 164 | # 'Component': {'AssetBuild': 'asset.parent[AssetBuild]', 165 | # 'AssetGroup': 'asset.parent[AssetBuild].parent[AssetGroup]', 166 | # 'Project': 'asset.parent.parent.project', 167 | # 'Sequence': 'asset.parent.parent[Sequence]', 168 | # 'Shot': 'asset.parent[Shot]'}}, 169 | # 'name': 'vfx'} 170 | 171 | pprint(SCHEMA.episodic) 172 | # output: 173 | # {'entities': {'AssetVersion': {'Episode': 'asset.parent[Shot].parent[Episode]'}}, 174 | # 'name': u'episodic'} 175 | ``` 176 | 177 | In the example, `SCHEMA.default` represents the default schema, which serves as the baseline for other schemas. `SCHEMA.vfx` demonstrates an override named "vfx," which inherits all the entries from the default schema but overrides the relationship for `Shot`. On the other hand, `SCHEMA.episodic` demonstrates an override named "episodic" that doesn't inherit from the default schema and only includes the relationships it provides, as it has dis abled inheritance. 178 | ```{warning} 179 | The relationships provided by your resolver take precedence over the relationships automatically derived from the Session/Database schema. 180 | ``` 181 | 182 | 183 | ## WARN_ON_INJECT 184 | 185 | The _WARN_ON_INJECT_ variable can be set to either `True` or `False`, with the default value being `False`. When set to `True`, it enables the emission of a `logging.warning` message when the _inject_ query criterion is used. 186 | 187 | The _inject_ criterion serves as a workaround for situations where certain criteria implementations are missing. Enabling the _WARN_ON_INJECT_ option can be useful in identifying the usage of this criterion in your codebase and exploring potential alternative approaches that may exist. 188 | -------------------------------------------------------------------------------- /doc/sphinx_source/index.md: -------------------------------------------------------------------------------- 1 | # Trackteroid 2 | 3 | 4 | **Declarative, object-oriented wrapper for Ftrack queries. Powerful functional-style interactions with resulting collections.** 5 | 6 | 7 | Welcome to Trackteroid’s documentation. Get started with [Installation](installation.md) and then get an overview with the [Quickstart](quickstart.md). 8 | 9 | Trackteroid depends on [Ftrack](https://www.ftrack.com/en/) and its [Python API](https://ftrack-python-api.readthedocs.io/) a project management and production tracking software specifically designed for the media and entertainment industry. It is commonly used in the fields of film, television, animation, visual effects, and gaming. 10 | 11 | 12 | ## Motivation 13 | 14 | We have decided to build a wrapping API around the Ftrack Python API to address several limitations and challenges that arise when directly interacting with the Python API. While the Ftrack Python API offers a lot of flexibility, there are certain aspects that can make development and maintenance more cumbersome and less intuitive. By creating a wrapping API, we aim to overcome these challenges and provide a more streamlined and developer-friendly experience. Here are the key reasons for this decision: 15 | 16 | - **Simplifying Querying** 17 | - One of the primary challenges with the Ftrack Python API is the requirement to write queries as SQL-like strings using a custom query language. This approach often leads to complex string formatting and can be hard to read, understand, and debug. This project aims for simplifying the query logic and provides a more expressive and intuitive querying interface. This allows developers to construct queries using a more natural and readable syntax, resulting in improved productivity and reduced errors. 18 | 19 | 20 | - **Optimizing Query Performance** 21 | - When working with the Ftrack Python API, developers need to ensure they write efficient queries to minimize unnecessary database calls and optimize performance. This requires careful consideration of the data retrieval logic and understanding the performance implications of different query constructions. In contrast, our API goes the extra mile by attempting to optimize the queries for performance automatically. By leveraging various optimization techniques, we aim to reduce the number of database queries, optimize data retrieval, and improve overall system performance. This alleviates the burden on developers to manually optimize their queries, allowing them to focus more on building features and functionality. 22 | 23 | 24 | - **Abstracting Database Schema Complexity** 25 | - Constructing queries with the Ftrack Python API requires a good understanding of the underlying database schema and relationships. This can be a barrier for developers who are not familiar with the intricacies of the Ftrack data model. We aim to abstract away the complexities of the database schema and provide a higher-level interface that shields developers from the details. This simplifies the development process and allows developers to focus on the functionality they want to build, rather than getting lost in the intricacies of the database structure. 26 | 27 | 28 | - **Enhancing Resulting Collections** 29 | - The collections returned by the Ftrack Python API after executing queries may not always be convenient enough to interact with directly. They may lack certain methods or properties that would make data manipulation and processing more efficient. We can enrich the resulting collections with additional functionalities and utility methods tailored to the specific needs of our application. This improves the developer experience by providing more convenient and intuitive ways to work with the retrieved data, ultimately enhancing productivity and code maintainability. 30 | 31 | 32 | - **Improved Field Accessibility** 33 | - The Ftrack Python API does not always present the available fields on entities directly, requiring developers to refer to the documentation or inspect the schema to determine the available properties. We do take steps to enhance field accessibility to some extent. We aim to provide a more intuitive and discoverable way for developers to access entity fields by exposing them directly through the API. This saves developers time and effort by eliminating the need for constant referencing of documentation or inspection of the underlying schema. It enhances productivity and code maintainability by making entity fields more accessible and discoverable within the development workflow. 34 | 35 | 36 | In summary Trackeroid tries to empower developers by providing a more intuitive and efficient way to interact with the Ftrack platform, ultimately accelerating development, improving code quality, and enhancing the overall user experience. 37 | 38 | ## Comparison 39 | ````{admonition} **With **Trackteroid**....** 40 | :class: dropdown 41 | ```python 42 | # Calculate and display the total time duration logged for 43 | # AssetBuild types within specified Shots and Folders. 44 | # 45 | # output: 46 | # Found 9 assetbuilds. 47 | # Found 1 assetbuilds with timelogs. 48 | # {16.0: ['Drone Craft'], 49 | # 'No time tracked yet.': ['Drawer Cabinet 01', 50 | # 'Gothic Commode 01', 51 | # 'Shelf 01', 52 | # 'Side Table 01', 53 | # 'Small Wooden Table 01', 54 | # 'Vintage Wooden Drawer 01', 55 | # 'Wooden Table 01', 56 | # 'Wooden Table 02']} 57 | 58 | from pprint import pprint 59 | 60 | from trackteroid import ( 61 | Query, 62 | AssetBuild, 63 | Folder, 64 | Task, 65 | Project 66 | ) 67 | 68 | assetbuild_collection = Query(AssetBuild).\ 69 | by_name(Project, "sync", "showroom").\ 70 | by_name(Folder, "Asset%", "Delivery 3").\ 71 | get_all( 72 | projections=[ 73 | Task.Timelog, 74 | Task.Timelog.duration 75 | ] 76 | ) 77 | 78 | print( 79 | f"Found {len(assetbuild_collection)} assets.\n" 80 | f"Found {assetbuild_collection.count(lambda ac: ac.Task.Timelog)} assetbuilds with timelogs." 81 | ) 82 | 83 | pprint( 84 | assetbuild_collection.group_and_map( 85 | lambda abc: abc.Task.Timelog.fold( 86 | 0, 87 | lambda current, tc: current + tc.duration[0] / 3600 88 | ) or "No time tracked yet.", 89 | lambda abc: abc.sort( 90 | lambda abc: abc.name 91 | ).name 92 | ) 93 | ) 94 | ``` 95 | ```` 96 | 97 | ````{admonition} ...in contrast to the **Ftrack Python API**. 98 | :class: dropdown 99 | ```python 100 | # Calculate and display the total time duration logged for 101 | # AssetBuild types within specified Shots and Folders. 102 | # 103 | # output: 104 | # Found 9 assetbuilds. 105 | # Found 1 assetbuilds with timelogs. 106 | # {16.0: ['Drone Craft'], 107 | # 'No time tracked yet.': ['Drawer Cabinet 01', 108 | # 'Gothic Commode 01', 109 | # 'Shelf 01', 110 | # 'Side Table 01', 111 | # 'Small Wooden Table 01', 112 | # 'Vintage Wooden Drawer 01', 113 | # 'Wooden Table 01', 114 | # 'Wooden Table 02']} 115 | 116 | from pprint import pprint 117 | 118 | import ftrack_api 119 | 120 | session = ftrack_api.Session(auto_connect_event_hub=False, auto_populate=False) 121 | project_names = ("sync", "showroom") 122 | folder_specific = "Delivery 3" 123 | folder_unspecific = "Asset%" 124 | 125 | query = ( 126 | f"select id, name, assets.name, parent.name, project.name, " 127 | f"assets.versions.task.timelogs, assets.versions.task.timelogs.duration " 128 | f"from AssetBuild where project has (name in {project_names}) " 129 | f"and parent[Folder] has (name is '{folder_specific}' or name like '{folder_unspecific}')" 130 | ) 131 | 132 | assetbuilds_no_duration = ["No time tracked yet.", []] 133 | assetbuilds_timelog_duration = [0, []] 134 | 135 | assetbuilds = session.query(query).all() 136 | 137 | for assetbuild in assetbuilds: 138 | has_duration = False 139 | for asset in assetbuild["assets"]: 140 | for version in asset["versions"]: 141 | durations = [_["duration"] for _ in version["task"]["timelogs"]] 142 | if any(durations): 143 | for duration in durations: 144 | assetbuilds_timelog_duration[0] += duration / 3600 145 | if not has_duration: 146 | has_duration = True 147 | 148 | if has_duration: 149 | assetbuilds_timelog_duration[1].append(assetbuild["name"]) 150 | else: 151 | assetbuilds_no_duration[1].append(assetbuild["name"]) 152 | 153 | print( 154 | f"Found {len(assetbuilds)} assetbuilds.\n" 155 | f"Found {len(assetbuilds_timelog_duration[1])} assetbuilds with timelogs." 156 | ) 157 | 158 | assetbuilds_no_duration[1].sort() 159 | assetbuilds_timelog_duration[1].sort() 160 | 161 | pprint( 162 | dict( 163 | [assetbuilds_no_duration, assetbuilds_timelog_duration] 164 | ) 165 | ) 166 | ``` 167 | ```` 168 | 169 | ## Overview 170 | ```{toctree} 171 | :maxdepth: 1 172 | 173 | installation.md 174 | quickstart.md 175 | configuration.md 176 | session.md 177 | entity.md 178 | query.md 179 | collections.md 180 | ``` 181 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | 5 | 6 |
7 | 8 | # Trackteroid 9 | 10 | [![Documentation Status][documentation-button]][documentation] 11 | [![Python Versions][pyversion-button]][pypi] 12 | [![Downloads][downloads-button]][downloads] 13 | [![BSD License][bsdlicense-button]][bsdlicense] 14 | 15 | [documentation]: https://trackteroid.readthedocs.io 16 | [documentation-button]: https://readthedocs.org/projects/trackteroid/badge/?version=latest 17 | [pyversion-button]: https://img.shields.io/pypi/pyversions/Trackteroid.svg 18 | [pypi]: https://pypi.org/project/trackteroid/ 19 | [bsdlicense]: https://opensource.org/licenses/BSD-3-Clause 20 | [bsdlicense-button]: https://img.shields.io/badge/license-BSD-yellow.svg 21 | [downloads]: https://pepy.tech/project/trackteroid 22 | [downloads-button]: https://static.pepy.tech/badge/trackteroid 23 | 24 | ---- 25 | **Declarative, object-oriented wrapper for Ftrack queries. Powerful functional-style interactions with resulting collections.** 26 | 27 | Trackteroid depends on [Ftrack](https://www.ftrack.com/en/) and its [Python API](https://ftrack-python-api.readthedocs.io/) a project management and production tracking software specifically designed for the media and entertainment industry. It is commonly used in the fields of film, television, animation, visual effects, and gaming. 28 | 29 | ## Installation 30 | 31 | Install and update using [pip](https://pip.pypa.io/en/stable/getting-started/): 32 | 33 | ```shell 34 | pip install trackteroid 35 | ``` 36 | 37 | ## Motivation 38 | 39 | We have decided to build a wrapping API around the Ftrack Python API to address several limitations and challenges that arise when directly interacting with the Python API. While the Ftrack Python API offers a lot of flexibility, there are certain aspects that can make development and maintenance more cumbersome and less intuitive. By creating a wrapping API, we aim to overcome these challenges and provide a more streamlined and developer-friendly experience. Here are the key reasons for this decision: 40 | 41 | - **Simplifying Querying** 42 | - One of the primary challenges with the Ftrack Python API is the requirement to write queries as SQL-like strings using a custom query language. This approach often leads to complex string formatting and can be hard to read, understand, and debug. This project aims for simplifying the query logic and provides a more expressive and intuitive querying interface. This allows developers to construct queries using a more natural and readable syntax, resulting in improved productivity and reduced errors. 43 | 44 | 45 | - **Optimizing Query Performance** 46 | - When working with the Ftrack Python API, developers need to ensure they write efficient queries to minimize unnecessary database calls and optimize performance. This requires careful consideration of the data retrieval logic and understanding the performance implications of different query constructions. In contrast, our API goes the extra mile by attempting to optimize the queries for performance automatically. By leveraging various optimization techniques, we aim to reduce the number of database queries, optimize data retrieval, and improve overall system performance. This alleviates the burden on developers to manually optimize their queries, allowing them to focus more on building features and functionality. 47 | 48 | 49 | - **Abstracting Database Schema Complexity** 50 | - Constructing queries with the Ftrack Python API requires a good understanding of the underlying database schema and relationships. This can be a barrier for developers who are not familiar with the intricacies of the Ftrack data model. We aim to abstract away the complexities of the database schema and provide a higher-level interface that shields developers from the details. This simplifies the development process and allows developers to focus on the functionality they want to build, rather than getting lost in the intricacies of the database structure. 51 | 52 | 53 | - **Enhancing Resulting Collections** 54 | - The collections returned by the Ftrack Python API after executing queries may not always be convenient enough to interact with directly. They may lack certain methods or properties that would make data manipulation and processing more efficient. We can enrich the resulting collections with additional functionalities and utility methods tailored to the specific needs of our application. This improves the developer experience by providing more convenient and intuitive ways to work with the retrieved data, ultimately enhancing productivity and code maintainability. 55 | 56 | 57 | - **Improved Field Accessibility** 58 | - The Ftrack Python API does not always present the available fields on entities directly, requiring developers to refer to the documentation or inspect the schema to determine the available properties. We do take steps to enhance field accessibility to some extent. We aim to provide a more intuitive and discoverable way for developers to access entity fields by exposing them directly through the API. This saves developers time and effort by eliminating the need for constant referencing of documentation or inspection of the underlying schema. It enhances productivity and code maintainability by making entity fields more accessible and discoverable within the development workflow. 59 | 60 | In summary Trackeroid tries to empower developers by providing a more intuitive and efficient way to interact with the Ftrack platform, ultimately accelerating development, improving code quality, and enhancing the overall user experience. 61 | 62 | 63 | ### Comparison 64 | With **Trackteroid**... 65 | ```python 66 | # Calculate and display the total time duration logged for 67 | # AssetBuild types within specified Shots and Folders. 68 | # 69 | # output: 70 | # Found 9 assetbuilds. 71 | # Found 1 assetbuilds with timelogs. 72 | # {16.0: ['Drone Craft'], 73 | # 'No time tracked yet.': ['Drawer Cabinet 01', 74 | # 'Gothic Commode 01', 75 | # 'Shelf 01', 76 | # 'Side Table 01', 77 | # 'Small Wooden Table 01', 78 | # 'Vintage Wooden Drawer 01', 79 | # 'Wooden Table 01', 80 | # 'Wooden Table 02']} 81 | 82 | from pprint import pprint 83 | 84 | from trackteroid import ( 85 | Query, 86 | AssetBuild, 87 | Folder, 88 | Task, 89 | Project 90 | ) 91 | 92 | assetbuild_collection = Query(AssetBuild).\ 93 | by_name(Project, "sync", "showroom").\ 94 | by_name(Folder, "Asset%", "Delivery 3").\ 95 | get_all( 96 | projections=[ 97 | Task.Timelog, 98 | Task.Timelog.duration 99 | ] 100 | ) 101 | 102 | print( 103 | f"Found {len(assetbuild_collection)} assets.\n" 104 | f"Found {assetbuild_collection.count(lambda ac: ac.Task.Timelog)} assetbuilds with timelogs." 105 | ) 106 | 107 | pprint( 108 | assetbuild_collection.group_and_map( 109 | lambda abc: abc.Task.Timelog.fold( 110 | 0, 111 | lambda current, tc: current + tc.duration[0] / 3600 112 | ) or "No time tracked yet.", 113 | lambda abc: abc.sort( 114 | lambda abc: abc.name 115 | ).name 116 | ) 117 | ) 118 | ``` 119 | 120 | ...in contrast to the **Ftrack Python API**. 121 | ```python 122 | # Calculate and display the total time duration logged for 123 | # AssetBuild types within specified Shots and Folders. 124 | # 125 | # output: 126 | # Found 9 assetbuilds. 127 | # Found 1 assetbuilds with timelogs. 128 | # {16.0: ['Drone Craft'], 129 | # 'No time tracked yet.': ['Drawer Cabinet 01', 130 | # 'Gothic Commode 01', 131 | # 'Shelf 01', 132 | # 'Side Table 01', 133 | # 'Small Wooden Table 01', 134 | # 'Vintage Wooden Drawer 01', 135 | # 'Wooden Table 01', 136 | # 'Wooden Table 02']} 137 | 138 | from pprint import pprint 139 | 140 | import ftrack_api 141 | 142 | session = ftrack_api.Session(auto_connect_event_hub=False, auto_populate=False) 143 | project_names = ("sync", "showroom") 144 | folder_specific = "Delivery 3" 145 | folder_unspecific = "Asset%" 146 | 147 | query = ( 148 | f"select id, name, assets.name, parent.name, project.name, " 149 | f"assets.versions.task.timelogs, assets.versions.task.timelogs.duration " 150 | f"from AssetBuild where project has (name in {project_names}) " 151 | f"and parent[Folder] has (name is '{folder_specific}' or name like '{folder_unspecific}')" 152 | ) 153 | 154 | assetbuilds_no_duration = ["No time tracked yet.", []] 155 | assetbuilds_timelog_duration = [0, []] 156 | 157 | assetbuilds = session.query(query).all() 158 | 159 | for assetbuild in assetbuilds: 160 | has_duration = False 161 | for asset in assetbuild["assets"]: 162 | for version in asset["versions"]: 163 | durations = [_["duration"] for _ in version["task"]["timelogs"]] 164 | if any(durations): 165 | for duration in durations: 166 | assetbuilds_timelog_duration[0] += duration / 3600 167 | if not has_duration: 168 | has_duration = True 169 | 170 | if has_duration: 171 | assetbuilds_timelog_duration[1].append(assetbuild["name"]) 172 | else: 173 | assetbuilds_no_duration[1].append(assetbuild["name"]) 174 | 175 | print( 176 | f"Found {len(assetbuilds)} assetbuilds.\n" 177 | f"Found {len(assetbuilds_timelog_duration[1])} assetbuilds with timelogs." 178 | ) 179 | 180 | assetbuilds_no_duration[1].sort() 181 | assetbuilds_timelog_duration[1].sort() 182 | 183 | pprint( 184 | dict( 185 | [assetbuilds_no_duration, assetbuilds_timelog_duration] 186 | ) 187 | ) 188 | ``` 189 | 190 | ## Usage 191 | 192 | See the [latest documentation][documentation] for usage details. -------------------------------------------------------------------------------- /doc/sphinx_source/collections/entitycollection.md: -------------------------------------------------------------------------------- 1 | ### Higher-Order Methods 2 | 3 | The EntityCollection class provides higher-order methods that accept functions as arguments, aligning with the principles of functional programming. 4 | 5 | #### apply 6 | 7 | `apply(predicate, attribute_name=None)` applies a given predicate function to each element in the collection and assigns the generated value to the specified attribute. 8 | If no attribute name is provided, the value is directly assigned to the calling collection. 9 | 10 | **Example 1:** Override status of all tasks associated to an AssetVersion collection. 11 | ```{include} collections/examples.md 12 | :start-after: example apply1 start 13 | :end-before: example apply1 end 14 | ``` 15 | 16 | **Example 1:** Extend the comment field of items within an AssetVersion collection. 17 | ```{include} collections/examples.md 18 | :start-after: example apply2 start 19 | :end-before: example apply2 end 20 | ``` 21 | 22 | #### count 23 | 24 | `count(predicate)` returns the number of elements for which a given predicate function returns `True`. 25 | 26 | **Example 1:** Count the occurrences of assets that contain "character" and "environment". 27 | ```{include} collections/examples.md 28 | :start-after: example count1 start 29 | :end-before: example count1 end 30 | ``` 31 | 32 | #### filter 33 | 34 | `filter(predicate)` is used to selectively filter a collection based on a given predicate function. 35 | The predicate function is applied to each element in the collection, and its return value, which is expected to be a boolean or coercible to a boolean, determines whether the element is included in the resulting collection. Elements for which the predicate returns `True` are added to the filtered collection, while those for which it returns `False` are excluded. 36 | 37 | **Example 1:** Generate a new collection that only contains elements with "prop" in the name. 38 | ```{include} collections/examples.md 39 | :start-after: example filter1 start 40 | :end-before: example filter1 end 41 | ``` 42 | 43 | **Example 2:** Generate a new collection that only contains the assets which have 10 or more versions associated. 44 | ```{include} collections/examples.md 45 | :start-after: example filter2 start 46 | :end-before: example filter2 end 47 | ``` 48 | 49 | #### fold 50 | 51 | `fold(start_value, predicate)` accumulates the value starting with an initial value and applying an operation from the first to the last element in a collection. 52 | 53 | **Example 1:** Determine the total filesize of all components for one Asset. 54 | ```{include} collections/examples.md 55 | :start-after: example fold1 start 56 | :end-before: example fold1 end 57 | ``` 58 | 59 | #### group 60 | 61 | `group(predicate)` returns a dictionary with keys given by the predicate. All entities from the original collection will be mapped to their corresponding key. 62 | 63 | **Example 1:** Group all AssetVersion collections via name of their asset. 64 | ```{include} collections/examples.md 65 | :start-after: example group1 start 66 | :end-before: example group1 end 67 | ``` 68 | 69 | **Example 2:** Group all AssetVersion collections via state name of their status. 70 | ```{include} collections/examples.md 71 | :start-after: example group2 start 72 | :end-before: example group2 end 73 | ``` 74 | 75 | #### group_and_map 76 | 77 | `group_and_map(group_predicate, map_predicate)` runs a [group](#group) first and then runs the map predicate function on all the collections in the resulting dictionary values. 78 | 79 | **Example1:** Provide a status -> last note overview for versions created by a given user. 80 | ```{include} collections/examples.md 81 | :start-after: example group_and_map1 start 82 | :end-before: example group_and_map1 end 83 | ``` 84 | 85 | #### map 86 | 87 | `map(predicate)` generates a sequence of results by applying a given predicate function to each element in the collection. The predicate function is invoked for each element, and its return value is included in the generated sequence. 88 | 89 | **Example1:** Generate a formatted representation of the AssetVersion combining the version number and the Asset name. 90 | ```{include} collections/examples.md 91 | :start-after: example map1 start 92 | :end-before: example map1 end 93 | ``` 94 | 95 | #### max 96 | 97 | `max(predicate)` returns the first element yielding the largest value of the given function. 98 | 99 | **Example1:** Get the AssetVersion collection with the largest version number for one asset. 100 | ```{include} collections/examples.md 101 | :start-after: example max1 start 102 | :end-before: example max1 end 103 | ``` 104 | 105 | **Example2:** Get the Asset collection that holds an AssetVersion with the largest version number. 106 | ```{include} collections/examples.md 107 | :start-after: example max2 start 108 | :end-before: example max2 end 109 | ``` 110 | 111 | ```{attention} 112 | The result of `max` will always be a single element collection even if multiple elements yield the same largest value. 113 | If multiple entities have the same max value, we follow Python's max implementation by returning the last occurence of this value. 114 | ``` 115 | 116 | #### min 117 | 118 | `min(predicate)` returns the first element yielding the smallest value of the given function. 119 | 120 | **Example1:** Get the AssetVersion collection with the smallest version number for one asset. 121 | ```{include} collections/examples.md 122 | :start-after: example min1 start 123 | :end-before: example min1 end 124 | ``` 125 | 126 | **Example2:** Get the Asset collection that holds an AssetVersion with the smallest version number. 127 | ```{include} collections/examples.md 128 | :start-after: example min2 start 129 | :end-before: example min2 end 130 | ``` 131 | 132 | ```{attention} 133 | The result of `min` will always be a single element collection even if multiple elements yield the same smallest value. 134 | If multiple entities have the same min value, we follow Python's min implementation by returning the first occurence of this value. 135 | ``` 136 | 137 | #### partition 138 | 139 | `partition(predicate)` splits the collection into tuple of EntityCollections/EmptyCollections, where the first item contains a collection with elements for which the predicate returned `True`, while the second items contains a collection with elements for which the predicate returned `False`. 140 | 141 | **Example1:** Split the AssetVersion collection based on a potential "Done" state of the elements. 142 | ```{include} collections/examples.md 143 | :start-after: example partition1 start 144 | :end-before: example partition1 end 145 | ``` 146 | 147 | #### sort 148 | 149 | `sort(predicate)` returns a collection sorted by the given predicate. 150 | 151 | **Example1:** Sort the AssetVersion collection based on version number of the individual elements. 152 | ```{include} collections/examples.md 153 | :start-after: example sort1 start 154 | :end-before: example sort1 end 155 | ``` 156 | 157 | **Example1:** Sort the AssetVersion collection based on the asset name of the individual elements. 158 | ```{include} collections/examples.md 159 | :start-after: example sort2 start 160 | :end-before: example sort2 end 161 | ``` 162 | 163 | ### Set Operations 164 | 165 | #### difference 166 | 167 | `difference(*collections)` computes the difference between two or more collections. 168 | 169 | ```{include} collections/examples.md 170 | :start-after: example difference1 start 171 | :end-before: example difference1 end 172 | ``` 173 | 174 | #### intersection 175 | 176 | `intersection(*collections)` computes the intersection of two or more collections. 177 | 178 | ```{include} collections/examples.md 179 | :start-after: example intersection1 start 180 | :end-before: example intersection1 end 181 | ``` 182 | 183 | #### symmetric_difference 184 | 185 | `symmetric_difference(*collections)` computes the [symmetric difference](https://en.wikipedia.org/wiki/Symmetric_difference) between collections. 186 | 187 | ```{include} collections/examples.md 188 | :start-after: example symmetric difference1 start 189 | :end-before: example symmetric difference1 end 190 | ``` 191 | 192 | #### union 193 | 194 | `union(*collections)` computes the union of two or more collections. 195 | 196 | ```{include} collections/examples.md 197 | :start-after: example union1 start 198 | :end-before: example union1 end 199 | ``` 200 | 201 | ### Type Coercion 202 | 203 | Certain entity types, such as `Component` and `TypedContext` subtypes, can be coerced to their respective base types. This allows for performing [set operations](#set-operations) between multiple types that inherit from the same base type. 204 | Additionally, when creating new entities, the type of the collection determines the type of entity that will be created. 205 | 206 | To perform type coercion, you can use the constructor of the desired entity type. 207 | 208 | ```{include} collections/examples.md 209 | :start-after: example type coercion1 start 210 | :end-before: example type coercion1 end 211 | ``` 212 | 213 | ```{attention} 214 | In certain cases, attributes like _parent_, _ancestors_, _children_, _descendants_, and _components_ will undergo automatic type coercion, as these collections can contain entities of multiple types. 215 | ``` 216 | 217 | `````{important} 218 | Although `Project` is not a subtype of `TypedContext`, accessing the _parent_ or _ancestors_ attributes may include `Project` entities. 219 | The coercion performed on these attributes ensures that `Project` entities are considered, allowing for proper access and attribute fetching in a cohesive manner. 220 | 221 | ````{admonition} See the following example that demonstrates it: 222 | :class: dropdown 223 | 224 | ```{include} collections/examples.md 225 | :start-after: example type filtering1 start 226 | :end-before: example type filtering1 end 227 | ``` 228 | ```` 229 | ````` 230 | 231 | ### Type Filtering 232 | 233 | Contrary to [type coercion](#type-coercion), filtering for subtypes is straightforward using the implemented item getter. 234 | 235 | `````{tip} 236 | ````{admonition} Use the [group_and_map](#group-and-map) to identify exising types. 237 | :class: dropdown 238 | 239 | ```{include} collections/examples.md 240 | :start-after: example type filtering2 start 241 | :end-before: example type filtering2 end 242 | ``` 243 | ```` 244 | ````` -------------------------------------------------------------------------------- /src/trackteroid/entities/declarations.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | import itertools 32 | import importlib 33 | import inspect 34 | import logging 35 | import re 36 | 37 | 38 | from ..configuration import LOGGING_NAMESPACE 39 | 40 | 41 | _LOG = logging.getLogger("{}.declarations".format(LOGGING_NAMESPACE)) 42 | 43 | 44 | class ForwardDeclaration: 45 | pass 46 | 47 | 48 | class RelationshipDeclaration: 49 | """ a declaration for a used relationship that needs resolution 50 | 51 | """ 52 | def __init__(self, parent, child): 53 | self._filter_re = re.compile("\[\w+\]") 54 | self._chain = [] 55 | self._entities_module = importlib.import_module("..entities", __name__) 56 | self._chain.extend( 57 | [ 58 | getattr(self._entities_module, parent.__name__, parent), 59 | getattr(self._entities_module, child.__name__ if not isinstance(child, str) else child, child) 60 | ] 61 | ) 62 | 63 | def __getattr__(self, item): 64 | # TODO: what requests `shape`?? 65 | if not item.startswith("__") and item != "shape": 66 | self._chain.append(getattr(self._entities_module, item, item)) 67 | return self 68 | 69 | def resolve_path_for(self, entity_type, session, schema): 70 | relationships = [] 71 | for item in self._chain: 72 | if item: 73 | if not isinstance(item, str): 74 | item.relationship(session=session, schema=schema) 75 | relationship = entity_type.relationship.get(item) 76 | if not relationship: 77 | _LOG.warning( 78 | "Unable to retrieve relationship between target entity type `{}` and entity type `{}`." 79 | "You should consider providing via the configurable `RELATIONSHIP_RESOLVER`.".format( 80 | entity_type.__class__.__name__, 81 | item.__name__ 82 | ) 83 | ) 84 | continue 85 | if not isinstance(relationship.relation, list): 86 | relationships.append([self._filter_re.sub("", relationship.relation)]) 87 | else: 88 | relationships.append( 89 | [self._filter_re.sub("", _) for _ in relationship.relation] 90 | ) 91 | entity_type = item 92 | else: 93 | relationships.append([item]) 94 | 95 | return [".".join(_) for _ in itertools.product(*relationships)] 96 | 97 | 98 | class ForwardDeclareCompare(type): 99 | def __eq__(self, other): 100 | if other and inspect.isclass(other) and issubclass(other, ForwardDeclaration): 101 | return self.__name__ == other.__name__ 102 | else: 103 | return super(ForwardDeclareCompare, self).__eq__(other) 104 | 105 | def __ne__(self, other): 106 | return not self == other 107 | 108 | def __getattr__(self, item): 109 | return RelationshipDeclaration(parent=self, child=item) 110 | 111 | def __hash__(self): 112 | return hash(self.__class__.__name__) 113 | 114 | 115 | # TODO: we need to auto-regenerate this when the schema changes 116 | class Appointment(ForwardDeclaration): pass 117 | class Asset(ForwardDeclaration): pass 118 | class AssetBuild(ForwardDeclaration): pass 119 | class AssetCustomAttributeLink(ForwardDeclaration): pass 120 | class AssetCustomAttributeLinkFrom(ForwardDeclaration): pass 121 | class AssetCustomAttributeValue(ForwardDeclaration): pass 122 | class AssetGroup(ForwardDeclaration): pass 123 | class AssetType(ForwardDeclaration): pass 124 | class AssetVersion(ForwardDeclaration): pass 125 | class AssetVersionCustomAttributeLink(ForwardDeclaration): pass 126 | class AssetVersionCustomAttributeLinkFrom(ForwardDeclaration): pass 127 | class AssetVersionCustomAttributeValue(ForwardDeclaration): pass 128 | class AssetVersionLink(ForwardDeclaration): pass 129 | class AssetVersionList(ForwardDeclaration): pass 130 | class AssetVersionStatusChange(ForwardDeclaration): pass 131 | class CalendarEvent(ForwardDeclaration): pass 132 | class CalendarEventResource(ForwardDeclaration): pass 133 | class Component(ForwardDeclaration): pass 134 | class ComponentCustomAttributeLink(ForwardDeclaration): pass 135 | class ComponentCustomAttributeLinkFrom(ForwardDeclaration): pass 136 | class ComponentLocation(ForwardDeclaration): pass 137 | class ContainerComponent(ForwardDeclaration): pass 138 | class Context(ForwardDeclaration): pass 139 | class ContextCustomAttributeLink(ForwardDeclaration): pass 140 | class ContextCustomAttributeLinkFrom(ForwardDeclaration): pass 141 | class ContextCustomAttributeValue(ForwardDeclaration): pass 142 | class Conversation(ForwardDeclaration): pass 143 | class CustomAttributeConfiguration(ForwardDeclaration): pass 144 | class CustomAttributeGroup(ForwardDeclaration): pass 145 | class CustomAttributeLink(ForwardDeclaration): pass 146 | class CustomAttributeLinkConfiguration(ForwardDeclaration): pass 147 | class CustomAttributeLinkFrom(ForwardDeclaration): pass 148 | class CustomAttributeType(ForwardDeclaration): pass 149 | class CustomAttributeValue(ForwardDeclaration): pass 150 | class CustomConfigurationBase(ForwardDeclaration): pass 151 | class Dashboard(ForwardDeclaration): pass 152 | class DashboardResource(ForwardDeclaration): pass 153 | class DashboardWidget(ForwardDeclaration): pass 154 | class Disk(ForwardDeclaration): pass 155 | class EntitySetting(ForwardDeclaration): pass 156 | class Epic(ForwardDeclaration): pass 157 | class Episode(ForwardDeclaration): pass 158 | class Event(ForwardDeclaration): pass 159 | class Feed(ForwardDeclaration): pass 160 | class FileComponent(ForwardDeclaration): pass 161 | class Floor(ForwardDeclaration): pass 162 | class Folder(ForwardDeclaration): pass 163 | class Group(ForwardDeclaration): pass 164 | class GroupCustomAttributeLink(ForwardDeclaration): pass 165 | class GroupCustomAttributeLinkFrom(ForwardDeclaration): pass 166 | class Hardware(ForwardDeclaration): pass 167 | class Job(ForwardDeclaration): pass 168 | class JobComponent(ForwardDeclaration): pass 169 | class License(ForwardDeclaration): pass 170 | class List(ForwardDeclaration): pass 171 | class ListCategory(ForwardDeclaration): pass 172 | class ListCustomAttributeLink(ForwardDeclaration): pass 173 | class ListCustomAttributeLinkFrom(ForwardDeclaration): pass 174 | class ListCustomAttributeValue(ForwardDeclaration): pass 175 | class ListObject(ForwardDeclaration): pass 176 | class ListObjectCustomAttributeValue(ForwardDeclaration): pass 177 | class Location(ForwardDeclaration): pass 178 | class Manager(ForwardDeclaration): pass 179 | class ManagerType(ForwardDeclaration): pass 180 | class Membership(ForwardDeclaration): pass 181 | class Message(ForwardDeclaration): pass 182 | class Metadata(ForwardDeclaration): pass 183 | class Milestone(ForwardDeclaration): pass 184 | class Note(ForwardDeclaration): pass 185 | class NoteCategory(ForwardDeclaration): pass 186 | class NoteComponent(ForwardDeclaration): pass 187 | class NoteLabel(ForwardDeclaration): pass 188 | class NoteLabelLink(ForwardDeclaration): pass 189 | class ObjectType(ForwardDeclaration): pass 190 | class Office(ForwardDeclaration): pass 191 | class Participant(ForwardDeclaration): pass 192 | class Priority(ForwardDeclaration): pass 193 | class Project(ForwardDeclaration): pass 194 | class ProjectSchema(ForwardDeclaration): pass 195 | class ProjectSchemaObjectType(ForwardDeclaration): pass 196 | class ProjectSchemaOverride(ForwardDeclaration): pass 197 | class Recipient(ForwardDeclaration): pass 198 | class Resource(ForwardDeclaration): pass 199 | class Resources(ForwardDeclaration): pass 200 | class ReviewSession(ForwardDeclaration): pass 201 | class ReviewSessionInvitee(ForwardDeclaration): pass 202 | class ReviewSessionObject(ForwardDeclaration): pass 203 | class ReviewSessionObjectStatus(ForwardDeclaration): pass 204 | class Schema(ForwardDeclaration): pass 205 | class SchemaStatus(ForwardDeclaration): pass 206 | class SchemaType(ForwardDeclaration): pass 207 | class Scope(ForwardDeclaration): pass 208 | class Seat(ForwardDeclaration): pass 209 | class SecurityRole(ForwardDeclaration): pass 210 | class Sequence(ForwardDeclaration): pass 211 | class SequenceComponent(ForwardDeclaration): pass 212 | class Setting(ForwardDeclaration): pass 213 | class SettingComponent(ForwardDeclaration): pass 214 | class Shot(ForwardDeclaration): pass 215 | class Sprint(ForwardDeclaration): pass 216 | class State(ForwardDeclaration): pass 217 | class Status(ForwardDeclaration): pass 218 | class StatusChange(ForwardDeclaration): pass 219 | class Task(ForwardDeclaration): pass 220 | class TaskTemplate(ForwardDeclaration): pass 221 | class TaskTemplateItem(ForwardDeclaration): pass 222 | class TaskTypeSchema(ForwardDeclaration): pass 223 | class TaskTypeSchemaType(ForwardDeclaration): pass 224 | class TestTest(ForwardDeclaration): pass 225 | class Timelog(ForwardDeclaration): pass 226 | class Timer(ForwardDeclaration): pass 227 | class Type(ForwardDeclaration): pass 228 | class TypedContext(ForwardDeclaration): pass 229 | class TypedContextLink(ForwardDeclaration): pass 230 | class TypedContextList(ForwardDeclaration): pass 231 | class TypedContextStatusChange(ForwardDeclaration): pass 232 | class User(ForwardDeclaration): pass 233 | class UserApplicationState(ForwardDeclaration): pass 234 | class UserCustomAttributeLink(ForwardDeclaration): pass 235 | class UserCustomAttributeLinkFrom(ForwardDeclaration): pass 236 | class UserCustomAttributeValue(ForwardDeclaration): pass 237 | class UserSecurityRole(ForwardDeclaration): pass 238 | class UserSecurityRoleProject(ForwardDeclaration): pass 239 | class UserType(ForwardDeclaration): pass 240 | class WorkflowSchema(ForwardDeclaration): pass 241 | class WorkflowSchemaStatus(ForwardDeclaration): pass -------------------------------------------------------------------------------- /tests/test_authoring.py: -------------------------------------------------------------------------------- 1 | import random 2 | import uuid 3 | from collections import OrderedDict 4 | 5 | import pytest 6 | 7 | from trackteroid import ( 8 | Query, 9 | Sequence, 10 | Project, 11 | Shot, 12 | Task, 13 | PROJECT_SCHEMAS, 14 | TASK_TYPES, 15 | NoteCategory, 16 | Note, 17 | ) 18 | from trackteroid.entities.base import EntityCollection, Entity 19 | 20 | 21 | def test_link_inputs(scenario_sequence): 22 | sequences = ( 23 | Query(Sequence).by_id(*scenario_sequence.sequence_ids).get_all(order_by="name") 24 | ) 25 | sequence1 = sequences[0] 26 | sequence2 = sequences[1] 27 | sequence3 = sequences[2:4] 28 | 29 | after_link1 = sequence1.link_inputs(sequence2) 30 | after_link2 = sequence2.link_inputs(sequence3) 31 | 32 | assert isinstance(after_link1, EntityCollection) 33 | assert "Sequence" == after_link1._entity.__class__.__name__ 34 | assert isinstance(after_link2, EntityCollection) 35 | assert "Sequence" == after_link2._entity.__class__.__name__ 36 | 37 | sequence1.commit() 38 | 39 | assert ( 40 | Query(Sequence) 41 | .by_id(*sequence1.id) 42 | .get_one(projections=["incoming_links.from_id"]) 43 | .incoming_links.from_id 44 | == sequence2.id 45 | ) 46 | 47 | assert ( 48 | Query(Sequence) 49 | .by_id(*sequence2.id) 50 | .get_one(projections=["incoming_links.from_id"]) 51 | .incoming_links.from_id 52 | == sequence3.id, 53 | ) 54 | 55 | 56 | def test_create(scenario_project, ftrack_session): 57 | project = Query(Project).by_id(scenario_project.project_id).get_one() 58 | 59 | test_entities = [] 60 | 61 | # create a sequence 62 | sequence = project.children[Sequence].create(name="Sequence") 63 | test_entities.append(("Sequence", sequence.id[0])) 64 | 65 | # create some shots 66 | shot1 = sequence.children[Shot].create(name="Shot1") 67 | shot2 = sequence.children[Shot].create(name="Shot2") 68 | test_entities.append(("Shot", shot1.id[0])) 69 | test_entities.append(("Shot", shot2.id[0])) 70 | 71 | # create some tasks["name 72 | shot1_task1, shot1_task2 = shot1.children[Task].create_batch( 73 | {"name": "Modeling", "type": "Modeling"}, 74 | {"name": "Rigging", "type": "Rigging"}, 75 | ) 76 | shot2_tasks = shot2.children[Task].create_batch( 77 | {"name": "Modeling", "type": "Modeling"}, 78 | ) 79 | shot2_tasks2 = shot2_tasks.create_batch( 80 | {"name": "Rigging", "type": "Rigging"}, 81 | {"name": "Animation", "type": "Animation"}, 82 | ) 83 | tasks = [ 84 | shot1_task1, 85 | shot1_task2, 86 | shot2_tasks, 87 | shot2_tasks2, 88 | ] 89 | for task in tasks: 90 | for id in task.id: 91 | test_entities.append(("Task", id)) 92 | 93 | asset_types = ftrack_session.query("AssetType").all() 94 | 95 | # create some assets 96 | asset1 = shot1.assets.create(name="Asset1", type=asset_types[0]["name"]) 97 | asset2 = shot1.assets.create(name="Asset2", type=asset_types[1]["name"]) 98 | test_entities.append(("Asset", asset1.id[0])) 99 | test_entities.append(("Asset", asset2.id[0])) 100 | 101 | # create some versions 102 | assetversion1 = asset1.versions.create(task=shot1_task1) 103 | assetversion2 = asset2.versions.create(task=shot1_task2) 104 | test_entities.append(("AssetVersion", assetversion1.id[0])) 105 | test_entities.append(("AssetVersion", assetversion2.id[0])) 106 | 107 | # link some versions 108 | assetversion1.link_outputs(assetversion2) 109 | 110 | # push to server 111 | project.commit() 112 | 113 | retrieved_entities = [] 114 | for entity in test_entities: 115 | retrieved_entities.append(ftrack_session.get(*entity)) 116 | assert all(retrieved_entities), "Some entities were not created" 117 | 118 | 119 | def test_create_project(ftrack_session): 120 | some_project = Query(Project).get_first() 121 | 122 | with pytest.raises(AssertionError): 123 | some_project.create() 124 | 125 | with pytest.raises(AssertionError): 126 | some_project.create(name="Foobar") 127 | 128 | with pytest.raises(AssertionError): 129 | some_project.create(name="Foobar", project_schema="DuDoedl") 130 | 131 | created_project = some_project.create( 132 | name=str(uuid.uuid4()), 133 | project_schema=random.choice(list(PROJECT_SCHEMAS.types.keys())), 134 | ) 135 | 136 | try: 137 | assert isinstance(created_project, EntityCollection) 138 | assert "Project" == created_project._entity.__class__.__name__ 139 | 140 | created_project.commit() 141 | 142 | assert Query(Project).by_id(*created_project.id).get_one() == created_project 143 | finally: 144 | ftrack_session.delete(ftrack_session.get("Project", created_project.id[0])) 145 | ftrack_session.commit() 146 | 147 | 148 | def test_create_sequence(scenario_project): 149 | test_project = Query(Project).by_id(scenario_project.project_id).get_one() 150 | 151 | with pytest.raises(AssertionError): 152 | test_project.children[Sequence].create() 153 | 154 | created_sequence = test_project.children[Sequence].create(name=str(uuid.uuid4())) 155 | 156 | assert isinstance(created_sequence, EntityCollection) 157 | assert "Sequence" == created_sequence._entity.__class__.__name__ 158 | 159 | test_project.commit() 160 | 161 | queried_sequence = Query(Sequence).by_id(*created_sequence.id).get_one() 162 | assert isinstance(queried_sequence, EntityCollection) 163 | assert test_project.id == queried_sequence.parent_id 164 | 165 | 166 | def test_create_shot(scenario_sequence): 167 | test_sequence = ( 168 | Query(Sequence).by_id(Project, scenario_sequence.project_id).get_all() 169 | ) 170 | 171 | with pytest.raises(AssertionError): 172 | test_sequence.children[Shot].create() 173 | 174 | with pytest.raises(AssertionError) as excinfo: 175 | test_sequence.children[Shot].create(name=str(uuid.uuid4())) 176 | 177 | assert "Ambiguous context" in str(excinfo.value) 178 | 179 | test_sequence = ( 180 | Query(Sequence).by_id(Project, scenario_sequence.project_id).get_first() 181 | ) 182 | created_shot = test_sequence.children[Shot].create(name=str(uuid.uuid4())) 183 | 184 | assert isinstance(created_shot, EntityCollection) 185 | assert "Shot" == created_shot._entity.__class__.__name__ 186 | 187 | test_sequence.commit() 188 | 189 | queried_shot = Query(Shot).by_id(*created_shot.id).get_one() 190 | assert queried_shot == created_shot 191 | assert test_sequence.id == queried_shot.parent_id 192 | 193 | 194 | def test_create_task(scenario_shot): 195 | test_shot = ( 196 | Query(Shot) 197 | .by_id(Project, scenario_shot.project_id) 198 | .get_all(projections=["project.project_schema._task_type_schema.types.name"]) 199 | ) 200 | task_types = [ 201 | TASK_TYPES._to_camel_case(_) 202 | for _ in test_shot.project.project_schema._task_type_schema.types.name 203 | ] 204 | 205 | with pytest.raises(AssertionError) as context: 206 | test_shot.children[Task].create() 207 | 208 | with pytest.raises(AssertionError) as context: 209 | test_shot.children[Task].create(name=str(uuid.uuid4())) 210 | 211 | with pytest.raises(AssertionError) as context: 212 | test_shot.children[Task].create( 213 | name=str(uuid.uuid4()), type=random.choice(task_types) 214 | ) 215 | assert "Ambiguous context" in str(context.value) 216 | 217 | test_shot = Query(Shot).by_id(Project, scenario_shot.project_id).get_first() 218 | 219 | created_task = test_shot.children[Task].create( 220 | name=str(uuid.uuid4()), type=random.choice(task_types) 221 | ) 222 | 223 | assert isinstance(created_task, EntityCollection) 224 | assert "Task" == created_task._entity.__class__.__name__ 225 | 226 | test_shot.commit() 227 | 228 | queried_task = Query(Task).by_id(*created_task.id).get_one() 229 | 230 | assert queried_task == created_task 231 | assert test_shot.id == queried_task.parent_id 232 | 233 | 234 | def _construct_collection_from_ftrack_entities(ftrack_entities, session): 235 | import trackteroid.entities 236 | assert ftrack_entities 237 | 238 | if not isinstance(ftrack_entities, list): 239 | ftrack_entities = [ftrack_entities] 240 | entities = [] 241 | 242 | for ftrack_entity in ftrack_entities: 243 | entities.append( 244 | ( 245 | ftrack_entity["id"], 246 | Entity( 247 | _cls=getattr(trackteroid.entities, ftrack_entity.entity_type), 248 | ftrack_entity=ftrack_entity, 249 | ), 250 | ) 251 | ) 252 | 253 | collection = EntityCollection( 254 | _cls=entities[0][1].__class__, entities=OrderedDict(entities), session=session 255 | ) 256 | collection.query = Query(entities[0][1].__class__).by_id(*[_["id"] for _ in ftrack_entities]) 257 | return collection 258 | 259 | 260 | def test_create_on_ambigious_context(scenario_shot_asset, scenario_assetbuild_asset, ftrack_session): 261 | shots = scenario_shot_asset.grab(ftrack_session, "Shot", ["assets.id"]) 262 | asset_builds = scenario_assetbuild_asset.grab(ftrack_session, "AssetBuild", ["assets.id"]) 263 | 264 | mix = [shots[0]["assets"][0], asset_builds[0]["assets"][0]] 265 | 266 | collection4 = _construct_collection_from_ftrack_entities(mix, ftrack_session) 267 | 268 | collection5 = _construct_collection_from_ftrack_entities( 269 | shots[1]["assets"][0], ftrack_session 270 | ) 271 | 272 | asset_type = ftrack_session.query("AssetType").first() 273 | with pytest.raises(AssertionError) as context: 274 | collection4.intersection(collection5).create( 275 | name=str(uuid.uuid4()), type=asset_type["name"] 276 | ) 277 | assert "Ambiguous context" in str(context.value) 278 | 279 | 280 | def test_create_note(scenario_shot): 281 | categories = Query(NoteCategory).get_all() 282 | # Run it three times, once on an empty collection, another in a collection 283 | # with a single entity and then two (this last one will test that the parent 284 | # ambiguity error is not raised) 285 | for i in range(3): 286 | chosen_category = random.choice(categories) 287 | # Query every time to ensure the cache is not fooling the tests 288 | test_shot = Query(Shot).by_id(*scenario_shot.shot_ids).get_first(projections=["notes.content"]) 289 | 290 | assert ( 291 | len(test_shot.notes) == i 292 | ), "Expected shot to have {} notes, {} found".format(i, len(test_shot.notes)) 293 | 294 | with pytest.raises(AssertionError): 295 | test_shot.notes.create(contents="A note", category="A String") 296 | 297 | note = test_shot.notes.create(content="A note", category=chosen_category) 298 | note.commit() 299 | 300 | # For some unknown reason, note.commit() converts parent_type to "Resource", so we can ignore testing 301 | # the parent_type, as it'll be wrong anyway 302 | assert ( 303 | note.parent_id[0] == test_shot.id[0] 304 | ), "Expected parent id {!r}, got {!r}".format( 305 | test_shot.id[0], note.parent_id[0] 306 | ) 307 | 308 | # To ensure it's on the entity and not only on the session cache, a new query 309 | # should do the trick 310 | notes = Query(Note).inject("parent_id is {}".format(test_shot.id[0])).get_all() 311 | assert len(notes) == (i + 1), "Expected {} notes, {} found".format( 312 | i + 1, len(notes) 313 | ) 314 | assert ( 315 | notes.filter(lambda x: x.id[0] == note.id[0]).category.name[0] 316 | == chosen_category.name[0] 317 | ) 318 | -------------------------------------------------------------------------------- /doc/sphinx_source/quickstart.md: -------------------------------------------------------------------------------- 1 | # Quickstart 2 | 3 | Eager to get started? This page give some introduction to the core concepts of Trackteroid. 4 | Follow [Installation](installation.md) and install Trackteroid first. Ideally you should also have a basic understanding of the [Ftrack Python API](https://ftrack-python-api.readthedocs.io/en/stable/index.html). 5 | 6 | The provided examples assume that you have properly configured the [API access for Ftrack](https://ftrack-python-api.readthedocs.io/en/stable/understanding_sessions.html) accordingly. 7 | 8 | ## Accessing Data From FTrack 9 | 10 | ```{include} query/examples.md 11 | :start-after: example minimal start 12 | :end-before: example minimal end 13 | 14 | ``` 15 | 16 | ### The Query 17 | 18 | ```{include} query/overview.md 19 | ``` 20 | ### Defining Relationships 21 | 22 | One of the main objectives of Trackteroid is to minimize the need for in-depth knowledge of the underlying database structure when working with queries and resulting collections. This goal is accomplished through two distinct approaches. 23 | 24 | Firstly, it automatically derives relationships whenever possible by dynamically inspecting the schema of the current session. This capability allows for seamless handling of relationships without requiring explicit configuration. 25 | 26 | However, Ftrack's dynamic nature means that certain entity types may require configuring relationships to align with specific requirements. Trackteroid provides the flexibility to describe and represent contextual relationships for such cases, enabling customization and adaptation to meet individual needs by implementing a [resolver](configuration.md#relationships-resolver). 27 | 28 | All communication with an Ftrack server is facilitated through a `Session` object. By default, a `Query` is constructed using the [_SESSION_ singleton](session.md#multiple-sessions) and the _default_ schema. Here's an example: 29 | 30 | ```{include} query/examples.md 31 | :start-after: example session start 32 | :end-before: example session end 33 | ``` 34 | 35 | However, you also have the flexibility to initialize your own `Session` object and provide a different schema. Here's an example: 36 | 37 | ```python 38 | from trackteroid import ( 39 | Query, 40 | SCHEMA, 41 | AssetVersion 42 | ) 43 | from trackteroid.session import Session 44 | 45 | my_session = Session() 46 | 47 | Query(AssetVersion, session=my_session, schema=SCHEMA.vfx) 48 | ``` 49 | 50 | ## Collections 51 | 52 | ```{include} collections/overview.md 53 | ``` 54 | 55 | `````{admonition} **Iterables all the way down!** 56 | :class: important 57 | 58 | Regardless of the number of entities it contains, whether it's multiple, single, or none at all, a collection remains iterable. 59 | This holds true even when requesting attributes that result in a primitive data type, such as strings. This consistent behavior allows for uniform usage across different scenarios and helps avoid the need for excessive conditional statements. 60 | 61 | ````{admonition} **In practice...** 62 | :class: dropdown 63 | 64 | ```{include} collections/examples.md 65 | :start-after: example practice start 66 | :end-before: example practice end 67 | ``` 68 | 69 | The provided code demonstrates some of the capabilities of Trackteroid in handling complex scenarios without the need for explicit loops or excessive conditional statements. 70 | Although the code may seem complex at first glance, the following explanations will break it down step by step. 71 | 72 | In the first part of the code, a single _TypedContext_ sample is retrieved using the _Query_ class. 73 | The _limit_ parameter is set to 1 to fetch only one sample. The _projections_ parameter is used to specify the desired attributes (_Component.name_ and _ComponentLocation.resource_identifier_) to be included in the result. 74 | 75 | Next, the retrieved _TypedContext_ sample is filtered using the _[Shot]_ filter. This filter selects only the subtypes of _TypedContext_ that match the _Shot_ entity and ensures that only _Shot_ entities are considered in the subsequent operations. 76 | Since we are limiting the result of the _TypedContext_ query to only one entity, there is a possibility that the retrieved entity may not be a _Shot_. It could be of a different entity type, such as _AssetBuild_, _Sequence_, or _Folder_. 77 | Following the filter, the _Shot_ sample is further filtered using the _filter_ method. In this case, the filter condition checks if the _Component_ name of any of the _Shot's_ _AssetVersions_ is equal to "main". 78 | Finally, the _resource_identifier_ attribute of a single _ComponentLocation_ is accessed. As we are anticipating only one result, the value is accessed using the [0] index. If a value is present, it is utilized; otherwise, the fallback string _"Not existing"_ is used. 79 | 80 | The second part of the code follows a similar structure, but this time the limit parameter is set to 10 to retrieve 10 potential _TypedContext_ samples. 81 | 82 | No need to worry if you haven't fully grasped the concepts yet. Subsequent sections will provide further clarification. 83 | ```` 84 | ````` 85 | 86 | ### Transformation, Fetching and Option Handling 87 | 88 | The `EntityCollection` provides you with a lot of convenience for accessing, filtering and transforming containing data. 89 | 90 | 91 | #### Item and Attribute Access 92 | 93 | Retrieving items from a collection is straightforward and effortless. 94 | These examples illustrate the versatility of the item getter on an EntityCollection. 95 | ```{include} collections/examples.md 96 | :start-after: example item access1 start 97 | :end-before: example item access1 end 98 | ``` 99 | 100 | Accessing related collections and primitive data is user-friendly. 101 | This example demonstrates the seamless navigation through nested collections and the retrieval of primitive data stored in the _resource_identifier_ attribute of associated _component_locations_. 102 | 103 | ```{include} collections/examples.md 104 | :start-after: example attribute access1 start 105 | :end-before: example attribute access1 end 106 | ``` 107 | 108 | You can conveniently access individual attributes within the custom_attributes field by utilizing the `custom_` prefix as a shortcut. This allows direct access to specific attributes without the need to explicitly refer to the _custom_attributes_ field and retrieve values by their corresponding keys. 109 | ```{include} collections/examples.md 110 | :start-after: example custom attribute access start 111 | :end-before: example custom attribute access end 112 | ``` 113 | 114 | #### Transformation Methods 115 | 116 | While iterating through loops is a valid approach, leveraging transformations can provide enhanced convenience. 117 | The `EntityCollection` class provides [higher-order methods](collections.md#higher-order-methods) that accept functions as arguments, aligning with the principles of functional programming. 118 | The presented example highlights a subset of the transformation methods available. 119 | 120 | ```{include} collections/examples.md 121 | :start-after: example transformation methods overview start 122 | :end-before: example transformation methods overview end 123 | ``` 124 | 125 | #### Set Operations 126 | 127 | Due to the immutability of collections, it is not possible to directly add or remove entities. However, you can utilize the identical [set operations](collections.md#set-operations) available in Python's `set` class to obtain new collections. 128 | ```{include} collections/examples.md 129 | :start-after: example set operations overview start 130 | :end-before: example set operations overview end 131 | ``` 132 | 133 | #### Fetching Attributes 134 | 135 | As Trackteroid's default Sessions disable the _auto-polulate_ feature, it is possible to work with unprojected data. In such cases, you may need to fetch missing attributes when required. This can be accomplished using the `fetch_attributes` method on your collection. 136 | ```{include} collections/examples.md 137 | :start-after: example fetch attributes1 start 138 | :end-before: example fetch attributes1 end 139 | ``` 140 | 141 | #### Fallback Concept 142 | 143 | ```{include} collections/emptycollection.md 144 | ``` 145 | 146 | ## Authoring 147 | 148 | ### CRUD (Create, Read, Update, Delete) 149 | 150 | [Collections](collections.md) provide a user interface for performing CRUD operations, which include creating, reading, updating, and deleting data. 151 | The sections below are organized in a logical order to guide you through these operations. 152 | 153 | #### Read 154 | 155 | The listed page references will provide you will all the information when it comes to requesting and accessing data from Ftrack and how the data is being exposed on collections. 156 | 157 | - [Querying](#the-query): Learn how to construct queries to retrieve specific data 158 | - [Attribute Access](#item-and-attribute-access): Understand how to access attributes of items in collections. 159 | 160 | #### Update 161 | 162 | ##### Setting Attributes 163 | 164 | Data updates are primarily performed by assigning values using the `=` operator. 165 | 166 | ```{include} collections/examples.md 167 | :start-after: example setattr1 start 168 | :end-before: example setattr1 end 169 | ``` 170 | The provided code example illustrates the process of assigning values to the _resource_identifier_ attribute of `ComponentLocation` entities associated with an `AssetVersion` collection. 171 | 172 | The code demonstrates two scenarios for updating values: single-value and multi-value assignment. In the case of a single-value assignment, a string value is assigned to _ComponentLocation[0].resource_identifier_, assuming that we are dealing with a collection containing a single element. This operation is possible when the collection has only one element. On the other hand, in the list assignment scenario, a list of values is assigned to the _ComponentLocation.resource_identifier_ attribute, with each value corresponding to an element in the collection. It is crucial to ensure that the number of elements in the list matches the number of elements in the collection. 173 | 174 | ```{attention} 175 | It's important to note that updates made to the collection are only stored in the local cache until they are committed. 176 | The `commit()` method can be called on any collection and will commit **all recorded operations** from the underlying session to the Ftrack server. To verify the success of the update in the example, the code reconnects the session and retrieves the updated attribute value by executing a new query. 177 | ``` 178 | 179 | ```{tip} 180 | The [apply](collections.md#apply) method provides a convenient approach when you need to assign a single value or a single-element collection to a collection that has multiple receivers. 181 | ``` 182 | 183 | #### Create 184 | 185 | The `create(**kwargs)` method on a collection enables the creation of new entities, providing a new collection that allows for additional operations on the created entities. The required keyword arguments for this method vary depending on the entity type being created. 186 | 187 | ```{include} collections/examples.md 188 | :start-after: example note creation1 start 189 | :end-before: example note creation1 end 190 | ``` 191 | 192 | The code example showcases the process of adding a new note to an existing collection of notes within an `AssetVersion` collection. Since collections are immutable and do not allow entities to be added or removed from an existing collection directly, the create method returns a new `Note` collection that solely contains the newly created note. 193 | 194 | To preserve the existing notes, the code performs a `union` operation between the original `Note` collection and the newly created collection. This combined collection is then assigned back, ensuring that both the existing notes and the newly created note are included. 195 | 196 | ```{attention} 197 | It's important to note that creation and updates made to the collection are only stored in the local cache until they are committed. 198 | The `commit()` method can be called on any collection and will commit **all recorded operations** from the underlying session to the Ftrack server. 199 | ``` 200 | 201 | ##### Linking 202 | 203 | The `AssetVersion` collection offers a convenient way to link entities to each other using the _uses_versions_ and _used_in_versions_ attribute types. Additionally, collections can be easily linked or unlinked from each other by utilizing the following methods: 204 | - `link_inputs(collection)` 205 | - `link_outputs(collection)` 206 | - `unlink_inputs(collection)` 207 | - `unlink_outputs(collection)` 208 | 209 | ```{attention} 210 | The linking process involves dedicated *Link types, and using `link_inputs` and `link_outputs` will **create** new link objects with appropriate assignments. Conversely, `unlink_inputs` and `unlink_outputs` are used to **delete** these link objects. 211 | ``` 212 | 213 | ```{include} collections/examples.md 214 | :start-after: example linking1 start 215 | :end-before: example linking1 end 216 | ``` 217 | 218 | #### Delete 219 | 220 | The `delete()` method available on a collection provides the capability to delete entities associated with that collection. 221 | 222 | ```{include} collections/examples.md 223 | :start-after: example delete start 224 | :end-before: example delete end 225 | ``` 226 | 227 | ```{attention} 228 | It's important to note that of a collection is only stored in the local cache until the changes are committed. 229 | The `commit()` method can be called on any collection and will commit **all recorded operations** from the underlying session to the Ftrack server. To verify the success of the deletion in the example, the code reconnects the session and retrieves the updated attribute value by executing a new query. 230 | ``` 231 | 232 | 233 | -------------------------------------------------------------------------------- /src/trackteroid/entities/relationships_parser.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | import itertools 32 | 33 | 34 | # TODO: This need class/function docs 35 | class TreeData: 36 | def __init__(self, type=None, parent_attr=None, parent=None): 37 | self.type = type 38 | self.parent_attr = parent_attr 39 | self.parent = parent 40 | 41 | 42 | class TreeItem: 43 | def __init__(self, data=TreeData()): 44 | self.children = [] 45 | self.data = data 46 | 47 | def get_parent(self): 48 | return self.data.parent 49 | 50 | 51 | class RelationshipsParser(object): 52 | 53 | def __init__(self, ftrack_session=None): 54 | self._session = ftrack_session 55 | self._exclude_types = set() 56 | self._tmp_exclude_types = set() 57 | self._ref_key = "refs" 58 | self._entities = {} 59 | self._tmp_relations = {} 60 | self._include_types = set() 61 | self._attributes_blacklist = set() 62 | self._root_item = TreeItem() 63 | self._project_relations = {} 64 | self._relationships = {} 65 | self._all_array_types = {} 66 | 67 | def parse_session_schemas(self): 68 | for ftrack_entity in self._session.schemas: 69 | entity_id = ftrack_entity.get("id") 70 | self._entities[entity_id] = {} 71 | tmp_refs = {} 72 | tmp_array_refs = {} 73 | for prop, prop_data in ftrack_entity["properties"].items(): 74 | items = prop_data.get("items") 75 | ref = prop_data.get("$ref", "") 76 | if ref: 77 | tmp_refs[prop] = ref 78 | if items: 79 | ref = prop_data.get("items").get("$ref", "") 80 | if ref: 81 | tmp_array_refs[prop] = ref 82 | self._entities[entity_id]["refs"] = tmp_refs 83 | self._entities[entity_id]["array_refs"] = tmp_array_refs 84 | 85 | def create_entity_network(self, ftrack_entity): 86 | self._tmp_exclude_types = set(self._exclude_types) 87 | # entity type must be excluded, as finding it in one of its children would lead to an endless recursion 88 | self._tmp_exclude_types.add(ftrack_entity) 89 | return self.recurse_entity_connections(ftrack_entity, ancestors=set()) 90 | 91 | def recurse_entity_connections(self, ftrack_entity, ancestors=None): 92 | # ancestors are the entities, in sequence, linking to the current entity 93 | # the top entity of course has none 94 | if ancestors is None: 95 | ancestors = set() 96 | # refs - reference type attributes on the current entity 97 | # type - current entity's type 98 | network = {"refs": {}, "type": ftrack_entity} 99 | # return otherwise recursion will be endless 100 | if ftrack_entity in ancestors: 101 | return network 102 | 103 | # get all array or non-array reference attributes of the current entity 104 | refs = self._entities.get(ftrack_entity).get(self._ref_key) 105 | if refs: 106 | # first iteration - validate props 107 | valid_props = {} 108 | for prop_name, prop_type in refs.items(): 109 | if prop_name not in self._attributes_blacklist and prop_type not in self._tmp_exclude_types and prop_type != ftrack_entity: 110 | valid_props[prop_name] = prop_type 111 | # add valid prop types to exclude list for deeper recursions 112 | self._tmp_exclude_types.update(valid_props.values()) 113 | 114 | # second iteration, recurse all valid props 115 | for prop_name, prop_type in valid_props.items(): 116 | if prop_type in self._entities: 117 | ancestors.add(ftrack_entity) 118 | network["refs"][prop_name] = self.recurse_entity_connections(prop_type, ancestors) 119 | return network 120 | 121 | def extract_entity_relations(self, network): 122 | self._tmp_relations = {} 123 | self.recurse_entity_relations(network) 124 | return self._tmp_relations 125 | 126 | def recurse_entity_relations(self, network, path=None): 127 | if path is None: 128 | path = [] 129 | if "refs" in network: 130 | for attr_name, attr_data in network.get("refs").items(): 131 | prop_type = attr_data.get("type") 132 | if prop_type not in self._tmp_relations: 133 | tmp_path = list(path) 134 | tmp_path.append(attr_name) 135 | self._tmp_relations[prop_type] = tmp_path 136 | self.recurse_entity_relations(attr_data, tmp_path) 137 | 138 | def extract_all_entity_relations(self): 139 | for entity_name in self._entities: 140 | self._relationships[entity_name] = {} 141 | 142 | # create network of non-array references 143 | self.use_array_refs = False 144 | asset_network = self.create_entity_network(entity_name) 145 | 146 | # extract non-collection relations 147 | self._relationships[entity_name]["non_collection"] = self.extract_entity_relations(asset_network) 148 | 149 | # create network of array references 150 | self.use_array_refs = True 151 | asset_network = self.create_entity_network(entity_name) 152 | 153 | # extract collection relations 154 | self._relationships[entity_name]["collection"] = self.extract_entity_relations(asset_network) 155 | 156 | def parse_project_structure(self, project): 157 | root_data = TreeData(project.entity_type, None) 158 | self._root_item = TreeItem(root_data) 159 | self._project_relations = {} 160 | self.recurse_project_structure(self._root_item, project) 161 | 162 | def recurse_project_structure(self, structure_item=TreeItem(), ftrack_entity=None): 163 | entity_type = structure_item.data.type 164 | if entity_type in self._entities: 165 | refs = self._entities.get(entity_type).get("array_refs") 166 | for ref in refs: 167 | if ref in self._attributes_blacklist: 168 | continue 169 | collection = ftrack_entity.get(ref) 170 | if collection and collection.__class__.__name__ == "Collection": 171 | for child in collection: 172 | if child.get("parent") == ftrack_entity: 173 | child_data = TreeData(child.entity_type, "parent", structure_item) 174 | child_item = TreeItem(child_data) 175 | structure_item.children.append(child_item) 176 | self.recurse_project_structure(child_item, child) 177 | elif child.entity_type in self._include_types: 178 | tmp_collection_parent_attr = self._relationships.get(child.entity_type).get("non_collection").get(entity_type) 179 | if tmp_collection_parent_attr: 180 | collection_parent = tmp_collection_parent_attr[-1] 181 | if child.get(collection_parent) == ftrack_entity: 182 | child_data = TreeData(child.entity_type, collection_parent, structure_item) 183 | child_item = TreeItem(child_data) 184 | structure_item.children.append(child_item) 185 | self.recurse_project_structure(child_item, child) 186 | 187 | def print_project_structure(self, tree_item=TreeItem(), offset=""): 188 | print(offset, "Item:", tree_item.data.type, tree_item.data.parent_attr) 189 | offset = offset + " " 190 | for child in tree_item.children: 191 | self.print_project_structure(child, offset) 192 | 193 | def extract_project_relations(self): 194 | self._project_relations = {} 195 | self.walk_tree_down(self._root_item) 196 | for entity_name, entity_data in self._project_relations.items(): 197 | self._relationships[entity_name]["parent_attr"] = entity_data.get("parent_attr") 198 | entity_relations = entity_data.get("parent_paths") 199 | self._relationships[entity_name]["project"] = self.optimize_project_relations(entity_name, entity_relations) 200 | 201 | def walk_tree_down(self, tree_item=TreeItem()): 202 | for child in tree_item.children: 203 | child_data = child.data 204 | if child_data.type not in self._project_relations: 205 | self._project_relations[child_data.type] = {} 206 | self._project_relations[child_data.type]["parent_attr"] = child_data.parent_attr 207 | self._project_relations[child_data.type]["parent_paths"] = [] 208 | tmp_paths = [] 209 | self.walk_tree_up(tmp_paths, child) 210 | for path in tmp_paths: 211 | if path not in self._project_relations[child_data.type]["parent_paths"]: 212 | self._project_relations[child_data.type]["parent_paths"].append(path) 213 | self.walk_tree_down(child) 214 | 215 | def walk_tree_up(self, tmp_paths, tree_item=TreeItem(), path=None): 216 | if path is None: 217 | path = [] 218 | # top reached 219 | if tree_item.data.type not in self._project_relations: 220 | return 221 | parent = tree_item.get_parent() 222 | if parent: 223 | path.append({"type": parent.data.type, "attr": tree_item.data.parent_attr}) 224 | tmp_paths.append(list(path)) 225 | self.walk_tree_up(tmp_paths, parent, path) 226 | 227 | def optimize_project_relations(self, entity, relations): 228 | if not relations: 229 | return 230 | optimized_relations = [] 231 | for relation_data in relations: 232 | relation = relation_data[-1].get("type") 233 | if len(relation_data) > 1: # can"t get shorter than 1 234 | original_path_length = len(relation_data) 235 | shortest_relation = [] 236 | shorter_path_found = False 237 | for rel in relation_data[:original_path_length-1]: # now search up the relation to find even shorter ones 238 | non_collection_relation = self._relationships.get(rel.get("type")).get("non_collection") 239 | shortest_relation.append(rel) 240 | if relation in non_collection_relation: 241 | alternative_relation = non_collection_relation.get(relation) 242 | if len(alternative_relation) <= len(shortest_relation) and len(alternative_relation) == 1: 243 | for alt_relation in alternative_relation: 244 | shortest_relation.append({"type": relation, "attr": alt_relation}) 245 | shorter_path_found = True 246 | break 247 | if shorter_path_found: 248 | optimized_relations.append(shortest_relation) 249 | continue 250 | optimized_relations.append(relation_data) 251 | return optimized_relations 252 | 253 | @property 254 | def exclude_types(self): 255 | return self._exclude_types 256 | 257 | @exclude_types.setter 258 | def exclude_types(self, exclude_types): 259 | self._exclude_types = exclude_types 260 | 261 | @property 262 | def use_array_refs(self): 263 | return self._ref_key == "array_refs" 264 | 265 | @use_array_refs.setter 266 | def use_array_refs(self, use_array_refs): 267 | if use_array_refs: 268 | self._ref_key = "array_refs" 269 | else: 270 | self._ref_key = "refs" 271 | 272 | @property 273 | def include_types(self): 274 | return self._include_types 275 | 276 | @include_types.setter 277 | def include_types(self, include_types): 278 | self._include_types = include_types 279 | 280 | @property 281 | def attributes_blacklist(self): 282 | return self._attributes_blacklist 283 | 284 | @attributes_blacklist.setter 285 | def attributes_blacklist(self, attributes_blacklist): 286 | self._attributes_blacklist = attributes_blacklist 287 | 288 | @property 289 | def entities(self): 290 | return self._entities 291 | 292 | @property 293 | def project_relations(self): 294 | return self._project_relations 295 | 296 | @property 297 | def relationships(self): 298 | return self._relationships 299 | 300 | @property 301 | def array_attributes(self): 302 | array_refs = list(set(itertools.chain.from_iterable(_["array_refs"].keys() for _ in self.entities.values()))) 303 | array_refs.sort() 304 | return array_refs -------------------------------------------------------------------------------- /src/trackteroid/stubs/stubs.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | import inspect 32 | import logging 33 | import os 34 | import re 35 | import sys 36 | 37 | import ftrack_api 38 | 39 | 40 | DEFAULT_STUBS_PATH = os.path.dirname(__file__) 41 | STUBS_HEADER = "\nimport typing\n" 42 | 43 | LOG = logging.getLogger("trackteroid.stubs") 44 | 45 | SESSION = ftrack_api.Session(auto_populate=True, auto_connect_event_hub=False) 46 | 47 | 48 | def make_file(path, mode=0o777, default_content="", overwrite=False): 49 | """ 50 | Create a file (if it does not yet exist). 51 | Args: 52 | path (str): 53 | mode (octal, optional): file mode 54 | default_content (str): optional - the default content to put into the file if it get created here 55 | overwrite (bool): whether the file should be overwritten, if it already exists 56 | 57 | Returns: 58 | bool - True if file exists or could be created. False if not. 59 | """ 60 | os.makedirs(os.path.dirname(path), exist_ok=True) 61 | if os.path.exists(os.path.dirname(path)): 62 | try: 63 | if not os.path.exists(path) or overwrite: 64 | with open(path, 'w') as file_write: 65 | file_write.write(default_content) 66 | os.chmod(path, mode) 67 | return True 68 | except (OSError, AttributeError, TypeError): 69 | LOG.error("Failed to create file '%s'" % path, exc_info=True) 70 | return False 71 | 72 | 73 | class StubClassBuilder(object): 74 | """ A simply and limited helper to build class stubs """ 75 | 76 | CLASS_TEMPLATE = "class {class_name}{class_bases}:\n" 77 | CLASS_MEMBER_TEMPLATE = " {member_name}: {type} \n" 78 | INSTANCE_ATTRIBUTE_TEMPLATE = " self.{attribute_name}: {type} \n" 79 | CLASS_ATTRIBUTE_TEMPLATE = " {attribute_name}: {type} \n" 80 | METHOD_TEMPLATE = " def {method_name}({arguments}{keyword_arguments}) -> {return_type}:{ellipsis} \n" 81 | 82 | TYPE_MAP = { 83 | "array": "{ref} = {ref}", 84 | "string": "str = str()", 85 | "number": "float = float()", 86 | "boolean": "bool = bool()", 87 | "integer": "int = int()", 88 | "mapped_array": "typing.List = [{ref}]", 89 | "variable": "typing.Any = None", 90 | "any": "{ref} = {ref}" 91 | } 92 | 93 | def __init__(self, name, bases=()): 94 | self._content = { 95 | "header": "", 96 | "class_members": [], 97 | "constructor": "", 98 | "attributes": [], 99 | "methods": [] 100 | } 101 | self._name = name 102 | self._bases = bases 103 | 104 | self._set_class(name, bases) 105 | 106 | def __str__(self): 107 | return ( 108 | f"{self._content['header']}" 109 | f"{''.join(self._content['class_members'])}\n" 110 | f"{self._content['constructor']}" 111 | f"{''.join(sorted(self._content['attributes']))}" 112 | f"{''.join(sorted(self._content['methods']))}\n\n" 113 | ) 114 | 115 | def __add__(self, other): 116 | return str(self) + other 117 | 118 | def __radd__(self, other): 119 | return other + str(self) 120 | 121 | def add_member(self, name, type, ref): 122 | _type = (self.TYPE_MAP.get(type) or type).format(ref=ref) 123 | self._content["class_members"].append( 124 | self.CLASS_MEMBER_TEMPLATE.format( 125 | member_name=name, type=_type) 126 | ) 127 | 128 | def add_attribute(self, name, type, ref): 129 | """ add an attribute stub 130 | 131 | Args: 132 | name (str): attribute name 133 | type (str): attribute type 134 | ref (str): attribute type references 135 | 136 | Returns: 137 | 138 | """ 139 | 140 | if not self._content["constructor"]: 141 | raise ValueError( 142 | "No constructor stub was added. Add an ` __init__` stub via add_method() first." 143 | ) 144 | 145 | _type = (self.TYPE_MAP.get(type) or type).format(ref=ref) 146 | 147 | self._content["attributes"].append( 148 | self.ATTRIBUTE_TEMPLATE.format( 149 | attribute_name=name, type=_type 150 | ) 151 | ) 152 | 153 | def add_method(self, name, arguments, keyword_arguments="", return_type="None"): 154 | """ add a method stub 155 | 156 | Args: 157 | name (str): method name 158 | arguments (str): collapsed argument type hints 159 | keyword_arguments (str): collapses keyword argument type hints 160 | return_type (str): the expected return type 161 | 162 | Returns: 163 | 164 | """ 165 | if keyword_arguments: 166 | arguments += ", " 167 | 168 | _method_str = self.METHOD_TEMPLATE.format( 169 | method_name=name, 170 | arguments=arguments, 171 | keyword_arguments=keyword_arguments, 172 | return_type=return_type, 173 | ellipsis="{ellipsis}" 174 | ) 175 | 176 | # some special treatment 177 | if name == "__init__": 178 | self._content["constructor"] = _method_str.format(ellipsis="") 179 | else: 180 | self._content["methods"].append(_method_str.format(ellipsis="...")) 181 | 182 | def _set_class(self, name, bases): 183 | if bases: 184 | bases = "(" + ", ".join(list(bases)) + ")" 185 | else: 186 | bases = "" 187 | self._content["header"] = self.CLASS_TEMPLATE.format(class_name=name, class_bases=bases) 188 | 189 | def set_class_bases(self, bases): 190 | self._set_class(self.name, bases) 191 | 192 | @property 193 | def name(self): 194 | return self._name 195 | 196 | 197 | def get_stubs_from_schemas(include_custom_attributes=False): 198 | """ generate stubs from available entity schemas 199 | 200 | Returns: 201 | list: list with StubClassBuiler instances 202 | """ 203 | stubs = [] 204 | 205 | for element in SESSION.schemas: 206 | stub = StubClassBuilder(name=element["id"]) 207 | 208 | # ensure we add the __init__ first, because the stub builder is limited 209 | # and expects we have that added, to use the proper indentation for the 210 | # attributes 211 | for name, _ in element["properties"].items(): 212 | if name == "custom_attributes": 213 | # skip custom attributes as we will add them individually later 214 | continue 215 | items = _.get("items") 216 | ref = _.get("$ref", "") 217 | if items: 218 | ref = _["items"].get("$ref", "") 219 | if ref: 220 | ref = "{}".format(ref) 221 | _type = _.get("type") 222 | if _type: 223 | stub.add_member(name=name, type=_type, ref=ref) 224 | else: 225 | stub.add_member(name=name, type="any", ref=ref) 226 | 227 | if include_custom_attributes and "custom_attributes" in element["properties"].keys(): 228 | item = SESSION.query("select custom_attributes from {}".format(stub.name)).first() 229 | if item: 230 | for key in item["custom_attributes"].keys(): 231 | stub.add_member(name="custom_" + key, type=_type, ref=ref, cls_attribute=True) 232 | 233 | stubs.append(stub) 234 | 235 | return stubs 236 | 237 | 238 | def get_extended_entity_stubs(classes, stubs): 239 | """ extend given stubs for our entity implementations 240 | 241 | Args: 242 | classes (list): list of classes that need to match the stub names 243 | stubs (list): list of StubClassBuilder instances 244 | 245 | Returns: 246 | list: list of StubClassBuilder instances 247 | 248 | """ 249 | stubs_map = {_.name: _ for _ in stubs} 250 | class_map = dict(classes) 251 | 252 | stubs = [] 253 | 254 | for stub_name, stub in stubs_map.items(): 255 | _cls = class_map.get(stub_name) 256 | if _cls: 257 | 258 | stub.add_method( 259 | name="__init__", 260 | arguments="self, *args", 261 | keyword_arguments="**kwargs", 262 | ) 263 | 264 | for method in inspect.getmembers( 265 | _cls, 266 | predicate=lambda x: (inspect.isfunction(x) or inspect.ismethod(x)) and not x.__name__.startswith( 267 | "__")): 268 | 269 | specs = inspect.getfullargspec(method[1]) 270 | varargs = specs.varargs or "" 271 | if varargs: 272 | varargs = ", *" + varargs 273 | 274 | kwargs = [] 275 | args = specs.args or [] 276 | if specs.defaults: 277 | for i, element in enumerate(specs.args[-len(specs.defaults):]): 278 | if isinstance(specs.defaults[i], str): 279 | kwargs.append("{}=\"{}\"".format(element, specs.defaults[i])) 280 | else: 281 | kwargs.append("{}={}".format(element, specs.defaults[i])) 282 | args = specs.args[:-len(specs.defaults)] 283 | 284 | args = ", ".join(args) 285 | kwargs = ", ".join(kwargs) 286 | 287 | # always expect the forwarded Query return type 288 | # in case we are a "by_*" Criteria method 289 | if re.search(r"^by_", method[0]): 290 | return_type = "Query({})".format(stub_name) 291 | elif re.search(r"^inject$", method[0]): 292 | return_type = "Query({})".format(stub_name) 293 | args = "self, filter" 294 | kwargs = "" 295 | varargs = "" 296 | elif method[0] == "get" or method[0].startswith("get_"): 297 | return_type = stub_name 298 | elif method[0] == "create": 299 | return_type = stub_name 300 | elif method[0] == "create_batch": 301 | return_type = stub_name 302 | elif re.match(r"^(un)?link_.*", method[0]): 303 | return_type = stub_name 304 | elif method[0] == "pre_create": 305 | continue 306 | elif method[0] == "delete": 307 | return_type = "None" 308 | else: 309 | return_type = "typing.Any" 310 | 311 | # be stupid and ignore any kind of namespace for base classes 312 | stub.set_class_bases([_.__name__ for _ in _cls.__bases__]) 313 | stub.add_method( 314 | name=method[0], 315 | arguments=args + varargs, 316 | keyword_arguments=kwargs, 317 | return_type=return_type 318 | ) 319 | # if method is a criterion let's add the negated counterpart 320 | if re.search(r"^by_", method[0]): 321 | stub.add_method( 322 | name="not_{}".format(method[0]), 323 | arguments=args + varargs, 324 | keyword_arguments=kwargs, 325 | return_type=return_type 326 | ) 327 | 328 | # TODO: add overload for get, to add relative terminator 329 | 330 | # TODO improve for base types like TypedContext, Component 331 | # that support filtering via Entity 332 | stub.add_method( 333 | name="__getitem__", 334 | arguments="self, item: typing.Union[int, slice, str]", 335 | keyword_arguments="", 336 | return_type=stub_name 337 | ) 338 | stub.add_method( 339 | name="fetch_attributes", 340 | arguments="self, projections: typing.List[typing.Union[str, Entity]]", 341 | keyword_arguments="", 342 | return_type=stub_name 343 | ) 344 | stubs.append(stub) 345 | 346 | return stubs 347 | 348 | 349 | def generate_entitites_stubs(): 350 | """ (re)generate our entities.pyi stub file 351 | 352 | Notes: 353 | This overrides the stub .pyi file at the expected location. Here we will 354 | override trixter.entities.entities.pyi with the freshly inspected data. 355 | Use this whenever the implementation of any Entity subclass changes or 356 | the ftrack entities schema was modified. 357 | 358 | Returns: 359 | 360 | """ 361 | # TODO: The entities lookup should go away once our project is a proper package. 362 | # For now look up the expected package name dynamically based on the location of our script 363 | # and make 'entities' importable. 364 | entities_path = os.path.dirname(os.path.dirname(__file__)) 365 | package_name, package_root = os.path.basename(entities_path), os.path.dirname(entities_path) 366 | sys.path.insert(0, package_root) 367 | entities = getattr(__import__(package_name), "entities") 368 | declarations = getattr(__import__(package_name), "declarations") 369 | 370 | classes = inspect.getmembers( 371 | entities, 372 | predicate=lambda entity: 373 | inspect.isclass(entity) 374 | and issubclass(entity, (entities.Entity, declarations.ForwardDeclaration)) 375 | and " " not in entity.__name__ # TODO: why do we get members with whitespace in name? 376 | ) 377 | 378 | stubs = STUBS_HEADER 379 | stubs += "from .base import Entity\n" 380 | stubs += "from ..query import Query\n\n" 381 | 382 | for stub in sorted(get_extended_entity_stubs(classes, get_stubs_from_schemas()), key=lambda x: x.name): 383 | stubs += stub 384 | 385 | path = os.path.join(os.path.dirname(entities.__file__), "entities.pyi") 386 | if os.path.isfile(path): 387 | os.remove(path) 388 | 389 | LOG.info("Updating stubsfile {}...".format(path)) 390 | make_file(path, default_content=stubs) 391 | 392 | 393 | if __name__ == '__main__': 394 | generate_entitites_stubs() 395 | -------------------------------------------------------------------------------- /src/trackteroid/session.py: -------------------------------------------------------------------------------- 1 | # BSD 3-Clause License 2 | # 3 | # Copyright (c) 2023, Trixter GmbH 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are met: 7 | # 8 | # 1. Redistributions of source code must retain the above copyright notice, this 9 | # list of conditions and the following disclaimer. 10 | # 11 | # 2. Redistributions in binary form must reproduce the above copyright notice, 12 | # this list of conditions and the following disclaimer in the documentation 13 | # and/or other materials provided with the distribution. 14 | # 15 | # 3. Neither the name of the copyright holder nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | # 30 | 31 | import dbm 32 | import contextlib 33 | import json 34 | import warnings 35 | import inspect 36 | import importlib 37 | 38 | import ftrack_api 39 | 40 | from ftrack_api.symbol import NOT_SET 41 | 42 | from ftrack_api.cache import ( 43 | SerialisedCache, 44 | FileCache 45 | ) 46 | from ftrack_api.operation import ( 47 | CreateEntityOperation, 48 | UpdateEntityOperation, 49 | Operations 50 | ) 51 | 52 | from .entities.relationships_parser import RelationshipsParser 53 | 54 | 55 | _PARSED_RELATIONSHIPS_CACHE = {} 56 | 57 | 58 | class Session(object): 59 | """ Delegates all attributes and methods to the ftrack session instance 60 | 61 | Allows us to have our own Session interface for enhanced interaction. 62 | """ 63 | 64 | # exclude members that we don't want to delegate 65 | MEMBERS = [ 66 | "_session", 67 | "_session_arguments", 68 | "_terminated_queries", # this is our terminated query cache 69 | "reuse_query_results", 70 | ] 71 | 72 | def __init__(self, auto_populate=False, auto_connect_event_hub=False, **kwargs): 73 | # Collect arguments to hand over to the native ftrack_api.Session 74 | local_args = dict(locals()) 75 | valid_args = [_ for _ in inspect.getfullargspec(ftrack_api.Session.__init__).args if _ not in ('self', 'kwargs')] 76 | session_arguments = {} 77 | for arg_key, arg_val in local_args.items(): 78 | if arg_key in valid_args: 79 | session_arguments[arg_key] = arg_val 80 | session_arguments.update(kwargs) 81 | 82 | self._session = ftrack_api.Session(**session_arguments) 83 | 84 | # monkey patch ftrack session to find the way back from it to here 85 | self._session.delegate = self 86 | self._session_arguments = session_arguments 87 | self._terminated_queries = {} 88 | 89 | self.reuse_query_results = kwargs.get("reuse_query_results", False) 90 | 91 | def __eq__(self, other): 92 | if isinstance(other, ftrack_api.Session): 93 | return other == self._session 94 | elif isinstance(other, Session): 95 | return other._session == self._session 96 | return False 97 | 98 | def __ne__(self, other): 99 | return not self.__eq__(other) 100 | 101 | @property 102 | def parsed_relationships(self): 103 | if not self._session.server_url in _PARSED_RELATIONSHIPS_CACHE: 104 | relationship_parser = RelationshipsParser(ftrack_session=self._session) 105 | # types such as "Context" are ambiguous and cannot be used to extract direct relationships 106 | # example structure - Seq -> Folder -> Shot 107 | # in this case, establishing a relationship from Seq to Shot is impossible as would any other 108 | # that went through "parent" or "children" for example 109 | relationship_parser.exclude_types = {"Context"} 110 | relationship_parser.parse_session_schemas() 111 | # attributes that group things cannot be used to extract relationships 112 | # as an example, "descendants" lists all children and children of children and so on, 113 | # so they are all flattened with their relationships removed 114 | relationship_parser.attributes_blacklist = {"descendants", "ancestors", "status_changes"} 115 | relationship_parser.extract_all_entity_relations() 116 | _PARSED_RELATIONSHIPS_CACHE[self._session.server_url] = relationship_parser 117 | 118 | return _PARSED_RELATIONSHIPS_CACHE[self._session.server_url] 119 | 120 | def get_type_class(self, type_name): 121 | from .entities import entities 122 | return getattr(entities, type_name, None) 123 | 124 | def get_cached_collections(self): 125 | # TODO: Is this function still needed? 126 | typenames = sorted([str(k) for k, v in self._build_entity_type_classes(self.schemas).items()]) 127 | type_module = importlib.import_module("..entities", __name__) 128 | 129 | type_map = {} 130 | for name in typenames: 131 | collection = getattr(type_module, "EntityCollection") 132 | _type = getattr(type_module, name, None) 133 | if _type and issubclass(_type, getattr(type_module, "Entity")): 134 | cached_entities = [ 135 | _type.from_entity_type(name=_type.__name__, ftrack_entity=_) for _ in self._local_cache.values() 136 | if _.__class__.__name__ == name 137 | ] 138 | entitycollection = collection._make_empty(_type, self) 139 | type_map[_type] = entitycollection.from_entities(cached_entities) 140 | 141 | return type_map 142 | 143 | def reconnect(self, **kwargs): 144 | """ Closes the active session and connects a new session with the 145 | option to update session arguments. 146 | 147 | Args: 148 | kwargs: optional overrides for existing session arguments. 149 | """ 150 | session_attributes = {} 151 | valid_args = [_ for _ in inspect.getfullargspec(ftrack_api.Session.__init__).args if _ not in ('self', 'kwargs')] 152 | for arg in valid_args: 153 | if hasattr(self._session, arg): 154 | session_attributes[arg] = getattr(self._session, arg) 155 | 156 | self._session.close() 157 | self._session_arguments.update(session_attributes) 158 | self._session_arguments.update(kwargs) 159 | self._session = ftrack_api.Session(**self._session_arguments) 160 | # Some of the attributes propagated as arguments are cache-related, therefore we 161 | # need to clear the cache in order to make sure it's empty for the new session. 162 | self._session.cache.clear() 163 | 164 | self._terminated_queries = {} 165 | 166 | def close(self): 167 | """Closes the session""" 168 | self._session.close() 169 | self._terminated_queries = {} 170 | 171 | def reconnect_and_commit(self, initial_operations_path, **kwargs): 172 | """ 173 | Given a path to a dumped database [.dbm] this method will apply 174 | the stored operations to the database. 175 | 176 | The session is reconnected to start from a clean cache and 177 | an empty operations stack. Then the local cache and operations 178 | will be updated with the contents of the dumped cache. 179 | To sync the ftrack database with the local cache a commit is 180 | issued. 181 | 182 | Args: 183 | initial_operations_path (): 184 | kwargs: optional overrides for existing session arguments. 185 | 186 | Returns: 187 | 188 | """ 189 | self.reconnect(**kwargs) 190 | 191 | operations = Operations() 192 | 193 | try: 194 | _db = dbm.open(initial_operations_path, "r") 195 | _db.close() 196 | except dbm.error: 197 | raise OSError("Unable to open Database `{}`".format(initial_operations_path)) 198 | 199 | initial_cache = SerialisedCache( 200 | FileCache(initial_operations_path), 201 | encode=self._session.encode, 202 | decode=self._session.decode 203 | ) 204 | for key in initial_cache.keys(): 205 | if key == "__operations__": 206 | initial_operations = json.loads(initial_cache.get(key)) 207 | 208 | for op in initial_operations: 209 | if op["operation"] == "create": 210 | del op["operation"] 211 | operation = CreateEntityOperation(**op) 212 | elif op["operation"] == "update": 213 | if op["old_value"] in initial_cache.keys(): 214 | op["old_value"] = initial_cache.get(str(op["old_value"])) 215 | if op["old_value"] == "NOT SET": 216 | op["old_value"] = NOT_SET 217 | if op["new_value"] in initial_cache.keys(): 218 | op["new_value"] = initial_cache.get(str(op["new_value"])) 219 | if op["new_value"] == "NOT SET": 220 | op["new_value"] = NOT_SET 221 | del op["operation"] 222 | operation = UpdateEntityOperation(**op) 223 | else: 224 | raise KeyError("Unknown Operation `{}`.".format(op["operation"])) 225 | 226 | operations.push(operation) 227 | else: 228 | self._local_cache.set(key, initial_cache.get(key)) 229 | 230 | self._session.recorded_operations = operations 231 | self._session.commit() 232 | 233 | @staticmethod 234 | def _serialise_value(value): 235 | if value == NOT_SET: 236 | return "NOT SET" 237 | elif hasattr(value, "entity_type"): 238 | return "('{}', ['{}'])".format( 239 | value.entity_type, 240 | value["id"] 241 | ) 242 | else: 243 | return value 244 | 245 | @contextlib.contextmanager 246 | def reusing_query_results(self): 247 | """Temporarily enable query cache""" 248 | backup = self.reuse_query_results 249 | self.reuse_query_results = True 250 | try: 251 | yield 252 | finally: 253 | self.reuse_query_results = backup 254 | 255 | @contextlib.contextmanager 256 | def deferred_operations(self, filepath, clear=True): 257 | 258 | file_cache = FileCache(filepath) 259 | serialised_cache = SerialisedCache( 260 | file_cache, 261 | encode=self.encode, 262 | decode=self.decode 263 | ) 264 | 265 | if clear: 266 | serialised_cache.clear() 267 | 268 | # add the filecache temporarily 269 | self.cache.caches.append(serialised_cache) 270 | cache_index = len(self.cache.caches) - 1 271 | 272 | # store all previously recorded operations 273 | _previous_operations = self.recorded_operations 274 | 275 | # clear operations temporarily 276 | deferred = Operations() 277 | self.recorded_operations = deferred 278 | 279 | yield 280 | 281 | # sync cache 282 | _cache_records = [] 283 | with file_cache._database() as database: 284 | for key in database.keys(): 285 | value = database[key] 286 | _cache_records.append((key, value)) 287 | 288 | for key, value in _cache_records: 289 | entity_data = json.loads(value) 290 | for attr, attr_value in entity_data.items(): 291 | if isinstance(attr_value, dict): 292 | entity_type = attr_value.get("__entity_type__") 293 | if entity_type: 294 | # if this is an entity we check whether the linked entity is available 295 | # in our filecache 296 | # TODO: eventually we should check for the actual primary key here 297 | # for our current usecase it seems ok to blindly use the id 298 | cache_key = "('{}', ['{}'])".format(entity_type, attr_value["id"]) 299 | if cache_key not in serialised_cache.keys(): 300 | serialised_cache.set(cache_key, self._local_cache.get(cache_key)) 301 | 302 | # serialise operations and store them in the database 303 | ops = [] 304 | for op in self.recorded_operations: 305 | if isinstance(op, CreateEntityOperation): 306 | ops.append( 307 | { 308 | "operation": "create", 309 | "entity_data": op.entity_data, 310 | "entity_type": op.entity_type, 311 | "entity_key": op.entity_key 312 | } 313 | ) 314 | elif isinstance(op, UpdateEntityOperation): 315 | ops.append( 316 | { 317 | "operation": "update", 318 | "entity_type": op.entity_type, 319 | "entity_key": op.entity_key, 320 | "attribute_name": op.attribute_name, 321 | "old_value": self._serialise_value(op.old_value), 322 | "new_value": self._serialise_value(op.new_value) 323 | } 324 | ) 325 | 326 | serialised_cache.set("__operations__", json.dumps(ops)) 327 | 328 | # remove temporary filecache 329 | self.cache.caches.pop() 330 | 331 | # load previous operations back in the stack 332 | self.recorded_operations = _previous_operations 333 | 334 | def __getattribute__(self, item): 335 | # delegating access to those attributes is neccessary to support the 336 | # temporary attribute value decorator 337 | if item in ("__dict__", "__slots__"): 338 | return self.__getattr__(item) 339 | else: 340 | return super(Session, self).__getattribute__(item) 341 | 342 | def __getattr__(self, key): 343 | if hasattr(self._session, key): 344 | return getattr(self._session, key) 345 | else: 346 | raise AttributeError("Attribute '{}' does not exist.".format(key)) 347 | 348 | def __setattr__(self, key, value): 349 | if key in self.MEMBERS: 350 | super(Session, self).__setattr__(key, value) 351 | setattr(self._session, key, value) 352 | elif hasattr(self, key): 353 | setattr(self._session, key, value) 354 | else: 355 | raise AttributeError("Attribute '{}' does not exist.".format(key)) 356 | 357 | def __delattr__(self, item): 358 | if item in self.MEMBERS: 359 | super(Session, self).__delattr__(item) 360 | delattr(self._session, item) 361 | elif hasattr(self, item): 362 | delattr(self._session, item) 363 | else: 364 | raise AttributeError("Attribute '{}' does not exist.".format(item)) 365 | 366 | 367 | class _SessionDelegate(Session): 368 | def __init__(self, *args, **kwargs): 369 | warnings.warn('"_SessionDelegate" class got renamed to "Session"', 370 | DeprecationWarning) 371 | super(_SessionDelegate, self).__init__(*args, **kwargs) 372 | 373 | 374 | # main session 375 | # TODO: defer connection 376 | SESSION = Session() 377 | SESSION.auto_populate = False 378 | --------------------------------------------------------------------------------