├── docs ├── .gitignore ├── _extensions │ └── machow │ │ └── interlinks │ │ ├── .gitignore │ │ ├── _extension.yml │ │ ├── objects.txt │ │ ├── test.qmd │ │ └── interlinks.lua ├── img │ └── notdavid.png ├── _sidebar.yml ├── api │ └── _sidebar.yml ├── _quarto.yml ├── style.scss ├── index.qmd └── attributes.qmd ├── .gitignore ├── tests ├── python.py ├── conftest.py ├── run.py ├── test_utils.py ├── test_bob.py ├── test_base_object.py ├── test_obj.py ├── test_vdb.py ├── test_nodes.py ├── test_array_print_methods.py ├── test_collection.py ├── test_geometry_types.py ├── test_attribute.py └── test_arrays.py ├── databpy ├── addon.py ├── nodes │ ├── __init__.py │ ├── utils.py │ ├── generating.py │ └── appending.py ├── material.py ├── __init__.py ├── vdb.py ├── utils.py ├── collection.py └── array.py ├── .github └── workflows │ ├── lint.yml │ ├── test-in-blender.yml │ ├── test-daily.yml │ ├── ci-cd.yml │ ├── test-upstream.yml │ ├── docs.yml │ └── tests.yml ├── pyproject.toml ├── README.qmd └── README.md /docs/.gitignore: -------------------------------------------------------------------------------- 1 | /.quarto/ 2 | 3 | **/*.quarto_ipynb 4 | -------------------------------------------------------------------------------- /docs/_extensions/machow/interlinks/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | *.pdf 3 | *_files/ 4 | -------------------------------------------------------------------------------- /docs/img/notdavid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BradyAJohnston/databpy/HEAD/docs/img/notdavid.png -------------------------------------------------------------------------------- /docs/_extensions/machow/interlinks/_extension.yml: -------------------------------------------------------------------------------- 1 | title: Interlinks 2 | author: Michael Chow 3 | version: 1.1.0 4 | quarto-required: ">=1.2.0" 5 | contributes: 6 | filters: 7 | - interlinks.lua 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | *.pyc 3 | 4 | /.quarto/ 5 | /_build 6 | /reference 7 | /dist 8 | /api 9 | /databpy.egg-info 10 | /docs/_build 11 | *.ipynb 12 | *.quarto_ipynb 13 | /docs/api 14 | /build 15 | .DS_Store 16 | /docs/_inv 17 | docs/objects.txt 18 | -------------------------------------------------------------------------------- /docs/_extensions/machow/interlinks/objects.txt: -------------------------------------------------------------------------------- 1 | # Sphinx inventory version 2 2 | # Project: quartodoc 3 | # Version: 0.0.9999 4 | # The remainder of this file is compressed using zlib. 5 | some_func py:function 1 api/some_func.html - 6 | a.b.c py:function 1 api/a.b.c.html - 7 | quartodoc.Auto py:class 1 api/Auto.html - 8 | -------------------------------------------------------------------------------- /tests/python.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import sys 3 | import os 4 | 5 | argv = sys.argv 6 | argv = argv[argv.index("--") + 1 :] 7 | 8 | 9 | def main(): 10 | python = os.path.realpath(sys.executable) 11 | subprocess.run([python] + argv) 12 | 13 | 14 | if __name__ == "__main__": 15 | main() 16 | -------------------------------------------------------------------------------- /databpy/addon.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | 3 | 4 | def register(): 5 | bpy.types.Object.uuid = bpy.props.StringProperty( 6 | name="UUID", 7 | description="Unique identifier for the object", 8 | default="", 9 | options={"HIDDEN"}, 10 | ) 11 | 12 | 13 | def unregister(): 14 | del bpy.types.Object.uuid 15 | -------------------------------------------------------------------------------- /docs/_sidebar.yml: -------------------------------------------------------------------------------- 1 | website: 2 | sidebar: 3 | - contents: 4 | - api/index.qmd 5 | - contents: 6 | - api/attribute.named_attribute.qmd 7 | - api/attribute.store_named_attribute.qmd 8 | section: Attribute 9 | - contents: 10 | - api/collection.qmd 11 | - api/BlenderObject.qmd 12 | section: Objects 13 | id: api 14 | - id: dummy-sidebar 15 | -------------------------------------------------------------------------------- /docs/_extensions/machow/interlinks/test.qmd: -------------------------------------------------------------------------------- 1 | --- 2 | filters: 3 | - interlinks.lua 4 | interlinks: 5 | autolink: true 6 | aliases: 7 | quartodoc: null 8 | #sources: 9 | # test: 10 | # url: https://example.com 11 | --- 12 | 13 | * `some_func` 14 | * `some_func()` 15 | * `some_func(a=1)` 16 | * `some_func()`{.qd-no-link} 17 | * `some_func + some_func` 18 | * `a.b.c` 19 | * `~a.b.c` 20 | * `a.b.c()` 21 | * `quartodoc.Auto()` 22 | * `Auto()` -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | import pytest 3 | import databpy 4 | 5 | databpy.register() 6 | 7 | 8 | @pytest.fixture(autouse=True) 9 | def run_around_tests(): 10 | # Code that will run before each tests 11 | 12 | bpy.ops.wm.read_homefile(app_template="") 13 | for tree in bpy.data.node_groups: 14 | bpy.data.node_groups.remove(tree) 15 | 16 | yield 17 | 18 | bpy.ops.wm.read_homefile(app_template="") 19 | # Code that will run after your test, for example: 20 | # files_after = # ... do something to check the existing files 21 | # assert files_before == files_after 22 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Code Lint with Ruff 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["*"] 8 | 9 | 10 | jobs: 11 | lint: 12 | name: Check code style 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: actions/setup-python@v5 17 | with: 18 | python-version: "3.11" 19 | - name: Install ruff 20 | run: pip install ruff 21 | - name: Check code formatting 22 | run: ruff format --diff 23 | - name: Lint code base 24 | run: ruff check 25 | -------------------------------------------------------------------------------- /databpy/nodes/__init__.py: -------------------------------------------------------------------------------- 1 | from .appending import ( 2 | cleanup_duplicates, 3 | deduplicate_node_trees, 4 | DuplicatePrevention, 5 | append_from_blend, 6 | ) 7 | from .generating import custom_string_iswitch, new_tree, swap_tree 8 | from .utils import get_input, get_output, MaintainConnections, NodeGroupCreationError 9 | 10 | __all__ = [ 11 | "cleanup_duplicates", 12 | "deduplicate_node_trees", 13 | "DuplicatePrevention", 14 | "append_from_blend", 15 | "custom_string_iswitch", 16 | "new_tree", 17 | "swap_tree", 18 | "get_input", 19 | "get_output", 20 | "MaintainConnections", 21 | "NodeGroupCreationError", 22 | ] 23 | -------------------------------------------------------------------------------- /databpy/material.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import bpy 4 | from bpy.types import Material 5 | 6 | 7 | # TODO: use DuplicatePrevention when adding material node trees 8 | def append_from_blend(name: str, filepath: str) -> Material: 9 | file_path = Path(filepath) 10 | if not file_path.exists(): 11 | raise FileNotFoundError(f"Given file not found: {filepath}") 12 | try: 13 | return bpy.data.materials[name] 14 | except KeyError: 15 | bpy.ops.wm.append( 16 | directory=str(file_path / "Material"), 17 | filename=name, 18 | link=False, 19 | ) 20 | return bpy.data.materials[name] 21 | -------------------------------------------------------------------------------- /tests/run.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sys 3 | 4 | argv = sys.argv 5 | argv = argv[argv.index("--") + 1 :] 6 | 7 | 8 | # run this script like this: 9 | # /Applications/Blender.app/Contents/MacOS/Blender -b -P tests/run.py -- . -v 10 | # /Applications/Blender.app/Contents/MacOS/Blender -b -P tests/run.py -- . -k test_color_lookup_supplied 11 | 12 | 13 | def main(): 14 | # run the test suite, and we have to manually return the result value if non-zero 15 | # value is returned for a failing test 16 | if len(argv) == 0: 17 | result = pytest.main() 18 | else: 19 | result = pytest.main(argv) 20 | if result.value != 0: 21 | sys.exit(result.value) 22 | 23 | 24 | if __name__ == "__main__": 25 | main() 26 | -------------------------------------------------------------------------------- /.github/workflows/test-in-blender.yml: -------------------------------------------------------------------------------- 1 | name: Run Tests in Blender 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["*"] 8 | 9 | jobs: 10 | build: 11 | runs-on: ${{ matrix.os }} 12 | strategy: 13 | max-parallel: 4 14 | fail-fast: false 15 | matrix: 16 | version: ["4.5", "5.0", "daily"] 17 | os: [macos-14, ubuntu-latest, windows-latest] 18 | steps: 19 | - uses: actions/checkout@v4 20 | - uses: BradyAJohnston/setup-blender@v5 21 | with: 22 | version: ${{ matrix.version }} 23 | - name: Install and Test 24 | run: | 25 | blender -b -P tests/python.py -- -m pip install -e ".[test]" 26 | blender -b -P tests/run.py -- -vv 27 | -------------------------------------------------------------------------------- /.github/workflows/test-daily.yml: -------------------------------------------------------------------------------- 1 | name: Test Daily Build 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | 7 | jobs: 8 | build: 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | max-parallel: 4 12 | fail-fast: false 13 | matrix: 14 | version: ["daily"] 15 | os: [ubuntu-latest, macos-latest, windows-latest] 16 | steps: 17 | - uses: actions/checkout@v4 18 | - uses: BradyAJohnston/setup-blender@v3 19 | with: 20 | version: ${{ matrix.version }} 21 | - name: Install in Blender 22 | run: | 23 | blender -b -P tests/python.py -- -m pip install ".[test]" 24 | - name: Run Tests 25 | run: | 26 | blender -b -P tests/run.py -- -vv tests --cov --cov-report=xml 27 | -------------------------------------------------------------------------------- /docs/api/_sidebar.yml: -------------------------------------------------------------------------------- 1 | website: 2 | sidebar: 3 | - contents: 4 | - api/index.qmd 5 | - contents: 6 | - api/named_attribute.qmd 7 | - api/store_named_attribute.qmd 8 | - api/remove_named_attribute.qmd 9 | - api/AttributeDomains.qmd 10 | - api/AttributeTypes.qmd 11 | - api/AttributeArray.qmd 12 | section: Attribute 13 | - contents: 14 | - api/create_collection.qmd 15 | section: Collections 16 | - contents: 17 | - api/create_object.qmd 18 | - api/create_mesh_object.qmd 19 | - api/create_curves_object.qmd 20 | - api/create_pointcloud_object.qmd 21 | - api/create_bob.qmd 22 | - api/evaluate_object.qmd 23 | - api/BlenderObject.qmd 24 | - api/BlenderObjectAttribute.qmd 25 | - api/BlenderObjectBase.qmd 26 | - api/LinkedObjectError.qmd 27 | section: Objects 28 | id: api 29 | - id: dummy-sidebar 30 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "databpy" 3 | version = "0.6.0" 4 | description = "A data-oriented wrapper library for the Blender Python API" 5 | readme = "README.md" 6 | dependencies = [ 7 | "numpy>=1.24.0,<2.0", 8 | ] 9 | requires-python = "~=3.11.0" 10 | keywords = ["blender", "python", "numpy"] 11 | maintainers = [ 12 | {name="Brady Johnston", email="brady.johnston@me.com"}, 13 | ] 14 | 15 | [project.urls] 16 | Homepage = "https://bradyajohnston.github.io/databpy" 17 | Repository = "https://github.com/bradyajohnston/databpy" 18 | Documentation = "https://bradyajohnston.github.io/databpy" 19 | 20 | [project.optional-dependencies] 21 | bpy = ["bpy>=4.5"] 22 | test = ["pytest", "pytest-cov"] 23 | dev = ["fake-bpy-module", "polars"] 24 | docs = ["quartodoc", "jupyter", "polars"] 25 | 26 | [tool.uv.workspace] 27 | members = [ 28 | ".", 29 | ] 30 | 31 | [tool.uv.sources] 32 | databpy = { workspace = true } 33 | 34 | [dependency-groups] 35 | dev = [ 36 | "databpy", 37 | ] 38 | 39 | 40 | -------------------------------------------------------------------------------- /.github/workflows/ci-cd.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | tags: 4 | - 'v*' 5 | 6 | jobs: 7 | pypi-publish: 8 | name: Upload release to PyPI 9 | runs-on: ubuntu-latest 10 | environment: 11 | name: pypi 12 | url: https://pypi.org/p/databpy 13 | permissions: 14 | id-token: write 15 | contents: write 16 | 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@v4 20 | 21 | - name: Install uv 22 | uses: astral-sh/setup-uv@v4 23 | with: 24 | version: "latest" 25 | 26 | - name: build 27 | run: | 28 | uv build 29 | 30 | - name: Create GH Release 31 | run: | 32 | # Create release with auto-generated notes 33 | gh release create ${{ github.ref_name }} --generate-notes dist/* 34 | env: 35 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 36 | 37 | - name: Publish to PyPI 38 | uses: pypa/gh-action-pypi-publish@release/v1 -------------------------------------------------------------------------------- /.github/workflows/test-upstream.yml: -------------------------------------------------------------------------------- 1 | name: Test Upstream 2 | 3 | on: 4 | pull_request: 5 | branches: ["main"] 6 | 7 | jobs: 8 | test-in-blender: 9 | runs-on: ${{ matrix.os }} 10 | env: 11 | BRANCH_NAME: ${{ github.head_ref || github.ref_name }} 12 | REPO_NAME: "BradyAJohnston/databpy" 13 | strategy: 14 | max-parallel: 4 15 | fail-fast: false 16 | matrix: 17 | version: ["latest"] 18 | os: [ubuntu-latest] 19 | repo: ["BradyAJohnston/MolecularNodes", "kolibril13/blender_csv_import"] 20 | steps: 21 | - uses: actions/checkout@v5 22 | with: 23 | repository: ${{ matrix.repo }} 24 | 25 | - uses: BradyAJohnston/setup-blender@v5 26 | with: 27 | version: ${{ matrix.version }} 28 | 29 | - name: Install and Test 30 | run: | 31 | blender -b -P tests/python.py -- -m pip install -e ".[test]" 32 | blender -b -P tests/python.py -- -m pip install git+https://github.com/${{ env.REPO_NAME}}.git@${{ env.BRANCH_NAME }} 33 | blender -b -P tests/run.py -- -vv -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: docs-build-deploy 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: ["main"] 7 | pull_request: 8 | branches: ["*"] 9 | 10 | jobs: 11 | build-docs: 12 | runs-on: ubuntu-latest 13 | permissions: write-all 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: quarto-dev/quarto-actions/setup@v2 17 | - name: Install the latest version of uv 18 | uses: astral-sh/setup-uv@v4 19 | with: 20 | version: "latest" 21 | 22 | - name: Build Docs 23 | run: | 24 | uv sync --all-extras --dev 25 | uv pip install -e . 26 | cd docs 27 | uv run -m quartodoc build 28 | 29 | - name: Fetch Interlinks 30 | run: | 31 | cd docs 32 | uv run -m quartodoc interlinks 33 | 34 | - name: Quarto Render 35 | run: | 36 | uv run quarto render docs 37 | 38 | # Only publish to GitHub Pages on push to main, not on PRs 39 | - name: Publish to GitHub Pages 40 | if: github.event_name == 'push' && github.ref == 'refs/heads/main' 41 | uses: quarto-dev/quarto-actions/publish@v2 42 | with: 43 | target: gh-pages 44 | path: docs 45 | render: false 46 | env: 47 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 48 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Run Tests 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["*"] 8 | 9 | jobs: 10 | build: 11 | runs-on: ${{ matrix.os }} 12 | strategy: 13 | max-parallel: 4 14 | fail-fast: false 15 | matrix: 16 | version: ['4.5', '5.0', '5.1'] 17 | os: [macos-latest, windows-latest, ubuntu-latest] 18 | steps: 19 | - uses: actions/checkout@v4 20 | 21 | - name: Install the latest version of uv 22 | uses: astral-sh/setup-uv@v4 23 | with: 24 | version: "latest" 25 | 26 | - name: Install 27 | run: | 28 | uv sync --all-extras --dev 29 | uv pip install "bpy==${{ matrix.version }}.*" --extra-index-url https://bradyajohnston.github.io/dailybpy/ --index-strategy unsafe-best-match --prerelease=allow 30 | 31 | - name: Run tests 32 | run: | 33 | uv run -m pytest -vv --cov=databpy --cov-report=xml 34 | 35 | - name: Upload coverage reports to Codecov 36 | uses: codecov/codecov-action@v5 37 | if: matrix.os == 'macos-latest' && matrix.version == '5.0' 38 | with: 39 | token: ${{ secrets.CODECOV_TOKEN }} 40 | name: coverage.xml 41 | 42 | - name: Upload coverage reports to Codecov 43 | if: matrix.os == 'macos-latest' && matrix.version == '5.0' 44 | uses: codecov/codecov-action@v3 45 | 46 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from pathlib import Path 4 | import databpy.utils as utils 5 | 6 | 7 | def test_centre_unweighted(): 8 | positions = np.array([[0, 0, 0], [2, 2, 2]]) 9 | result = utils.centre(positions) 10 | np.testing.assert_array_equal(result, np.array([1, 1, 1])) 11 | 12 | 13 | def test_centre_weighted(): 14 | positions = np.array([[0, 0, 0], [2, 2, 2]]) 15 | weights = np.array([1, 3]) 16 | result = utils.centre(positions, weights) 17 | np.testing.assert_array_equal(result, np.array([1.5, 1.5, 1.5])) 18 | 19 | 20 | def test_lerp_scalar(): 21 | result = utils.lerp(0, 10, 0.5) 22 | assert result == 5.0 23 | 24 | 25 | def test_lerp_array(): 26 | a = np.array([0, 0, 0]) 27 | b = np.array([10, 10, 10]) 28 | result = utils.lerp(a, b, 0.5) 29 | np.testing.assert_array_equal(result, np.array([5, 5, 5])) 30 | 31 | 32 | def test_lerp_extremes(): 33 | a = np.array([1, 1, 1]) 34 | b = np.array([2, 2, 2]) 35 | result_zero = utils.lerp(a, b, 0.0) 36 | result_one = utils.lerp(a, b, 1.0) 37 | np.testing.assert_array_equal(result_zero, a) 38 | np.testing.assert_array_equal(result_one, b) 39 | 40 | 41 | def test_path_resolve_str(): 42 | result = utils.path_resolve("//test.blend") 43 | assert isinstance(result, Path) 44 | assert result.is_absolute() 45 | 46 | 47 | def test_path_resolve_path(): 48 | input_path = Path("//test.blend") 49 | result = utils.path_resolve(input_path) 50 | assert isinstance(result, Path) 51 | assert result.is_absolute() 52 | 53 | 54 | def test_path_resolve_invalid(): 55 | with pytest.raises(ValueError): 56 | utils.path_resolve(123) 57 | -------------------------------------------------------------------------------- /databpy/__init__.py: -------------------------------------------------------------------------------- 1 | from .object import ( 2 | ObjectTracker, 3 | BlenderObjectBase, 4 | BlenderObjectAttribute, 5 | BlenderObject, 6 | BOB, 7 | create_object, 8 | create_bob, 9 | create_mesh_object, 10 | create_curves_object, 11 | create_pointcloud_object, 12 | LinkedObjectError, 13 | bdo, 14 | ) 15 | from .vdb import import_vdb 16 | from . import nodes 17 | from .nodes import utils 18 | from .addon import register, unregister 19 | from .utils import centre, lerp 20 | from .collection import create_collection 21 | from .array import AttributeArray 22 | from .attribute import ( 23 | named_attribute, 24 | store_named_attribute, 25 | remove_named_attribute, 26 | list_attributes, 27 | evaluate_object, 28 | Attribute, 29 | AttributeType, 30 | AttributeTypeInfo, 31 | AttributeTypes, 32 | AttributeDomains, 33 | AttributeDomain, 34 | NamedAttributeError, 35 | AttributeMismatchError, 36 | ) 37 | 38 | __all__ = [ 39 | "ObjectTracker", 40 | "BlenderObjectBase", 41 | "BlenderObjectAttribute", 42 | "BlenderObject", 43 | "BOB", 44 | "create_object", 45 | "create_bob", 46 | "create_mesh_object", 47 | "create_curves_object", 48 | "create_pointcloud_object", 49 | "LinkedObjectError", 50 | "bdo", 51 | "import_vdb", 52 | "nodes", 53 | "utils", 54 | "register", 55 | "unregister", 56 | "centre", 57 | "lerp", 58 | "create_collection", 59 | "AttributeArray", 60 | "named_attribute", 61 | "store_named_attribute", 62 | "remove_named_attribute", 63 | "list_attributes", 64 | "evaluate_object", 65 | "Attribute", 66 | "AttributeType", 67 | "AttributeTypeInfo", 68 | "AttributeTypes", 69 | "AttributeDomains", 70 | "AttributeDomain", 71 | "NamedAttributeError", 72 | "AttributeMismatchError", 73 | ] 74 | -------------------------------------------------------------------------------- /databpy/vdb.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import bpy 4 | 5 | from .collection import create_collection 6 | from .object import ObjectTracker 7 | 8 | 9 | def import_vdb( 10 | file: str | Path, collection: str | bpy.types.Collection | None = None 11 | ) -> bpy.types.Object: 12 | """ 13 | Imports a VDB file as a Blender volume object. 14 | 15 | Parameters 16 | ---------- 17 | file : str | Path 18 | Path to the VDB file. 19 | collection : str | bpy.types.Collection | None, optional 20 | Collection to place the imported volume in. Can be either a collection name, 21 | an existing collection, or None to use the active collection. 22 | 23 | Returns 24 | ------- 25 | bpy.types.Object 26 | A Blender object containing the imported volume data. 27 | 28 | Raises 29 | ------ 30 | RuntimeError 31 | If the VDB file could not be imported (e.g., file not found). 32 | """ 33 | # Check if file exists 34 | file_path = Path(file) 35 | if not file_path.exists(): 36 | raise RuntimeError(f"VDB file not found: {file_path}") 37 | 38 | with ObjectTracker() as tracker: 39 | bpy.ops.object.volume_import(filepath=str(file)) 40 | new_objects = tracker.new_objects() 41 | 42 | # Check if any objects were created 43 | if not new_objects: 44 | raise RuntimeError(f"Failed to import VDB file: {file_path}") 45 | 46 | volume_obj = new_objects[-1] 47 | 48 | if collection is not None: 49 | initial_collection = volume_obj.users_collection[0] 50 | initial_collection.objects.unlink(volume_obj) 51 | 52 | target_collection = collection 53 | if isinstance(collection, str): 54 | target_collection = create_collection(collection) 55 | 56 | target_collection.objects.link(volume_obj) 57 | 58 | return volume_obj 59 | -------------------------------------------------------------------------------- /docs/_quarto.yml: -------------------------------------------------------------------------------- 1 | project: 2 | type: website 3 | output-dir: _build 4 | 5 | filters: 6 | - interlinks 7 | 8 | website: 9 | title: "databpy" 10 | navbar: 11 | left: 12 | - href: index.qmd 13 | text: Home 14 | - href: attributes.qmd 15 | text: Attributes 16 | - href: api/index.qmd 17 | text: API 18 | tools: 19 | - icon: github 20 | href: https://github.com/BradyAJohnston/databpy 21 | 22 | format: 23 | html: 24 | page-layout: full 25 | theme: 26 | - sandstone 27 | - style.scss 28 | toc: true 29 | toc-depth: 2 30 | preview-colour: 31 | code: true 32 | text: true 33 | 34 | # tell quarto to read the generated sidebar 35 | metadata-files: 36 | - api/_sidebar.yml 37 | 38 | interlinks: 39 | fast: true 40 | aliases: 41 | databpy: null # Allow shortened internal references 42 | numpy: np 43 | molecularnodes: mn 44 | sources: 45 | numpy: 46 | url: https://numpy.org/doc/stable/ 47 | python: 48 | url: https://docs.python.org/3/ 49 | bpy: 50 | url: https://docs.blender.org/api/current/ 51 | 52 | quartodoc: 53 | # the name used to import the package you want to create reference docs for 54 | package: databpy 55 | dir: api 56 | 57 | # write sidebar and style data 58 | sidebar: api/_sidebar.yml 59 | css: api/_styles-quartodoc.css 60 | 61 | # enable automatic linking of type annotations 62 | render_interlinks: true 63 | 64 | options: 65 | include_inherited: true 66 | 67 | sections: 68 | - title: Attribute 69 | desc: For interacting with attributes on meshes 70 | contents: 71 | - named_attribute 72 | - store_named_attribute 73 | - remove_named_attribute 74 | - AttributeDomains 75 | - AttributeTypes 76 | - AttributeArray 77 | - title: Collections 78 | desc: Working with collections in Blender 79 | contents: 80 | - create_collection 81 | - title: Objects 82 | contents: 83 | # - object.ObjectTracker 84 | - create_object 85 | - create_mesh_object 86 | - create_curves_object 87 | - create_pointcloud_object 88 | - create_bob 89 | - evaluate_object 90 | - BlenderObject 91 | - BlenderObjectAttribute 92 | - BlenderObjectBase 93 | - LinkedObjectError 94 | -------------------------------------------------------------------------------- /docs/style.scss: -------------------------------------------------------------------------------- 1 | /*-- scss:defaults --*/ 2 | 3 | // Simple legibility improvements on top of sandstone theme 4 | 5 | /*-- scss:rules --*/ 6 | 7 | // ============================================================================ 8 | // Typography & Readability 9 | // ============================================================================ 10 | 11 | body { 12 | line-height: 1.6; 13 | } 14 | 15 | p { 16 | margin-bottom: 1rem; 17 | } 18 | 19 | h1, h2, h3, h4, h5, h6 { 20 | margin-top: 1.5rem; 21 | margin-bottom: 1rem; 22 | line-height: 1.3; 23 | } 24 | 25 | // ============================================================================ 26 | // Code Blocks - Improved Readability 27 | // ============================================================================ 28 | 29 | code { 30 | padding: 0.2em 0.4em; 31 | font-size: 0.9em; 32 | } 33 | 34 | pre { 35 | padding: 1rem; 36 | border-radius: 0.5rem; 37 | margin: 1rem 0; 38 | line-height: 1.5; 39 | } 40 | 41 | // ============================================================================ 42 | // Tables - Better Spacing 43 | // ============================================================================ 44 | 45 | table { 46 | margin: 1.5rem 0; 47 | 48 | th, td { 49 | padding: 0.75rem; 50 | } 51 | } 52 | 53 | // ============================================================================ 54 | // Sidebar/TOC - Text Wrapping 55 | // ============================================================================ 56 | 57 | .sidebar nav .nav-link, 58 | .toc .nav-link, 59 | #TOC .nav-link, 60 | .sidebar-navigation .nav-link { 61 | white-space: normal; 62 | word-wrap: break-word; 63 | line-height: 1.4; 64 | } 65 | 66 | // ============================================================================ 67 | // Callouts - Improved Spacing 68 | // ============================================================================ 69 | 70 | .callout { 71 | margin: 1.5rem 0; 72 | padding: 1rem; 73 | } 74 | 75 | // ============================================================================ 76 | // Links - Better Contrast 77 | // ============================================================================ 78 | 79 | a { 80 | text-decoration: none; 81 | 82 | &:hover { 83 | text-decoration: underline; 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /databpy/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from pathlib import Path 3 | import bpy 4 | 5 | 6 | def centre(position: np.ndarray, weight: np.ndarray | None = None) -> np.ndarray: 7 | """Calculate the weighted centroid of the vectors. 8 | 9 | Parameters 10 | ---------- 11 | position : np.ndarray 12 | Array of position vectors. 13 | weight : np.ndarray | None, optional 14 | Array of weights for each position. Default is None. 15 | 16 | Returns 17 | ------- 18 | np.ndarray 19 | The weighted centroid of the input vectors. 20 | """ 21 | if weight is None: 22 | return np.average(position, axis=0) 23 | return np.average(position, weights=weight, axis=0) 24 | 25 | 26 | def lerp(a: np.ndarray, b: np.ndarray, t: float = 0.5) -> np.ndarray: 27 | """Linearly interpolate between two values. 28 | 29 | Parameters 30 | ---------- 31 | a : np.ndarray 32 | The starting value. 33 | b : np.ndarray 34 | The ending value. 35 | t : float, optional 36 | The interpolation parameter. Default is 0.5. 37 | 38 | Returns 39 | ------- 40 | np.ndarray 41 | The interpolated value(s). 42 | 43 | Notes 44 | ----- 45 | This function performs linear interpolation between `a` and `b` using the 46 | interpolation parameter `t` such that the result lies between `a` and `b`. 47 | 48 | Examples 49 | -------- 50 | ```{python} 51 | from databpy.utils import lerp 52 | lerp(1, 2, 0.5) 53 | lerp(3, 7, 0.2) 54 | lerp([1, 2, 3], [4, 5, 6], 0.5) 55 | ``` 56 | """ 57 | return a + (b - a) * t 58 | 59 | 60 | def path_resolve(path: str | Path) -> Path: 61 | """Resolve a path string or Path object to an absolute Path. 62 | 63 | Parameters 64 | ---------- 65 | path : str | Path 66 | The path to resolve, either as a string or Path object. 67 | 68 | Returns 69 | ------- 70 | Path 71 | The resolved absolute Path. 72 | 73 | Raises 74 | ------ 75 | ValueError 76 | If the path cannot be resolved or is of invalid type. 77 | """ 78 | if not isinstance(path, (str, Path)): 79 | raise ValueError(f"Path must be string or Path object, got {type(path)}") 80 | 81 | return Path(bpy.path.abspath(str(path))).resolve() 82 | -------------------------------------------------------------------------------- /databpy/collection.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | from bpy.types import Collection 3 | 4 | 5 | def _get_collection(name: str) -> Collection: 6 | """ 7 | Retrieve a Blender collection by name, if it doesn't exist, create it and link to scene. 8 | 9 | Parameters 10 | ---------- 11 | name : str 12 | The name of the collection to retrieve or create 13 | 14 | Returns 15 | ------- 16 | Collection 17 | The retrieved or created Blender collection 18 | """ 19 | if name in bpy.data.collections: 20 | return bpy.data.collections[name] 21 | 22 | coll = bpy.data.collections.new(name) 23 | if bpy.context.scene: 24 | bpy.context.scene.collection.children.link(coll) 25 | return coll 26 | 27 | 28 | def create_collection( 29 | name: str = "NewCollection", parent: Collection | str | None = None 30 | ) -> Collection: 31 | """ 32 | Create a new Blender collection or retrieve an existing one. 33 | 34 | Parameters 35 | ---------- 36 | name : str, optional 37 | The name of the collection to create or retrieve. Default is "NewCollection". 38 | parent : Collection or str or None, optional 39 | The parent collection to link the new collection to. If a string is provided, 40 | it will be used to find an existing collection by name. If None, the new collection 41 | will be linked to the scene's root collection. Default is None. 42 | 43 | Returns 44 | ------- 45 | Collection 46 | The created or retrieved Blender collection. 47 | 48 | Raises 49 | ------ 50 | TypeError 51 | If the parent parameter is not a Collection, string or None. 52 | KeyError 53 | If the parent collection name provided does not exist in bpy.data.collections. 54 | """ 55 | if not isinstance(parent, (Collection, str, type(None))): 56 | raise TypeError("Parent must be a Collection, string or None") 57 | 58 | if isinstance(parent, str): 59 | parent = _get_collection(parent) 60 | 61 | coll = _get_collection(name) 62 | 63 | if parent is None: 64 | return coll 65 | 66 | if coll.name in parent.children: 67 | return coll 68 | 69 | parent.children.link(coll) 70 | if bpy.context.scene and coll.name in bpy.context.scene.collection.children: 71 | bpy.context.scene.collection.children.unlink(coll) 72 | 73 | return coll 74 | -------------------------------------------------------------------------------- /tests/test_bob.py: -------------------------------------------------------------------------------- 1 | import databpy as db 2 | import bpy 3 | import numpy as np 4 | 5 | np.random.seed(11) 6 | 7 | 8 | def test_get_position(): 9 | bpy.ops.wm.read_factory_settings() 10 | 11 | att = db.named_attribute(bpy.data.objects["Cube"], "position") 12 | # Verify basic properties of the position attribute 13 | assert att.shape == (8, 3) # Default cube has 8 vertices 14 | assert att.dtype in (np.float32, np.float64) # Should be float type 15 | # Verify it contains reasonable position data 16 | assert np.all(np.abs(att) <= 10.0) # Positions should be reasonable values 17 | 18 | 19 | def test_set_position(): 20 | bpy.ops.wm.read_factory_settings() 21 | obj = bpy.data.objects["Cube"] 22 | pos_a = db.named_attribute(obj, "position") 23 | 24 | # Store new random positions 25 | new_positions = np.random.randn(len(obj.data.vertices), 3) 26 | db.store_named_attribute(obj, new_positions, "position") 27 | pos_b = db.named_attribute(obj, "position") 28 | 29 | # Verify positions changed 30 | assert not np.allclose(pos_a, pos_b) 31 | # Verify new positions match what we set 32 | assert np.allclose(pos_b, new_positions) 33 | # Verify shapes are correct 34 | assert pos_a.shape == (8, 3) 35 | assert pos_b.shape == (8, 3) 36 | 37 | 38 | def test_bob(): 39 | bpy.ops.wm.read_factory_settings() 40 | bob = db.BlenderObject(bpy.data.objects["Cube"]) 41 | 42 | pos_a = bob.named_attribute("position") 43 | 44 | # Store new random positions 45 | new_positions = np.random.randn(len(bob), 3) 46 | bob.store_named_attribute(new_positions, "position") 47 | pos_b = bob.named_attribute("position") 48 | 49 | # Verify positions changed 50 | assert not np.allclose(pos_a, pos_b) 51 | # Verify new positions match what we set 52 | assert np.allclose(pos_b, new_positions) 53 | # Verify shapes and basic properties 54 | assert pos_a.shape == (8, 3) 55 | assert pos_b.shape == (8, 3) 56 | assert len(bob) == 8 # Default cube has 8 vertices 57 | 58 | 59 | # test that we aren't overwriting an existing UUID on an object, when wrapping it with 60 | # with BlenderObject 61 | def test_bob_mismatch_uuid(): 62 | bob = db.BlenderObject(bpy.data.objects["Cube"]) 63 | obj = bob.object 64 | old_uuid = obj.uuid 65 | bob = db.BlenderObject(obj) 66 | assert old_uuid == bob.uuid 67 | 68 | 69 | def test_register(): 70 | db.unregister() 71 | db.BlenderObject(bpy.data.objects["Cube"]) 72 | -------------------------------------------------------------------------------- /databpy/nodes/utils.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | 3 | 4 | NODE_DUP_SUFFIX = r"\.\d{3}$" 5 | 6 | 7 | class NodeGroupCreationError(Exception): 8 | def __init__(self, message): 9 | self.message = message 10 | super().__init__(self.message) 11 | 12 | 13 | def get_output(group): 14 | return group.nodes[ 15 | bpy.app.translations.pgettext_data( 16 | "Group Output", 17 | ) 18 | ] 19 | 20 | 21 | def get_input(group): 22 | return group.nodes[ 23 | bpy.app.translations.pgettext_data( 24 | "Group Input", 25 | ) 26 | ] 27 | 28 | 29 | class MaintainConnections: 30 | # capture input and output links, so we can rebuild the links based on name 31 | # and the sockets they were connected to 32 | # as we collect them, remove the links so they aren't automatically connected 33 | # when we change the node_tree for the group 34 | 35 | def __init__(self, node: bpy.types.GeometryNode) -> None: 36 | self.node = node 37 | self.input_links = [] 38 | self.output_links = [] 39 | 40 | def __enter__(self): 41 | "Store all the connections in and out of this node for rebuilding on exit." 42 | self.node_tree = self.node.id_data 43 | 44 | for input in self.node.inputs: 45 | for input_link in input.links: 46 | self.input_links.append((input_link.from_socket, input.name)) 47 | self.node_tree.links.remove(input_link) 48 | 49 | for output in self.node.outputs: 50 | for output_link in output.links: 51 | self.output_links.append((output.name, output_link.to_socket)) 52 | self.node_tree.links.remove(output_link) 53 | 54 | try: 55 | self.material = self.node.inputs["Material"].default_value 56 | except KeyError: 57 | self.material = None 58 | 59 | def __exit__(self, type, value, traceback): 60 | "Rebuild the connections in and out of this node that were stored on entry." 61 | # rebuild the links based on names of the sockets, not their identifiers 62 | link = self.node_tree.links.new 63 | for input_link in self.input_links: 64 | try: 65 | link(input_link[0], self.node.inputs[input_link[1]]) 66 | except KeyError: 67 | pass 68 | for output_link in self.output_links: 69 | try: 70 | link(self.node.outputs[output_link[0]], output_link[1]) 71 | except KeyError: 72 | pass 73 | 74 | # reset all values to tree defaults 75 | tree = self.node.node_tree 76 | for item in tree.interface.items_tree: 77 | if item.item_type == "PANEL": 78 | continue 79 | if item.in_out == "INPUT": 80 | if hasattr(item, "default_value"): 81 | self.node.inputs[item.identifier].default_value = item.default_value 82 | 83 | if self.material: 84 | try: 85 | self.node.inputs["Material"].default_value = self.material 86 | except KeyError: 87 | # the new node doesn't contain a material slot 88 | pass 89 | -------------------------------------------------------------------------------- /databpy/nodes/generating.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | from .utils import MaintainConnections, NodeGroupCreationError, get_input, get_output 3 | from typing import Iterable 4 | 5 | 6 | def swap_tree(node: bpy.types.GeometryNode, tree: bpy.types.GeometryNodeTree) -> None: 7 | with MaintainConnections(node): 8 | node.node_tree = tree # type: ignore 9 | node.name = tree.name 10 | 11 | 12 | def new_tree( 13 | name: str = "Geometry Nodes", 14 | geometry: bool = True, 15 | input_name: str = "Geometry", 16 | output_name: str = "Geometry", 17 | fallback: bool = True, 18 | ) -> bpy.types.NodeTree: 19 | tree = bpy.data.node_groups.get(name) 20 | # if the group already exists, return it and don't create a new one 21 | if tree and fallback: 22 | return tree 23 | 24 | # create a new group for this particular name and do some initial setup 25 | tree = bpy.data.node_groups.new(name, "GeometryNodeTree") 26 | input_node = tree.nodes.new("NodeGroupInput") 27 | output_node = tree.nodes.new("NodeGroupOutput") 28 | input_node.location.x = -200 - input_node.width 29 | output_node.location.x = 200 30 | if geometry: 31 | tree.interface.new_socket( 32 | input_name, in_out="INPUT", socket_type="NodeSocketGeometry" 33 | ) 34 | tree.interface.new_socket( 35 | output_name, in_out="OUTPUT", socket_type="NodeSocketGeometry" 36 | ) 37 | tree.links.new(output_node.inputs[0], input_node.outputs[0]) 38 | return tree 39 | 40 | 41 | def custom_string_iswitch( 42 | name: str, values: Iterable[str], attr_name: str = "attr_id" 43 | ) -> bpy.types.NodeTree: 44 | """ 45 | Creates a node group containing a `Index Switch` node with all the given values. 46 | """ 47 | 48 | # dont' attempt to return an already existing node tree. If a user is requesting a 49 | # new one they are likely passing in a new list, so we have to createa a new one 50 | # to ensure we are using the new iterables 51 | tree = new_tree(name=name, geometry=False, fallback=False) 52 | # name might have originally been the same, but on creation it might be name.001 or 53 | # something similar so we just grab the name from the tree 54 | name = tree.name 55 | tree.color_tag = "CONVERTER" 56 | 57 | # try creating the node group, otherwise on fail cleanup the created group and 58 | # report the error 59 | try: 60 | link = tree.links.new 61 | node_input = get_input(tree) 62 | socket_in = tree.interface.new_socket( 63 | attr_name, in_out="INPUT", socket_type="NodeSocketInt" 64 | ) 65 | socket_in.name = attr_name 66 | node_output = get_output(tree) 67 | socket_out = tree.interface.new_socket( 68 | attr_name, in_out="OUTPUT", socket_type="NodeSocketString" 69 | ) 70 | socket_out.name = "String" 71 | 72 | node_iswitch: bpy.types.GeometryNodeIndexSwitch = tree.nodes.new( # type: ignore 73 | "GeometryNodeIndexSwitch" 74 | ) 75 | node_iswitch.data_type = "STRING" 76 | link(node_input.outputs[socket_in.identifier], node_iswitch.inputs["Index"]) 77 | 78 | for i, item in enumerate(values): 79 | # the node starts with 2 items already, so we only create new items 80 | # if they are above that 81 | if i > 1: 82 | node_iswitch.index_switch_items.new() 83 | 84 | node_iswitch.inputs[int(i + 1)].default_value = item 85 | 86 | link( 87 | node_iswitch.outputs["Output"], 88 | node_output.inputs[socket_out.identifier], 89 | ) 90 | 91 | return tree 92 | 93 | # if something broke when creating the node group, delete whatever was created 94 | except Exception as e: 95 | node_name = tree.name 96 | bpy.data.node_groups.remove(tree) 97 | raise NodeGroupCreationError( 98 | f"Unable to make node group: {node_name}.\nError: {e}" 99 | ) 100 | -------------------------------------------------------------------------------- /docs/index.qmd: -------------------------------------------------------------------------------- 1 | # This is databpy! 2 | 3 | ::: {.callout-warning} 4 | # Stability 5 | Active development and refinements are still ongoing. We are not yet past the `1.0` milestone and there may be breaking breaking changes before we get there. `databpy` has matured a lot and the primary interfaces of getting and setting attributes should mostly remain stable. 6 | ::: 7 | 8 | [![codecov](https://codecov.io/gh/BradyAJohnston/databpy/graph/badge.svg?token=KFuu67hzAz)](https://codecov.io/gh/BradyAJohnston/databpy) 9 | [![pypi](https://img.shields.io/pypi/v/databpy.png)](https://pypi.org/project/databpy/) 10 | ![tests](https://github.com/bradyajohnston/databpy/actions/workflows/tests.yml/badge.svg) 11 | ![deployment](https://github.com/bradyajohnston/databpy/actions/workflows/ci-cd.yml/badge.svg) 12 | 13 | ![](img/notdavid.png) 14 | 15 | This is intended as a cleaner way to work with data and attributes on mesh objects in Blender. The goal is to help make importing and working with tabular datasets from outside of Blender as objects _inside_ of blender a whole lot easier. 16 | 17 | 18 | ::: {.callout-tip} 19 | # New to databpy? 20 | Check out the [**Attributes guid**](attributes.qmd) for a comprehensive overview of the attribute system, including performance tips, best practices, and examples for all attribute types. 21 | ::: 22 | 23 | `databpy` originally started as a submodule inside of [Molecular Nodes](https://github.com/BradyAJohnston/MolecularNodes) but has been split off into it's own package for use by other projects. Some internal code is still quite specific to the use case of Molecular Nodes, but the majority is more general. 24 | 25 | 26 | ## Hello World 27 | The 'Hello World' example is storing and retrieving data from a mesh objects inside of Blender. 28 | 29 | While the `bpy` API is robust, it is a bit too verbose for what we are after, and there are many particulars that you can't intuit (you have to store the `value` for floats, `vector` for 2 & 3 component vectors, but `value` for a Quaternion which is a 4-component vector). 30 | 31 | See the enums in the API documentation for the different [`AttributeTypes`](`databpy.AttributeTypes`) and [`AttributeDomains`](`databpy.AttributeDomains`). 32 | 33 | ### Regular `bpy` API 34 | 35 | ```{python} 36 | import bpy 37 | import numpy as np 38 | 39 | np.random.seed(6) 40 | 41 | new_float_values = np.random.randn(8 * 3).reshape(-1, 3) 42 | 43 | obj = bpy.data.objects["Cube"] 44 | 45 | # create new attribute, then fill with data from a 1D numpy array 46 | att = obj.data.attributes.new("test_float", "FLOAT_VECTOR", "POINT") 47 | att.data.foreach_set("vector", new_float_values.reshape(-1)) 48 | 49 | # initialise empty array to fill, get data and then reshape to correct dimensions 50 | empty_vector = np.zeros(len(att.data) * 3, dtype=float) 51 | att.data.foreach_get("vector", empty_vector) 52 | empty_vector.reshape((-1, 3)) 53 | ``` 54 | 55 | ### `databpy` API 56 | 57 | We can get and set values with single function calls. Data types are inferred from the numpy array data types, but can be manually specified. The point domain is the default domain, but domain can also be specified. See the [`AttributeDomains`](`databpy.AttributeDomains`) for more which domains can be chosen. 58 | 59 | ```{python} 60 | import databpy as db 61 | db.store_named_attribute(obj, new_float_values, "example_values") 62 | db.named_attribute(obj, "example_values") 63 | ``` 64 | 65 | 66 | ## A more friendly Blender Object (bob) 67 | 68 | Doing some common data-oriented operations on objects in Blender can be a bit of a pain, so `databpy` provides a [`BlenderObject`](`databpy.BlenderObject`) class that wraps mesh objects and provides some convenience functions. 69 | 70 | The most useful are the [`store_named_attribute()`](`databpy.BlenderObject.store_named_attribute`) and [`named_attribute()`](`databpy.BlenderObject.named_attribute`) methods that just work on the mesh object, so you can quickly get and set attributes with bob. 71 | 72 | ```{python} 73 | bob = db.BlenderObject(bpy.data.objects["Cube"]) 74 | 75 | bob.store_named_attribute(new_float_values, "example_values") 76 | bob.named_attribute("example_values") 77 | ``` -------------------------------------------------------------------------------- /tests/test_base_object.py: -------------------------------------------------------------------------------- 1 | """Tests for BlenderObjectBase class methods.""" 2 | 3 | import numpy as np 4 | import bpy 5 | import databpy as db 6 | 7 | 8 | class TestBlenderObjectBase: 9 | """Test core BlenderObjectBase functionality.""" 10 | 11 | def test_setitem_syntax_new_attribute(self): 12 | """Test setting a new attribute using dictionary-style syntax.""" 13 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 14 | bob = db.BlenderObject.from_mesh(vertices) 15 | 16 | # Set a new custom attribute 17 | custom_data = np.array([1.0, 2.0, 3.0]) 18 | bob["custom_attr"] = custom_data 19 | 20 | # Verify it was stored 21 | retrieved = bob.named_attribute("custom_attr") 22 | assert np.allclose(retrieved, custom_data) 23 | 24 | def test_setitem_syntax_existing_attribute(self): 25 | """Test updating an existing attribute using dictionary-style syntax.""" 26 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 27 | bob = db.BlenderObject.from_mesh(vertices) 28 | 29 | # Store initial attribute 30 | initial_data = np.array([1.0, 2.0, 3.0]) 31 | bob.store_named_attribute(initial_data, "test_attr") 32 | 33 | # Update using setitem 34 | new_data = np.array([4.0, 5.0, 6.0]) 35 | bob["test_attr"] = new_data 36 | 37 | # Verify it was updated 38 | retrieved = bob.named_attribute("test_attr") 39 | assert np.allclose(retrieved, new_data) 40 | assert not np.allclose(retrieved, initial_data) 41 | 42 | def test_data_property_mesh(self): 43 | """Test data property returns correct data block for mesh.""" 44 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 45 | bob = db.BlenderObject.from_mesh(vertices) 46 | 47 | # Access data property 48 | data = bob.data 49 | assert isinstance(data, bpy.types.Mesh) 50 | assert len(data.vertices) == 3 51 | 52 | def test_data_property_curves(self): 53 | """Test data property returns correct data block for curves.""" 54 | positions = np.random.random((5, 3)).astype(np.float32) 55 | curve_sizes = [5] 56 | bob = db.BlenderObject.from_curves(positions, curve_sizes) 57 | 58 | # Access data property 59 | data = bob.data 60 | assert isinstance(data, bpy.types.Curves) 61 | 62 | def test_data_property_pointcloud(self): 63 | """Test data property returns correct data block for point cloud.""" 64 | positions = np.random.random((10, 3)).astype(np.float32) 65 | bob = db.BlenderObject.from_pointcloud(positions) 66 | 67 | # Access data property 68 | data = bob.data 69 | assert isinstance(data, bpy.types.PointCloud) 70 | assert len(data.points) == 10 71 | 72 | def test_attributes_property(self): 73 | """Test attributes property returns attributes collection.""" 74 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 75 | bob = db.BlenderObject.from_mesh(vertices) 76 | 77 | # Access attributes property 78 | attrs = bob.attributes 79 | # Verify it's an attributes collection by checking interface 80 | assert hasattr(attrs, "__getitem__") 81 | assert "position" in attrs 82 | 83 | def test_attributes_property_with_custom_attributes(self): 84 | """Test attributes property includes custom attributes.""" 85 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 86 | bob = db.BlenderObject.from_mesh(vertices) 87 | 88 | # Add custom attribute 89 | custom_data = np.array([1.0, 2.0, 3.0]) 90 | bob.store_named_attribute(custom_data, "custom_attr") 91 | 92 | # Verify it's in attributes collection 93 | attrs = bob.attributes 94 | assert "custom_attr" in attrs 95 | assert attrs["custom_attr"] is not None 96 | 97 | def test_evaluate_method(self): 98 | """Test evaluate method returns evaluated object.""" 99 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 100 | bob = db.BlenderObject.from_mesh(vertices) 101 | 102 | # Call evaluate 103 | evaluated_obj = bob.evaluate() 104 | assert isinstance(evaluated_obj, bpy.types.Object) 105 | assert evaluated_obj.data is not None 106 | -------------------------------------------------------------------------------- /tests/test_obj.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import databpy as db 3 | from databpy import LinkedObjectError, bdo 4 | import bpy 5 | import pytest 6 | 7 | 8 | def test_creat_obj(): 9 | # Create a mesh object named "MyMesh" in the collection "MyCollection" 10 | # with vertex locations and bond edges. 11 | locations = [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 1.0, 0.0]] 12 | bonds = [(0, 1), (1, 2), (2, 0)] 13 | name = "MyMesh" 14 | my_object = db.create_object(locations, bonds, name=name) 15 | 16 | assert len(my_object.data.vertices) == 3 17 | assert my_object.name == name 18 | assert my_object.name != "name" 19 | 20 | 21 | def test_BlenderObject(): 22 | bob = db.BlenderObject(None) 23 | 24 | with pytest.raises(LinkedObjectError): 25 | bob.object 26 | with pytest.raises(LinkedObjectError): 27 | bob.name 28 | with pytest.raises(LinkedObjectError): 29 | bob.name = "testing" 30 | 31 | bob = db.BlenderObject(bdo["Cube"]) 32 | assert bob.name == "Cube" 33 | bob.name = "NewName" 34 | with pytest.raises(KeyError): 35 | bdo["Cube"] 36 | assert bob.name == "NewName" 37 | 38 | 39 | def test_set_position(): 40 | bob = db.BlenderObject(bdo["Cube"]) 41 | pos_a = bob.position 42 | bob.position += 10 43 | pos_b = bob.position 44 | assert not np.allclose(pos_a, pos_b) 45 | assert np.allclose(pos_a, pos_b - 10, rtol=0.1) 46 | 47 | 48 | def test_centroid(): 49 | bpy.ops.wm.read_factory_settings() 50 | # Create test object with known vertices 51 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 52 | bob = db.create_bob(verts, name="TestObject") 53 | 54 | # Test unweighted centroid 55 | centroid = bob.centroid() 56 | assert np.allclose(centroid, np.array([1, 1, 1])) 57 | assert np.allclose(db.centre(verts), np.array([1, 1, 1])) 58 | 59 | # Test weighted centroid with float weights 60 | weights = np.array([0.5, 0.3, 0.2]) 61 | weighted_centroid = bob.centroid(weights) 62 | expected = np.average(verts, weights=weights, axis=0) 63 | assert np.allclose(weighted_centroid, expected) 64 | assert np.allclose(db.utils.centre(verts, weight=weights), expected) 65 | 66 | # Test centroid with integer index selection 67 | indices = np.array([0, 1]) 68 | indexed_centroid = bob.centroid(indices) 69 | expected = np.mean(verts[indices], axis=0) 70 | assert np.allclose(indexed_centroid, expected) 71 | 72 | # Test centroid with named attribute weights 73 | db.store_named_attribute(bob.object, weights, "weights") 74 | named_centroid = bob.centroid("weights") 75 | expected = np.average(verts, weights=weights, axis=0) 76 | assert np.allclose(named_centroid, expected) 77 | 78 | 79 | def test_change_names(): 80 | bob_cube = db.BlenderObject("Cube") 81 | assert bob_cube.name == "Cube" 82 | with db.ObjectTracker() as o: 83 | bpy.ops.mesh.primitive_cylinder_add() 84 | bob_cyl = db.BlenderObject(o.latest()) 85 | 86 | assert bob_cyl.name == "Cylinder" 87 | assert len(bob_cube) != len(bob_cyl) 88 | 89 | # rename the objects, but separately to the linked BlenderObject, so that the 90 | # reference will have to be rebuilt from the .uuid when the names don't match 91 | bpy.data.objects["Cylinder"].name = "Cylinder2" 92 | bpy.data.objects["Cube"].name = "Cylinder" 93 | 94 | # ensure that the reference to the actul object is updated, so that even if the name has 95 | # changed the reference is reconnected via the .uuid 96 | assert len(bob_cube) == 8 97 | assert bob_cube.name == "Cylinder" 98 | assert bob_cyl.name == "Cylinder2" 99 | 100 | 101 | def test_matrix_read_write(): 102 | bob = db.create_bob(np.zeros((5, 3))) 103 | arr = np.array((5, 4, 4), float) 104 | arr = np.random.rand(5, 4, 4) 105 | 106 | bob.store_named_attribute( 107 | data=arr, name="test_matrix", atype=db.AttributeTypes.FLOAT4X4 108 | ) 109 | 110 | assert np.allclose(bob.named_attribute("test_matrix"), arr) 111 | arr2 = np.random.rand(5, 4, 4) 112 | bob.store_named_attribute(data=arr2, name="test_matrix2") 113 | assert ( 114 | bob.data.attributes["test_matrix2"].data_type 115 | == db.AttributeTypes.FLOAT4X4.value.type_name 116 | ) 117 | assert not np.allclose(bob.named_attribute("test_matrix2"), arr) 118 | 119 | 120 | def test_newfrompydata(): 121 | values = np.random.rand(8, 3) 122 | bob = db.BlenderObject() 123 | with pytest.raises(LinkedObjectError): 124 | bob.new_from_pydata(values) 125 | 126 | bob = db.BlenderObject.from_pointcloud(values) 127 | with pytest.raises(TypeError): 128 | bob.new_from_pydata(values) 129 | -------------------------------------------------------------------------------- /README.qmd: -------------------------------------------------------------------------------- 1 | --- 2 | format: gfm 3 | jupyter: python3 4 | title: databpy 5 | --- 6 | 7 | [![codecov](https://codecov.io/gh/BradyAJohnston/databpy/graph/badge.svg?token=KFuu67hzAz)](https://codecov.io/gh/BradyAJohnston/databpy) 8 | [![pypi](https://img.shields.io/pypi/v/databpy)](https://pypi.org/project/databpy/) 9 | ![tests](https://github.com/bradyajohnston/databpy/actions/workflows/tests.yml/badge.svg) 10 | ![deployment](https://github.com/bradyajohnston/databpy/actions/workflows/ci-cd.yml/badge.svg) 11 | 12 | ![](docs/img/notdavid.png) 13 | 14 | A set of data-oriented wrappers around the python API of Blender. 15 | 16 | This was originally used internally inside of [Molecular Nodes](https://github.com/BradyAJohnston/MolecularNodes) but was broken out into a separate python module for re-use in other projects. 17 | 18 | ## Installation 19 | Available on PyPI, install with pip: 20 | 21 | ```bash 22 | pip install databpy 23 | ``` 24 | 25 | ::: {.callout-caution} 26 | 27 | `bpy` (Blender as a python module) is listed as an optional dependency, so that if you install `databpy` inside of Blender you won't install a redundant version of `bpy`. If you are using this outside of Blender, you will need to specifically request `bpy` with either of these methods: 28 | ```bash 29 | # install wtih bpy dependency 30 | pip install 'databpy[bpy]' 31 | 32 | # install both packages 33 | pip install databpy bpy 34 | 35 | # install with all optional dependencies 36 | pip install 'databpy[all]' 37 | ``` 38 | ::: 39 | 40 | ## Usage 41 | 42 | The main use cases are to create objects, store and retrieve attributes from them. The functions are named around nodes in Geometry Nodes `Store Named Attribute` and `Named Attribute` 43 | 44 | ```python 45 | import databpy as db 46 | 47 | db.store_named_attribute() # store a named attribute on a mesh object 48 | db.named_attribute() # retrieve a named attribute from a mesh object 49 | ``` 50 | 51 | Here's an example on how to store an attribute: 52 | ```python 53 | import numpy as np 54 | import databpy as db 55 | 56 | coords = np.array([ 57 | [0, 0, 0], 58 | [0, 5, 0], 59 | [5, 0, 0], 60 | [5, 5, 0] 61 | ]) 62 | 63 | obj = db.create_object(coords, name="Box") 64 | db.store_named_attribute(obj, np.array([10, 20, 31, 42]), "vals") 65 | ``` 66 | ![image](https://github.com/user-attachments/assets/2af6046a-8d73-4881-af63-8ed175fe2136) 67 | 68 | This module is mainly used to create mesh objects and work with their attributes. It is built to store and retrieve data using NumPy arrays: 69 | 70 | ```{python} 71 | #| echo: false 72 | #| output: false 73 | import bpy 74 | bpy.ops.wm.read_homefile(app_template="") 75 | ``` 76 | 77 | 78 | ```{python} 79 | import numpy as np 80 | import databpy as db 81 | np.random.seed(6) 82 | 83 | # Create a mesh object 84 | random_verts = np.random.rand(10, 3) 85 | 86 | obj = db.create_object(random_verts, name="RandomMesh") 87 | 88 | obj.name 89 | ``` 90 | 91 | Access attributes from the object's mesh. 92 | ```{python} 93 | 94 | db.named_attribute(obj, 'position') 95 | 96 | ``` 97 | 98 | 99 | ### `BlenderObject` class (bob) 100 | 101 | This is a convenience class that wraps around the `bpy.types.Object`, and provides access to all of the useful functions. We can wrap an existing Object or return one when creating a new object. 102 | 103 | This just gives us access to the `named_attribute()` and `store_named_attribute()` functions on the object class, but also provides a more intuitive way to access the object's attributes. 104 | 105 | ```{python} 106 | bob = db.BlenderObject(obj) # wraps the existing object 107 | bob = db.create_bob(random_verts) # creates a new object and returns it already wrapped 108 | 109 | # these two are identical 110 | bob.named_attribute('position') 111 | bob.position 112 | ``` 113 | 114 | We can clear all of the data from the object and initialise a new mesh underneath: 115 | 116 | ```{python} 117 | bob.new_from_pydata(np.random.randn(5, 3)) 118 | bob.position 119 | ``` 120 | 121 | ## Example with Polars data 122 | 123 | ```{python} 124 | import polars as pl 125 | import databpy as db 126 | from io import StringIO 127 | 128 | json_file = StringIO(""" 129 | { 130 | "Dino": [ 131 | [55.3846, 97.1795, 0.0], 132 | [51.5385, 96.0256, 0.0] 133 | ], 134 | "Star": [ 135 | [58.2136, 91.8819, 0.0], 136 | [58.1961, 92.215, 0.0] 137 | ] 138 | } 139 | """) 140 | 141 | df = pl.read_json(json_file) 142 | columns_to_explode = [col for col in df.columns if df[col].dtype == pl.List(pl.List)] 143 | df = df.explode(columns_to_explode) 144 | 145 | vertices = np.zeros((len(df), 3), dtype=np.float32) 146 | bob = db.create_bob(vertices, name="DinoStar") 147 | 148 | for col in df.columns: 149 | data = np.vstack(df.get_column(col).to_numpy()) 150 | bob.store_named_attribute(data, col) 151 | 152 | bob.named_attribute("Dino") 153 | ``` 154 | 155 | 156 | ```{python} 157 | bob.named_attribute("Star") 158 | ``` -------------------------------------------------------------------------------- /databpy/nodes/appending.py: -------------------------------------------------------------------------------- 1 | import re 2 | import time 3 | import warnings 4 | from pathlib import Path 5 | from typing import List 6 | 7 | import bpy 8 | 9 | from .utils import NODE_DUP_SUFFIX 10 | 11 | 12 | def deduplicate_node_trees(node_trees: List[bpy.types.NodeTree]): 13 | """Deduplicate node trees by remapping duplicates to their originals. 14 | 15 | Identifies node trees with duplicate naming patterns (e.g., "NodeTree.001", 16 | "NodeTree.002") and remaps all references to point to the original node tree 17 | (e.g., "NodeTree"). Uses Blender's `user_remap()` API to automatically update 18 | all references throughout the blend file, then removes the duplicate node trees. 19 | 20 | Parameters 21 | ---------- 22 | node_trees : List[bpy.types.NodeTree] 23 | List of node trees to check for duplicates. Typically obtained from 24 | `bpy.data.node_groups` or a filtered subset thereof. 25 | 26 | Returns 27 | ------- 28 | None 29 | This function modifies the Blender data in-place. 30 | 31 | Notes 32 | ----- 33 | - Duplicate pattern: Matches node trees with names ending in ".###" where 34 | ### is a 3-digit number (e.g., ".001", ".042", ".999") 35 | - Thread-safe: No, modifies global Blender data structures 36 | - Uses a set for O(1) lookup performance when checking already-processed trees 37 | 38 | Examples 39 | -------- 40 | Deduplicate all node groups in the current blend file: 41 | 42 | ```python 43 | import bpy 44 | from databpy.nodes import deduplicate_node_trees 45 | 46 | node_trees = list(bpy.data.node_groups) 47 | deduplicate_node_trees(node_trees) 48 | ``` 49 | 50 | Deduplicate only geometry node trees: 51 | 52 | ```python 53 | geometry_trees = [ 54 | tree for tree in bpy.data.node_groups 55 | if tree.type == 'GEOMETRY' 56 | ] 57 | deduplicate_node_trees(geometry_trees) 58 | ``` 59 | 60 | Deduplicate newly imported node trees: 61 | 62 | ```python 63 | before_import = set(ng.name for ng in bpy.data.node_groups) 64 | # ... import operation that may create duplicates ... 65 | new_trees = [ 66 | tree for tree in bpy.data.node_groups 67 | if tree.name not in before_import 68 | ] 69 | deduplicate_node_trees(new_trees) 70 | ``` 71 | 72 | See Also 73 | -------- 74 | cleanup_duplicates : Higher-level function that handles collection and purging 75 | DuplicatePrevention : Context manager for preventing duplicates during import 76 | """ 77 | node_duplicate_pattern = re.compile(NODE_DUP_SUFFIX) 78 | to_remove: set[bpy.types.NodeTree] = set() 79 | 80 | # First pass: identify all duplicates and their replacements 81 | remap_pairs = [] 82 | 83 | for node_tree in node_trees: 84 | # Skip if already marked for removal 85 | if node_tree in to_remove: 86 | continue 87 | 88 | old_name = node_tree.name 89 | 90 | if node_duplicate_pattern.search(old_name): 91 | # Remove the numeric suffix to get the original name 92 | name_sans = old_name.rsplit(".", 1)[0] 93 | replacement = bpy.data.node_groups.get(name_sans) 94 | 95 | # Only remap if replacement exists and isn't also marked for removal 96 | if replacement and replacement not in to_remove: 97 | remap_pairs.append((node_tree, replacement)) 98 | to_remove.add(node_tree) 99 | 100 | # Second pass: perform all remappings 101 | for node_tree, replacement in remap_pairs: 102 | node_tree.user_remap(replacement) 103 | 104 | # Third pass: remove all duplicates 105 | for tree in to_remove: 106 | try: 107 | bpy.data.node_groups.remove(tree) 108 | except ReferenceError: 109 | pass 110 | 111 | 112 | def cleanup_duplicates(purge: bool = False): 113 | # Collect all node trees from node groups, excluding "NodeGroup" named ones 114 | node_trees = [tree for tree in bpy.data.node_groups if "NodeGroup" not in tree.name] 115 | 116 | # Call the deduplication function with the collected node trees 117 | deduplicate_node_trees(node_trees) 118 | 119 | if purge: 120 | # Purge orphan data blocks from the file 121 | bpy.ops.outliner.orphans_purge() 122 | 123 | 124 | class DuplicatePrevention: 125 | "Context manager to cleanup duplicated node trees when appending node groups" 126 | 127 | def __init__(self, timing=False): 128 | self.old_names: List[str] = [] 129 | self.start_time: float = 0.0 130 | self.timing = timing 131 | 132 | def __enter__(self): 133 | self.old_names = [tree.name for tree in bpy.data.node_groups] 134 | if self.timing: 135 | self.start_time = time.time() 136 | 137 | def __exit__(self, type, value, traceback): 138 | deduplicate_node_trees( 139 | [tree for tree in bpy.data.node_groups if tree.name not in self.old_names] 140 | ) 141 | if self.timing: 142 | end_time = time.time() 143 | print(f"De-duplication time: {end_time - self.start_time:.2f} seconds") 144 | 145 | 146 | def append_from_blend( 147 | name: str, filepath: str | Path, link: bool = False 148 | ) -> bpy.types.NodeTree: 149 | "Append a Geometry Nodes node tree from the given .blend file" 150 | # to access the nodes we need to specify the "NodeTree" folder but this isn't a real 151 | # folder, just for accessing when appending. Ensure that the filepath ends with "NodeTree" 152 | filepath = str(Path(filepath)).removesuffix("NodeTree") 153 | filepath = str(Path(filepath) / "NodeTree") 154 | try: 155 | return bpy.data.node_groups[name] 156 | except KeyError: 157 | with warnings.catch_warnings(): 158 | warnings.simplefilter("ignore") 159 | with DuplicatePrevention(): 160 | # Append from NodeTree directory inside blend file 161 | bpy.ops.wm.append( 162 | "EXEC_DEFAULT", 163 | directory=filepath, 164 | filename=name, 165 | link=link, 166 | use_recursive=True, 167 | ) 168 | return bpy.data.node_groups[name] 169 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # databpy 2 | 3 | 4 | [![codecov](https://codecov.io/gh/BradyAJohnston/databpy/graph/badge.svg?token=KFuu67hzAz)](https://codecov.io/gh/BradyAJohnston/databpy) 5 | [![pypi](https://img.shields.io/pypi/v/databpy.png)](https://pypi.org/project/databpy/) 6 | ![tests](https://github.com/bradyajohnston/databpy/actions/workflows/tests.yml/badge.svg) 7 | ![deployment](https://github.com/bradyajohnston/databpy/actions/workflows/ci-cd.yml/badge.svg) 8 | 9 | ![](docs/img/notdavid.png) 10 | 11 | A set of data-oriented wrappers around the python API of Blender. 12 | 13 | This was originally used internally inside of [Molecular 14 | Nodes](https://github.com/BradyAJohnston/MolecularNodes) but was broken 15 | out into a separate python module for re-use in other projects. 16 | 17 | ## Installation 18 | 19 | Available on PyPI, install with pip: 20 | 21 | ``` bash 22 | pip install databpy 23 | ``` 24 | 25 | > [!CAUTION] 26 | > 27 | > `bpy` (Blender as a python module) is listed as an optional 28 | > dependency, so that if you install `databpy` inside of Blender you 29 | > won’t install a redundant version of `bpy`. If you are using this 30 | > outside of Blender, you will need to specifically request `bpy` with 31 | > either of these methods: 32 | > 33 | > ``` bash 34 | > # install wtih bpy dependency 35 | > pip install 'databpy[bpy]' 36 | > 37 | > # install both packages 38 | > pip install databpy bpy 39 | > 40 | > # install with all optional dependencies 41 | > pip install 'databpy[all]' 42 | > ``` 43 | 44 | ## Usage 45 | 46 | The main use cases are to create objects, store and retrieve attributes 47 | from them. The functions are named around nodes in Geometry Nodes 48 | `Store Named Attribute` and `Named Attribute` 49 | 50 | ``` python 51 | import databpy as db 52 | 53 | db.store_named_attribute() # store a named attribute on a mesh object 54 | db.named_attribute() # retrieve a named attribute from a mesh object 55 | ``` 56 | 57 | Here’s an example on how to store an attribute: 58 | 59 | ``` python 60 | import numpy as np 61 | import databpy as db 62 | 63 | coords = np.array([ 64 | [0, 0, 0], 65 | [0, 5, 0], 66 | [5, 0, 0], 67 | [5, 5, 0] 68 | ]) 69 | 70 | obj = db.create_object(coords, name="Box") 71 | db.store_named_attribute(obj, np.array([10, 20, 31, 42]), "vals") 72 | ``` 73 | 74 | ![image](https://github.com/user-attachments/assets/2af6046a-8d73-4881-af63-8ed175fe2136.png) 75 | 76 | This module is mainly used to create mesh objects and work with their 77 | attributes. It is built to store and retrieve data using NumPy arrays: 78 | 79 | ``` python 80 | import numpy as np 81 | import databpy as db 82 | np.random.seed(6) 83 | 84 | # Create a mesh object 85 | random_verts = np.random.rand(10, 3) 86 | 87 | obj = db.create_object(random_verts, name="RandomMesh") 88 | 89 | obj.name 90 | ``` 91 | 92 | 'RandomMesh' 93 | 94 | Access attributes from the object’s mesh. 95 | 96 | ``` python 97 | db.named_attribute(obj, 'position') 98 | ``` 99 | 100 | array([[0.8928602 , 0.3319798 , 0.8212291 ], 101 | [0.04169663, 0.10765668, 0.59505206], 102 | [0.52981734, 0.41880742, 0.33540785], 103 | [0.62251943, 0.43814144, 0.7358821 ], 104 | [0.5180364 , 0.5788586 , 0.6453551 ], 105 | [0.99022424, 0.8198582 , 0.41320094], 106 | [0.8762677 , 0.82375944, 0.05447451], 107 | [0.7186372 , 0.8021706 , 0.7364066 ], 108 | [0.7091318 , 0.5409368 , 0.12482417], 109 | [0.9576473 , 0.4032563 , 0.21695116]], dtype=float32) 110 | 111 | ### `BlenderObject` class (bob) 112 | 113 | This is a convenience class that wraps around the `bpy.types.Object`, 114 | and provides access to all of the useful functions. We can wrap an 115 | existing Object or return one when creating a new object. 116 | 117 | This just gives us access to the `named_attribute()` and 118 | `store_named_attribute()` functions on the object class, but also 119 | provides a more intuitive way to access the object’s attributes. 120 | 121 | ``` python 122 | bob = db.BlenderObject(obj) # wraps the existing object 123 | bob = db.create_bob(random_verts) # creates a new object and returns it already wrapped 124 | 125 | # these two are identical 126 | bob.named_attribute('position') 127 | bob.position 128 | ``` 129 | 130 | AttributeArray(name='position', object='NewObject', mesh='NewObject', domain=POINT, type=FLOAT_VECTOR, shape=(10, 3), dtype=float32) 131 | array([[0.8928602 , 0.3319798 , 0.8212291 ], 132 | [0.04169663, 0.10765668, 0.59505206], 133 | [0.52981734, 0.41880742, 0.33540785], 134 | [0.62251943, 0.43814144, 0.7358821 ], 135 | [0.5180364 , 0.5788586 , 0.6453551 ], 136 | [0.99022424, 0.8198582 , 0.41320094], 137 | [0.8762677 , 0.82375944, 0.05447451], 138 | [0.7186372 , 0.8021706 , 0.7364066 ], 139 | [0.7091318 , 0.5409368 , 0.12482417], 140 | [0.9576473 , 0.4032563 , 0.21695116]], dtype=float32) 141 | 142 | We can clear all of the data from the object and initialise a new mesh 143 | underneath: 144 | 145 | ``` python 146 | bob.new_from_pydata(np.random.randn(5, 3)) 147 | bob.position 148 | ``` 149 | 150 | AttributeArray(name='position', object='NewObject', mesh='NewObject', domain=POINT, type=FLOAT_VECTOR, shape=(5, 3), dtype=float32) 151 | array([[ 0.82465386, -1.1764315 , 1.5644896 ], 152 | [ 0.7127051 , -0.1810066 , 0.53419954], 153 | [-0.58661294, -1.4818532 , 0.8572476 ], 154 | [ 0.94309896, 0.11444143, -0.02195668], 155 | [-2.1271446 , -0.83440745, -0.4655083 ]], dtype=float32) 156 | 157 | ## Example with Polars data 158 | 159 | ``` python 160 | import polars as pl 161 | import databpy as db 162 | from io import StringIO 163 | 164 | json_file = StringIO(""" 165 | { 166 | "Dino": [ 167 | [55.3846, 97.1795, 0.0], 168 | [51.5385, 96.0256, 0.0] 169 | ], 170 | "Star": [ 171 | [58.2136, 91.8819, 0.0], 172 | [58.1961, 92.215, 0.0] 173 | ] 174 | } 175 | """) 176 | 177 | df = pl.read_json(json_file) 178 | columns_to_explode = [col for col in df.columns if df[col].dtype == pl.List(pl.List)] 179 | df = df.explode(columns_to_explode) 180 | 181 | vertices = np.zeros((len(df), 3), dtype=np.float32) 182 | bob = db.create_bob(vertices, name="DinoStar") 183 | 184 | for col in df.columns: 185 | data = np.vstack(df.get_column(col).to_numpy()) 186 | bob.store_named_attribute(data, col) 187 | 188 | bob.named_attribute("Dino") 189 | ``` 190 | 191 | array([[55.3846, 97.1795, 0. ], 192 | [51.5385, 96.0256, 0. ]], dtype=float32) 193 | 194 | ``` python 195 | bob.named_attribute("Star") 196 | ``` 197 | 198 | array([[58.2136, 91.8819, 0. ], 199 | [58.1961, 92.215 , 0. ]], dtype=float32) 200 | -------------------------------------------------------------------------------- /tests/test_vdb.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import tempfile 3 | import os 4 | from pathlib import Path 5 | import bpy 6 | 7 | try: 8 | bpy.utils.expose_bundled_modules() 9 | import openvdb as vdb 10 | 11 | HAS_OPENVDB = True 12 | except Exception: 13 | HAS_OPENVDB = False 14 | 15 | from databpy.vdb import import_vdb 16 | from databpy.collection import create_collection 17 | 18 | 19 | def create_simple_vdb(filepath: Path) -> None: 20 | """Create a simple VDB file with a sphere for testing.""" 21 | if not HAS_OPENVDB: 22 | pytest.skip("OpenVDB not available") 23 | 24 | # Create a simple fog volume with actual density data 25 | # This approach is more likely to work with Blender's volume importer 26 | grid = vdb.FloatGrid() 27 | grid.name = "density" 28 | 29 | # Create a simple 3D density field 30 | accessor = grid.getAccessor() 31 | 32 | # Fill a small region with density values 33 | for i in range(-10, 11): 34 | for j in range(-10, 11): 35 | for k in range(-10, 11): 36 | # Create a simple spherical density falloff 37 | distance = (i * i + j * j + k * k) ** 0.5 38 | if distance <= 10.0: 39 | density = max(0.0, 1.0 - distance / 10.0) 40 | if density > 0.01: # Only set non-negligible values 41 | accessor.setValueOn((i, j, k), density) 42 | 43 | # Write the grid to file 44 | vdb.write(str(filepath), [grid]) 45 | 46 | 47 | @pytest.fixture 48 | def temp_vdb_file(): 49 | """Create a temporary VDB file for testing.""" 50 | with tempfile.NamedTemporaryFile(suffix=".vdb", delete=False) as tmp: 51 | filepath = Path(tmp.name) 52 | 53 | try: 54 | create_simple_vdb(filepath) 55 | yield filepath 56 | finally: 57 | if filepath.exists(): 58 | os.unlink(filepath) 59 | 60 | 61 | @pytest.fixture 62 | def clean_scene(): 63 | """Clean up the Blender scene before and after tests.""" 64 | # Clear existing objects 65 | bpy.ops.object.select_all(action="SELECT") 66 | bpy.ops.object.delete(use_global=False) 67 | 68 | # Clear collections except the default one 69 | for collection in bpy.data.collections: 70 | if collection.name != "Collection": 71 | bpy.data.collections.remove(collection) 72 | 73 | yield 74 | 75 | # Clean up after test 76 | bpy.ops.object.select_all(action="SELECT") 77 | bpy.ops.object.delete(use_global=False) 78 | 79 | 80 | @pytest.mark.skipif(not HAS_OPENVDB, reason="OpenVDB not available") 81 | class TestVDBImport: 82 | """Test cases for VDB import functionality.""" 83 | 84 | def test_import_vdb_basic(self, temp_vdb_file, clean_scene): 85 | """Test basic VDB import functionality.""" 86 | # Import the VDB file 87 | volume_obj = import_vdb(temp_vdb_file) 88 | 89 | # Check that an object was created 90 | assert volume_obj is not None 91 | assert isinstance(volume_obj, bpy.types.Object) 92 | 93 | # Check that it's a volume object 94 | assert volume_obj.type == "VOLUME" 95 | 96 | # Check that it has volume data 97 | assert volume_obj.data is not None 98 | assert isinstance(volume_obj.data, bpy.types.Volume) 99 | 100 | # Check that the object is in the scene 101 | assert volume_obj.name in bpy.data.objects 102 | 103 | def test_import_vdb_with_string_path(self, temp_vdb_file, clean_scene): 104 | """Test VDB import with string path instead of Path object.""" 105 | # Import using string path 106 | volume_obj = import_vdb(str(temp_vdb_file)) 107 | 108 | assert volume_obj is not None 109 | assert volume_obj.type == "VOLUME" 110 | 111 | def test_import_vdb_to_named_collection(self, temp_vdb_file, clean_scene): 112 | """Test importing VDB to a named collection.""" 113 | collection_name = "TestVDBCollection" 114 | 115 | # Import to named collection 116 | volume_obj = import_vdb(temp_vdb_file, collection=collection_name) 117 | 118 | # Check that the collection was created 119 | assert collection_name in bpy.data.collections 120 | target_collection = bpy.data.collections[collection_name] 121 | 122 | # Check that the object is in the correct collection 123 | assert volume_obj in target_collection.objects.values() 124 | 125 | # Check that it's not in the default collection 126 | default_collection = bpy.context.scene.collection 127 | assert volume_obj not in default_collection.objects.values() 128 | 129 | def test_import_vdb_to_existing_collection(self, temp_vdb_file, clean_scene): 130 | """Test importing VDB to an existing collection object.""" 131 | # Create a collection first 132 | test_collection = create_collection("ExistingCollection") 133 | 134 | # Import to the existing collection 135 | volume_obj = import_vdb(temp_vdb_file, collection=test_collection) 136 | 137 | # Check that the object is in the correct collection 138 | assert volume_obj in test_collection.objects.values() 139 | 140 | def test_import_vdb_default_collection(self, temp_vdb_file, clean_scene): 141 | """Test importing VDB with no collection specified (should use default).""" 142 | # Import without specifying collection 143 | volume_obj = import_vdb(temp_vdb_file, collection=None) 144 | 145 | # Should be in some collection (the default behavior of Blender) 146 | assert len(volume_obj.users_collection) > 0 147 | 148 | def test_import_nonexistent_vdb_file(self, clean_scene): 149 | """Test importing a non-existent VDB file.""" 150 | nonexistent_path = Path("/nonexistent/path/file.vdb") 151 | 152 | # Should raise an exception when trying to import non-existent file 153 | with pytest.raises(RuntimeError): 154 | import_vdb(nonexistent_path) 155 | 156 | def test_import_vdb_volume_data_properties(self, temp_vdb_file, clean_scene): 157 | """Test that the imported VDB has expected volume properties.""" 158 | volume_obj = import_vdb(temp_vdb_file) 159 | 160 | # Check volume data properties 161 | volume_data = volume_obj.data 162 | assert hasattr(volume_data, "grids") 163 | 164 | # Check that the volume data has the expected filepath 165 | assert volume_data.filepath == str(temp_vdb_file) 166 | 167 | # Check that volume data has expected attributes 168 | assert hasattr(volume_data, "display") 169 | assert hasattr(volume_data, "render") 170 | assert hasattr(volume_data, "materials") 171 | 172 | # Note: Due to Blender/OpenVDB version compatibility issues, 173 | # the grids may not always be loaded correctly in all environments. 174 | # This test focuses on verifying the volume object structure rather than 175 | # the specific grid content, which may vary by Blender/OpenVDB version. 176 | 177 | 178 | @pytest.mark.skipif(HAS_OPENVDB, reason="Testing OpenVDB not available case") 179 | def test_import_vdb_without_openvdb(): 180 | """Test that we can still run tests when OpenVDB is not available.""" 181 | # This test just ensures our skip logic works correctly 182 | assert not HAS_OPENVDB 183 | -------------------------------------------------------------------------------- /tests/test_nodes.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | from pathlib import Path 3 | 4 | import bpy 5 | import pytest 6 | 7 | import databpy as db 8 | from databpy.nodes import NodeGroupCreationError, custom_string_iswitch 9 | 10 | 11 | def test_custom_string_iswitch_basic(): 12 | """Test basic creation of string index switch node group""" 13 | 14 | tree = custom_string_iswitch("TestSwitch", ["X", "Y", "Z"]) 15 | 16 | assert tree.name == "TestSwitch" 17 | assert isinstance(tree, bpy.types.NodeTree) 18 | 19 | # Test input/output sockets 20 | assert tree.interface.items_tree["attr_id"].in_out == "INPUT" 21 | assert tree.interface.items_tree["String"].in_out == "OUTPUT" 22 | 23 | # Test node presence and configuration 24 | iswitch = next(n for n in tree.nodes if n.type == "INDEX_SWITCH") 25 | assert iswitch.data_type == "STRING" 26 | assert len(iswitch.index_switch_items) == 3 27 | 28 | 29 | def test_custom_string_iswitch_values(): 30 | """Test that input values are correctly assigned""" 31 | values = ["Chain_A", "Chain_B", "Chain_C", "Chain_D"] 32 | tree = custom_string_iswitch("ValueTest", values, "chain") 33 | 34 | iswitch = next(n for n in tree.nodes if n.type == "INDEX_SWITCH") 35 | 36 | # Check all values are assigned correctly 37 | for i, val in enumerate(values): 38 | assert iswitch.inputs[i + 1].default_value == val 39 | 40 | 41 | def test_custom_string_iswitch_name_duplication(): 42 | """Test that existing node group is returned if name exists""" 43 | tree1 = custom_string_iswitch("ReuseTest", ["A", "B"]) 44 | tree2 = custom_string_iswitch("ReuseTest", ["X", "Y"]) 45 | 46 | assert tree1.name == "ReuseTest" 47 | assert tree1.name + ".001" == tree2.name 48 | 49 | 50 | def test_custom_string_iswitch_minimal(): 51 | """Test creation with default values""" 52 | tree = custom_string_iswitch("MinimalTest", ["A", "B", "C"]) 53 | 54 | iswitch = next(n for n in tree.nodes if n.type == "INDEX_SWITCH") 55 | assert len(iswitch.index_switch_items) == 3 56 | assert iswitch.inputs[1].default_value == "A" 57 | assert iswitch.inputs[2].default_value == "B" 58 | assert iswitch.inputs[3].default_value == "C" 59 | 60 | 61 | def test_long_list(): 62 | """Test that a long list of values is correctly handled""" 63 | tree = custom_string_iswitch( 64 | "LongListTest", [str(x) for x in range(1_000)], "chain" 65 | ) 66 | for i, val in enumerate(range(1_000)): 67 | assert tree.nodes["Index Switch"].inputs[i + 1].default_value == str(val) 68 | 69 | 70 | def test_raises_error(): 71 | """Test that an error is raised if the node group already exists""" 72 | with pytest.raises(NodeGroupCreationError): 73 | custom_string_iswitch("TestSwitch", range(10)) 74 | 75 | 76 | def test_input_output(): 77 | tree = db.nodes.new_tree() 78 | tree.interface.new_socket("test_int", in_out="INPUT", socket_type="NodeSocketInt") 79 | tree.interface.new_socket( 80 | "test_float", in_out="INPUT", socket_type="NodeSocketFloat" 81 | ) 82 | tree.interface.new_socket("test_int1", in_out="INPUT", socket_type="NodeSocketInt") 83 | 84 | group1 = db.nodes.new_tree("Group1") 85 | group1.interface.new_socket( 86 | "test_float", in_out="INPUT", socket_type="NodeSocketFloat" 87 | ) 88 | group1.interface.new_socket("test_int", in_out="INPUT", socket_type="NodeSocketInt") 89 | group1.interface.new_socket( 90 | "test_int1", in_out="INPUT", socket_type="NodeSocketInt" 91 | ) 92 | 93 | group2 = db.nodes.new_tree("Group2") 94 | group2.interface.new_socket( 95 | "test_float", in_out="INPUT", socket_type="NodeSocketFloat" 96 | ) 97 | group2.interface.new_socket("test_int", in_out="INPUT", socket_type="NodeSocketInt") 98 | group2.interface.new_socket( 99 | "test_int2", in_out="INPUT", socket_type="NodeSocketInt" 100 | ) 101 | 102 | node = tree.nodes.new("GeometryNodeGroup") 103 | node.node_tree = group1 104 | tree.links.new( 105 | db.nodes.get_input(tree).outputs["Geometry"], 106 | node.inputs["Geometry"], 107 | ) 108 | tree.links.new( 109 | node.outputs["Geometry"], 110 | db.nodes.get_output(tree).inputs["Geometry"], 111 | ) 112 | for name in ["test_int", "test_float", "test_int1"]: 113 | tree.links.new( 114 | db.nodes.get_input(tree).outputs[name], 115 | node.inputs[name], 116 | ) 117 | 118 | assert "test_int1" in node.inputs 119 | assert node.inputs["test_int1"].is_linked 120 | 121 | with db.nodes.MaintainConnections(node): 122 | node.node_tree = group2 123 | 124 | assert node.inputs["Geometry"].is_linked 125 | assert node.inputs["test_float"].is_linked 126 | assert node.inputs["test_int"].is_linked 127 | assert "test_int1" not in node.inputs 128 | assert not node.inputs["test_int2"].is_linked 129 | 130 | 131 | def test_duplicate_prevention(): 132 | tree = db.nodes.new_tree() 133 | tree.interface.new_socket("test_int", in_out="INPUT", socket_type="NodeSocketInt") 134 | tree.interface.new_socket( 135 | "test_float", in_out="INPUT", socket_type="NodeSocketFloat" 136 | ) 137 | tree.interface.new_socket("test_int1", in_out="INPUT", socket_type="NodeSocketInt") 138 | 139 | group1 = db.nodes.new_tree("Group1") 140 | group1.interface.new_socket( 141 | "test_float", in_out="INPUT", socket_type="NodeSocketFloat" 142 | ) 143 | group1.interface.new_socket("test_int", in_out="INPUT", socket_type="NodeSocketInt") 144 | group1.interface.new_socket( 145 | "test_int1", in_out="INPUT", socket_type="NodeSocketInt" 146 | ) 147 | 148 | group2 = db.nodes.new_tree("Group2") 149 | group2.interface.new_socket( 150 | "test_float", in_out="INPUT", socket_type="NodeSocketFloat" 151 | ) 152 | group2.interface.new_socket("test_int", in_out="INPUT", socket_type="NodeSocketInt") 153 | group2.interface.new_socket( 154 | "test_int2", in_out="INPUT", socket_type="NodeSocketInt" 155 | ) 156 | 157 | node = tree.nodes.new("GeometryNodeGroup") 158 | node.node_tree = group1 159 | tree.links.new( 160 | db.nodes.get_input(tree).outputs["Geometry"], 161 | node.inputs["Geometry"], 162 | ) 163 | tree.links.new( 164 | node.outputs["Geometry"], 165 | db.nodes.get_output(tree).inputs["Geometry"], 166 | ) 167 | for name in ["test_int", "test_float", "test_int1"]: 168 | tree.links.new( 169 | db.nodes.get_input(tree).outputs[name], 170 | node.inputs[name], 171 | ) 172 | assert len(bpy.data.node_groups) == 3 173 | group1.copy() 174 | assert len(bpy.data.node_groups) == 4 175 | with db.nodes.DuplicatePrevention(timing=True): 176 | tree2 = tree.copy() 177 | for _ in range(10): 178 | group = tree2.nodes.new("GeometryNodeGroup") 179 | group.node_tree = group1.copy() 180 | 181 | assert len(bpy.data.node_groups) == 4 182 | 183 | 184 | @pytest.mark.parametrize("suffix", ["NodeTree", ""]) 185 | def test_append_from_blend(suffix): 186 | # we have to use the test node group on an object/ modifier otherwise it will get 187 | # cleaned up by Blener when we save and exit the file 188 | tree = db.nodes.custom_string_iswitch("TestSwitch", ["A", "B", "C", "D"]) 189 | obj = bpy.data.objects["Cube"] 190 | obj.modifiers.new(type="NODES", name="Modifier").node_group = tree 191 | assert bpy.data.node_groups.get("TestSwitch") 192 | # save the blend file in a temp file 193 | with tempfile.NamedTemporaryFile(suffix=".blend") as f: 194 | # save the current working Blender file and reload a fresh one, which doesn't 195 | # contain any node groups 196 | bpy.ops.wm.save_as_mainfile(filepath=f.name) 197 | bpy.ops.wm.read_homefile("EXEC_DEFAULT") 198 | assert not bpy.data.node_groups.get("TestSwitch") 199 | 200 | # test appending the node group from the save .blend file into the current one 201 | tree2 = db.nodes.append_from_blend("TestSwitch", Path(f.name) / suffix) 202 | assert tree2.name == "TestSwitch" 203 | assert len(tree2.nodes) == 3 204 | assert tree2.nodes["Index Switch"].inputs[1].default_value == "A" 205 | assert tree2.nodes["Index Switch"].inputs[2].default_value == "B" 206 | assert tree2.nodes["Index Switch"].inputs[3].default_value == "C" 207 | assert tree2.nodes["Index Switch"].inputs[4].default_value == "D" 208 | -------------------------------------------------------------------------------- /tests/test_array_print_methods.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import numpy as np 3 | from databpy import create_object 4 | from databpy.array import AttributeArray, Attribute 5 | 6 | 7 | class TestAttributeArrayPrintMethods: 8 | """Test the __str__ and __repr__ methods of AttributeArray.""" 9 | 10 | @pytest.fixture 11 | def blender_object(self): 12 | """Create a real BlenderObject for testing.""" 13 | obj = create_object(np.random.rand(10, 3).astype(np.float32), name="TestCube") 14 | return obj 15 | 16 | @pytest.fixture 17 | def example_attribute(self): 18 | """Create a real Attribute for testing.""" 19 | obj = create_object(np.random.rand(10, 3).astype(np.float32), name="TestCube") 20 | return Attribute(obj.data.attributes["position"]) 21 | 22 | @pytest.fixture 23 | def sample_array(self, blender_object): 24 | """Create a sample AttributeArray for testing.""" 25 | # Create the AttributeArray using the real blender object 26 | return AttributeArray(blender_object, "position") 27 | 28 | def test_str_method_basic_info(self, sample_array): 29 | """Test that __str__ includes basic attribute information.""" 30 | result = str(sample_array) 31 | 32 | # Check that all expected components are present 33 | assert "AttributeArray 'position'" in result 34 | assert "TestCube('TestCube')" in result 35 | assert "domain: POINT" in result 36 | 37 | def test_repr_method_detailed_info(self, sample_array): 38 | """Test that __repr__ includes detailed attribute information.""" 39 | result = repr(sample_array) 40 | 41 | # Check that all expected components are present 42 | assert "AttributeArray(name='position'" in result 43 | assert "object='TestCube', mesh='TestCube" in result 44 | assert "domain=POINT" in result 45 | assert "type=FLOAT_VECTOR" in result 46 | 47 | # Check that array representation is included 48 | assert "array(" in result 49 | 50 | def test_str_method_different_array_shapes(self, blender_object): 51 | """Test __str__ method with different array shapes.""" 52 | test_arrays = [ 53 | np.array([1.0, 2.0, 3.0], dtype=np.float32), # 1D 54 | np.array([[1.0, 2.0, 3.0]], dtype=np.float32), # 2D single row 55 | np.array([[1.0], [2.0], [3.0]], dtype=np.float32), # 2D single column 56 | np.array( 57 | [ 58 | [1.0, 2.0, 3.0, 4.0], 59 | [5.0, 6.0, 7.0, 8.0], 60 | [9.0, 10.0, 11.0, 12.0], 61 | [13.0, 14.0, 15.0, 16.0], 62 | ], 63 | dtype=np.float32, 64 | ), # 4x4 matrix 65 | ] 66 | 67 | for i, test_array in enumerate(test_arrays): 68 | # Create a new object for each test array with appropriate vertex count 69 | if test_array.ndim == 1: 70 | vertices = np.random.rand(len(test_array), 3).astype(np.float32) 71 | elif test_array.ndim == 2: 72 | vertices = np.random.rand(test_array.shape[0], 3).astype(np.float32) 73 | else: 74 | vertices = np.random.rand(test_array.shape[0], 3).astype(np.float32) 75 | 76 | obj = create_object(vertices, name=f"TestShape{i}") 77 | 78 | # Store the test array as a custom attribute 79 | from databpy.attribute import store_named_attribute 80 | 81 | store_named_attribute( 82 | obj, 83 | test_array, 84 | f"test_attr_{i}", 85 | ) 86 | 87 | # Create AttributeArray for the custom attribute 88 | arr = AttributeArray(obj, f"test_attr_{i}") 89 | 90 | result = str(arr) 91 | # Check that the attribute name appears in the string representation 92 | assert f"test_attr_{i}" in result 93 | 94 | def test_print_integration(self, sample_array, capsys): 95 | """Test that print() works correctly with the __str__ method.""" 96 | print(sample_array) 97 | captured = capsys.readouterr() 98 | 99 | assert "AttributeArray 'position'" in captured.out 100 | assert "TestCube('TestCube')" in captured.out 101 | assert "domain: POINT" in captured.out 102 | 103 | def test_str_method_with_large_array(self): 104 | """Test __str__ method with a large array to ensure it handles numpy's truncation.""" 105 | # Create a large array that numpy will truncate 106 | large_vertices = np.random.rand(1000, 3).astype(np.float32) 107 | obj = create_object(large_vertices, name="LargeTestObject") 108 | 109 | # Get the position attribute array 110 | arr = AttributeArray(obj, "position") 111 | 112 | result = str(arr) 113 | assert "shape: (1000, 3)" in result 114 | 115 | # Should contain numpy's truncation indicator for large arrays 116 | assert "..." in result or len(result.split("\n")) > 1 117 | 118 | def test_repr_always_shows_dtype(self, blender_object): 119 | """Test that __repr__ always explicitly shows dtype for cross-platform consistency. 120 | 121 | This is important for snapshot testing across platforms. On Windows, np.array_repr() 122 | may omit dtype when it's the platform default (e.g., int32), while on macOS/Linux 123 | it's always shown. We ensure dtype is always explicit in our repr output. 124 | """ 125 | from databpy.attribute import store_named_attribute 126 | 127 | # Get the number of points on the blender object 128 | num_points = len(blender_object.data.vertices) 129 | 130 | # Test with int32 (the type that causes platform-dependent repr on Windows) 131 | int_data = np.ones(num_points, dtype=np.int32) 132 | store_named_attribute(blender_object, int_data, "test_int32") 133 | int_arr = AttributeArray(blender_object, "test_int32") 134 | 135 | int_repr = repr(int_arr) 136 | # Should contain 'dtype=int32' or 'dtype=int32)' somewhere in output 137 | assert "dtype=int32" in int_repr, ( 138 | f"int32 dtype not explicitly shown in repr: {int_repr}" 139 | ) 140 | 141 | # Test with float32 as well 142 | float_data = np.ones(num_points, dtype=np.float32) 143 | store_named_attribute(blender_object, float_data, "test_float32") 144 | float_arr = AttributeArray(blender_object, "test_float32") 145 | 146 | float_repr = repr(float_arr) 147 | assert "dtype=float32" in float_repr, ( 148 | f"float32 dtype not explicitly shown in repr: {float_repr}" 149 | ) 150 | 151 | 152 | class TestColumnSlicePrintMethods: 153 | """Test print behavior of column slice views.""" 154 | 155 | @pytest.fixture 156 | def parent_array_and_data(self): 157 | """Create a real parent AttributeArray with known data.""" 158 | # Create object with known vertex positions 159 | parent_data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], dtype=np.float32) 160 | obj = create_object(parent_data, name="ColumnTestObject") 161 | 162 | # Get the position attribute as an AttributeArray 163 | parent_array = AttributeArray(obj, "position") 164 | 165 | return parent_array, parent_data 166 | 167 | def test_column_slice_str_delegation(self, parent_array_and_data): 168 | """Test that a column slice delegates to numpy string formatting.""" 169 | parent_array, parent_data = parent_array_and_data 170 | 171 | # Create column view 172 | col_view = parent_array[:, 1] # Second column 173 | 174 | # The string representation should come from the column data 175 | expected_column = parent_data[:, 1] # [2.0, 5.0] 176 | 177 | # Test that we can convert to string (should use numpy's default) 178 | result = str(np.asarray(col_view)) 179 | expected = str(expected_column) 180 | 181 | assert result == expected 182 | 183 | def test_column_slice_array_conversion(self, parent_array_and_data): 184 | """Test that a column slice converts to array properly for printing.""" 185 | parent_array, parent_data = parent_array_and_data 186 | 187 | col_view = parent_array[:, 0] # First column 188 | 189 | # Convert to array and check it matches expected column 190 | as_array = np.asarray(col_view) 191 | expected_column = parent_data[:, 0] # [1.0, 4.0] 192 | 193 | np.testing.assert_array_equal(as_array, expected_column) 194 | -------------------------------------------------------------------------------- /tests/test_collection.py: -------------------------------------------------------------------------------- 1 | import databpy as db 2 | import bpy 3 | import pytest 4 | 5 | 6 | def test_collection_missing(): 7 | db.collection.create_collection("Collection") 8 | bpy.data.collections.remove(bpy.data.collections["Collection"]) 9 | with pytest.raises(KeyError): 10 | bpy.data.collections["Collection"] 11 | db.collection.create_collection("Collection") 12 | 13 | 14 | def test_collection_spam(): 15 | n_coll = len(list(bpy.data.collections.keys())) 16 | for _ in range(10): 17 | coll = db.collection.create_collection("Collection") 18 | assert coll.name == "Collection" 19 | db.create_bob() 20 | assert n_coll == len(list(bpy.data.collections.keys())) 21 | 22 | 23 | def test_collection(): 24 | assert "Collection" in bpy.data.collections 25 | coll = db.collection.create_collection("Example", parent="Collection") 26 | assert "Collection.001" not in bpy.data.collections 27 | assert coll.name == "Example" 28 | assert coll.name in bpy.data.collections 29 | assert coll.name in bpy.data.collections["Collection"].children 30 | 31 | 32 | def test_collection_parent(): 33 | db.collection.create_collection(".MN_data", parent="MolecularNodes") 34 | assert ".MN_data" not in bpy.context.scene.collection.children 35 | 36 | 37 | # New tests to improve coverage 38 | 39 | 40 | def test_get_collection_existing(): 41 | """Test _get_collection with existing collection.""" 42 | # Create a collection first 43 | test_coll = bpy.data.collections.new("TestExisting") 44 | bpy.context.scene.collection.children.link(test_coll) 45 | 46 | # Test that _get_collection returns the existing one 47 | retrieved = db.collection._get_collection("TestExisting") 48 | assert retrieved == test_coll 49 | assert retrieved.name == "TestExisting" 50 | 51 | 52 | def test_get_collection_new(): 53 | """Test _get_collection creates new collection when it doesn't exist.""" 54 | # Ensure collection doesn't exist 55 | if "TestNew" in bpy.data.collections: 56 | bpy.data.collections.remove(bpy.data.collections["TestNew"]) 57 | 58 | # Test that _get_collection creates new collection 59 | new_coll = db.collection._get_collection("TestNew") 60 | assert new_coll.name == "TestNew" 61 | assert "TestNew" in bpy.data.collections 62 | assert new_coll.name in bpy.context.scene.collection.children 63 | 64 | 65 | def test_create_collection_default_name(): 66 | """Test create_collection with default name.""" 67 | coll = db.collection.create_collection() 68 | assert coll.name == "NewCollection" 69 | assert "NewCollection" in bpy.data.collections 70 | 71 | 72 | def test_create_collection_with_collection_parent(): 73 | """Test create_collection with Collection object as parent.""" 74 | # Create parent collection 75 | parent_coll = db.collection.create_collection("ParentCollection") 76 | 77 | # Create child with Collection object as parent 78 | child_coll = db.collection.create_collection("ChildCollection", parent=parent_coll) 79 | 80 | assert child_coll.name == "ChildCollection" 81 | assert child_coll.name in parent_coll.children 82 | # Should be unlinked from scene root 83 | assert child_coll.name not in bpy.context.scene.collection.children 84 | 85 | 86 | def test_create_collection_with_string_parent(): 87 | """Test create_collection with string parent name.""" 88 | # Create parent collection 89 | db.collection.create_collection("StringParent") 90 | 91 | # Create child with string parent name 92 | child_coll = db.collection.create_collection("StringChild", parent="StringParent") 93 | 94 | assert child_coll.name == "StringChild" 95 | assert child_coll.name in bpy.data.collections["StringParent"].children 96 | # Should be unlinked from scene root 97 | assert child_coll.name not in bpy.context.scene.collection.children 98 | 99 | 100 | def test_create_collection_invalid_parent_type(): 101 | """Test create_collection raises TypeError for invalid parent type.""" 102 | with pytest.raises(TypeError, match="Parent must be a Collection, string or None"): 103 | db.collection.create_collection("TestCollection", parent=123) 104 | 105 | with pytest.raises(TypeError, match="Parent must be a Collection, string or None"): 106 | db.collection.create_collection("TestCollection", parent=[]) 107 | 108 | 109 | def test_create_collection_nonexistent_parent_string(): 110 | """Test create_collection with non-existent parent string creates parent.""" 111 | # Ensure parent doesn't exist 112 | if "NonExistentParent" in bpy.data.collections: 113 | bpy.data.collections.remove(bpy.data.collections["NonExistentParent"]) 114 | 115 | # This should create both parent and child 116 | child_coll = db.collection.create_collection( 117 | "ChildOfNonExistent", parent="NonExistentParent" 118 | ) 119 | 120 | assert "NonExistentParent" in bpy.data.collections 121 | assert child_coll.name == "ChildOfNonExistent" 122 | assert child_coll.name in bpy.data.collections["NonExistentParent"].children 123 | 124 | 125 | def test_create_collection_already_in_parent(): 126 | """Test create_collection when collection already exists in parent.""" 127 | # Create parent and child 128 | parent_coll = db.collection.create_collection("ExistingParent") 129 | child_coll = db.collection.create_collection("ExistingChild", parent=parent_coll) 130 | 131 | # Try to create the same child again with same parent 132 | child_coll2 = db.collection.create_collection("ExistingChild", parent=parent_coll) 133 | 134 | # Should return the same collection 135 | assert child_coll == child_coll2 136 | assert child_coll.name in parent_coll.children 137 | 138 | # Should only have one instance in parent 139 | child_count = sum(1 for c in parent_coll.children if c.name == "ExistingChild") 140 | assert child_count == 1 141 | 142 | 143 | def test_create_collection_move_from_scene_to_parent(): 144 | """Test that collection is moved from scene root to parent when parent is specified.""" 145 | # Create collection in scene root first 146 | coll = db.collection.create_collection("MoveTest") 147 | assert coll.name in bpy.context.scene.collection.children 148 | 149 | # Create parent 150 | parent_coll = db.collection.create_collection("MoveParent") 151 | 152 | # Move collection to parent 153 | moved_coll = db.collection.create_collection("MoveTest", parent=parent_coll) 154 | 155 | # Should be the same collection 156 | assert moved_coll == coll 157 | # Should be in parent 158 | assert moved_coll.name in parent_coll.children 159 | # Should be removed from scene root 160 | assert moved_coll.name not in bpy.context.scene.collection.children 161 | 162 | 163 | def test_create_collection_none_parent_explicit(): 164 | """Test create_collection with explicit None parent stays in scene.""" 165 | coll = db.collection.create_collection("ExplicitNone", parent=None) 166 | assert coll.name == "ExplicitNone" 167 | assert coll.name in bpy.context.scene.collection.children 168 | 169 | 170 | def test_create_collection_nested_hierarchy(): 171 | """Test creating nested collection hierarchy.""" 172 | # Create grandparent -> parent -> child hierarchy 173 | grandparent = db.collection.create_collection("Grandparent") 174 | parent = db.collection.create_collection("Parent", parent=grandparent) 175 | child = db.collection.create_collection("Child", parent=parent) 176 | 177 | # Verify hierarchy 178 | assert parent.name in grandparent.children 179 | assert child.name in parent.children 180 | assert grandparent.name in bpy.context.scene.collection.children 181 | assert parent.name not in bpy.context.scene.collection.children 182 | assert child.name not in bpy.context.scene.collection.children 183 | 184 | 185 | def test_create_collection_reuse_existing_with_different_parent(): 186 | """Test that existing collection can be moved to different parent.""" 187 | # Create initial setup 188 | parent1 = db.collection.create_collection("Parent1") 189 | parent2 = db.collection.create_collection("Parent2") 190 | child = db.collection.create_collection("MovableChild", parent=parent1) 191 | 192 | assert child.name in parent1.children 193 | assert child.name not in parent2.children 194 | 195 | # Move to different parent 196 | moved_child = db.collection.create_collection("MovableChild", parent=parent2) 197 | 198 | # Should be same collection object 199 | assert moved_child == child 200 | # Should be in new parent 201 | assert moved_child.name in parent2.children 202 | # Should be removed from old parent (this tests the unlinking logic) 203 | assert moved_child.name not in bpy.context.scene.collection.children 204 | -------------------------------------------------------------------------------- /databpy/array.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from .attribute import Attribute, store_named_attribute 3 | import bpy 4 | 5 | 6 | class AttributeArray(np.ndarray): 7 | """ 8 | A numpy array subclass that automatically syncs changes back to the Blender object. 9 | 10 | AttributeArray provides an ergonomic interface for working with Blender attributes 11 | using familiar numpy operations. It automatically handles bidirectional syncing: 12 | values are retrieved from Blender as a numpy array, operations are applied, 13 | and results are immediately stored back to Blender. 14 | 15 | This is the high-level interface for attribute manipulation. For low-level control, 16 | see the `Attribute` class which provides manual get/set operations without auto-sync. 17 | 18 | Performance Characteristics 19 | --------------------------- 20 | - Every modification syncs the ENTIRE attribute array to Blender, not just changed values 21 | - This is due to Blender's foreach_set API requiring the complete array 22 | - For large meshes (10K+ elements), consider batching multiple operations 23 | - Example: `pos[:, 2] += 1.0` writes all position data, not just Z coordinates 24 | 25 | Supported Types 26 | --------------- 27 | Works with all Blender attribute types: 28 | - Float types: FLOAT, FLOAT2, FLOAT_VECTOR, FLOAT_COLOR, FLOAT4X4, QUATERNION 29 | - Integer types: INT (int32), INT8, INT32_2D 30 | - Boolean: BOOLEAN 31 | - Color: BYTE_COLOR (uint8) 32 | 33 | Attributes 34 | ---------- 35 | _blender_object : bpy.types.Object 36 | Reference to the Blender object for syncing changes. 37 | _attribute : Attribute 38 | The underlying Attribute instance with type information. 39 | _attr_name : str 40 | Name of the attribute being wrapped. 41 | _root : AttributeArray 42 | Reference to the root array for handling views/slices correctly. 43 | 44 | Examples 45 | -------- 46 | Basic usage: 47 | 48 | ```{python} 49 | import databpy as db 50 | import numpy as np 51 | 52 | obj = db.create_object(np.random.rand(10, 3), name="test_bob") 53 | pos = db.AttributeArray(obj, "position") 54 | pos[:, 2] += 1.0 # Automatically syncs to Blender 55 | ``` 56 | 57 | Using BlenderObject for convenience: 58 | 59 | ```{python} 60 | import databpy as db 61 | import numpy as np 62 | 63 | bob = db.create_bob(np.random.rand(10, 3), name="test_bob") 64 | print('Initial position:') 65 | print(bob.position) # Returns an AttributeArray 66 | ``` 67 | ```{python} 68 | bob.position[:, 2] += 1.0 69 | print('Updated position:') 70 | print(bob.position) 71 | ``` 72 | ```{python} 73 | # Convert to regular numpy array (no sync) 74 | print('As Array:') 75 | print(np.asarray(bob.position)) 76 | ``` 77 | 78 | Working with integer attributes: 79 | 80 | ```{python} 81 | import databpy as db 82 | import numpy as np 83 | 84 | obj = db.create_object(np.random.rand(10, 3)) 85 | # Store integer attribute 86 | ids = np.arange(10, dtype=np.int32) 87 | db.store_named_attribute(obj, ids, "id", atype="INT") 88 | 89 | # Access as AttributeArray 90 | id_array = db.AttributeArray(obj, "id") 91 | id_array += 100 # Automatically syncs as int32 92 | ``` 93 | 94 | See Also 95 | -------- 96 | Attribute : Low-level attribute interface without auto-sync 97 | store_named_attribute : Function to create/update attributes 98 | named_attribute : Function to read attribute data as regular arrays 99 | """ 100 | 101 | def __new__(cls, obj: bpy.types.Object, name: str) -> "AttributeArray": 102 | """Create a new AttributeArray that wraps a Blender attribute. 103 | 104 | Parameters 105 | ---------- 106 | obj : bpy.types.Object 107 | The Blender object containing the attribute. 108 | name : str 109 | The name of the attribute to wrap. 110 | 111 | Returns 112 | ------- 113 | AttributeArray 114 | A numpy array subclass that syncs changes back to Blender. 115 | """ 116 | attr = Attribute(obj.data.attributes[name]) 117 | arr = np.asarray(attr.as_array()).view(cls) 118 | arr._blender_object = obj 119 | arr._attribute = attr 120 | arr._attr_name = name 121 | # Track the root array so that views can sync the full data 122 | arr._root = arr 123 | return arr 124 | 125 | def __array_finalize__(self, obj): 126 | """Initialize attributes when array is created through operations.""" 127 | if obj is None: 128 | return 129 | 130 | self._blender_object = getattr(obj, "_blender_object", None) 131 | self._attribute = getattr(obj, "_attribute", None) 132 | self._attr_name = getattr(obj, "_attr_name", None) 133 | # Preserve reference to the root array for syncing 134 | self._root = getattr(obj, "_root", self) 135 | 136 | def __setitem__(self, key, value): 137 | """Set item and sync changes back to Blender.""" 138 | super().__setitem__(key, value) 139 | self._sync_to_blender() 140 | 141 | def _get_expected_components(self): 142 | """Get the expected number of components for the attribute type. 143 | 144 | Returns the total number of scalar values per element based on the 145 | attribute's dimensions. For example, FLOAT_VECTOR (3,) returns 3, 146 | FLOAT4X4 (4, 4) returns 16. 147 | """ 148 | dimensions = self._attribute.atype.value.dimensions 149 | return int(np.prod(dimensions)) 150 | 151 | def _ensure_correct_shape(self, data): 152 | """Ensure data has the correct shape for Blender. 153 | 154 | Handles numpy views that may have lost dimension information and 155 | reshapes 1D arrays to match the expected attribute dimensions. 156 | """ 157 | expected_components = self._get_expected_components() 158 | expected_dims = self._attribute.atype.value.dimensions 159 | 160 | # Reshape 1D to correct dimensionality if needed 161 | if data.ndim == 1 and len(data) % expected_components == 0: 162 | n_elements = len(data) // expected_components 163 | if len(expected_dims) == 1: 164 | # 1D attribute (FLOAT, INT, BOOLEAN, etc.) 165 | return data 166 | else: 167 | # Multi-dimensional attribute 168 | return data.reshape(n_elements, *expected_dims) 169 | 170 | # Handle views that lost shape information (e.g., column slices) 171 | if data.ndim != len(self._attribute.shape): 172 | # Try to get the full array from the root 173 | full_array = np.asarray(self._root).view(np.ndarray).copy() 174 | if full_array.shape == self._attribute.shape: 175 | return full_array 176 | 177 | return data 178 | 179 | def _sync_to_blender(self): 180 | """Sync the current array data back to the Blender object. 181 | 182 | Note: This syncs the ENTIRE array to Blender on every modification, 183 | even for single element changes. This is due to Blender's foreach_set 184 | API requiring the full array. For large meshes, consider batching 185 | multiple modifications before triggering a sync. 186 | """ 187 | if self._blender_object is None: 188 | import warnings 189 | 190 | warnings.warn( 191 | "AttributeArray has lost its Blender object reference. " 192 | "Changes will not be synced back to Blender. This can happen " 193 | "if the array was created from a deleted object or copied incorrectly.", 194 | RuntimeWarning, 195 | stacklevel=3, 196 | ) 197 | return 198 | 199 | # Always sync using the root array to ensure full shape 200 | root = getattr(self, "_root", self) 201 | data_to_sync = np.asarray(root).view(np.ndarray) 202 | data_to_sync = self._ensure_correct_shape(data_to_sync) 203 | 204 | # Use the attribute's actual dtype instead of hardcoding float32 205 | expected_dtype = self._attribute.dtype 206 | if data_to_sync.dtype != expected_dtype: 207 | data_to_sync = data_to_sync.astype(expected_dtype) 208 | 209 | store_named_attribute( 210 | self._blender_object, 211 | data_to_sync, 212 | name=self._attr_name, 213 | atype=self._attribute.atype, 214 | domain=self._attribute.domain.name, 215 | ) 216 | 217 | def _inplace_operation_with_sync(self, operation, other): 218 | """Common method for in-place operations.""" 219 | result = operation(other) 220 | self._sync_to_blender() 221 | return result 222 | 223 | def __iadd__(self, other): 224 | """In-place addition with Blender syncing.""" 225 | return self._inplace_operation_with_sync(super().__iadd__, other) 226 | 227 | def __isub__(self, other): 228 | """In-place subtraction with Blender syncing.""" 229 | return self._inplace_operation_with_sync(super().__isub__, other) 230 | 231 | def __imul__(self, other): 232 | """In-place multiplication with Blender syncing.""" 233 | return self._inplace_operation_with_sync(super().__imul__, other) 234 | 235 | def __itruediv__(self, other): 236 | """In-place division with Blender syncing.""" 237 | return self._inplace_operation_with_sync(super().__itruediv__, other) 238 | 239 | def __str__(self): 240 | """String representation showing attribute info and array data.""" 241 | # Get basic info 242 | attr_name = getattr(self, "_attr_name", "Unknown") 243 | domain = getattr(self._attribute, "domain", None) 244 | domain_name = domain.name if domain else "Unknown" 245 | 246 | # Get object info 247 | obj_name = "Unknown" 248 | obj_type = "Unknown" 249 | if self._blender_object: 250 | obj_name = getattr(self._blender_object, "name", "Unknown") 251 | obj_type = getattr(self._blender_object.data, "name", "Unknown") 252 | 253 | # Get array info 254 | array_str = np.array_str(np.asarray(self).view(np.ndarray)) 255 | 256 | return ( 257 | f"AttributeArray '{attr_name}' from {obj_type}('{obj_name}')" 258 | f"(domain: {domain_name}, shape: {self.shape}, dtype: {self.dtype})\n" 259 | f"{array_str}" 260 | ) 261 | 262 | def __repr__(self): 263 | """Detailed representation for debugging.""" 264 | # Get basic info 265 | attr_name = getattr(self, "_attr_name", "Unknown") 266 | domain = getattr(self._attribute, "domain", None) 267 | domain_name = domain.name if domain else "Unknown" 268 | atype = getattr(self._attribute, "atype", "Unknown") 269 | 270 | # Get object info 271 | obj_name = "Unknown" 272 | obj_type = "Unknown" 273 | if self._blender_object: 274 | obj_name = getattr(self._blender_object, "name", "Unknown") 275 | obj_type = getattr(self._blender_object.data, "name", "Unknown") 276 | 277 | # Get array representation with explicit dtype for cross-platform consistency 278 | # np.array_repr() can omit dtype on Windows when it's the platform default 279 | arr = np.asarray(self).view(np.ndarray) 280 | # Use np.array_repr() but then ensure dtype is always appended 281 | array_repr = np.array_repr(arr) 282 | # If dtype isn't already in the repr, add it before the closing parenthesis 283 | if f"dtype={arr.dtype}" not in array_repr: 284 | array_repr = array_repr.rstrip(")") + f", dtype={arr.dtype})" 285 | 286 | return ( 287 | f"AttributeArray(name='{attr_name}', object='{obj_name}', mesh='{obj_type}', " 288 | f"domain={domain_name}, type={atype.value}, shape={self.shape}, dtype={self.dtype})\n" 289 | f"{array_repr}" 290 | ) 291 | -------------------------------------------------------------------------------- /docs/_extensions/machow/interlinks/interlinks.lua: -------------------------------------------------------------------------------- 1 | local inventory = {} -- sphinx inventories 2 | local autolink -- set in Meta 3 | local autolink_ignore_token = "qd-no-link" 4 | 5 | local function _debug_log(text, debug) 6 | if debug then 7 | quarto.log.warning(text) 8 | end 9 | end 10 | 11 | local function read_inv_text(filename) 12 | -- read file 13 | local file = io.open(filename, "r") 14 | if file == nil then 15 | return nil 16 | end 17 | local str = file:read("a") 18 | file:close() 19 | 20 | 21 | local project = str:match("# Project: (%S+)") 22 | local version = str:match("# Version: (%S+)") 23 | 24 | local data = { project = project, version = version, items = {} } 25 | 26 | local ptn_data = 27 | "^" .. 28 | "(.-)%s+" .. -- name 29 | "([%S:]-):" .. -- domain 30 | "([%S]+)%s+" .. -- role 31 | "(%-?%d+)%s+" .. -- priority 32 | "(%S*)%s+" .. -- uri 33 | "(.-)\r?$" -- dispname 34 | 35 | 36 | -- Iterate through each line in the file content 37 | for line in str:gmatch("[^\r\n]+") do 38 | if not line:match("^#") then 39 | -- Match each line against the pattern 40 | local name, domain, role, priority, uri, dispName = line:match(ptn_data) 41 | 42 | -- if name is nil, raise an error 43 | if name == nil then 44 | error("Error parsing line: " .. line) 45 | end 46 | 47 | data.items[#data.items + 1] = { 48 | name = name, 49 | domain = domain, 50 | role = role, 51 | priority = priority, 52 | uri = uri, 53 | dispName = dispName 54 | } 55 | end 56 | end 57 | return data 58 | end 59 | 60 | local function read_json(filename) 61 | local file = io.open(filename, "r") 62 | if file == nil then 63 | return nil 64 | end 65 | local str = file:read("a") 66 | file:close() 67 | 68 | local decoded = quarto.json.decode(str) 69 | return decoded 70 | end 71 | 72 | local function read_inv_text_or_json(base_name) 73 | local file = io.open(base_name .. ".txt", "r") 74 | if file then 75 | -- TODO: refactors so we don't just close the file immediately 76 | io.close(file) 77 | json = read_inv_text(base_name .. ".txt") 78 | else 79 | json = read_json(base_name .. ".json") 80 | end 81 | 82 | return json 83 | end 84 | 85 | -- each inventory has entries: project, version, items 86 | local function lookup(search_object, debug) 87 | local results = {} 88 | for _, inv in ipairs(inventory) do 89 | for _, item in ipairs(inv.items) do 90 | -- e.g. :external+:::`` 91 | if item.inv_name and item.inv_name ~= search_object.inv_name then 92 | goto continue 93 | end 94 | 95 | if item.name ~= search_object.name then 96 | goto continue 97 | end 98 | 99 | if search_object.role and item.role ~= search_object.role then 100 | goto continue 101 | end 102 | 103 | if search_object.domain and item.domain ~= search_object.domain then 104 | goto continue 105 | else 106 | if search_object.domain or item.domain == "py" then 107 | table.insert(results, item) 108 | end 109 | 110 | goto continue 111 | end 112 | 113 | ::continue:: 114 | end 115 | end 116 | 117 | if #results == 1 then 118 | return results[1] 119 | end 120 | if #results > 1 then 121 | _debug_log("Found multiple matches for " .. search_object.name .. ", using the first match.", debug) 122 | return results[1] 123 | end 124 | if #results == 0 then 125 | _debug_log("Found no matches for object:\n", debug) 126 | _debug_log(search_object, debug) 127 | end 128 | 129 | return nil 130 | end 131 | 132 | local function mysplit(inputstr, sep) 133 | if sep == nil then 134 | sep = "%s" 135 | end 136 | local t = {} 137 | for str in string.gmatch(inputstr, "([^" .. sep .. "]+)") do 138 | table.insert(t, str) 139 | end 140 | return t 141 | end 142 | 143 | local function normalize_role(role) 144 | if role == "func" then 145 | return "function" 146 | end 147 | return role 148 | end 149 | 150 | local function copy_replace(original, key, new_value) 151 | -- First create a copy of the table 152 | local copy = {} 153 | for k, v in pairs(original) do 154 | copy[k] = v 155 | end 156 | 157 | -- Then replace the specific value 158 | copy[key] = new_value 159 | 160 | return copy 161 | end 162 | 163 | local function contains(list, value) 164 | -- check if list contains a value 165 | for i, v in ipairs(list) do 166 | if v == value then 167 | return true 168 | end 169 | end 170 | return false 171 | end 172 | 173 | local function flatten_alias_list(list) 174 | -- flatten a list of lists into a single list, 175 | -- where each entry has the form {key, subvalue}} 176 | -- e.g. 177 | -- input: {key1 = {subval1, subval2}, key2 = subval3} 178 | -- output: {{key1, subval1}, {key1, subval2}, {key2, subval3}} 179 | local flat = {} 180 | for key, sublist in pairs(list) do 181 | if type(sublist) == "table" then 182 | for _, subvalue in ipairs(sublist) do 183 | table.insert(flat, { key, subvalue }) 184 | end 185 | else 186 | table.insert(flat, { key, sublist }) 187 | end 188 | end 189 | return flat 190 | end 191 | 192 | local function prepend_aliases(flat_aliases) 193 | -- if str up to first period starts with an alias, then 194 | -- replace it with the full name. 195 | -- For example, suppose we have the alias quartodoc -> qd 196 | -- e.g. qd.Auto -> quartodoc.Auto 197 | -- e.g. qda.Auto -> qda.Auto 198 | 199 | local new_inv = {} 200 | new_inv["project"] = "aliases" 201 | new_inv["version"] = "0.0.9999" -- I have not begun to think about version... 202 | new_inv["items"] = {} 203 | 204 | for _, name_pair in pairs(flat_aliases) do 205 | local full = name_pair[1] 206 | local alias = name_pair[2] 207 | for _, inv in ipairs(inventory) do 208 | for _, item in ipairs(inv.items) do 209 | if string.sub(item.name, 1, string.len(full) + 1) == (full .. ".") then 210 | -- replace full .. "." with alias .. "." 211 | local prefix 212 | if not alias or pandoc.utils.stringify(alias) == "" then 213 | prefix = "" 214 | else 215 | -- TODO: ensure alias doesn't end with period 216 | prefix = pandoc.utils.stringify(alias) .. "." 217 | end 218 | local new_name = prefix .. string.sub(item.name, string.len(full) + 2) 219 | table.insert(new_inv.items, copy_replace(item, "name", new_name)) 220 | end 221 | end 222 | end 223 | end 224 | table.insert(inventory, new_inv) 225 | end 226 | 227 | local function build_search_object(str, debug) 228 | local starts_with_colon = str:sub(1, 1) == ":" 229 | local search = {} 230 | if starts_with_colon then 231 | local t = mysplit(str, ":") 232 | if #t == 2 then 233 | -- e.g. :py:func:`my_func` 234 | search.role = normalize_role(t[1]) 235 | search.name = t[2]:match("%%60(.*)%%60") 236 | elseif #t == 3 then 237 | -- e.g. :py:func:`my_func` 238 | search.domain = t[1] 239 | search.role = normalize_role(t[2]) 240 | search.name = t[3]:match("%%60(.*)%%60") 241 | elseif #t == 4 then 242 | -- e.g. :ext+inv:py:func:`my_func` 243 | search.external = true 244 | 245 | search.inv_name = t[1]:match("external%+(.*)") 246 | search.domain = t[2] 247 | search.role = normalize_role(t[3]) 248 | search.name = t[4]:match("%%60(.*)%%60") 249 | else 250 | _debug_log("couldn't parse this link: " .. str, debug) 251 | return {} 252 | end 253 | else 254 | search.name = str:match("%%60(.*)%%60") 255 | end 256 | 257 | if search.name == nil then 258 | _debug_log("couldn't parse this link: " .. str, debug) 259 | return {} 260 | end 261 | 262 | if search.name:sub(1, 1) == "~" then 263 | search.shortened = true 264 | search.name = search.name:sub(2, -1) 265 | end 266 | return search 267 | end 268 | 269 | local function report_broken_link(link, search_object, replacement) 270 | -- TODO: how to unescape html elements like [? 271 | return pandoc.Code(pandoc.utils.stringify(link.content)) 272 | end 273 | 274 | function Link(link) 275 | -- do not process regular links ---- 276 | if not link.target:match("%%60") then 277 | return link 278 | end 279 | 280 | -- lookup item ---- 281 | local search = build_search_object(link.target) 282 | local item = lookup(search) 283 | 284 | -- determine replacement, used if no link text specified ---- 285 | local original_text = pandoc.utils.stringify(link.content) 286 | local replacement = search.name 287 | if search.shortened then 288 | local t = mysplit(search.name, ".") 289 | replacement = t[#t] 290 | end 291 | 292 | -- set link text ---- 293 | if original_text == "" and replacement ~= nil then 294 | link.content = pandoc.Code(replacement) 295 | end 296 | 297 | -- report broken links ---- 298 | if item == nil then 299 | return report_broken_link(link, search) 300 | end 301 | link.target = item.uri:gsub("%$$", search.name) 302 | 303 | 304 | return link 305 | end 306 | 307 | function Code(code) 308 | if (not autolink) or contains(code.classes, autolink_ignore_token) then 309 | return code 310 | end 311 | 312 | -- allow text for lookup to be simple function call 313 | -- and also support shortened syntax (~~ prefix) 314 | -- e.g. my_func() -> my_func 315 | -- e.g. a.b.call() -> a.b.call 316 | -- e.g. ~~my_func() -> my_func 317 | local text 318 | 319 | -- detect and remove shortening syntax (~~ prefix) 320 | local is_shortened = code.text:sub(1, 2) == "~~" 321 | local is_short_dot = code.text:sub(1, 3) == "~~." 322 | local unprefixed = code.text:gsub("^~~%.?", "") 323 | if unprefixed:match("%(%s*%)") then 324 | text = unprefixed:gsub("%(%s*%)", "") 325 | else 326 | text = unprefixed 327 | end 328 | 329 | 330 | -- return code.attr 331 | local search = build_search_object("%60" .. text .. "%60") 332 | local item = lookup(search) 333 | 334 | -- determine replacement, used if no link text specified ---- 335 | if item == nil then 336 | code.text = unprefixed 337 | return code 338 | end 339 | 340 | -- shorten text if shortening syntax used 341 | if is_shortened then 342 | -- keep text after last period (.) 343 | local split = mysplit(unprefixed, ".") 344 | if #split > 0 then 345 | local new_name = split[#split] 346 | if is_short_dot then 347 | -- if shortened with dot, keep the dot 348 | new_name = "." .. new_name 349 | end 350 | code.text = new_name 351 | else 352 | code.text = unprefixed 353 | end 354 | end 355 | 356 | 357 | return pandoc.Link(code, item.uri:gsub("%$$", search.name)) 358 | end 359 | 360 | local function fixup_json(json, prefix, attach) 361 | for _, item in ipairs(json.items) do 362 | item.uri = prefix .. item.uri 363 | end 364 | table.insert(inventory, json) 365 | end 366 | 367 | return { 368 | { 369 | Meta = function(meta) 370 | local json 371 | local prefix 372 | local aliases 373 | 374 | -- set globals from config 375 | if meta.interlinks and meta.interlinks.autolink then 376 | autolink = true 377 | else 378 | autolink = false 379 | end 380 | 381 | local aliases 382 | if meta.interlinks and meta.interlinks.aliases then 383 | aliases = meta.interlinks.aliases 384 | else 385 | aliases = {} 386 | end 387 | 388 | -- process sources 389 | if meta.interlinks and meta.interlinks.sources then 390 | for k, v in pairs(meta.interlinks.sources) do 391 | local base_name = quarto.project.offset .. "/_inv/" .. k .. "_objects" 392 | json = read_inv_text_or_json(base_name) 393 | prefix = pandoc.utils.stringify(v.url) 394 | if json ~= nil then 395 | fixup_json(json, prefix) 396 | end 397 | end 398 | end 399 | json = read_inv_text_or_json(quarto.project.offset .. "/objects") 400 | if json ~= nil then 401 | fixup_json(json, "/") 402 | end 403 | 404 | prepend_aliases(flatten_alias_list(aliases)) 405 | end 406 | }, 407 | { 408 | Link = Link, 409 | Code = Code 410 | } 411 | } 412 | -------------------------------------------------------------------------------- /tests/test_geometry_types.py: -------------------------------------------------------------------------------- 1 | """Tests for Curves and PointCloud object creation and manipulation.""" 2 | 3 | import numpy as np 4 | import pytest 5 | import bpy 6 | import databpy as db 7 | 8 | 9 | class TestMeshCreation: 10 | """Tests for mesh object creation with new API.""" 11 | 12 | def test_create_mesh_object(self): 13 | """Test create_mesh_object() creates a valid mesh.""" 14 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]) 15 | faces = [(0, 1, 2, 3)] 16 | obj = db.create_mesh_object(vertices, faces=faces, name="TestMesh") 17 | 18 | assert isinstance(obj.data, bpy.types.Mesh) 19 | assert len(obj.data.vertices) == 4 20 | assert obj.name == "TestMesh" 21 | 22 | def test_create_mesh_bob(self): 23 | """Test create_mesh_bob() creates a valid BlenderObject.""" 24 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 25 | bob = db.BlenderObject.from_mesh(vertices, name="TestMeshBob") 26 | 27 | assert isinstance(bob, db.BlenderObject) 28 | assert isinstance(bob.data, bpy.types.Mesh) 29 | assert len(bob) == 3 30 | assert bob.name == "TestMeshBob" 31 | 32 | def test_create_mesh_empty(self): 33 | """Test creating an empty mesh.""" 34 | obj = db.create_mesh_object(name="EmptyMesh") 35 | 36 | assert isinstance(obj.data, bpy.types.Mesh) 37 | assert len(obj.data.vertices) == 0 38 | 39 | def test_create_mesh_with_edges(self): 40 | """Test creating a mesh with edges.""" 41 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 42 | edges = [(0, 1), (1, 2)] 43 | obj = db.create_mesh_object(vertices, edges=edges, name="EdgeMesh") 44 | 45 | assert len(obj.data.vertices) == 3 46 | assert len(obj.data.edges) == 2 47 | 48 | 49 | class TestCurvesCreation: 50 | """Tests for curves object creation.""" 51 | 52 | def test_create_curves_object(self): 53 | """Test create_curves_object() creates valid curves.""" 54 | positions = np.random.random((10, 3)).astype(np.float32) 55 | curve_sizes = [3, 4, 3] 56 | obj = db.create_curves_object(positions, curve_sizes, name="TestCurves") 57 | 58 | assert isinstance(obj.data, bpy.types.Curves) 59 | assert len(obj.data.curves) == 3 60 | assert len(obj.data.points) == 10 61 | assert obj.name == "TestCurves" 62 | 63 | def test_create_curves_bob(self): 64 | """Test create_curves_bob() creates a valid BlenderObject.""" 65 | positions = np.random.random((7, 3)).astype(np.float32) 66 | curve_sizes = [3, 4] 67 | bob = db.BlenderObject.from_curves(positions, curve_sizes, name="TestCurvesBob") 68 | 69 | assert isinstance(bob, db.BlenderObject) 70 | assert isinstance(bob.data, bpy.types.Curves) 71 | assert len(bob) == 7 72 | assert bob.name == "TestCurvesBob" 73 | 74 | def test_create_curves_positions_preserved(self): 75 | """Test that positions are correctly stored in curves.""" 76 | test_positions = np.array( 77 | [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]], dtype=np.float32 78 | ) 79 | curve_sizes = [3] 80 | bob = db.BlenderObject.from_curves(test_positions, curve_sizes, name="TestPos") 81 | 82 | retrieved_positions = bob.named_attribute("position") 83 | assert np.allclose(test_positions, retrieved_positions, atol=1e-6) 84 | 85 | def test_create_curves_empty(self): 86 | """Test creating an empty curves object.""" 87 | obj = db.create_curves_object(name="EmptyCurves") 88 | 89 | assert isinstance(obj.data, bpy.types.Curves) 90 | assert len(obj.data.curves) == 0 91 | assert len(obj.data.points) == 0 92 | 93 | def test_create_curves_multiple_curves(self): 94 | """Test creating multiple curves with different sizes.""" 95 | positions = np.random.random((15, 3)).astype(np.float32) 96 | curve_sizes = [2, 5, 3, 5] # 4 curves with different point counts 97 | obj = db.create_curves_object(positions, curve_sizes, name="MultiCurves") 98 | 99 | assert len(obj.data.curves) == 4 100 | assert len(obj.data.points) == 15 101 | 102 | def test_create_curves_size_mismatch_error(self): 103 | """Test that mismatched positions and curve_sizes raises ValueError.""" 104 | positions = np.random.random((10, 3)).astype(np.float32) 105 | curve_sizes = [3, 4, 2] # Sum is 9, not 10 106 | 107 | with pytest.raises(ValueError, match="Total points in curve_sizes"): 108 | db.create_curves_object(positions, curve_sizes) 109 | 110 | 111 | class TestPointCloudCreation: 112 | """Tests for point cloud object creation.""" 113 | 114 | def test_create_pointcloud_object(self): 115 | """Test create_pointcloud_object() creates valid point cloud.""" 116 | positions = np.random.random((50, 3)).astype(np.float32) 117 | obj = db.create_pointcloud_object(positions, name="TestPC") 118 | 119 | assert isinstance(obj.data, bpy.types.PointCloud) 120 | assert len(obj.data.points) == 50 121 | assert obj.name == "TestPC" 122 | 123 | def test_create_pointcloud_bob(self): 124 | """Test create_pointcloud_bob() creates a valid BlenderObject.""" 125 | positions = np.random.random((100, 3)).astype(np.float32) 126 | bob = db.BlenderObject.from_pointcloud(positions, name="TestPCBob") 127 | 128 | assert isinstance(bob, db.BlenderObject) 129 | assert isinstance(bob.data, bpy.types.PointCloud) 130 | assert len(bob) == 100 131 | assert bob.name == "TestPCBob" 132 | 133 | def test_create_pointcloud_positions_preserved(self): 134 | """Test that positions are correctly stored in point cloud.""" 135 | test_positions = np.array( 136 | [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], 137 | dtype=np.float32, 138 | ) 139 | bob = db.BlenderObject.from_pointcloud(test_positions, name="TestPCPos") 140 | 141 | retrieved_positions = bob.named_attribute("position") 142 | assert np.allclose(test_positions, retrieved_positions, atol=1e-6) 143 | 144 | def test_create_pointcloud_empty(self): 145 | """Test creating an empty point cloud.""" 146 | obj = db.create_pointcloud_object(name="EmptyPC") 147 | 148 | assert isinstance(obj.data, bpy.types.PointCloud) 149 | assert len(obj.data.points) == 0 150 | 151 | def test_create_pointcloud_large(self): 152 | """Test creating a large point cloud.""" 153 | positions = np.random.random((1000, 3)).astype(np.float32) 154 | obj = db.create_pointcloud_object(positions, name="LargePC") 155 | 156 | assert len(obj.data.points) == 1000 157 | 158 | 159 | class TestBlenderObjectLen: 160 | """Tests for __len__ method across all geometry types.""" 161 | 162 | def test_len_mesh(self): 163 | """Test __len__ returns vertex count for mesh objects.""" 164 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]) 165 | bob = db.BlenderObject.from_mesh(vertices) 166 | 167 | assert len(bob) == 4 168 | assert len(bob) == len(bob.data.vertices) 169 | 170 | def test_len_curves(self): 171 | """Test __len__ returns point count for curves objects.""" 172 | positions = np.random.random((12, 3)).astype(np.float32) 173 | curve_sizes = [4, 5, 3] 174 | bob = db.BlenderObject.from_curves(positions, curve_sizes) 175 | 176 | assert len(bob) == 12 177 | assert len(bob) == len(bob.data.points) 178 | 179 | def test_len_pointcloud(self): 180 | """Test __len__ returns point count for point cloud objects.""" 181 | positions = np.random.random((75, 3)).astype(np.float32) 182 | bob = db.BlenderObject.from_pointcloud(positions) 183 | 184 | assert len(bob) == 75 185 | assert len(bob) == len(bob.data.points) 186 | 187 | def test_len_empty_objects(self): 188 | """Test __len__ returns 0 for empty objects.""" 189 | mesh_bob = db.BlenderObject.from_mesh() 190 | curves_bob = db.BlenderObject.from_curves() 191 | pc_bob = db.BlenderObject.from_pointcloud() 192 | 193 | assert len(mesh_bob) == 0 194 | assert len(curves_bob) == 0 195 | assert len(pc_bob) == 0 196 | 197 | def test_len_old_curve_type_raises_error(self): 198 | """Test __len__ raises TypeError for unsupported old Curve type.""" 199 | # Create old Curve type 200 | bpy.ops.curve.primitive_bezier_curve_add() 201 | old_curve_obj = bpy.context.active_object 202 | bob = db.BlenderObject(old_curve_obj) 203 | 204 | with pytest.raises(TypeError, match="not supported"): 205 | len(bob) 206 | 207 | 208 | class TestAttributeAccess: 209 | """Tests for attribute access across geometry types.""" 210 | 211 | def test_mesh_attribute_access(self): 212 | """Test attribute access on mesh objects.""" 213 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 214 | bob = db.BlenderObject.from_mesh(vertices) 215 | 216 | positions = bob.named_attribute("position") 217 | assert positions.shape == (3, 3) 218 | assert np.allclose(positions, vertices) 219 | 220 | def test_curves_attribute_access(self): 221 | """Test attribute access on curves objects.""" 222 | positions = np.random.random((5, 3)).astype(np.float32) 223 | curve_sizes = [5] 224 | bob = db.BlenderObject.from_curves(positions, curve_sizes) 225 | 226 | retrieved = bob.named_attribute("position") 227 | assert retrieved.shape == (5, 3) 228 | assert np.allclose(positions, retrieved, atol=1e-6) 229 | 230 | def test_pointcloud_attribute_access(self): 231 | """Test attribute access on point cloud objects.""" 232 | positions = np.random.random((20, 3)).astype(np.float32) 233 | bob = db.BlenderObject.from_pointcloud(positions) 234 | 235 | retrieved = bob.named_attribute("position") 236 | assert retrieved.shape == (20, 3) 237 | assert np.allclose(positions, retrieved, atol=1e-6) 238 | 239 | def test_mesh_getitem_syntax(self): 240 | """Test dictionary-style attribute access on mesh.""" 241 | vertices = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0]]) 242 | bob = db.BlenderObject.from_mesh(vertices) 243 | 244 | positions = bob["position"] 245 | assert isinstance(positions, db.AttributeArray) 246 | assert positions.shape == (3, 3) 247 | 248 | def test_curves_getitem_syntax(self): 249 | """Test dictionary-style attribute access on curves.""" 250 | positions = np.random.random((8, 3)).astype(np.float32) 251 | curve_sizes = [3, 5] 252 | bob = db.BlenderObject.from_curves(positions, curve_sizes) 253 | 254 | retrieved = bob["position"] 255 | assert isinstance(retrieved, db.AttributeArray) 256 | assert retrieved.shape == (8, 3) 257 | 258 | def test_pointcloud_getitem_syntax(self): 259 | """Test dictionary-style attribute access on point cloud.""" 260 | positions = np.random.random((30, 3)).astype(np.float32) 261 | bob = db.BlenderObject.from_pointcloud(positions) 262 | 263 | retrieved = bob["position"] 264 | assert isinstance(retrieved, db.AttributeArray) 265 | assert retrieved.shape == (30, 3) 266 | 267 | 268 | class TestDeprecationWarnings: 269 | """Tests for deprecation warnings on old API.""" 270 | 271 | def test_vertices_property_deprecation(self): 272 | """Test BlenderObject.vertices shows deprecation warning.""" 273 | bob = db.BlenderObject.from_mesh([[0, 0, 0], [1, 0, 0]]) 274 | 275 | with pytest.warns(DeprecationWarning, match="vertices is deprecated"): 276 | vertices = bob.vertices 277 | 278 | assert len(vertices) == 2 279 | 280 | def test_edges_property_deprecation(self): 281 | """Test BlenderObject.edges shows deprecation warning.""" 282 | bob = db.BlenderObject.from_mesh([[0, 0, 0], [1, 0, 0]]) 283 | 284 | with pytest.warns(DeprecationWarning, match="edges is deprecated"): 285 | edges = bob.edges 286 | 287 | # Edges might be empty but property should work 288 | assert hasattr(edges, "__len__") 289 | 290 | def test_vertices_on_non_mesh_raises_error(self): 291 | """Test vertices property raises error on non-mesh objects.""" 292 | bob = db.BlenderObject.from_curves( 293 | np.random.random((5, 3)).astype(np.float32), [5] 294 | ) 295 | 296 | with pytest.warns(DeprecationWarning): 297 | with pytest.raises(AttributeError, match="only works with Mesh"): 298 | _ = bob.vertices 299 | 300 | def test_edges_on_non_mesh_raises_error(self): 301 | """Test edges property raises error on non-mesh objects.""" 302 | bob = db.BlenderObject.from_pointcloud( 303 | np.random.random((10, 3)).astype(np.float32) 304 | ) 305 | 306 | with pytest.warns(DeprecationWarning): 307 | with pytest.raises(AttributeError, match="only works with Mesh"): 308 | _ = bob.edges 309 | 310 | 311 | class TestCollectionHandling: 312 | """Tests for collection assignment in creation functions.""" 313 | 314 | def test_mesh_in_custom_collection(self): 315 | """Test creating mesh in custom collection.""" 316 | col = db.create_collection("TestCollection") 317 | obj = db.create_mesh_object([[0, 0, 0]], collection=col) 318 | 319 | assert obj.name in col.objects 320 | 321 | def test_curves_in_custom_collection(self): 322 | """Test creating curves in custom collection.""" 323 | col = db.create_collection("CurvesCollection") 324 | obj = db.create_curves_object( 325 | np.random.random((3, 3)).astype(np.float32), [3], collection=col 326 | ) 327 | 328 | assert obj.name in col.objects 329 | 330 | def test_pointcloud_in_custom_collection(self): 331 | """Test creating point cloud in custom collection.""" 332 | col = db.create_collection("PCCollection") 333 | obj = db.create_pointcloud_object( 334 | np.random.random((5, 3)).astype(np.float32), collection=col 335 | ) 336 | 337 | assert obj.name in col.objects 338 | 339 | 340 | class TestEdgeCases: 341 | """Tests for edge cases and error handling.""" 342 | 343 | def test_curves_with_single_curve(self): 344 | """Test creating curves with a single curve.""" 345 | positions = np.random.random((10, 3)).astype(np.float32) 346 | obj = db.create_curves_object(positions, [10]) 347 | 348 | assert len(obj.data.curves) == 1 349 | assert len(obj.data.points) == 10 350 | 351 | def test_curves_with_single_point_curves(self): 352 | """Test creating multiple curves each with single point.""" 353 | positions = np.random.random((5, 3)).astype(np.float32) 354 | obj = db.create_curves_object(positions, [1, 1, 1, 1, 1]) 355 | 356 | assert len(obj.data.curves) == 5 357 | assert len(obj.data.points) == 5 358 | 359 | def test_pointcloud_with_single_point(self): 360 | """Test creating point cloud with a single point.""" 361 | positions = np.array([[1.0, 2.0, 3.0]], dtype=np.float32) 362 | obj = db.create_pointcloud_object(positions) 363 | 364 | assert len(obj.data.points) == 1 365 | 366 | def test_mesh_with_2d_positions_converts_to_3d(self): 367 | """Test that 2D positions raise an error.""" 368 | # Blender requires 3D coordinates, 2D should fail 369 | vertices = [[0, 0], [1, 0]] 370 | with pytest.raises(RuntimeError, match="internal error setting the array"): 371 | db.create_mesh_object(vertices) 372 | 373 | def test_pointcloud_from_list_input(self): 374 | """Test creating point cloud from list instead of numpy array.""" 375 | positions = [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]] 376 | obj = db.create_pointcloud_object(positions) 377 | 378 | assert len(obj.data.points) == 2 379 | 380 | def test_curves_from_list_input(self): 381 | """Test creating curves from list instead of numpy array.""" 382 | positions = [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 1.0, 0.0]] 383 | curve_sizes = [3] 384 | obj = db.create_curves_object(positions, curve_sizes) 385 | 386 | assert len(obj.data.points) == 3 387 | -------------------------------------------------------------------------------- /tests/test_attribute.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import numpy as np 3 | import bpy 4 | import databpy as db 5 | import itertools 6 | 7 | 8 | def test_attribute_properties(): 9 | # Create test object with known vertices 10 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 11 | obj = db.create_object(verts, name="TestObject") 12 | att = db.Attribute(obj.data.attributes["position"]) 13 | assert att.name == "position" 14 | assert att.type_name == "FLOAT_VECTOR" 15 | att = db.store_named_attribute( 16 | obj, np.random.rand(3, 3), "test_attr", domain="POINT" 17 | ) 18 | assert att.name == "test_attr" 19 | 20 | 21 | def test_errores(): 22 | # Create test object with known vertices 23 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 24 | obj = db.create_object(verts, name="TestObject") 25 | db.Attribute(obj.data.attributes["position"]) 26 | with pytest.raises(ValueError): 27 | db.store_named_attribute( 28 | obj, np.random.rand(3, 3), "test_attr", domain="FAKE_DOMAIN" 29 | ) 30 | with pytest.raises(ValueError): 31 | db.store_named_attribute( 32 | obj, np.random.rand(3, 3), "test_attr", atype="FAKE_TYPE" 33 | ) 34 | with pytest.raises(db.NamedAttributeError): 35 | db.remove_named_attribute(obj, "nonexistent_attr") 36 | 37 | 38 | def test_named_attribute_position(): 39 | # Create test object with known vertices 40 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 41 | obj = db.create_object(verts, name="TestObject") 42 | 43 | # Test retrieving position attribute 44 | result = db.named_attribute(obj, "position") 45 | np.testing.assert_array_equal(result, verts) 46 | 47 | 48 | def test_named_attribute_custom(): 49 | # Create test object 50 | verts = np.array([[0, 0, 0], [1, 1, 1]]) 51 | obj = db.create_object(verts, name="TestObject") 52 | 53 | # Store custom attribute 54 | test_data = np.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0]]) 55 | db.store_named_attribute(obj, test_data, "test_attr") 56 | 57 | # Test retrieving custom attribute 58 | result = db.named_attribute(obj, "test_attr") 59 | np.testing.assert_array_equal(result, test_data) 60 | 61 | db.remove_named_attribute(obj, "test_attr") 62 | with pytest.raises(db.NamedAttributeError): 63 | db.named_attribute(obj, "test_attr") 64 | 65 | 66 | def test_named_attribute_nonexistent(): 67 | obj = db.create_object(np.array([[0, 0, 0]]), name="TestObject") 68 | 69 | with pytest.raises(AttributeError): 70 | db.named_attribute(obj, "nonexistent_attr") 71 | 72 | 73 | def test_attribute_mismatch(): 74 | # Create test object 75 | verts = np.array([[0, 0, 0], [1, 1, 1]]) 76 | obj = db.create_object(verts, name="TestObject") 77 | new_data = np.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0]]) 78 | db.store_named_attribute(obj, new_data, "test_attr") 79 | 80 | test_data = np.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 1.0, 0.0]]) 81 | 82 | with pytest.raises(db.NamedAttributeError): 83 | db.store_named_attribute(obj, test_data, "test_attr") 84 | 85 | with pytest.raises(db.NamedAttributeError): 86 | db.store_named_attribute(obj, np.repeat(1, 3), "test_attr") 87 | 88 | 89 | def test_attribute_overwrite(): 90 | verts = np.array([[0, 0, 0], [1, 1, 1]]) 91 | obj = db.create_object(verts, name="TestObject") 92 | new_data = np.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0]]) 93 | db.store_named_attribute(obj, new_data, "test_attr") 94 | # with overwrite = False, the attribute should not be overwritten and a new one will 95 | # be created with a new name instead 96 | new_values = np.repeat(1, 2) 97 | att = db.store_named_attribute(obj, new_values, "test_attr", overwrite=False) 98 | 99 | assert new_values.shape != db.named_attribute(obj, "test_attr").shape 100 | assert np.allclose(new_values, db.named_attribute(obj, att.name)) 101 | 102 | assert db.named_attribute(obj, "test_attr").shape == (2, 3) 103 | with pytest.raises(db.NamedAttributeError): 104 | db.store_named_attribute(obj, new_values, "test_attr") 105 | 106 | db.remove_named_attribute(obj, "test_attr") 107 | with pytest.raises(db.NamedAttributeError): 108 | db.named_attribute(obj, "test_attr") 109 | db.store_named_attribute(obj, new_values, "test_attr") 110 | assert np.allclose(db.named_attribute(obj, "test_attr"), new_values) 111 | assert db.named_attribute(obj, "test_attr").shape == (2,) 112 | 113 | 114 | def test_named_attribute_evaluate(): 115 | # Create test object with modifier 116 | obj = bpy.data.objects["Cube"] 117 | pos = db.named_attribute(obj, "position") 118 | 119 | # Add a simple modifier (e.g., subdivision surface) 120 | mod = obj.modifiers.new(name="Subsurf", type="SUBSURF") 121 | mod.levels = 1 122 | 123 | # Test with evaluate=True 124 | result = db.named_attribute(obj, "position", evaluate=True) 125 | assert len(result) > len(pos) # Should have more vertices after subdivision 126 | 127 | 128 | def test_obj_type_error(): 129 | with pytest.raises(TypeError): 130 | db.named_attribute(123, "position") 131 | 132 | with pytest.raises(TypeError): 133 | db.named_attribute(bpy.data.objects["Camera"], "position") 134 | 135 | 136 | def test_check_obj(): 137 | db.attribute._check_obj_attributes(bpy.data.objects["Cube"]) 138 | assert pytest.raises( 139 | TypeError, 140 | db.attribute._check_obj_attributes, 141 | bpy.data.objects["Camera"], 142 | ) 143 | assert pytest.raises( 144 | TypeError, 145 | db.attribute._check_obj_attributes, 146 | bpy.data.objects["Light"], 147 | ) 148 | assert pytest.raises( 149 | TypeError, 150 | db.attribute._check_is_mesh, 151 | bpy.data.objects["Light"], 152 | ) 153 | assert pytest.raises( 154 | TypeError, 155 | db.attribute._check_is_mesh, 156 | bpy.data.objects["Camera"], 157 | ) 158 | 159 | 160 | def test_guess_attribute_type(): 161 | # Create test object 162 | np.array([[0, 0, 0], [1, 1, 1]]) 163 | assert pytest.raises( 164 | ValueError, 165 | db.attribute.guess_atype_from_array, 166 | ["A", "B", "C"], 167 | ) 168 | 169 | 170 | def test_guess_atype(): 171 | """Test attribute type guessing from array shape and dtype.""" 172 | # Test float-based types 173 | assert db.attribute.AttributeTypes.FLOAT == db.attribute.guess_atype_from_array( 174 | np.zeros(10, dtype=np.float32) 175 | ) 176 | assert db.attribute.AttributeTypes.FLOAT == db.attribute.guess_atype_from_array( 177 | np.zeros(10, dtype=np.float64) 178 | ) 179 | assert db.attribute.AttributeTypes.FLOAT2 == db.attribute.guess_atype_from_array( 180 | np.zeros((10, 2), dtype=np.float32) 181 | ) 182 | assert ( 183 | db.attribute.AttributeTypes.FLOAT_VECTOR 184 | == db.attribute.guess_atype_from_array(np.zeros((10, 3))) 185 | ) 186 | assert ( 187 | db.attribute.AttributeTypes.FLOAT_COLOR 188 | == db.attribute.guess_atype_from_array(np.zeros((10, 4))) 189 | ) 190 | assert db.attribute.AttributeTypes.FLOAT4X4 == db.attribute.guess_atype_from_array( 191 | np.zeros((10, 4, 4)) 192 | ) 193 | 194 | # Test integer-based types 195 | assert db.attribute.AttributeTypes.INT == db.attribute.guess_atype_from_array( 196 | np.zeros(10, dtype=np.int32) 197 | ) 198 | assert db.attribute.AttributeTypes.INT == db.attribute.guess_atype_from_array( 199 | np.zeros(10, dtype=np.int64) 200 | ) 201 | assert db.attribute.AttributeTypes.INT8 == db.attribute.guess_atype_from_array( 202 | np.zeros(10, dtype=np.int8) 203 | ) 204 | assert db.attribute.AttributeTypes.INT8 == db.attribute.guess_atype_from_array( 205 | np.zeros(10, dtype=np.uint8) 206 | ) 207 | assert db.attribute.AttributeTypes.INT32_2D == db.attribute.guess_atype_from_array( 208 | np.zeros((10, 2), dtype=np.int32) 209 | ) 210 | 211 | # Test color types - distinguishes byte vs float based on dtype 212 | assert ( 213 | db.attribute.AttributeTypes.BYTE_COLOR 214 | == db.attribute.guess_atype_from_array(np.zeros((10, 4), dtype=np.uint8)) 215 | ) 216 | assert ( 217 | db.attribute.AttributeTypes.FLOAT_COLOR 218 | == db.attribute.guess_atype_from_array(np.zeros((10, 4), dtype=np.float32)) 219 | ) 220 | 221 | # Test boolean 222 | assert db.attribute.AttributeTypes.BOOLEAN == db.attribute.guess_atype_from_array( 223 | np.zeros(10, dtype=bool) 224 | ) 225 | 226 | 227 | def test_raise_error(): 228 | with pytest.raises(db.NamedAttributeError): 229 | db.store_named_attribute(bpy.data.objects["Cube"], np.zeros((10, 3)), "test") 230 | 231 | with pytest.raises(db.NamedAttributeError): 232 | db.remove_named_attribute(bpy.data.objects["Cube"], "testing") 233 | 234 | 235 | def test_named_attribute_name(): 236 | obj = bpy.data.objects["Cube"] 237 | valid_names = [] 238 | for i in range(150): 239 | name = "a" * i 240 | print(f"{i} letters, name: '{name}'") 241 | data = np.random.rand(len(obj.data.vertices), 3) 242 | if i >= 68 or i == 0: 243 | with pytest.raises(db.NamedAttributeError): 244 | db.store_named_attribute(obj, data, name) 245 | else: 246 | db.store_named_attribute(obj, data, name) 247 | assert name in db.list_attributes(obj) 248 | valid_names.append(name) 249 | 250 | # Verify all valid names were created 251 | attrs = db.list_attributes(obj) 252 | for name in valid_names: 253 | assert name in attrs 254 | 255 | 256 | @pytest.mark.parametrize( 257 | "evaluate, drop_hidden", itertools.product([True, False], repeat=2) 258 | ) 259 | def test_list_attributes(evaluate, drop_hidden): 260 | obj = bpy.data.objects["Cube"] 261 | 262 | # Get initial attributes - should include default cube attributes like position 263 | attrs_before = db.list_attributes(obj, evaluate=evaluate, drop_hidden=drop_hidden) 264 | assert "position" in attrs_before # position is always present on mesh objects 265 | assert isinstance(attrs_before, list) 266 | 267 | # 10 different random string names with different lengths 268 | names = [ 269 | "attr1", 270 | "longer_attribute_name", 271 | "a", 272 | "short", 273 | "medium_length", 274 | "x" * 50, 275 | "attr_with_special_chars!@#$%^&*()", 276 | "数字属性", 277 | ] 278 | 279 | # store a named attribute via geometry nodes as this should only show up 280 | # when evaluate=True 281 | tree = db.nodes.new_tree() 282 | n = tree.nodes.new("GeometryNodeStoreNamedAttribute") 283 | n.inputs["Name"].default_value = "testing" 284 | n.inputs["Value"].default_value = 0.5 285 | tree.links.new(tree.nodes["Group Input"].outputs["Geometry"], n.inputs["Geometry"]) 286 | tree.links.new(n.outputs["Geometry"], tree.nodes["Group Output"].inputs["Geometry"]) 287 | mod = obj.modifiers.new("db_nodes", "NODES") 288 | mod.node_group = tree 289 | 290 | for name in names: 291 | data = np.random.rand(len(obj.data.vertices), 3) 292 | db.store_named_attribute(obj, data, name, domain="POINT", atype="FLOAT_VECTOR") 293 | 294 | attributes = db.list_attributes(obj, evaluate=evaluate, drop_hidden=drop_hidden) 295 | 296 | # Verify BlenderObject wrapper gives same results 297 | assert attributes == db.BlenderObject(obj).list_attributes( 298 | evaluate=evaluate, drop_hidden=drop_hidden 299 | ) 300 | 301 | # Verify all our custom names are present 302 | for name in names: 303 | assert name in attributes, ( 304 | f"Expected attribute '{name}' not found in {attributes}" 305 | ) 306 | 307 | # Test geometry nodes attribute visibility based on evaluate flag 308 | if evaluate: 309 | assert "testing" in db.list_attributes(obj, evaluate=True) 310 | else: 311 | assert "testing" not in db.list_attributes(obj, evaluate=False) 312 | 313 | 314 | def test_ipython_list_attirbutes(): 315 | values = np.random.rand(8, 3) 316 | bob = db.BlenderObject.from_mesh(values) 317 | bob.store_named_attribute(np.random.randint(0, 128, 8), "test_attribute") 318 | assert bob.list_attributes() == bob._ipython_key_completions_() 319 | 320 | 321 | def test_str_access_attribute(): 322 | # Create test object with known vertices 323 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 324 | bob = db.create_bob(verts) 325 | 326 | bob.store_named_attribute(np.array(range(9)).reshape((3, 3)), "test_name") 327 | assert isinstance(bob["test_name"], db.array.AttributeArray) 328 | assert bob["test_name"][0][0] == 0.0 329 | bob["test_name"][0] = 1 330 | assert bob["test_name"][0][0] == 1 331 | 332 | with pytest.raises(ValueError): 333 | bob[0] # type: ignore 334 | 335 | values = np.zeros(3, dtype=int) 336 | 337 | bob["another_name"] = values 338 | np.testing.assert_array_equal(bob["another_name"], values) 339 | 340 | bob["another_name"] = values + 10 341 | assert np.array_equal(bob["another_name"], values + 10) 342 | 343 | 344 | def test_int32_dtype(): 345 | """Test that INT attributes return int32 dtype, not int64.""" 346 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 347 | obj = db.create_object(verts, name="TestIntDtype") 348 | 349 | # Store an INT attribute 350 | int_data = np.array([1, 2, 3], dtype=np.int32) 351 | db.store_named_attribute(obj, int_data, "test_int", atype="INT") 352 | 353 | # Retrieve and verify it's int32, not int64 354 | result = db.named_attribute(obj, "test_int") 355 | assert result.dtype == np.int32, f"Expected int32, got {result.dtype}" 356 | np.testing.assert_array_equal(result, int_data) 357 | 358 | 359 | def test_int32_2d_dtype(): 360 | """Test that INT32_2D attributes return int32 dtype.""" 361 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 362 | obj = db.create_object(verts, name="TestInt2DDtype") 363 | 364 | # Store an INT32_2D attribute 365 | int_data = np.array([[1, 2], [3, 4], [5, 6]], dtype=np.int32) 366 | db.store_named_attribute(obj, int_data, "test_int2d", atype="INT32_2D") 367 | 368 | # Retrieve and verify it's int32 369 | result = db.named_attribute(obj, "test_int2d") 370 | assert result.dtype == np.int32, f"Expected int32, got {result.dtype}" 371 | np.testing.assert_array_equal(result, int_data) 372 | 373 | 374 | def test_int8_dtype(): 375 | """Test that INT8 attributes return int8 dtype.""" 376 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 377 | obj = db.create_object(verts, name="TestInt8Dtype") 378 | 379 | # Store an INT8 attribute 380 | int_data = np.array([1, 2, 3], dtype=np.int8) 381 | db.store_named_attribute(obj, int_data, "test_int8", atype="INT8") 382 | 383 | # Retrieve and verify it's int8 384 | result = db.named_attribute(obj, "test_int8") 385 | assert result.dtype == np.int8, f"Expected int8, got {result.dtype}" 386 | np.testing.assert_array_equal(result, int_data) 387 | 388 | 389 | def test_byte_color_dtype(): 390 | """Test that BYTE_COLOR attributes return uint8 dtype.""" 391 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 392 | obj = db.create_object(verts, name="TestByteColorDtype") 393 | 394 | # Store a BYTE_COLOR attribute (RGBA values as uint8) 395 | # BYTE_COLOR is stored as unsigned char in Blender (MLoopCol) 396 | color_data = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]], dtype=np.uint8) 397 | db.store_named_attribute(obj, color_data, "test_byte_color", atype="BYTE_COLOR") 398 | 399 | # Retrieve and verify it's uint8 400 | result = db.named_attribute(obj, "test_byte_color") 401 | assert result.dtype == np.uint8, f"Expected uint8, got {result.dtype}" 402 | assert result.shape == (3, 4), f"Expected shape (3, 4), got {result.shape}" 403 | 404 | 405 | def test_1d_array_reshaping(): 406 | """Test that 1D arrays can be reshaped to match attribute dimensions.""" 407 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 408 | obj = db.create_object(verts, name="TestReshape") 409 | 410 | # Test with FLOAT_VECTOR (3D) - pass 1D array of 9 elements 411 | flat_data = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=np.float32) 412 | db.store_named_attribute( 413 | obj, flat_data, "test_reshape_vector", atype="FLOAT_VECTOR" 414 | ) 415 | 416 | result = db.named_attribute(obj, "test_reshape_vector") 417 | assert result.shape == (3, 3), f"Expected shape (3, 3), got {result.shape}" 418 | np.testing.assert_array_equal(result, flat_data.reshape(3, 3)) 419 | 420 | # Test with FLOAT2 - pass 1D array of 6 elements 421 | flat_data_2d = np.array([1, 2, 3, 4, 5, 6], dtype=np.float32) 422 | db.store_named_attribute(obj, flat_data_2d, "test_reshape_float2", atype="FLOAT2") 423 | 424 | result_2d = db.named_attribute(obj, "test_reshape_float2") 425 | assert result_2d.shape == (3, 2), f"Expected shape (3, 2), got {result_2d.shape}" 426 | np.testing.assert_array_equal(result_2d, flat_data_2d.reshape(3, 2)) 427 | 428 | # Test with FLOAT_COLOR (4D) - pass 1D array of 12 elements 429 | flat_color = np.random.rand(12).astype(np.float32) 430 | db.store_named_attribute(obj, flat_color, "test_reshape_color", atype="FLOAT_COLOR") 431 | 432 | result_color = db.named_attribute(obj, "test_reshape_color") 433 | assert result_color.shape == (3, 4), ( 434 | f"Expected shape (3, 4), got {result_color.shape}" 435 | ) 436 | np.testing.assert_array_almost_equal(result_color, flat_color.reshape(3, 4)) 437 | 438 | 439 | def test_1d_array_wrong_size_fails(): 440 | """Test that 1D arrays with wrong total size raise an error.""" 441 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 442 | obj = db.create_object(verts, name="TestReshapeFail") 443 | 444 | # Try to pass wrong size - should fail 445 | wrong_size = np.array([1, 2, 3, 4, 5], dtype=np.float32) # 5 elements, need 9 446 | with pytest.raises(db.NamedAttributeError): 447 | db.store_named_attribute( 448 | obj, wrong_size, "test_wrong_size", atype="FLOAT_VECTOR" 449 | ) 450 | 451 | 452 | def test_attribute_from_array_reshaping(): 453 | """Test that Attribute.from_array() can handle reshaping.""" 454 | verts = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) 455 | obj = db.create_object(verts, name="TestAttrReshape") 456 | 457 | # Create attribute first 458 | initial_data = np.random.rand(3, 3).astype(np.float32) 459 | db.store_named_attribute(obj, initial_data, "test_attr_reshape") 460 | 461 | # Get the Attribute wrapper 462 | attr = db.Attribute(obj.data.attributes["test_attr_reshape"]) 463 | 464 | # Try to set with 1D array 465 | flat_data = np.array([10, 11, 12, 13, 14, 15, 16, 17, 18], dtype=np.float32) 466 | attr.from_array(flat_data) 467 | 468 | # Verify it was reshaped correctly 469 | result = attr.as_array() 470 | assert result.shape == (3, 3) 471 | np.testing.assert_array_equal(result, flat_data.reshape(3, 3)) 472 | -------------------------------------------------------------------------------- /docs/attributes.qmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Working with Attributes" 3 | --- 4 | 5 | # Understanding the Attribute System 6 | 7 | ## What are Attributes? 8 | 9 | In Blender, all data on geometry is stored as **attributes**. An attribute is a named array of values associated with elements of geometry. Every piece of information—vertex positions, edge indices, face normals, UV coordinates, vertex colors—exists as an attribute on a specific **domain**. 10 | 11 | For example: 12 | - Vertex positions are stored as a `FLOAT_VECTOR` attribute named `"position"` on the `POINT` domain 13 | - Face materials are stored as an `INT` attribute on the `FACE` domain 14 | - UV maps are stored as `FLOAT2` attributes on the `CORNER` domain 15 | 16 | `databpy` provides a clean, intuitive interface for working with these attributes using familiar NumPy operations. 17 | 18 | ## The Three-Level API 19 | 20 | `databpy` offers three levels of abstraction for working with attributes, each suited to different use cases: 21 | 22 | ```{mermaid} 23 | flowchart TD 24 | A[BlenderObject] --> B[AttributeArray] 25 | B --> C[Attribute] 26 | C --> D[bpy.types.Attribute] 27 | 28 | A -.->|"Auto-sync, convenience methods"| E[User Level] 29 | B -.->|"NumPy operations with auto-sync"| E 30 | C -.->|"Manual control, one-shot operations"| F[Advanced Use] 31 | D -.->|"Raw Blender API"| G[Low Level] 32 | 33 | style A fill:#90EE90 34 | style B fill:#87CEEB 35 | style C fill:#FFB6C1 36 | style D fill:#FFE4B5 37 | ``` 38 | 39 | ### 1. [`BlenderObject`](`databpy.BlenderObject`) - Highest Level (Most Convenient) 40 | 41 | The [`BlenderObject`](`databpy.BlenderObject`) class (nicknamed "bob") provides the most ergonomic interface with dictionary-style access and convenience properties. 42 | 43 | ```{python} 44 | import databpy as db 45 | import numpy as np 46 | 47 | # Create a simple object 48 | bob = db.create_bob(np.random.rand(10, 3)) 49 | 50 | # Access attributes like a dictionary - returns AttributeArray 51 | positions = bob["position"] 52 | positions[:, 2] += 1.0 # Automatically syncs to Blender 53 | 54 | # Or use the convenience property 55 | bob.position[:, 2] += 1.0 # Same thing 56 | 57 | # Store new attributes 58 | bob["my_values"] = np.random.rand(10) 59 | ``` 60 | 61 | **When to use:** Interactive work, scripting, when you want automatic syncing. 62 | 63 | ### 2. [`AttributeArray`](`databpy.AttributeArray`) - Mid Level (Auto-Syncing NumPy) 64 | 65 | [`AttributeArray`](`databpy.AttributeArray`) is a [`numpy.ndarray`](`numpy.ndarray`) subclass that automatically writes changes back to Blender. It provides natural NumPy operations with automatic bidirectional syncing. 66 | 67 | ```{python} 68 | import databpy as db 69 | import bpy 70 | 71 | obj = bpy.data.objects["Cube"] 72 | 73 | # Create an AttributeArray 74 | pos = db.AttributeArray(obj, "position") 75 | 76 | # All NumPy operations work and auto-sync 77 | pos[:, 2] += 1.0 # In-place addition 78 | pos *= 2.0 # Broadcasting 79 | pos[0] = [0, 0, 0] # Item assignment 80 | 81 | # Changes are immediately reflected in Blender 82 | ``` 83 | 84 | ::: {.callout-note} 85 | ## Performance Note 86 | `AttributeArray` syncs the **entire** attribute array on every modification, not just changed values. This is due to Blender's `foreach_set` API. For large meshes (10K+ vertices), consider batching operations or using the lower-level `Attribute` class. 87 | ::: 88 | 89 | **When to use:** When you need NumPy-like operations with automatic syncing, working with position/color/custom data interactively. 90 | 91 | ### 3. [`Attribute`](`databpy.Attribute`) - Low Level (Manual Control) 92 | 93 | The [`Attribute`](`databpy.Attribute`) class provides direct, stateless access with explicit control over when data is read or written. 94 | 95 | ```{python} 96 | import databpy as db 97 | import numpy as np 98 | import bpy 99 | 100 | obj = bpy.data.objects["Cube"] 101 | 102 | # Get the attribute wrapper 103 | attr = db.Attribute(obj.data.attributes["position"]) 104 | 105 | # Manually read data 106 | positions = attr.as_array() 107 | 108 | # Perform operations (no auto-sync) 109 | positions[:, 2] += 1.0 110 | positions *= 2.0 111 | 112 | # Manually write back (single write operation) 113 | attr.from_array(positions) 114 | ``` 115 | 116 | **When to use:** 117 | - One-shot read or write operations 118 | - Performance-critical code where you want control over sync timing 119 | - Batch processing where you make many changes before writing back 120 | - When you need to inspect attribute metadata without reading data 121 | 122 | ## Attribute Types 123 | 124 | Blender supports various attribute data types. `databpy` works with all of them through the [`AttributeTypes`](`databpy.AttributeTypes`) enum: 125 | 126 | ### Float-Based Types 127 | 128 | ```{python} 129 | import databpy as db 130 | import numpy as np 131 | 132 | bob = db.create_bob(np.random.rand(10, 3)) 133 | 134 | # FLOAT - Single float per element 135 | temperatures = np.random.rand(10).astype(np.float32) 136 | bob["temperature"] = temperatures 137 | 138 | # FLOAT2 - 2D vectors 139 | uv_coords = np.random.rand(10, 2).astype(np.float32) 140 | db.store_named_attribute(bob.object, uv_coords, "uv", atype="FLOAT2") 141 | 142 | # FLOAT_VECTOR - 3D vectors (most common) 143 | velocities = np.random.rand(10, 3).astype(np.float32) 144 | bob["velocity"] = velocities 145 | 146 | # FLOAT_COLOR - RGBA colors (4 components) 147 | colors = np.random.rand(10, 4).astype(np.float32) 148 | db.store_named_attribute(bob.object, colors, "color", atype="FLOAT_COLOR") 149 | 150 | # QUATERNION - Rotations (4 components: w, x, y, z) 151 | rotations = np.random.rand(10, 4).astype(np.float32) 152 | db.store_named_attribute(bob.object, rotations, "rotation", atype="QUATERNION") 153 | 154 | # FLOAT4X4 - 4x4 transformation matrices 155 | matrices = np.random.rand(10, 4, 4).astype(np.float32) 156 | db.store_named_attribute(bob.object, matrices, "transform", atype="FLOAT4X4") 157 | ``` 158 | 159 | ### Integer-Based Types 160 | 161 | ```{python} 162 | # INT - 32-bit signed integers 163 | ids = np.arange(10, dtype=np.int32) 164 | bob["id"] = ids 165 | 166 | # INT8 - 8-bit signed integers (memory efficient) 167 | small_values = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], dtype=np.int8) 168 | db.store_named_attribute(bob.object, small_values, "category", atype="INT8") 169 | 170 | # INT32_2D - 2D integer vectors 171 | pairs = np.random.randint(0, 100, size=(10, 2), dtype=np.int32) 172 | db.store_named_attribute(bob.object, pairs, "edge_ids", atype="INT32_2D") 173 | ``` 174 | 175 | ### Color Types 176 | 177 | ```{python} 178 | # BYTE_COLOR - RGBA as unsigned 8-bit (0-255) 179 | byte_colors = np.random.randint(0, 255, size=(10, 4), dtype=np.uint8) 180 | db.store_named_attribute(bob.object, byte_colors, "vertex_color", atype="BYTE_COLOR") 181 | ``` 182 | 183 | ### Boolean Type 184 | 185 | ```{python} 186 | # BOOLEAN - True/False flags 187 | selection = np.random.rand(10) > 0.5 188 | bob["selected"] = selection 189 | ``` 190 | 191 | ::: {.callout-tip} 192 | ## Type Inference 193 | If you don't specify an `atype`, `databpy` will infer it from your array's dtype and shape using [`guess_atype_from_array()`](`databpy.guess_atype_from_array`). 194 | ::: 195 | 196 | ## Attribute Domains 197 | 198 | Attributes exist on different **domains** of the geometry, specified by the [`AttributeDomains`](`databpy.AttributeDomains`) enum: 199 | 200 | | Domain | Description | Example Use Cases | 201 | |--------|-------------|-------------------| 202 | | `POINT` | Vertices, curve control points | Positions, vertex colors, temperature | 203 | | `EDGE` | Mesh edges | Edge weights, crease values | 204 | | `FACE` | Mesh faces/polygons | Material indices, face areas | 205 | | `CORNER` | Face corners (vertex-face pairs) | UV coordinates, split normals | 206 | | `CURVE` | Individual curve splines | Per-spline properties | 207 | | `INSTANCE` | Geometry instances | Instance transforms, IDs | 208 | 209 | ```{python} 210 | import databpy as db 211 | import numpy as np 212 | import bpy 213 | 214 | obj = bpy.data.objects["Cube"] 215 | 216 | # POINT domain (default) - one value per vertex 217 | vertex_data = np.random.rand(len(obj.data.vertices), 3) 218 | db.store_named_attribute(obj, vertex_data, "vertex_attr", domain="POINT") 219 | 220 | # FACE domain - one value per face 221 | face_data = np.random.rand(len(obj.data.polygons)) 222 | db.store_named_attribute(obj, face_data, "face_attr", domain="FACE") 223 | 224 | # EDGE domain - one value per edge 225 | edge_data = np.random.rand(len(obj.data.edges)) 226 | db.store_named_attribute(obj, edge_data, "edge_attr", domain="EDGE") 227 | ``` 228 | 229 | ::: {.callout-important} 230 | ## Domain Size Matching 231 | The length of your data array must match the number of elements in the target domain. A cube has 8 vertices, 12 edges, and 6 faces, so your arrays must have corresponding lengths. 232 | ::: 233 | 234 | ## Common Workflows 235 | 236 | ### Reading Attributes 237 | 238 | ```{python} 239 | import databpy as db 240 | import bpy 241 | 242 | obj = bpy.data.objects["Cube"] 243 | 244 | # Method 1: Simple function call (returns regular numpy array) 245 | positions = db.named_attribute(obj, "position") # see named_attribute() 246 | 247 | # Method 2: Via BlenderObject (returns AttributeArray) 248 | bob = db.BlenderObject(obj) # see BlenderObject 249 | positions = bob["position"] # or bob.position 250 | 251 | # Method 3: List available attributes 252 | attrs = db.list_attributes(obj) # see list_attributes() 253 | print(attrs) 254 | 255 | # Method 4: With modifier evaluation 256 | evaluated_positions = db.named_attribute(obj, "position", evaluate=True) 257 | ``` 258 | 259 | ### Writing Attributes 260 | 261 | ```{python} 262 | import databpy as db 263 | import numpy as np 264 | 265 | bob = db.create_bob(np.random.rand(10, 3)) 266 | 267 | # Method 1: Dictionary-style (most convenient) 268 | bob["my_data"] = np.random.rand(10, 3) 269 | 270 | # Method 2: Function call (more options) 271 | db.store_named_attribute( 272 | bob.object, 273 | data=np.random.rand(10), 274 | name="custom_attr", 275 | atype="FLOAT", 276 | domain="POINT", 277 | overwrite=True 278 | ) 279 | 280 | # Method 3: Via BlenderObject method 281 | bob.store_named_attribute( 282 | np.random.rand(10, 3), 283 | name="another_attr", 284 | domain="POINT" 285 | ) 286 | ``` 287 | 288 | ### Modifying Attributes In-Place 289 | 290 | ```{python} 291 | import databpy as db 292 | import numpy as np 293 | 294 | bob = db.create_bob(np.random.rand(100, 3)) 295 | 296 | # Using AttributeArray for interactive modifications 297 | pos = bob.position 298 | 299 | # Simple operations 300 | pos[:, 2] += 1.0 # Move all points up 301 | pos *= 2.0 # Scale positions 302 | pos[pos < 0] = 0 # Clamp negative values 303 | 304 | # Boolean indexing 305 | selection = pos[:, 2] > 0.5 306 | pos[selection, 2] = 1.0 307 | 308 | # Column operations 309 | pos[:, 0] = np.linspace(0, 1, len(pos)) # Linear ramp on X 310 | ``` 311 | 312 | ### Batch Processing (Performance) 313 | 314 | For large datasets, use the `Attribute` class to batch operations: 315 | 316 | ```{python} 317 | import databpy as db 318 | import numpy as np 319 | import bpy 320 | 321 | obj = db.create_object(np.random.rand(int(1e5), 3)) 322 | attr = db.Attribute(obj.data.attributes["position"]) 323 | 324 | # Single read 325 | positions = attr.as_array() 326 | 327 | # Many operations without syncing 328 | positions[:, 2] += 1.0 329 | positions *= 2.0 330 | positions = np.clip(positions, -10, 10) 331 | 332 | # Single write 333 | attr.from_array(positions) 334 | ``` 335 | 336 | ::: {.callout-tip} 337 | ## Performance Comparison 338 | - `AttributeArray`: ~N writes for N operations (auto-sync each time) 339 | - `Attribute`: 1 read + 1 write for N operations (manual control) 340 | 341 | For 100K vertices with 10 operations, `Attribute` can be 10x faster. 342 | ::: 343 | 344 | ### Working with Integer Attributes 345 | 346 | ```{python} 347 | import databpy as db 348 | import numpy as np 349 | 350 | bob = db.create_bob(np.random.rand(10, 3)) 351 | 352 | # Store integer IDs 353 | ids = np.arange(10, dtype=np.int32) 354 | bob["id"] = ids 355 | 356 | # Retrieve as AttributeArray 357 | id_array = bob["id"] 358 | 359 | # Modify (automatically maintains int32 dtype) 360 | id_array += 100 361 | 362 | # Verify it's still integers 363 | print(id_array.dtype) # int32 364 | ``` 365 | 366 | ### Working with Boolean Attributes 367 | 368 | ```{python} 369 | import databpy as db 370 | import numpy as np 371 | 372 | bob = db.create_bob(np.random.rand(20, 3)) 373 | 374 | # Create selection based on position 375 | selection = bob.position[:, 2] > 0.5 376 | bob["selected"] = selection 377 | 378 | # Use boolean attribute for filtering 379 | selected_mask = bob["selected"] 380 | bob.position[selected_mask, 2] = 1.0 381 | ``` 382 | 383 | ## Error Handling 384 | 385 | `databpy` uses a consistent exception hierarchy for attribute-related errors: 386 | 387 | ### Exception Hierarchy 388 | 389 | - **`db.NamedAttributeError`** (base class, inherits from `AttributeError`) 390 | - Raised when attribute operations fail 391 | - Used for: non-existent attributes, invalid names, domain size mismatches 392 | - **`db.AttributeMismatchError`** (inherits from `NamedAttributeError`) 393 | - Raised when data doesn't match attribute expectations 394 | - Used for: shape mismatches, type incompatibilities 395 | 396 | ### Common Error Scenarios 397 | 398 | ```{python} 399 | import databpy as db 400 | import numpy as np 401 | import bpy 402 | 403 | obj = bpy.data.objects["Cube"] 404 | 405 | try: 406 | # Trying to access non-existent attribute 407 | data = db.named_attribute(obj, "nonexistent") 408 | except db.NamedAttributeError as e: 409 | print(f"Attribute not found: {e}") 410 | 411 | try: 412 | # Wrong data size for domain 413 | db.store_named_attribute(obj, np.random.rand(100, 3), "test") 414 | except db.NamedAttributeError as e: 415 | print(f"Size mismatch: {e}") 416 | 417 | try: 418 | # Shape mismatch when using Attribute class 419 | attr = db.Attribute(obj.data.attributes["position"]) 420 | wrong_shape = np.random.rand(8, 4) # Should be (8, 3) 421 | attr.from_array(wrong_shape) 422 | except db.AttributeMismatchError as e: 423 | print(f"Shape error: {e}") 424 | ``` 425 | 426 | ::: {.callout-tip} 427 | ## Catching All Attribute Errors 428 | Since `AttributeMismatchError` inherits from `NamedAttributeError`, you can catch all attribute-related errors with a single `except db.NamedAttributeError:` clause. 429 | ::: 430 | 431 | ## Best Practices 432 | 433 | ### 1. Choose the Right Abstraction Level 434 | 435 | - **Interactive work, scripting**: Use `BlenderObject` and `AttributeArray` 436 | - **Performance-critical code**: Use `Attribute` with manual read/write 437 | - **Quick one-off reads**: Use `named_attribute()` function 438 | 439 | ### 2. Be Mindful of Data Types 440 | 441 | ```{python} 442 | # Good: Explicit dtype matching Blender's storage 443 | data = np.random.rand(10, 3).astype(np.float32) 444 | 445 | # Less good: float64 will be converted to float32 anyway 446 | data = np.random.rand(10, 3) # defaults to float64 447 | ``` 448 | 449 | ### 3. Batch Operations When Possible 450 | 451 | ```{python} 452 | # Not great: Multiple syncs 453 | pos = bob.position 454 | for i in range(len(pos)): 455 | pos[i, 2] += 1.0 # Syncs every iteration! 456 | 457 | # Better: Vectorized operation (single sync) 458 | pos[:, 2] += 1.0 459 | ``` 460 | 461 | ### 4. Use Named Constants for Domains and Types 462 | 463 | ```{python} 464 | from databpy import AttributeDomains, AttributeTypes 465 | 466 | # More readable and type-safe 467 | db.store_named_attribute( 468 | obj, 469 | np.random.rand(len(obj.data.vertices), 3), 470 | "my_attr", 471 | atype=AttributeTypes.FLOAT_VECTOR, 472 | domain=AttributeDomains.POINT 473 | ) 474 | db.named_attribute(obj, "my_attr") 475 | ``` 476 | 477 | ### 5. Clean Up Temporary Attributes 478 | 479 | ```{python} 480 | # Remove attributes you no longer need 481 | db.remove_named_attribute(obj, "my_attr") 482 | try: 483 | db.named_attribute(obj, "my_attr") 484 | except db.NamedAttributeError as e: 485 | print(e) 486 | ``` 487 | 488 | ## Architecture Summary 489 | 490 | Understanding the relationship between the classes helps you choose the right tool: 491 | 492 | ``` 493 | ┌─────────────────────────────────────────────────────┐ 494 | │ BlenderObject (bob) │ 495 | │ - High-level convenience wrapper │ 496 | │ - Dictionary access: bob["attr"] │ 497 | │ - Property access: bob.position │ 498 | │ - Returns: AttributeArray │ 499 | └─────────────────┬───────────────────────────────────┘ 500 | │ 501 | │ creates/returns 502 | ▼ 503 | ┌─────────────────────────────────────────────────────┐ 504 | │ AttributeArray │ 505 | │ - NumPy subclass with auto-sync │ 506 | │ - All NumPy operations work │ 507 | │ - Syncs entire array on every modification │ 508 | │ - References: Attribute (for metadata) │ 509 | └─────────────────┬───────────────────────────────────┘ 510 | │ 511 | │ uses 512 | ▼ 513 | ┌─────────────────────────────────────────────────────┐ 514 | │ Attribute │ 515 | │ - Low-level wrapper, manual control │ 516 | │ - Methods: as_array(), from_array() │ 517 | │ - Properties: atype, domain, shape, dtype │ 518 | │ - One-shot reads/writes │ 519 | └─────────────────┬───────────────────────────────────┘ 520 | │ 521 | │ wraps 522 | ▼ 523 | ┌─────────────────────────────────────────────────────┐ 524 | │ bpy.types.Attribute │ 525 | │ - Raw Blender API │ 526 | │ - foreach_get/foreach_set │ 527 | └─────────────────────────────────────────────────────┘ 528 | ``` 529 | 530 | ## See Also 531 | 532 | ### Core Classes 533 | - [`BlenderObject`](`databpy.BlenderObject`) - High-level object wrapper with convenience methods 534 | - [`AttributeArray`](`databpy.AttributeArray`) - Auto-syncing NumPy array subclass 535 | - [`Attribute`](`databpy.Attribute`) - Low-level attribute wrapper with manual control 536 | 537 | ### Functions 538 | - [`named_attribute()`](`databpy.named_attribute`) - Read attribute data as NumPy array 539 | - [`store_named_attribute()`](`databpy.store_named_attribute`) - Write attribute data to object 540 | - [`remove_named_attribute()`](`databpy.remove_named_attribute`) - Delete an attribute 541 | - [`list_attributes()`](`databpy.list_attributes`) - List all attributes on an object 542 | - [`create_bob()`](`databpy.create_bob`) - Create a new BlenderObject 543 | - [`create_object()`](`databpy.create_object`) - Create a new Blender object 544 | 545 | ### Type Enums 546 | - [`AttributeTypes`](`databpy.AttributeTypes`) - Enum of all available attribute data types 547 | - [`AttributeDomains`](`databpy.AttributeDomains`) - Enum of all available geometry domains 548 | 549 | ### Exceptions 550 | - [`NamedAttributeError`](`databpy.NamedAttributeError`) - Base exception for attribute operations 551 | - [`AttributeMismatchError`](`databpy.AttributeMismatchError`) - Exception for data/type mismatches 552 | 553 | ### External References 554 | - [`numpy.ndarray`](`numpy.ndarray`) - NumPy array documentation 555 | - [`bpy.types.Object`](`bpy.types.Object`) - Blender Object documentation 556 | - [`bpy.types.Attribute`](`bpy.types.Attribute`) - Blender Attribute documentation 557 | -------------------------------------------------------------------------------- /tests/test_arrays.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import unittest 3 | import pytest 4 | import databpy as db 5 | from databpy.object import AttributeArray, create_bob 6 | 7 | np.random.seed(11) 8 | 9 | 10 | class TestAttributeArray(unittest.TestCase): 11 | """Test the AttributeArray numpy subclass functionality.""" 12 | 13 | def setup_method(self, method=None): 14 | """Set up test fixtures before each test method.""" 15 | # Create test vertices 16 | self.test_vertices = np.array( 17 | [ 18 | [0.0, 0.0, 0.0], 19 | [1.0, 0.0, 0.0], 20 | [1.0, 1.0, 0.0], 21 | [0.0, 1.0, 0.0], 22 | [0.5, 0.5, 1.0], 23 | ] 24 | ) 25 | self.bob = create_bob(vertices=self.test_vertices, name="TestPositionArray") 26 | 27 | def test_position_array_creation(self): 28 | """Test that PositionArray is created correctly.""" 29 | pos = self.bob.position 30 | 31 | assert isinstance(pos, AttributeArray) 32 | assert isinstance(pos, np.ndarray) 33 | assert pos.shape == (5, 3) 34 | np.testing.assert_array_equal(pos, self.test_vertices) 35 | 36 | def test_position_array_has_blender_reference(self): 37 | """Test that PositionArray maintains reference to BlenderObject.""" 38 | pos = self.bob.position 39 | 40 | assert hasattr(pos, "_blender_object") 41 | assert pos._blender_object is self.bob.object 42 | 43 | def test_numpy_array_properties(self): 44 | """Test that PositionArray inherits numpy array properties.""" 45 | pos = self.bob.position 46 | 47 | assert pos.shape == (5, 3) 48 | assert pos.dtype == np.float32 or pos.dtype == np.float64 49 | assert pos.ndim == 2 50 | assert len(pos) == 5 51 | 52 | def test_numpy_array_methods(self): 53 | """Test that PositionArray supports numpy array methods.""" 54 | pos = self.bob.position 55 | 56 | # Test read-only operations 57 | mean_pos = pos.mean(axis=0) 58 | assert mean_pos.shape == (3,) 59 | 60 | max_pos = pos.max(axis=0) 61 | assert max_pos.shape == (3,) 62 | 63 | # Test slicing returns PositionArray or regular array as appropriate 64 | slice_pos = pos[:3] 65 | assert isinstance(slice_pos, np.ndarray) 66 | 67 | def test_indexed_assignment(self): 68 | """Test that indexed assignment works and syncs to Blender.""" 69 | pos = self.bob.position 70 | pos[0, 2] 71 | 72 | # Modify a single element 73 | pos[0, 2] = 5.0 74 | 75 | # Check that the change is reflected in the array 76 | assert pos[0, 2] == 5.0 77 | 78 | # Check that it synced back to Blender 79 | updated_pos = self.bob.named_attribute("position") 80 | assert updated_pos[0, 2] == 5.0 81 | 82 | def test_slice_assignment(self): 83 | """Test that slice assignment works and syncs to Blender.""" 84 | pos = self.bob.position 85 | 86 | # Modify a column (all Z coordinates) 87 | pos[:, 2] = 2.0 88 | 89 | # Check that all Z coordinates are updated 90 | np.testing.assert_array_equal(pos[:, 2], [2.0, 2.0, 2.0, 2.0, 2.0]) 91 | 92 | # Check that it synced back to Blender 93 | updated_pos = self.bob.named_attribute("position") 94 | np.testing.assert_array_equal(updated_pos[:, 2], [2.0, 2.0, 2.0, 2.0, 2.0]) 95 | 96 | def test_inplace_addition(self): 97 | """Test that in-place addition works and syncs to Blender.""" 98 | pos = self.bob.position 99 | original_pos = pos.copy() 100 | 101 | # Add 1 to all Z coordinates 102 | pos[:, 2] += 1.0 103 | 104 | # Check the change 105 | expected = original_pos.copy() 106 | expected[:, 2] += 1.0 107 | np.testing.assert_array_almost_equal(pos, expected) 108 | 109 | # Check sync to Blender 110 | updated_pos = self.bob.named_attribute("position") 111 | np.testing.assert_array_almost_equal(updated_pos, expected) 112 | 113 | def test_inplace_subtraction(self): 114 | """Test that in-place subtraction works and syncs to Blender.""" 115 | pos = self.bob.position 116 | original_pos = pos.copy() 117 | 118 | pos[:, 1] -= 0.5 119 | 120 | expected = original_pos.copy() 121 | expected[:, 1] -= 0.5 122 | np.testing.assert_array_almost_equal(pos, expected) 123 | 124 | # Check sync to Blender 125 | updated_pos = self.bob.named_attribute("position") 126 | np.testing.assert_array_almost_equal(updated_pos, expected) 127 | 128 | def test_inplace_multiplication(self): 129 | """Test that in-place multiplication works and syncs to Blender.""" 130 | pos = self.bob.position 131 | original_pos = pos.copy() 132 | 133 | pos *= 2.0 134 | 135 | expected = original_pos * 2.0 136 | np.testing.assert_array_almost_equal(pos, expected) 137 | 138 | # Check sync to Blender 139 | updated_pos = self.bob.named_attribute("position") 140 | np.testing.assert_array_almost_equal(updated_pos, expected) 141 | 142 | def test_inplace_division(self): 143 | """Test that in-place division works and syncs to Blender.""" 144 | pos = self.bob.position 145 | # Set to non-zero values to avoid division issues 146 | pos[:] = [ 147 | [2.0, 4.0, 6.0], 148 | [8.0, 10.0, 12.0], 149 | [14.0, 16.0, 18.0], 150 | [20.0, 22.0, 24.0], 151 | [26.0, 28.0, 30.0], 152 | ] 153 | original_pos = pos.copy() 154 | 155 | pos /= 2.0 156 | 157 | expected = original_pos / 2.0 158 | np.testing.assert_array_almost_equal(pos, expected) 159 | 160 | # Check sync to Blender 161 | updated_pos = self.bob.named_attribute("position") 162 | np.testing.assert_array_almost_equal(updated_pos, expected) 163 | 164 | def test_complex_indexing_operations(self): 165 | """Test complex indexing operations like the original use case.""" 166 | pos = self.bob.position 167 | 168 | # The original problematic operation 169 | pos[:, 2] += 1 170 | 171 | # Check that all Z coordinates increased by 1 172 | expected_z = self.test_vertices[:, 2] + 1 173 | np.testing.assert_array_almost_equal(pos[:, 2], expected_z) 174 | 175 | # Check sync to Blender 176 | updated_pos = self.bob.named_attribute("position") 177 | np.testing.assert_array_almost_equal(updated_pos[:, 2], expected_z) 178 | 179 | def test_multiple_operations(self): 180 | """Test multiple consecutive operations.""" 181 | pos = self.bob.position 182 | 183 | # Chain multiple operations 184 | pos[:, 0] += 1.0 185 | pos[:, 1] *= 2.0 186 | pos[0, 2] = 10.0 187 | 188 | # Check final state 189 | expected = self.test_vertices.copy() 190 | expected[:, 0] += 1.0 191 | expected[:, 1] *= 2.0 192 | expected[0, 2] = 10.0 193 | 194 | np.testing.assert_array_almost_equal(pos, expected) 195 | 196 | # Check sync to Blender 197 | updated_pos = self.bob.named_attribute("position") 198 | np.testing.assert_array_almost_equal(updated_pos, expected) 199 | 200 | def test_position_setter_still_works(self): 201 | """Test that the position setter still works with regular arrays.""" 202 | new_positions = np.array( 203 | [ 204 | [10.0, 10.0, 10.0], 205 | [20.0, 20.0, 20.0], 206 | [30.0, 30.0, 30.0], 207 | [40.0, 40.0, 40.0], 208 | [50.0, 50.0, 50.0], 209 | ] 210 | ) 211 | 212 | # Set using the setter 213 | self.bob.position = new_positions 214 | 215 | # Check that it worked 216 | pos = self.bob.position 217 | np.testing.assert_array_equal(pos, new_positions) 218 | 219 | # Check that it's still a PositionArray 220 | assert isinstance(pos, AttributeArray) 221 | 222 | def test_array_finalize_preserves_reference(self): 223 | """Test that array operations preserve the Blender object reference.""" 224 | pos = self.bob.position 225 | 226 | # Operations that might trigger __array_finalize__ 227 | pos[:3] 228 | 229 | # The slice might not be a PositionArray, but the original should still work 230 | pos[0, 0] = 999.0 231 | 232 | # Check that the reference is still intact 233 | assert hasattr(pos, "_blender_object") 234 | assert pos._blender_object is self.bob.object 235 | 236 | # Check that the change synced 237 | updated_pos = self.bob.named_attribute("position") 238 | assert updated_pos[0, 0] == 999.0 239 | 240 | def test_column_slice_array_operations(self): 241 | """Test that column slices support array operations with syncing.""" 242 | pos = self.bob.position 243 | 244 | # Set initial test values 245 | initial_values = np.array([1.0, 2.0, 3.0, 4.0, 5.0]) 246 | pos[:, 2] = initial_values 247 | 248 | # Get column view for z coordinates 249 | z_column = pos[:, 2] 250 | 251 | # Test basic array operations that are known to work 252 | z_column += 2.0 253 | 254 | # Check that the operation applied correctly 255 | expected = initial_values + 2.0 256 | np.testing.assert_array_almost_equal(np.asarray(z_column), expected) 257 | 258 | # Check that it synced back to Blender 259 | updated_pos = self.bob.named_attribute("position") 260 | np.testing.assert_array_almost_equal(updated_pos[:, 2], expected) 261 | 262 | # Test another operation 263 | z_column *= 3.0 264 | 265 | # Check the new result 266 | expected = (initial_values + 2.0) * 3.0 267 | np.testing.assert_array_almost_equal(np.asarray(z_column), expected) 268 | 269 | # Verify sync again 270 | updated_pos = self.bob.named_attribute("position") 271 | np.testing.assert_array_almost_equal(updated_pos[:, 2], expected) 272 | 273 | def test_column_slice_attribute_delegation(self): 274 | """Test that column views support numpy methods and attribute access.""" 275 | pos = self.bob.position 276 | 277 | # Set initial values 278 | pos[:, 1] = np.array([1.0, 2.0, 3.0, 4.0, 5.0]) 279 | 280 | # Get a column view 281 | y_column = pos[:, 1] 282 | 283 | # Test attribute delegation 284 | assert y_column.mean() == 3.0 285 | assert y_column.sum() == 15.0 286 | assert y_column.max() == 5.0 287 | assert y_column.min() == 1.0 288 | 289 | # Test attribute error for non-existent attribute 290 | try: 291 | y_column.nonexistent_attribute 292 | assert False, "Should have raised AttributeError" 293 | except AttributeError: 294 | pass 295 | 296 | def test_column_slice_array_conversion(self): 297 | """Test that column slices convert to arrays with optional dtype.""" 298 | pos = self.bob.position 299 | 300 | # Set test values 301 | pos[:, 0] = np.array([1.0, 2.0, 3.0, 4.0, 5.0]) 302 | 303 | # Get a column view 304 | x_column = pos[:, 0] 305 | 306 | # Convert to array with default dtype 307 | arr1 = np.asarray(x_column) 308 | np.testing.assert_array_equal(arr1, [1.0, 2.0, 3.0, 4.0, 5.0]) 309 | assert arr1.dtype == pos.dtype 310 | 311 | # Convert to array with specified dtype 312 | arr2 = np.asarray(x_column, dtype=np.int32) 313 | np.testing.assert_array_equal(arr2, [1, 2, 3, 4, 5]) 314 | assert arr2.dtype == np.int32 315 | 316 | # Test equality comparison 317 | assert np.array_equal(x_column, np.array([1.0, 2.0, 3.0, 4.0, 5.0])) 318 | assert not np.array_equal(x_column, np.array([5.0, 4.0, 3.0, 2.0, 1.0])) 319 | 320 | def test_float_color_attribute_handling(self): 321 | """Test handling of FLOAT_COLOR attributes (4 components).""" 322 | from databpy.attribute import AttributeTypes 323 | 324 | # Create a color attribute (RGBA) 325 | color_data = np.random.rand(5, 4).astype(np.float32) 326 | self.bob.store_named_attribute( 327 | color_data, name="color", atype=AttributeTypes.FLOAT_COLOR, domain="POINT" 328 | ) 329 | 330 | # Get as AttributeArray 331 | colors = AttributeArray(self.bob.object, "color") 332 | 333 | # Verify shape and components 334 | assert colors.shape == (5, 4) 335 | assert colors._get_expected_components() == 4 336 | 337 | # Modify and verify sync 338 | colors[:, 3] = 0.5 # Set alpha to 0.5 339 | 340 | updated_colors = self.bob.named_attribute("color") 341 | np.testing.assert_array_almost_equal( 342 | updated_colors[:, 3], [0.5, 0.5, 0.5, 0.5, 0.5] 343 | ) 344 | 345 | def test_equality_comparison(self): 346 | """Test equality comparisons with different input types.""" 347 | pos = self.bob.position 348 | 349 | # Set known values 350 | test_data = np.array( 351 | [ 352 | [1.0, 2.0, 3.0], 353 | [4.0, 5.0, 6.0], 354 | [7.0, 8.0, 9.0], 355 | [10.0, 11.0, 12.0], 356 | [13.0, 14.0, 15.0], 357 | ] 358 | ) 359 | pos[:] = test_data 360 | 361 | # Compare with numpy array using __eq__ method 362 | assert np.array_equal(pos, test_data) 363 | 364 | # For inequality, use np.array_equal with a not operator 365 | modified_data = test_data + 1.0 366 | assert not np.array_equal(np.asarray(pos), modified_data) 367 | 368 | # Compare with a column 369 | column_data = np.array([1.0, 4.0, 7.0, 10.0, 13.0]) 370 | assert np.array_equal(pos[:, 0], column_data) 371 | 372 | # Compare with another AttributeArray 373 | other_bob = create_bob(vertices=test_data, name="OtherTest") 374 | other_pos = other_bob.position 375 | assert np.array_equal(pos, other_pos) 376 | 377 | # Test that non-equality works correctly 378 | other_bob2 = create_bob(vertices=test_data + 2.0, name="DifferentTest") 379 | different_pos = other_bob2.position 380 | assert not np.array_equal(np.asarray(pos), np.asarray(different_pos)) 381 | 382 | # Test comparison with array of different shape but matching column 383 | column_data = np.array([3.0, 6.0, 9.0, 12.0, 15.0]) 384 | # This should match the 3rd column (index 2) 385 | assert np.array_equal(pos[:, 2], column_data) 386 | 387 | def test_column_slice_array_wrapping_and_ufuncs(self): 388 | """Test column slice handling of numpy ops and array wrapping.""" 389 | pos = self.bob.position 390 | 391 | # Set initial values 392 | pos[:, 0] = np.array([1.0, 2.0, 3.0, 4.0, 5.0]) 393 | 394 | # Get a column view (1D AttributeArray) 395 | x_column = pos[:, 0] 396 | 397 | # Test standard operations instead of direct ufuncs 398 | x_column += 10.0 399 | 400 | # Check that operation updated values and synced 401 | np.testing.assert_array_equal(pos[:, 0], [11.0, 12.0, 13.0, 14.0, 15.0]) 402 | updated_pos = self.bob.named_attribute("position") 403 | np.testing.assert_array_almost_equal( 404 | updated_pos[:, 0], [11.0, 12.0, 13.0, 14.0, 15.0] 405 | ) 406 | 407 | # Save current values 408 | original = np.array(pos[:, 0]) 409 | 410 | # Apply sqrt using standard assignment 411 | pos[:, 0] = np.sqrt(pos[:, 0]) 412 | 413 | # Check results 414 | expected = np.sqrt(original) 415 | np.testing.assert_array_almost_equal(pos[:, 0], expected) 416 | updated_pos = self.bob.named_attribute("position") 417 | np.testing.assert_array_almost_equal(updated_pos[:, 0], expected) 418 | 419 | # Square the values to get back to original (approximately) 420 | pos[:, 0] = np.square(pos[:, 0]) 421 | np.testing.assert_array_almost_equal(pos[:, 0], original) 422 | 423 | def test_error_handling_for_invalid_operations(self): 424 | """Test error handling for invalid operations on AttributeArray and column slices.""" 425 | pos = self.bob.position 426 | 427 | # Test incompatible shape for assignment 428 | with pytest.raises(ValueError): 429 | pos[:] = np.random.rand(10, 5) # Wrong number of columns 430 | 431 | # Test incompatible shape for column assignment 432 | with pytest.raises(ValueError): 433 | pos[:, 0] = np.random.rand(10) # Wrong number of rows 434 | 435 | # Test invalid column index 436 | with pytest.raises(IndexError): 437 | pos[:, 5] = 1.0 # Column index out of bounds 438 | 439 | # Test invalid item assignment 440 | with pytest.raises(IndexError): 441 | pos[10, 0] = 1.0 # Row index out of bounds 442 | 443 | # Test invalid operation on column view 444 | column = pos[:, 0] 445 | # Use pytest for all assertions for consistency 446 | with pytest.raises((TypeError, ValueError)): 447 | column + "string" # Incompatible type for operation 448 | 449 | def test_mixed_type_operations(self): 450 | """Test operations with mixed data types on AttributeArray and column slices.""" 451 | pos = self.bob.position 452 | 453 | # Initialize with float values 454 | pos[:] = np.ones((5, 3), dtype=np.float32) 455 | 456 | # Test mixed-type addition (integer) 457 | pos[:, 0] += 1 458 | np.testing.assert_array_equal(pos[:, 0], [2.0, 2.0, 2.0, 2.0, 2.0]) 459 | 460 | # Test mixed-type multiplication (integer) 461 | pos[:, 1] *= 2 462 | np.testing.assert_array_equal(pos[:, 1], [2.0, 2.0, 2.0, 2.0, 2.0]) 463 | 464 | # Test with boolean array 465 | mask = np.array([True, False, True, False, True]) 466 | pos[mask, 2] = 3.0 467 | expected = np.array([3.0, 1.0, 3.0, 1.0, 3.0]) 468 | np.testing.assert_array_equal(pos[:, 2], expected) 469 | 470 | # Test operations with numpy int64/float64 scalars 471 | pos[:, 0] += np.int64(3) 472 | np.testing.assert_array_equal(pos[:, 0], [5.0, 5.0, 5.0, 5.0, 5.0]) 473 | 474 | pos[:, 1] *= np.float64(1.5) 475 | np.testing.assert_array_equal(pos[:, 1], [3.0, 3.0, 3.0, 3.0, 3.0]) 476 | 477 | # Check sync to Blender 478 | updated_pos = self.bob.named_attribute("position") 479 | np.testing.assert_array_almost_equal(updated_pos, pos) 480 | 481 | def test_multiple_column_operations(self): 482 | """Test operations on multiple columns simultaneously.""" 483 | pos = self.bob.position 484 | 485 | # Initialize with known values 486 | pos[:] = np.ones((5, 3), dtype=np.float32) 487 | 488 | # Test modifying multiple columns in a single operation 489 | pos[:, [0, 2]] = np.array( 490 | [[2.0, 3.0], [2.0, 3.0], [2.0, 3.0], [2.0, 3.0], [2.0, 3.0]] 491 | ) 492 | 493 | # Check results 494 | np.testing.assert_array_equal(pos[:, 0], [2.0, 2.0, 2.0, 2.0, 2.0]) 495 | np.testing.assert_array_equal(pos[:, 1], [1.0, 1.0, 1.0, 1.0, 1.0]) 496 | np.testing.assert_array_equal(pos[:, 2], [3.0, 3.0, 3.0, 3.0, 3.0]) 497 | 498 | # Test boolean indexing for multiple columns 499 | mask = np.array([True, False, True, False, True]) 500 | pos[mask, :] = np.array([[5.0, 5.0, 5.0], [6.0, 6.0, 6.0], [7.0, 7.0, 7.0]]) 501 | 502 | # Check results 503 | expected = np.array( 504 | [ 505 | [5.0, 5.0, 5.0], 506 | [2.0, 1.0, 3.0], 507 | [6.0, 6.0, 6.0], 508 | [2.0, 1.0, 3.0], 509 | [7.0, 7.0, 7.0], 510 | ] 511 | ) 512 | np.testing.assert_array_equal(pos, expected) 513 | 514 | # Check sync to Blender 515 | updated_pos = self.bob.named_attribute("position") 516 | np.testing.assert_array_almost_equal(updated_pos, expected) 517 | 518 | # Test range slicing 519 | pos[1:4, 0:2] = 9.0 520 | expected[1:4, 0:2] = 9.0 521 | np.testing.assert_array_equal(pos, expected) 522 | 523 | # Check sync to Blender 524 | updated_pos = self.bob.named_attribute("position") 525 | np.testing.assert_array_almost_equal(updated_pos, expected) 526 | 527 | 528 | def test_position_array_integration(): 529 | """Integration test with the broader databpy ecosystem.""" 530 | # Create object using create_bob 531 | vertices = np.random.rand(10, 3) 532 | bob = create_bob(vertices=vertices, name="IntegrationTest") 533 | 534 | # Test that position returns PositionArray 535 | pos = bob.position 536 | assert isinstance(pos, AttributeArray) 537 | 538 | # Test the original use case that was broken 539 | pos[:, 2] += 1.0 540 | 541 | # Verify the change 542 | expected_z = vertices[:, 2] + 1.0 543 | np.testing.assert_array_almost_equal(pos[:, 2], expected_z) 544 | 545 | # Test with ObjectTracker context manager 546 | from databpy.object import ObjectTracker 547 | 548 | with ObjectTracker() as tracker: 549 | create_bob(vertices=np.random.rand(5, 3), name="TrackedObject") 550 | 551 | tracked_objects = tracker.new_objects() 552 | assert len(tracked_objects) == 1 553 | 554 | # Test position array on tracked object 555 | tracked_bob = db.BlenderObject(tracked_objects[0]) 556 | tracked_pos = tracked_bob.position 557 | assert isinstance(tracked_pos, AttributeArray) 558 | 559 | # Test modification 560 | tracked_pos += 0.5 561 | updated = tracked_bob.named_attribute("position") 562 | assert np.all(updated >= 0.5) 563 | --------------------------------------------------------------------------------