├── python ├── pyosl │ ├── __init__.py │ ├── .gitignore │ ├── ply │ │ └── __init__.py │ ├── README.md │ ├── LICENSE │ ├── osl2glsl.py │ ├── osllex.py │ ├── oslast.py │ └── oslparse.py ├── pypng │ ├── __init__.py │ └── README ├── iniparse │ ├── configparser.py │ ├── __init__.py │ ├── utils.py │ ├── config.py │ └── compat.py ├── pcpp │ ├── ply │ │ └── ply │ │ │ ├── __init__.py │ │ │ ├── ygen.py │ │ │ └── ctokens.py │ ├── __init__.py │ ├── evaluator.py │ └── pcmd.py ├── pluginUtils │ ├── log.py │ ├── __init__.py │ ├── path.py │ ├── manager.py │ ├── convert.py │ └── gltf.py └── sysfont.py ├── .gitignore ├── fonts ├── bfont.woff └── LICENSE-bfont.ttf.txt ├── blender_manifest.toml ├── README.md ├── node_material_wrapper.py ├── manual_map.py ├── reexport.py ├── gltf2_export.py ├── __init__.py ├── gltf2_get.py ├── gltf2_filter.py └── utils.py /python/pyosl/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.pyc 3 | -------------------------------------------------------------------------------- /python/pypng/__init__.py: -------------------------------------------------------------------------------- 1 | # empty 2 | -------------------------------------------------------------------------------- /python/pyosl/.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | __pycache__/ 3 | -------------------------------------------------------------------------------- /fonts/bfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Soft8Soft/verge3d-blender-addon/HEAD/fonts/bfont.woff -------------------------------------------------------------------------------- /python/iniparse/configparser.py: -------------------------------------------------------------------------------- 1 | from configparser import * 2 | from configparser import Error, InterpolationMissingOptionError 3 | -------------------------------------------------------------------------------- /python/pyosl/ply/__init__.py: -------------------------------------------------------------------------------- 1 | # PLY package 2 | # Author: David Beazley (dave@dabeaz.com) 3 | 4 | __version__ = '3.11' 5 | __all__ = ['lex','yacc'] 6 | -------------------------------------------------------------------------------- /python/pcpp/ply/ply/__init__.py: -------------------------------------------------------------------------------- 1 | # PLY package 2 | # Author: David Beazley (dave@dabeaz.com) 3 | 4 | __version__ = '3.11' 5 | __all__ = ['lex','yacc'] 6 | -------------------------------------------------------------------------------- /python/pcpp/__init__.py: -------------------------------------------------------------------------------- 1 | from .pcmd import main, version, CmdPreprocessor 2 | from .preprocessor import Preprocessor, OutputDirective, Action 3 | __version__ = version 4 | -------------------------------------------------------------------------------- /python/pypng/README: -------------------------------------------------------------------------------- 1 | Generated from pip version of pypng 0.20220715.0 2 | 3 | pip install pypng --target pypng 4 | 5 | Modifications: 6 | 1) removed bin folder, cache, and dist info 7 | 2) created package with empty __init__.py 8 | -------------------------------------------------------------------------------- /python/pluginUtils/log.py: -------------------------------------------------------------------------------- 1 | import logging, sys 2 | 3 | LOG_LEVEL = logging.INFO 4 | 5 | def getLogger(name): 6 | log = logging.getLogger(name) 7 | log.setLevel(LOG_LEVEL) 8 | if not log.hasHandlers(): 9 | logH = logging.StreamHandler(sys.stdout) 10 | logH.setFormatter(logging.Formatter('%(name)s-%(levelname)s: %(message)s')) 11 | log.addHandler(logH) 12 | return log 13 | -------------------------------------------------------------------------------- /blender_manifest.toml: -------------------------------------------------------------------------------- 1 | schema_version = "1.0.0" 2 | 3 | id = "verge3d" 4 | name = "Verge3D" 5 | version = "4.11.0" 6 | type = "add-on" 7 | blender_version_min = "4.2.0" 8 | maintainer = "Soft8Soft" 9 | license = ["SPDX:GPL-2.0-or-later"] 10 | tagline = "Artist-friendly toolkit for creating 3D web experiences" 11 | 12 | permissions = ["files", "network"] 13 | copyright = ["2017-2025 Soft8Soft"] 14 | website = "https://www.soft8soft.com/docs/manual/en/index.html" 15 | tags = ["Import-Export", "Render"] 16 | -------------------------------------------------------------------------------- /python/pluginUtils/__init__.py: -------------------------------------------------------------------------------- 1 | #__all__ = [''] 2 | 3 | from . import convert, gltf, log, manager, path, rawdata 4 | 5 | debug = True 6 | copyrightLine = 'Soft8Soft, LLC' 7 | 8 | def clamp(val, minval, maxval): 9 | return max(minval, min(maxval, val)) 10 | 11 | def srgbToLinear(x): 12 | if x <= 0.0: 13 | return 0.0 14 | elif x >= 1: 15 | return 1.0 16 | elif x < 0.04045: 17 | return x / 12.92 18 | else: 19 | return ((x + 0.055) / 1.055) ** 2.4 20 | 21 | def colorToLuminosity(color): 22 | return color[0] * 0.21 + color[1] * 0.72 + color[2] * 0.07 23 | 24 | def isPowerOfTwo(val): 25 | return (val != 0 and (not(val & (val - 1)))) 26 | -------------------------------------------------------------------------------- /python/pyosl/README.md: -------------------------------------------------------------------------------- 1 | # OSL to GLSL converter 2 | 3 | OSL to GLSL converter written in Python. 4 | 5 | This library is an open-sourced component of [Verge3D](https://www.soft8soft.com/verge3d/) toolkit. 6 | 7 | ## Usage 8 | 9 | pyosl includes a command line utility called osl2glsl.py. The command 10 | 11 | ``` 12 | python osl2glsl your_shader.osl 13 | ``` 14 | 15 | will print converted GLSL shader to stdout. Use the: 16 | 17 | ``` 18 | python osl2glsl your_shader.osl > your_shader.glsl 19 | ``` 20 | 21 | command to write GLSL shader to the file named `your_shader.glsl`. 22 | 23 | ## OSL standard library functions 24 | 25 | Converter will try to use the built-in GLSL functions where possible. In all other cases it will rename OSL functions as follows: 26 | 27 | * noise -> oslNoise 28 | * transform -> oslTransform 29 | 30 | It's up to you to write implementations of such oslNAME methods. 31 | 32 | ## Support 33 | Got questions/found bugs? Ask on the [Verge3D Forums](https://www.soft8soft.com/forums/). 34 | 35 | ## License 36 | This tool is licensed under the terms of the MIT license. 37 | -------------------------------------------------------------------------------- /python/pyosl/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Soft8Soft LLC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /python/iniparse/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2001, 2002, 2003 Python Software Foundation 2 | # Copyright (c) 2004-2008 Paramjit Oberoi 3 | # Copyright (c) 2007 Tim Lauridsen 4 | # All Rights Reserved. See LICENSE-PSF & LICENSE for details. 5 | 6 | from .ini import INIConfig, change_comment_syntax 7 | from .config import BasicConfig, ConfigNamespace 8 | from .compat import RawConfigParser, ConfigParser, SafeConfigParser 9 | from .utils import tidy 10 | 11 | from .configparser import DuplicateSectionError, \ 12 | NoSectionError, NoOptionError, \ 13 | InterpolationMissingOptionError, \ 14 | InterpolationDepthError, \ 15 | InterpolationSyntaxError, \ 16 | DEFAULTSECT, MAX_INTERPOLATION_DEPTH 17 | 18 | __all__ = [ 19 | 'BasicConfig', 'ConfigNamespace', 20 | 'INIConfig', 'tidy', 'change_comment_syntax', 21 | 'RawConfigParser', 'ConfigParser', 'SafeConfigParser', 22 | 'DuplicateSectionError', 'NoSectionError', 'NoOptionError', 23 | 'InterpolationMissingOptionError', 'InterpolationDepthError', 24 | 'InterpolationSyntaxError', 'DEFAULTSECT', 'MAX_INTERPOLATION_DEPTH', 25 | ] 26 | -------------------------------------------------------------------------------- /python/pyosl/osl2glsl.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import getopt, os, sys 4 | 5 | sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) 6 | 7 | import pyosl.glslgen, pyosl.oslparse 8 | 9 | def usage(): 10 | print('Usage: osl2glsl [-ognh] shader.ost') 11 | 12 | if __name__ == "__main__": 13 | 14 | try: 15 | opts, args = getopt.getopt(sys.argv[1:], 'ognh') 16 | except getopt.GetoptError as err: 17 | usage() 18 | sys.exit(1) 19 | 20 | print_osl_ast = False 21 | print_glsl_ast = False 22 | no_glsl = False 23 | 24 | for o, _ in opts: 25 | if o == '-o': 26 | print_osl_ast = True 27 | elif o == '-g': 28 | print_glsl_ast = True 29 | elif o == '-n': 30 | no_glsl = True 31 | elif o == '-h': 32 | usage() 33 | sys.exit() 34 | 35 | try: 36 | filename = args[0] 37 | with open(filename) as f: 38 | data = f.read() 39 | except IndexError: 40 | sys.stdout.write('Reading from standard input (type EOF to end):\n') 41 | data = sys.stdin.read() 42 | 43 | ast_osl = pyosl.oslparse.get_ast(data) 44 | if print_osl_ast: 45 | ast_osl.print_tree() 46 | 47 | ast_glsl = pyosl.glslgen.osl_to_glsl(ast_osl) 48 | if print_glsl_ast: 49 | ast_glsl.print_tree() 50 | 51 | if not no_glsl: 52 | print(pyosl.glslgen.generate(ast_glsl)) 53 | -------------------------------------------------------------------------------- /python/pluginUtils/path.py: -------------------------------------------------------------------------------- 1 | import os, pathlib, platform 2 | 3 | PORTS = { 4 | 'BLENDER': 8668, 5 | 'MAX': 8669, 6 | 'MAYA': 8670 7 | } 8 | 9 | REEXPORT_ONLY = True 10 | 11 | def getRoot(usePathLib=False): 12 | baseDir = os.path.dirname(os.path.abspath(__file__)) 13 | if usePathLib: 14 | return (pathlib.Path(baseDir) / '..' / '..').resolve() 15 | else: 16 | return os.path.join(baseDir, '..', '..') 17 | 18 | def getAppManagerHost(modPackage, includeScheme=True): 19 | if includeScheme: 20 | return 'http://localhost:{}/'.format(PORTS[modPackage]) 21 | else: 22 | # HACK: fixes slowdowns in WSL 23 | return '127.0.0.1:{}'.format(PORTS[modPackage]) 24 | 25 | def findExportedAssetPath(srcPath): 26 | dirname, basename = os.path.split(srcPath) 27 | 28 | for ext in ['.gltf', '.glb']: 29 | 30 | gltfname = os.path.splitext(basename)[0] + ext 31 | 32 | for path in [os.path.join(dirname, gltfname), 33 | os.path.join(dirname, 'export', gltfname), 34 | os.path.join(dirname, 'exports', gltfname)]: 35 | 36 | if os.path.exists(path): 37 | return path 38 | 39 | if not REEXPORT_ONLY: 40 | return os.path.splitext(srcPath)[0] + '.gltf' 41 | 42 | return None 43 | 44 | def getPlatformBinDirName(): 45 | """ 46 | linux_x86_64, windows_amd64, darwin_arm64, etc... 47 | """ 48 | return platform.system().lower() + '_' + platform.machine().lower() 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Industrial Robot](https://www.soft8soft.com/wp-content/uploads/2018/07/industrial-robot-gallery.jpg) 2 | 3 | # Verge3D for Blender add-on 4 | 5 | Verge3D is the most artist-friendly toolkit for creating interactive WebGL experiences. It features a comprehensive integration with Blender, a physically-based rendering pipeline, support for native materials and animation. Interactivity is enabled with Puzzles, an intuitive and easy-to-learn visual scripting environment. 6 | 7 | This add-on is an open-sourced part of complete Verge3D for Blender toolkit. 8 | 9 | On its own it can create glTF 2.0 compliant assets (.gltf/.bin + .glb) to be used in various software and services such as: 10 | 11 | * Blender 12 | * Microsoft Paint 3D 13 | * Sketchfab 14 | * Modo 15 | * Marmoset Toolbag 16 | * Microsoft Remix 3D 17 | 18 | Need more? Download Verge3D for Blender from [here](https://www.soft8soft.com/get-verge3d/). 19 | 20 | ## Installation 21 | 22 | Just clone and copy the repo to one the following directories (considering you have Blender 4.0 installed): 23 | 24 | ### Linux 25 | ``` 26 | /home/$user/.config/blender/4.0/scripts/addons/ 27 | ``` 28 | 29 | ### Windows 10+ 30 | ``` 31 | C:\Users\%username%\AppData\Roaming\Blender Foundation\Blender\4.0\scripts\addons\ 32 | ``` 33 | 34 | ### macOS 35 | ``` 36 | /Users/$USER/Library/Application Support/Blender/4.0/scripts/addons/ 37 | ``` 38 | 39 | ## Support 40 | 41 | Please report any issues found in this add-on on [Verge3D forums](https://www.soft8soft.com/forums/). 42 | 43 | ## License 44 | 45 | This add-on is licensed under GNU General Public License Version 3. 46 | -------------------------------------------------------------------------------- /python/iniparse/utils.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, List 2 | 3 | from . import compat 4 | from .ini import EmptyLine, LineContainer 5 | 6 | if TYPE_CHECKING: 7 | from .ini import LineType 8 | 9 | 10 | def tidy(cfg: compat.RawConfigParser): 11 | """Clean up blank lines. 12 | 13 | This functions makes the configuration look clean and 14 | handwritten - consecutive empty lines and empty lines at 15 | the start of the file are removed, and one is guaranteed 16 | to be at the end of the file. 17 | """ 18 | 19 | if isinstance(cfg, compat.RawConfigParser): 20 | cfg = cfg.data 21 | cont = cfg._data.contents 22 | i = 1 23 | while i < len(cont): 24 | if isinstance(cont[i], LineContainer): 25 | tidy_section(cont[i]) 26 | i += 1 27 | elif (isinstance(cont[i-1], EmptyLine) and 28 | isinstance(cont[i], EmptyLine)): 29 | del cont[i] 30 | else: 31 | i += 1 32 | 33 | # Remove empty first line 34 | if cont and isinstance(cont[0], EmptyLine): 35 | del cont[0] 36 | 37 | # Ensure a last line 38 | if cont and not isinstance(cont[-1], EmptyLine): 39 | cont.append(EmptyLine()) 40 | 41 | 42 | def tidy_section(lc: "LineContainer"): 43 | cont: List[LineType] = lc.contents 44 | i: int = 1 45 | while i < len(cont): 46 | if isinstance(cont[i-1], EmptyLine) and isinstance(cont[i], EmptyLine): 47 | del cont[i] 48 | else: 49 | i += 1 50 | 51 | # Remove empty first line 52 | if len(cont) > 1 and isinstance(cont[1], EmptyLine): 53 | del cont[1] 54 | -------------------------------------------------------------------------------- /node_material_wrapper.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2025 Soft8Soft, LLC. All rights reserved. 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | class NodeMaterialWrapper(): 16 | """ 17 | Wrapper for a node material, capable of overwriting the "node_tree" property. 18 | Doesn't correspond to the actual node material though. 19 | """ 20 | 21 | def __init__(self, material): 22 | super().__setattr__('_material', material) 23 | self.node_tree = None 24 | 25 | def __getattr__(self, attr): 26 | return getattr(self._material, attr) 27 | 28 | def __setattr__(self, attr, value): 29 | if attr == 'node_tree': 30 | super().__setattr__(attr, value) 31 | else: 32 | setattr(self._material, attr, value) 33 | 34 | def __getitem__(self, key): 35 | return self._material[key] 36 | 37 | def __setitem__(self, key, value): 38 | if key == 'node_tree': 39 | super()[key] = value 40 | else: 41 | self._material[key] = value 42 | -------------------------------------------------------------------------------- /python/pcpp/ply/ply/ygen.py: -------------------------------------------------------------------------------- 1 | # ply: ygen.py 2 | # 3 | # This is a support program that auto-generates different versions of the YACC parsing 4 | # function with different features removed for the purposes of performance. 5 | # 6 | # Users should edit the method LRParser.parsedebug() in yacc.py. The source code 7 | # for that method is then used to create the other methods. See the comments in 8 | # yacc.py for further details. 9 | 10 | import os.path 11 | import shutil 12 | 13 | def get_source_range(lines, tag): 14 | srclines = enumerate(lines) 15 | start_tag = '#--! %s-start' % tag 16 | end_tag = '#--! %s-end' % tag 17 | 18 | for start_index, line in srclines: 19 | if line.strip().startswith(start_tag): 20 | break 21 | 22 | for end_index, line in srclines: 23 | if line.strip().endswith(end_tag): 24 | break 25 | 26 | return (start_index + 1, end_index) 27 | 28 | def filter_section(lines, tag): 29 | filtered_lines = [] 30 | include = True 31 | tag_text = '#--! %s' % tag 32 | for line in lines: 33 | if line.strip().startswith(tag_text): 34 | include = not include 35 | elif include: 36 | filtered_lines.append(line) 37 | return filtered_lines 38 | 39 | def main(): 40 | dirname = os.path.dirname(__file__) 41 | shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak')) 42 | with open(os.path.join(dirname, 'yacc.py'), 'r') as f: 43 | lines = f.readlines() 44 | 45 | parse_start, parse_end = get_source_range(lines, 'parsedebug') 46 | parseopt_start, parseopt_end = get_source_range(lines, 'parseopt') 47 | parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack') 48 | 49 | # Get the original source 50 | orig_lines = lines[parse_start:parse_end] 51 | 52 | # Filter the DEBUG sections out 53 | parseopt_lines = filter_section(orig_lines, 'DEBUG') 54 | 55 | # Filter the TRACKING sections out 56 | parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING') 57 | 58 | # Replace the parser source sections with updated versions 59 | lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines 60 | lines[parseopt_start:parseopt_end] = parseopt_lines 61 | 62 | lines = [line.rstrip()+'\n' for line in lines] 63 | with open(os.path.join(dirname, 'yacc.py'), 'w') as f: 64 | f.writelines(lines) 65 | 66 | print('Updated yacc.py') 67 | 68 | if __name__ == '__main__': 69 | main() 70 | -------------------------------------------------------------------------------- /python/pcpp/ply/ply/ctokens.py: -------------------------------------------------------------------------------- 1 | # ---------------------------------------------------------------------- 2 | # ctokens.py 3 | # 4 | # Token specifications for symbols in ANSI C and C++. This file is 5 | # meant to be used as a library in other tokenizers. 6 | # ---------------------------------------------------------------------- 7 | 8 | # Reserved words 9 | 10 | tokens = [ 11 | # Literals (identifier, integer constant, float constant, string constant, char const) 12 | 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER', 13 | 14 | # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) 15 | 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO', 16 | 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', 17 | 'LOR', 'LAND', 'LNOT', 18 | 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', 19 | 20 | # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=) 21 | 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL', 22 | 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL', 23 | 24 | # Increment/decrement (++,--) 25 | 'INCREMENT', 'DECREMENT', 26 | 27 | # Structure dereference (->) 28 | 'ARROW', 29 | 30 | # Ternary operator (?) 31 | 'TERNARY', 32 | 33 | # Delimeters ( ) [ ] { } , . ; : 34 | 'LPAREN', 'RPAREN', 35 | 'LBRACKET', 'RBRACKET', 36 | 'LBRACE', 'RBRACE', 37 | 'COMMA', 'PERIOD', 'SEMI', 'COLON', 38 | 39 | # Ellipsis (...) 40 | 'ELLIPSIS', 41 | ] 42 | 43 | # Operators 44 | t_PLUS = r'\+' 45 | t_MINUS = r'-' 46 | t_TIMES = r'\*' 47 | t_DIVIDE = r'/' 48 | t_MODULO = r'%' 49 | t_OR = r'\|' 50 | t_AND = r'&' 51 | t_NOT = r'~' 52 | t_XOR = r'\^' 53 | t_LSHIFT = r'<<' 54 | t_RSHIFT = r'>>' 55 | t_LOR = r'\|\|' 56 | t_LAND = r'&&' 57 | t_LNOT = r'!' 58 | t_LT = r'<' 59 | t_GT = r'>' 60 | t_LE = r'<=' 61 | t_GE = r'>=' 62 | t_EQ = r'==' 63 | t_NE = r'!=' 64 | 65 | # Assignment operators 66 | 67 | t_EQUALS = r'=' 68 | t_TIMESEQUAL = r'\*=' 69 | t_DIVEQUAL = r'/=' 70 | t_MODEQUAL = r'%=' 71 | t_PLUSEQUAL = r'\+=' 72 | t_MINUSEQUAL = r'-=' 73 | t_LSHIFTEQUAL = r'<<=' 74 | t_RSHIFTEQUAL = r'>>=' 75 | t_ANDEQUAL = r'&=' 76 | t_OREQUAL = r'\|=' 77 | t_XOREQUAL = r'\^=' 78 | 79 | # Increment/decrement 80 | t_INCREMENT = r'\+\+' 81 | t_DECREMENT = r'--' 82 | 83 | # -> 84 | t_ARROW = r'->' 85 | 86 | # ? 87 | t_TERNARY = r'\?' 88 | 89 | # Delimeters 90 | t_LPAREN = r'\(' 91 | t_RPAREN = r'\)' 92 | t_LBRACKET = r'\[' 93 | t_RBRACKET = r'\]' 94 | t_LBRACE = r'\{' 95 | t_RBRACE = r'\}' 96 | t_COMMA = r',' 97 | t_PERIOD = r'\.' 98 | t_SEMI = r';' 99 | t_COLON = r':' 100 | t_ELLIPSIS = r'\.\.\.' 101 | 102 | # Identifiers 103 | t_ID = r'[A-Za-z_][A-Za-z0-9_]*' 104 | 105 | # Integer literal 106 | t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' 107 | 108 | # Floating literal 109 | t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' 110 | 111 | # String literal 112 | t_STRING = r'\"([^\\\n]|(\\.))*?\"' 113 | 114 | # Character constant 'c' or L'c' 115 | t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\'' 116 | 117 | # Comment (C-Style) 118 | def t_COMMENT(t): 119 | r'/\*(.|\n)*?\*/' 120 | t.lexer.lineno += t.value.count('\n') 121 | return t 122 | 123 | # Comment (C++-Style) 124 | def t_CPPCOMMENT(t): 125 | r'//.*\n' 126 | t.lexer.lineno += 1 127 | return t 128 | -------------------------------------------------------------------------------- /python/pyosl/osllex.py: -------------------------------------------------------------------------------- 1 | # OSL Lexer 2 | 3 | import os, sys 4 | 5 | import ply.lex as lex 6 | 7 | reserved = ( 8 | 'AND', 'BREAK', 'CLOSURE', 'COLOR', 'CONTINUE', 'DO', 'ELSE', 'EMIT', 'FLOAT', 'FOR', 'IF', 'ILLUMINANCE', 9 | 'ILLUMINATE', 'INT', 'MATRIX', 'NORMAL', 'NOT', 'OR', 'OUTPUT', 'POINT', 'PUBLIC', 'RETURN', 'STRING', 10 | 'STRUCT', 'VECTOR', 'VOID', 'WHILE', 11 | 12 | # shader types 13 | 'DISPLACEMENT', 'SHADER', 'SURFACE', 'VOLUME', 14 | 15 | #'BOOL', 'CASE', 'CATCH', 'CHAR', 'CLASS', 'CONST', 'DELETE', 'DEFAULT', 'DOUBLE', 'ENUM', 'EXTERN', 16 | #'FALSE', 'FRIEND', 'GOTO', 'INLINE', 'LONG', 'NEW', 'OPERATOR', 'PRIVATE', 'PROTECTED', 'SHORT', 17 | #'SIGNED', 'SIZEOF', 'STATIC', 'SWITCH', 'TEMPLATE', 'THIS', 'THROW', 'TRUE', 'TRY', 'TYPEDEF', 'UNIFORM', 18 | #'UNION', 'UNSIGNED', 'VARYING', 'VIRTUAL', 'VOLATILE' 19 | ) 20 | 21 | tokens = reserved + ( 22 | # literals (identifier, integer constant, float constant, string constant) 23 | 'ID', 'ICONST', 'FCONST', 'SCONST', 24 | 25 | # operators (+,-,*,/,%, |,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) 26 | 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD', 27 | 'BITOR', 'BITAND', 'BITNOT', 'XOR', 'LSHIFT', 'RSHIFT', 28 | 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', 29 | 30 | # assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=) 31 | 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL', 32 | 'LSHIFTEQUAL', 'RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL', 33 | 34 | # increment/decrement (++,--) 35 | 'PLUSPLUS', 'MINUSMINUS', 36 | 37 | # structure dereference (->) 38 | 'ARROW', 39 | 40 | # conditional operator (?) 41 | 'CONDOP', 42 | 43 | # delimeters ( ) [ ] { } , . ; : 44 | 'LPAREN', 'RPAREN', 45 | 'LBRACKET', 'RBRACKET', 46 | 'LBRACE', 'RBRACE', 47 | 'COMMA', 'PERIOD', 'SEMI', 'COLON', 48 | 49 | 'METABEGIN' 50 | 51 | ) 52 | 53 | # completely ignored characters 54 | t_ignore = ' \t\x0c' 55 | 56 | # newlines 57 | 58 | def t_NEWLINE(t): 59 | r'[\r\n]+' 60 | t.lexer.lineno += t.value.count('\n') 61 | 62 | # operators 63 | 64 | t_PLUS = r'\+' 65 | t_MINUS = r'-' 66 | t_TIMES = r'\*' 67 | t_DIVIDE = r'/' 68 | t_MOD = r'%' 69 | t_BITOR = r'\|' 70 | t_BITAND = r'&' 71 | t_BITNOT = r'~' 72 | t_XOR = r'\^' 73 | t_LSHIFT = r'<<' 74 | t_RSHIFT = r'>>' 75 | t_OR = r'(or|\|\|)' 76 | t_AND = r'(and|&&)' 77 | t_NOT = r'(not|!)' 78 | t_LT = r'<' 79 | t_GT = r'>' 80 | t_LE = r'<=' 81 | t_GE = r'>=' 82 | t_EQ = r'==' 83 | 84 | # special case, make it appear before not|! regexp 85 | def t_NE(t): 86 | r'!=' 87 | return t 88 | 89 | # assignment operators 90 | 91 | t_EQUALS = r'=' 92 | t_TIMESEQUAL = r'\*=' 93 | t_DIVEQUAL = r'/=' 94 | t_MODEQUAL = r'%=' 95 | t_PLUSEQUAL = r'\+=' 96 | t_MINUSEQUAL = r'-=' 97 | t_LSHIFTEQUAL = r'<<=' 98 | t_RSHIFTEQUAL = r'>>=' 99 | t_ANDEQUAL = r'&=' 100 | t_OREQUAL = r'\|=' 101 | t_XOREQUAL = r'\^=' 102 | 103 | # increment/decrement 104 | t_PLUSPLUS = r'\+\+' 105 | t_MINUSMINUS = r'--' 106 | 107 | # -> 108 | t_ARROW = r'->' 109 | 110 | # ? 111 | t_CONDOP = r'\?' 112 | 113 | # delimeters 114 | t_LPAREN = r'\(' 115 | t_RPAREN = r'\)' 116 | t_LBRACKET = r'\[' 117 | t_RBRACKET = r'\]' 118 | t_LBRACE = r'\{' 119 | t_RBRACE = r'\}' 120 | t_COMMA = r',' 121 | t_PERIOD = r'\.' 122 | t_SEMI = r';' 123 | t_COLON = r':' 124 | t_METABEGIN = r'\[\[' 125 | 126 | # identifiers and reserved words 127 | 128 | reserved_map = {} 129 | for r in reserved: 130 | reserved_map[r.lower()] = r 131 | 132 | 133 | def t_ID(t): 134 | r'[A-Za-z_][\w_]*' 135 | t.type = reserved_map.get(t.value, 'ID') 136 | return t 137 | 138 | # integer literal 139 | t_ICONST = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' 140 | 141 | # floating literal 142 | t_FCONST = r'((\d*)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' 143 | 144 | # string literal 145 | t_SCONST = r'\"([^\\\n]|(\\.))*?\"' 146 | 147 | 148 | # comments 149 | 150 | def t_comment(t): 151 | r'/\*(.|\n)*?\*/' 152 | t.lexer.lineno += t.value.count('\n') 153 | 154 | def t_comment2(t): 155 | r'//(.)*?\n' 156 | t.lexer.lineno += 1 157 | 158 | # preprocessor directive (ignored) 159 | 160 | def t_preprocessor(t): 161 | r'\#(.)*?\n' 162 | t.lexer.lineno += 1 163 | 164 | def t_error(t): 165 | print('Illegal character {}'.format(repr(t.value[0]))) 166 | t.lexer.skip(1) 167 | 168 | lexer = lex.lex() 169 | 170 | if __name__ == "__main__": 171 | lex.runmain(lexer) 172 | -------------------------------------------------------------------------------- /python/pluginUtils/manager.py: -------------------------------------------------------------------------------- 1 | import os, pathlib, platform, shutil, subprocess, sys, time 2 | 3 | from .log import getLogger 4 | from .path import getRoot, getAppManagerHost 5 | 6 | log = getLogger('V3D-PU') 7 | 8 | from http.client import HTTPConnection 9 | 10 | join = os.path.join 11 | 12 | APP_MANAGER_FORCE_ALL = True 13 | 14 | MANUAL_URL_DEFAULT = 'https://www.soft8soft.com/docs/manual/en/index.html' 15 | 16 | 17 | class AppManagerConn(): 18 | root = None 19 | modPackage = None 20 | isThreaded = False 21 | 22 | servers = [] # for threaded only 23 | subThreads = [] 24 | 25 | @classmethod 26 | def init(cls, root, modPackage): 27 | cls.root = root 28 | cls.modPackage = modPackage 29 | 30 | @classmethod 31 | def isAvailable(cls): 32 | if os.path.isfile(join(cls.root, 'manager', 'server.py')): 33 | return True 34 | else: 35 | return False 36 | 37 | @classmethod 38 | def ping(cls): 39 | conn = HTTPConnection(getAppManagerHost(cls.modPackage, False)) 40 | 41 | try: 42 | conn.request('GET', '/ping') 43 | except ConnectionRefusedError: 44 | return False 45 | 46 | response = conn.getresponse() 47 | 48 | if response.status == 200: 49 | return True 50 | else: 51 | return False 52 | 53 | @classmethod 54 | def getPreviewDir(cls, cleanup=False): 55 | conn = HTTPConnection(getAppManagerHost(cls.modPackage, False)) 56 | 57 | try: 58 | conn.request('GET', '/get_preview_dir') 59 | except ConnectionRefusedError: 60 | log.warning('App Manager connection error, wait a bit') 61 | time.sleep(0.3) 62 | # NOTE: repeated error will cause crash 63 | conn = HTTPConnection(getAppManagerHost(cls.modPackage, False)) 64 | conn.request('GET', '/get_preview_dir') 65 | 66 | response = conn.getresponse() 67 | 68 | if response.status != 200: 69 | log.error('App Manager connection error: ' + response.reason) 70 | return None 71 | 72 | path = response.read().decode('utf-8') 73 | 74 | if cleanup: 75 | shutil.rmtree(path, ignore_errors=True) 76 | os.makedirs(path, exist_ok=True) 77 | 78 | log.info('Performing export to preview dir: {}'.format(path)) 79 | 80 | return path 81 | 82 | @classmethod 83 | def getEnginePath(cls): 84 | conn = HTTPConnection(getAppManagerHost(cls.modPackage, False)) 85 | 86 | # decent fallback in case of connection errors 87 | enginePathDefault = getRoot(True) / 'build' / 'v3d.js' 88 | 89 | try: 90 | conn.request('GET', '/get_engine_path') 91 | except ConnectionRefusedError: 92 | log.error('App Manager connection refused, using fallback engine path') 93 | return enginePathDefault 94 | 95 | response = conn.getresponse() 96 | 97 | if response.status != 200: 98 | log.error('App Manager connection error, using fallback engine path: ' + response.reason) 99 | return enginePathDefault 100 | 101 | return pathlib.Path(response.read().decode('utf-8')) 102 | 103 | @classmethod 104 | def getManualURL(cls): 105 | conn = HTTPConnection(getAppManagerHost(cls.modPackage, False)) 106 | 107 | try: 108 | conn.request('GET', '/settings/get_manual_url') 109 | except ConnectionRefusedError: 110 | log.warning('App Manager connection refused') 111 | return MANUAL_URL_DEFAULT 112 | 113 | response = conn.getresponse() 114 | 115 | if response.status != 200: 116 | log.warning('App Manager connection error: ' + response.reason) 117 | return MANUAL_URL_DEFAULT 118 | 119 | manualURL = response.read().decode('utf-8') 120 | return manualURL 121 | 122 | @classmethod 123 | def start(cls): 124 | system = platform.system() 125 | 126 | if system == 'Windows': 127 | pythonPath = join(cls.root, 'python', 'windows', 'pythonw.exe') 128 | elif system == 'Darwin': 129 | pythonPath = join(cls.root, 'python', 'macos', 'bin', 'python') 130 | else: 131 | pythonPath = 'python3' 132 | 133 | modPackage = 'ALL' if APP_MANAGER_FORCE_ALL else cls.modPackage 134 | args = [pythonPath, join(cls.root, 'manager', 'server.py'), modPackage] 135 | 136 | if system == 'Linux': 137 | # HACK: remove env variables that fail to start the App Manager 138 | # reproduced in Maya 2023 / Python 3.9 139 | env = {k: v for k, v in os.environ.items() if k not in {'PYTHONPATH', 'PYTHONHOME', 'LD_LIBRARY_PATH'}} 140 | subprocess.Popen(args, env=env) 141 | else: 142 | subprocess.Popen(args) 143 | 144 | @classmethod 145 | def stop(cls): 146 | conn = HTTPConnection(getAppManagerHost(cls.modPackage, False)) 147 | conn.request('GET', '/stop') 148 | response = conn.getresponse() 149 | if response.status != 200 and response.status != 302: 150 | log.error('App Manager connection error: ' + response.reason) 151 | 152 | -------------------------------------------------------------------------------- /fonts/LICENSE-bfont.ttf.txt: -------------------------------------------------------------------------------- 1 | Fonts are (c) Bitstream (see below). DejaVu changes are in public domain. Glyphs imported from Arev fonts are (c) Tavmjung Bah (see below) 2 | 3 | 'DeJaVu-Lite' changes (removing characters for lighter file size) are in public domain. Source file is accompanied in this directory, BZip2 compressed, DeJaVuSans-Lite.sfd.bz2 . 4 | 5 | 6 | Bitstream Vera Fonts Copyright 7 | ------------------------------ 8 | 9 | Copyright (c) 2003 by Bitstream, Inc. All Rights Reserved. Bitstream Vera is 10 | a trademark of Bitstream, Inc. 11 | 12 | Permission is hereby granted, free of charge, to any person obtaining a copy 13 | of the fonts accompanying this license ("Fonts") and associated 14 | documentation files (the "Font Software"), to reproduce and distribute the 15 | Font Software, including without limitation the rights to use, copy, merge, 16 | publish, distribute, and/or sell copies of the Font Software, and to permit 17 | persons to whom the Font Software is furnished to do so, subject to the 18 | following conditions: 19 | 20 | The above copyright and trademark notices and this permission notice shall 21 | be included in all copies of one or more of the Font Software typefaces. 22 | 23 | The Font Software may be modified, altered, or added to, and in particular 24 | the designs of glyphs or characters in the Fonts may be modified and 25 | additional glyphs or characters may be added to the Fonts, only if the fonts 26 | are renamed to names not containing either the words "Bitstream" or the word 27 | "Vera". 28 | 29 | This License becomes null and void to the extent applicable to Fonts or Font 30 | Software that has been modified and is distributed under the "Bitstream 31 | Vera" names. 32 | 33 | The Font Software may be sold as part of a larger software package but no 34 | copy of one or more of the Font Software typefaces may be sold by itself. 35 | 36 | THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 37 | OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, 38 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, 39 | TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL BITSTREAM OR THE GNOME 40 | FOUNDATION BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING 41 | ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, 42 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF 43 | THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE 44 | FONT SOFTWARE. 45 | 46 | Except as contained in this notice, the names of Gnome, the Gnome 47 | Foundation, and Bitstream Inc., shall not be used in advertising or 48 | otherwise to promote the sale, use or other dealings in this Font Software 49 | without prior written authorization from the Gnome Foundation or Bitstream 50 | Inc., respectively. For further information, contact: fonts at gnome dot 51 | org. 52 | 53 | Arev Fonts Copyright 54 | ------------------------------ 55 | 56 | Copyright (c) 2006 by Tavmjong Bah. All Rights Reserved. 57 | 58 | Permission is hereby granted, free of charge, to any person obtaining 59 | a copy of the fonts accompanying this license ("Fonts") and 60 | associated documentation files (the "Font Software"), to reproduce 61 | and distribute the modifications to the Bitstream Vera Font Software, 62 | including without limitation the rights to use, copy, merge, publish, 63 | distribute, and/or sell copies of the Font Software, and to permit 64 | persons to whom the Font Software is furnished to do so, subject to 65 | the following conditions: 66 | 67 | The above copyright and trademark notices and this permission notice 68 | shall be included in all copies of one or more of the Font Software 69 | typefaces. 70 | 71 | The Font Software may be modified, altered, or added to, and in 72 | particular the designs of glyphs or characters in the Fonts may be 73 | modified and additional glyphs or characters may be added to the 74 | Fonts, only if the fonts are renamed to names not containing either 75 | the words "Tavmjong Bah" or the word "Arev". 76 | 77 | This License becomes null and void to the extent applicable to Fonts 78 | or Font Software that has been modified and is distributed under the 79 | "Tavmjong Bah Arev" names. 80 | 81 | The Font Software may be sold as part of a larger software package but 82 | no copy of one or more of the Font Software typefaces may be sold by 83 | itself. 84 | 85 | THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 86 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF 87 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT 88 | OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL 89 | TAVMJONG BAH BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 90 | INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL 91 | DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 92 | FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM 93 | OTHER DEALINGS IN THE FONT SOFTWARE. 94 | 95 | Except as contained in this notice, the name of Tavmjong Bah shall not 96 | be used in advertising or otherwise to promote the sale, use or other 97 | dealings in this Font Software without prior written authorization 98 | from Tavmjong Bah. For further information, contact: tavmjong @ free 99 | . fr. 100 | 101 | -------------------------------------------------------------------------------- /python/pcpp/evaluator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # Python C99 conforming preprocessor expression evaluator 3 | # (C) 2019 Niall Douglas http://www.nedproductions.biz/ 4 | # Started: Apr 2019 5 | 6 | from __future__ import generators, print_function, absolute_import 7 | 8 | import os, sys 9 | if __name__ == '__main__' and __package__ is None: 10 | sys.path.append( os.path.dirname( os.path.dirname( os.path.abspath(__file__) ) ) ) 11 | from pcpp.preprocessor import STRING_TYPES 12 | 13 | # The width of signed integer which this evaluator will use 14 | INTMAXBITS = 64 15 | 16 | # Some Python 3 compatibility shims 17 | if sys.version_info.major < 3: 18 | INTBASETYPE = long 19 | else: 20 | INTBASETYPE = int 21 | 22 | class Int(INTBASETYPE): 23 | """A signed integer within a preprocessor expression, bounded 24 | to within INT_MIN and INT_MAX. Overflow is handled like a CPU, 25 | despite being UB, as that's what GCC and clang do. 26 | 27 | >>> Int(5) 28 | Int(5) 29 | >>> Int(5) * Int(2) 30 | Int(10) 31 | >>> Int(5) * 2 32 | Int(10) 33 | >>> Int(50) % 8 34 | Int(2) 35 | >>> -Int(5) 36 | Int(-5) 37 | >>> +Int(-5) 38 | Int(-5) 39 | >>> ~Int(5) 40 | Int(-6) 41 | >>> Int(6) & 2 42 | Int(2) 43 | >>> Int(4) | 2 44 | Int(6) 45 | >>> Int(6) ^ 2 46 | Int(4) 47 | >>> Int(2) << 2 48 | Int(8) 49 | >>> Int(8) >> 2 50 | Int(2) 51 | >>> Int(9223372036854775808) 52 | Int(-9223372036854775808) 53 | >>> Int(-9223372036854775809) 54 | Int(9223372036854775807) 55 | >>> Int(18446744073709551615) 56 | Int(-1) 57 | """ 58 | INT_MIN = -(1 << (INTMAXBITS - 1)) 59 | INT_MAX = (1 << (INTMAXBITS - 1)) - 1 60 | INT_MASK = (1 << INTMAXBITS) - 1 61 | @classmethod 62 | def __clamp(cls, value): 63 | return ((value - cls.INT_MIN) & cls.INT_MASK) + cls.INT_MIN 64 | def __new__(cls, value, *args, **kwargs): 65 | return super(Int, cls).__new__(cls, cls.__clamp(value)) 66 | def __add__(self, other): 67 | return self.__class__(self.__clamp(super(Int, self).__add__(other))) 68 | def __sub__(self, other): 69 | return self.__class__(self.__clamp(super(Int, self).__sub__(other))) 70 | def __mul__(self, other): 71 | return self.__class__(self.__clamp(super(Int, self).__mul__(other))) 72 | def __div__(self, other): 73 | return self.__class__(self.__clamp(super(Int, self).__div__(other))) 74 | def __mod__(self, other): 75 | return self.__class__(self.__clamp(super(Int, self).__mod__(other))) 76 | def __neg__(self): 77 | return self.__class__(self.__clamp(super(Int, self).__neg__())) 78 | def __invert__(self): 79 | return self.__class__(self.__clamp(super(Int, self).__invert__())) 80 | def __and__(self, other): 81 | return self.__class__(self.__clamp(super(Int, self).__and__(other))) 82 | def __or__(self, other): 83 | return self.__class__(self.__clamp(super(Int, self).__or__(other))) 84 | def __pos__(self): 85 | return self.__class__(self.__clamp(super(Int, self).__pos__())) 86 | def __pow__(self, other): 87 | return self.__class__(self.__clamp(super(Int, self).__pow__(other))) 88 | def __lshift__(self, other): 89 | return self.__class__(self.__clamp(super(Int, self).__lshift__(other))) 90 | def __rshift__(self, other): 91 | return self.__class__(self.__clamp(super(Int, self).__rshift__(other))) 92 | def __xor__(self, other): 93 | return self.__class__(self.__clamp(super(Int, self).__xor__(other))) 94 | def __repr__(self): 95 | return "Int(%d)" % INTBASETYPE(self) 96 | 97 | #def execute_expr(token): 98 | # """Execute a fully macro expanded set of tokens representing an expression, 99 | # returning the result of the evaluation. 100 | # 101 | # >>> execute_expr('5') 102 | # 5 103 | # """ 104 | # if isinstance(tokens,STRING_TYPES): 105 | # tokens = self.tokenize(tokens) 106 | # return 107 | 108 | # 18446744073709551615 == -1 109 | # -9223372036854775809 == 9223372036854775807 110 | # ( ( 0L && _CRT_DECLARE_NONSTDC_NAMES) || (!0L && !__STDC__ ) ) 111 | # 0L 112 | # (((1)?2:3) == 2) 113 | # L'\0' == 0 114 | # 12 == 12 115 | # 12L == 12 116 | # -1 >= 0U 117 | # (1<<2) == 4 118 | # (-!+!9) == -1 119 | # (2 || 3) == 1 120 | # 1L * 3 != 3 121 | # (!1L != 0) || (-1L != -1) 122 | # 0177777 != 65535 123 | # 0Xffff != 65535 || 0XFfFf != 65535 124 | # 0L != 0 || 0l != 0 125 | # 1U != 1 || 1u != 1 126 | # 0 <= -1 127 | # 1 << 2 != 4 || 8 >> 1 != 4 128 | # (3 ^ 5) != 6 || (3 | 5) != 7 || (3 & 5) != 1 129 | # (0 ? 1 : 2) != 2 130 | # -1 << 3U > 0 131 | # 0 && 10 / 0 132 | # not_defined && 10 / not_defined 133 | # 0 && 10 / 0 > 1 134 | # (0) ? 10 / 0 : 0 135 | # 0 == 0 || 10 / 0 > 1 136 | # (15 >> 2 >> 1 != 1) || (3 << 2 << 1 != 24) 137 | # (1 | 2) == 3 && 4 != 5 || 0 138 | # 1 > 0 139 | # '\123' != 83 140 | # '\x1b' != '\033' 141 | # 0 + (1 - (2 + (3 - (4 + (5 - (6 + (7 - (8 + (9 - (10 + (11 - (12 + (13 - (14 + (15 - (16 + (17 - (18 + (19 - (20 + (21 - (22 + (23 - (24 + (25 - (26 + (27 - (28 + (29 - (30 + (31 - (32 + 0)))))))))) )))))))))))))))))))))) == 0 142 | 143 | 144 | if __name__ == "__main__": 145 | import doctest 146 | doctest.testmod() 147 | 148 | -------------------------------------------------------------------------------- /manual_map.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2025 Soft8Soft, LLC. All rights reserved. 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | import bpy 16 | import re 17 | from pluginUtils.manager import AppManagerConn 18 | 19 | def add_verge3d_manual_map(): 20 | 21 | manualURL = AppManagerConn.getManualURL() 22 | 23 | url_manual_prefix = re.sub('index.html$', '', manualURL) 24 | url_manual_mapping = ( 25 | ('bpy.ops.view3d.v3d_sneak_peek', 'blender/Beginners-Guide.html#Sneak_Peek'), 26 | ('bpy.ops.view3d.v3d_app_manager', 'blender/Beginners-Guide.html#App_Manager'), 27 | ('bpy.ops.export_scene.v3d_gltf', 'blender/Beginners-Guide.html#Export'), 28 | ('bpy.ops.export_scene.v3d_glb', 'blender/Beginners-Guide.html#Export'), 29 | ('bpy.types.v3dexportsettings.lzma_enabled', 'introduction/Asset-compression.html'), 30 | ('bpy.types.v3dexportsettings.compress_textures', 'introduction/Texture-Compression.html'), 31 | ('bpy.types.v3dexportsettings.export_animations', 'blender/Animation.html'), 32 | ('bpy.types.v3dexportsettings.export_frame_range', 'blender/Animation.html'), 33 | ('bpy.types.v3dexportsettings.export_move_keyframes', 'blender/Animation.html'), 34 | ('bpy.types.v3dexportsettings.bake_armature_actions', 'blender/Animation.html'), 35 | ('bpy.types.v3dexportsettings.use_shadows', 'blender/Shadows.html#global_settings'), 36 | ('bpy.types.v3dexportsettings.shadow*', 'blender/Shadows.html#global_settings'), 37 | ('bpy.types.v3dexportsettings.esm*', 'blender/Shadows.html#global_settings'), 38 | ('bpy.types.v3dexportsettings.use_oit', 'blender/Transparency.html#oit_rendering'), 39 | ('bpy.types.v3dscenesettings.use_gtao*', 'blender/Lighting-and-Rendering.html#ambient_occlusion'), 40 | ('bpy.types.v3dscenesettings.gtao*', 'blender/Lighting-and-Rendering.html#ambient_occlusion'), 41 | ('bpy.types.sceneeevee.use_gtao*', 'blender/Lighting-and-Rendering.html#ambient_occlusion'), 42 | ('bpy.types.sceneeevee.gtao*', 'blender/Lighting-and-Rendering.html#ambient_occlusion'), 43 | ('bpy.types.v3doutlinesettings*', 'blender/Lighting-and-Rendering.html#outline_rendering'), 44 | ('bpy.types.v3dexportsettings*', 'blender/Lighting-and-Rendering.html#global_rendering_properties_verge3d'), 45 | ('bpy.types.v3dworldsettings*', 'blender/Lighting-and-Rendering.html#world_rendering_properties'), 46 | ('bpy.types.v3dobjectsettings.anim*', 'blender/Animation.html#per_object_settings'), 47 | ('bpy.types.v3dobjectsettings.render_order', 'blender/Lighting-and-Rendering.html#per_object_rendering_properties'), 48 | ('bpy.types.v3dobjectsettings.render_order', 'blender/Lighting-and-Rendering.html#per_object_rendering_properties'), 49 | ('bpy.types.v3dobjectsettings.frustum_culling', 'blender/Lighting-and-Rendering.html#per_object_rendering_properties'), 50 | ('bpy.types.v3dobjectsettings.use_shadows', 'blender/Shadows.html#per_object_material_settings'), 51 | ('bpy.types.v3dobjectsettings.hidpi_compositing', 'blender/Lighting-and-Rendering.html#hidpi_rendering'), 52 | ('bpy.types.v3dobjectsettings.canvas_break*', 'blender/Lighting-and-Rendering.html#visibility_breakpoints'), 53 | ('bpy.types.v3dobjectsettings.canvas_fit*', 'blender/Camera.html#fit_to_camera_edge'), 54 | ('bpy.types.v3dobjectsettings.fix_ortho_zoom', 'blender/Camera.html'), 55 | ('bpy.types.v3dcamerasettings.orbit*', 'blender/Camera.html#orbit_camera_settings'), 56 | ('bpy.types.v3dcamerasettings.fps*', 'blender/Camera.html#firstperson_camera_settings'), 57 | ('bpy.types.v3dcamerasettings.enable_pointer_lock', 'blender/Camera.html#firstperson_camera_settings'), 58 | ('bpy.types.v3dcamerasettings*', 'blender/Camera.html#camera_settings'), 59 | ('bpy.ops.camera.v3d_orbit_camera_target_from_cursor', 'blender/Camera.html#orbit_camera_settings'), 60 | ('bpy.ops.camera.v3d_orbit_camera_update_view', 'blender/Camera.html#orbit_camera_settings'), 61 | ('bpy.types.v3dlinerenderingsettings*', 'blender/Lighting-and-Rendering.html#line_rendering'), 62 | ('bpy.types.v3dmaterialsettings.blend_method', 'blender/Transparency.html#setting_up_in_blender'), 63 | ('bpy.types.material.alpha_threshold', 'blender/Transparency.html#setting_up_in_blender'), 64 | ('bpy.types.v3dmaterialsettings.transparency_hack', 'blender/Transparency.html#setting_up_in_blender'), 65 | ('bpy.types.v3dmaterialsettings.depth_write', 'blender/Transparency.html#alpha_add'), 66 | ('bpy.types.v3dmaterialsettings.gltf_compat', 'blender/GLTF-Materials.html'), 67 | ('bpy.types.v3dmaterialsettings*', 'blender/Lighting-and-Rendering.html#material_verge3d_panel'), 68 | ('bpy.types.v3dshadowsettings*', 'blender/Shadows.html#per_light_settings'), 69 | ('bpy.types.v3dtexturesettings.anisotropy', 'blender/Shader-Nodes-Reference.html#Image_Texture'), 70 | ('bpy.types.v3dimagesettings.compression_method', 'introduction/Texture-Compression.html#tweaking_compression'), 71 | ('bpy.types.v3dtexturenoisesettings*', 'blender/Shader-Nodes-Reference.html#Noise_Texture'), 72 | ('bpy.types.v3dlightprobesettings*', 'blender/Lighting-and-Rendering.html#light_probes'), 73 | ('bpy.types.lightprobe.visibility_collection', 'blender/Lighting-and-Rendering.html#light_probes'), 74 | ('bpy.types.v3dobjectsettings.clipping_plane*', 'blender/Lighting-and-Rendering.html#clipping_planes'), 75 | ('bpy.ops.object.add_clipping_plane', 'blender/Lighting-and-Rendering.html#clipping_planes'), 76 | ) 77 | return url_manual_prefix, url_manual_mapping 78 | 79 | def register(): 80 | bpy.utils.register_manual_map(add_verge3d_manual_map) 81 | 82 | def unregister(): 83 | bpy.utils.unregister_manual_map(add_verge3d_manual_map) 84 | -------------------------------------------------------------------------------- /python/pluginUtils/convert.py: -------------------------------------------------------------------------------- 1 | import base64, lzma, os, platform, subprocess, sys, tempfile 2 | 3 | from .path import getRoot, getPlatformBinDirName 4 | from .log import getLogger 5 | 6 | log = getLogger('V3D-PU') 7 | 8 | COMPRESSION_THRESHOLD = 3 9 | 10 | from subprocess import CompletedProcess 11 | 12 | def runCMD(params): 13 | if platform.system().lower() == 'windows': 14 | # disable popup console window 15 | si = subprocess.STARTUPINFO() 16 | si.dwFlags = subprocess.STARTF_USESHOWWINDOW 17 | si.wShowWindow = subprocess.SW_HIDE 18 | app = subprocess.run(params, capture_output=True, startupinfo=si) 19 | else: 20 | app = subprocess.run(params, capture_output=True) 21 | 22 | return app 23 | 24 | 25 | class CompressionFailed(Exception): 26 | pass 27 | 28 | def compressLZMA(srcPath, dstPath=None): 29 | 30 | dstPath = dstPath if dstPath else srcPath + '.xz' 31 | 32 | log.info('Compressing {} to LZMA'.format(os.path.basename(srcPath))) 33 | 34 | with open(srcPath, 'rb') as fin: 35 | data = fin.read() 36 | with lzma.open(dstPath, 'wb') as fout: 37 | fout.write(data) 38 | 39 | def removeICCChunk(srcPath): 40 | import pypng.png 41 | 42 | def removeChunksGen(chunks, delete): 43 | for type, v in chunks: 44 | if type.decode('ascii') in delete: 45 | continue 46 | yield type, v 47 | 48 | try: 49 | tmpImg = tempfile.NamedTemporaryFile(delete=False) 50 | 51 | reader = pypng.png.Reader(srcPath) 52 | chunks = removeChunksGen(reader.chunks(), ['iCCP']) 53 | pypng.png.write_chunks(tmpImg, chunks) 54 | 55 | tmpImg.close() 56 | dstPath = tmpImg.name 57 | 58 | return dstPath 59 | 60 | except Exception as e: 61 | log.warning('ICC chunk removal failed\n' + str(e)) 62 | return None 63 | 64 | def compressKTX2(srcPath='', srcData=None, dstPath='-', method='AUTO'): 65 | """ 66 | srcPath/srcData are mutually exclusive 67 | """ 68 | 69 | if srcData: 70 | # NOTE: toktx does not support stdin at the moment 71 | tmpImg = tempfile.NamedTemporaryFile(delete=False) 72 | tmpImg.write(srcData) 73 | tmpImg.close() 74 | srcPath = tmpImg.name 75 | 76 | platformBinDir = getPlatformBinDirName() 77 | # HACK: workaround for missing Windows ARM converter 78 | # TODO: support Windows ARM 79 | if platformBinDir == 'windows_arm64': 80 | platformBinDir = 'windows_amd64' 81 | params = [os.path.join(getRoot(), 'ktx', platformBinDir, 'toktx')] 82 | 83 | params.append('--encode') 84 | if method == 'UASTC' or method == 'AUTO': 85 | params.append('uastc') 86 | params.append('--zcmp') 87 | else: 88 | params.append('etc1s') 89 | params.append('--clevel') 90 | params.append('2') 91 | params.append('--qlevel') 92 | params.append('255') 93 | 94 | params.append('--genmipmap') 95 | params.append(dstPath) 96 | params.append(srcPath) 97 | 98 | log.info('Compressing {0} to {1}'.format(os.path.basename(srcPath), params[2].upper())) 99 | 100 | app = runCMD(params) 101 | 102 | if app.stderr: 103 | msg = app.stderr.decode('utf-8').strip() 104 | 105 | if 'PNG file has an ICC profile chunk' in msg: 106 | log.warning('PNG with ICC profile chunk detected, stripping the chunk') 107 | 108 | srcPathRemICC = removeICCChunk(srcPath) 109 | 110 | if srcPathRemICC is not None: 111 | # replace src path and run compression again 112 | params[-1] = srcPathRemICC 113 | app = runCMD(params) 114 | 115 | if app.stderr: 116 | msg = app.stderr.decode('utf-8').strip() 117 | else: 118 | msg = 'Successfully compressed PNG with ICC profile chunk removed' 119 | 120 | os.unlink(srcPathRemICC) 121 | 122 | log.warning(msg) 123 | 124 | # allow non-critical warnings 125 | if app.returncode > 0: 126 | if srcData: 127 | os.unlink(srcPath) 128 | raise CompressionFailed 129 | 130 | if srcData: 131 | os.unlink(srcPath) 132 | 133 | if method == 'AUTO': 134 | if srcData: 135 | srcSize = len(srcData) 136 | else: 137 | srcSize = os.path.getsize(srcPath) 138 | 139 | if dstPath == '-': 140 | dstSize = len(app.stdout) 141 | else: 142 | dstSize = os.path.getsize(dstPath) 143 | 144 | if dstSize > COMPRESSION_THRESHOLD * srcSize: 145 | log.warning('Compressed image is too large, keeping original file as is') 146 | 147 | if dstPath != '-': 148 | os.unlink(dstPath) 149 | 150 | raise CompressionFailed 151 | 152 | return app.stdout 153 | 154 | def fileToDataURI(path, mime): 155 | with open(path, 'rb') as file: 156 | content = file.read() 157 | return 'data:' + mime + ';base64,' + base64.b64encode(content).decode('utf-8') 158 | 159 | 160 | def composeSingleHTML(htmlPath, glbPath, title): 161 | # NOTE: fixes crash with missing class state in Maya 162 | from .manager import AppManagerConn 163 | 164 | glb = fileToDataURI(glbPath, 'model/gltf-binary') 165 | 166 | v3d = '' 167 | with open(AppManagerConn.getEnginePath()) as v3dFile: 168 | v3d = v3dFile.read() 169 | 170 | html = '' 171 | with open(getRoot(True) / 'player' / 'embed.html') as htmlFile: 172 | html = htmlFile.read() 173 | 174 | css = '' 175 | with open(getRoot(True) / 'player' / 'player.css') as cssFile: 176 | css = cssFile.read() 177 | 178 | svgOpen = fileToDataURI(getRoot(True) / 'player' / 'media' / 'fullscreen_open.svg', 'image/svg+xml') 179 | svgClose = fileToDataURI(getRoot(True) / 'player' / 'media' / 'fullscreen_close.svg', 'image/svg+xml') 180 | 181 | css = css.replace('media/fullscreen_open.svg', svgOpen) 182 | css = css.replace('media/fullscreen_close.svg', svgClose) 183 | 184 | js = '' 185 | with open(getRoot(True) / 'player' / 'player.js') as jsFile: 186 | js = jsFile.read() 187 | js = js.replace('params.load', 'params.load || \'{}\''.format(glb)) 188 | 189 | favicon = fileToDataURI(getRoot(True) / 'player' / 'media' / 'favicon-48x48.png', 'image/png') 190 | 191 | html = html.replace('%TITLE%', title) 192 | html = html.replace('%FAVICON%', favicon) 193 | html = html.replace('%V3D%', v3d) 194 | html = html.replace('%CSS%', css) 195 | html = html.replace('%JS%', js) 196 | 197 | with open(htmlPath, 'w') as htmlFile: 198 | htmlFile.write(html) 199 | -------------------------------------------------------------------------------- /reexport.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2025 Soft8Soft, LLC. All rights reserved. 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | import bpy 16 | from bpy.app.handlers import persistent 17 | 18 | import fnmatch, re, os, sys 19 | import subprocess 20 | 21 | import pluginUtils 22 | from pluginUtils.path import getRoot, findExportedAssetPath 23 | 24 | from .utils import extractMaterialNodeTrees 25 | 26 | log = pluginUtils.log.getLogger('V3D-BL') 27 | 28 | join = os.path.join 29 | norm = os.path.normpath 30 | 31 | loadHandlerInfo = { 32 | 'currBlend' : None, 33 | 'currGLTF' : None, 34 | 'resaveBlend': False, 35 | 'updateCopyright': False, 36 | 'applySceneFixes': False 37 | } 38 | 39 | def applySceneFixes(): 40 | for mat in bpy.data.materials: 41 | if mat.use_nodes and mat.node_tree: 42 | for nodeTree in extractMaterialNodeTrees(mat.node_tree): 43 | for node in nodeTree.nodes: 44 | if node.type == 'BSDF_PRINCIPLED': 45 | node.distribution = 'GGX' 46 | 47 | for world in bpy.data.worlds: 48 | world.sun_threshold = 0 49 | 50 | @persistent 51 | def loadHandler(dummy): 52 | def delayReexport(): 53 | if loadHandlerInfo['updateCopyright']: 54 | for scene in bpy.data.scenes: 55 | scene.v3d_export.copyright = pluginUtils.copyrightLine 56 | 57 | if loadHandlerInfo['applySceneFixes']: 58 | applySceneFixes() 59 | 60 | exported = loadHandlerInfo['currGLTF'] 61 | if exported: 62 | log.info('Reexporting ' + loadHandlerInfo['currBlend']) 63 | 64 | if os.path.splitext(exported)[1] == '.gltf': 65 | bpy.ops.export_scene.v3d_gltf(filepath=exported) 66 | elif os.path.splitext(exported)[1] == '.glb': 67 | bpy.ops.export_scene.v3d_glb(filepath=exported) 68 | else: 69 | log.error('Invalid exported extension') 70 | 71 | if loadHandlerInfo['resaveBlend']: 72 | log.info('Resaving ' + loadHandlerInfo['currBlend']) 73 | bpy.ops.wm.save_mainfile() 74 | 75 | V3D_OT_reexport_all.reexportNext() 76 | 77 | bpy.app.timers.register(delayReexport, first_interval=0.001) 78 | 79 | class V3D_OT_reexport_all(bpy.types.Operator): 80 | bl_idname = 'wm.v3d_reexport_all' 81 | bl_label = 'Reexport all Verge3D assets' 82 | bl_description = 'Reexport all glTF files inside Verge3D SDK' 83 | 84 | exported = [] 85 | 86 | folder: bpy.props.StringProperty( 87 | name = 'Folder', 88 | description = 'Folder to reexport (relative to Verge3D root folder)', 89 | default = 'applications' 90 | ) 91 | resaveBlend: bpy.props.BoolProperty(name='Resave .blend files') 92 | updateCopyright: bpy.props.BoolProperty(name='Update copyright') 93 | applySceneFixes: bpy.props.BoolProperty(name='Apply scene fixes') 94 | forceGLB: bpy.props.BoolProperty(name='Force GLB export') 95 | 96 | @classmethod 97 | def reexportNext(cls): 98 | if len(cls.exported): 99 | currBlend, currGLTF = cls.exported.pop(0) 100 | 101 | if loadHandler not in bpy.app.handlers.load_post: 102 | bpy.app.handlers.load_post.append(loadHandler) 103 | 104 | loadHandlerInfo['currBlend'] = currBlend 105 | loadHandlerInfo['currGLTF'] = currGLTF 106 | 107 | bpy.ops.wm.open_mainfile(filepath=currBlend) 108 | 109 | else: 110 | if loadHandler in bpy.app.handlers.load_post: 111 | bpy.app.handlers.load_post.remove(loadHandler) 112 | 113 | def execute(self, context): 114 | apps = join(getRoot(), self.folder) 115 | 116 | loadHandlerInfo['resaveBlend'] = self.resaveBlend 117 | loadHandlerInfo['updateCopyright'] = self.updateCopyright 118 | loadHandlerInfo['applySceneFixes'] = self.applySceneFixes 119 | 120 | sys.setrecursionlimit(10000) 121 | 122 | for root, dirs, files in os.walk(apps): 123 | for name in files: 124 | if fnmatch.fnmatch(name, '*.blend'): 125 | blendpath = norm(join(root, name)) 126 | 127 | if sys.platform.startswith('linux'): 128 | fileinfo = subprocess.check_output(['file', '--uncompress', blendpath]).decode() 129 | verStr = re.search('\d\.\d\d', fileinfo).group(0) 130 | ver = tuple([int(n) for n in verStr.split('.')]) + (0,) 131 | 132 | if ver < (2, 80, 0) or ver > bpy.app.version: 133 | blendRel = os.path.relpath(blendpath, apps) 134 | log.warning(f'Ignoring {blendRel}, saved in Blender {ver[0]}.{ver[1]}') 135 | continue 136 | 137 | IGNORE = [] 138 | 139 | ignore = False 140 | for pattern in IGNORE: 141 | if fnmatch.fnmatch(name, pattern): 142 | ignore = True 143 | if ignore: 144 | continue 145 | 146 | gltfpath = findExportedAssetPath(blendpath) 147 | if gltfpath: 148 | if self.forceGLB: 149 | gltfpath = os.path.splitext(gltfpath)[0] + '.glb' 150 | self.__class__.exported.append((blendpath, gltfpath)) 151 | elif self.resaveBlend: 152 | self.__class__.exported.append((blendpath, None)) 153 | 154 | self.__class__.exported.sort() 155 | self.__class__.reexportNext() 156 | 157 | return {'FINISHED'} 158 | 159 | def invoke(self, context, event): 160 | wm = context.window_manager 161 | return wm.invoke_props_dialog(self) 162 | 163 | def menuReexportAll(self, context): 164 | self.layout.separator() 165 | self.layout.operator(V3D_OT_reexport_all.bl_idname, icon='TOOL_SETTINGS') 166 | 167 | def register(): 168 | bpy.utils.register_class(V3D_OT_reexport_all) 169 | if pluginUtils.debug: 170 | bpy.types.TOPBAR_MT_render.append(menuReexportAll) 171 | 172 | def unregister(): 173 | if pluginUtils.debug: 174 | bpy.types.TOPBAR_MT_render.remove(menuReexportAll) 175 | bpy.utils.unregister_class(V3D_OT_reexport_all) 176 | -------------------------------------------------------------------------------- /python/pyosl/oslast.py: -------------------------------------------------------------------------------- 1 | # OSL Abstact Syntax Tree 2 | 3 | import os, sys 4 | 5 | class Node: 6 | def __init__(self, type, *children): 7 | self.type = type 8 | self.children = list(children) 9 | self.tmp = None 10 | 11 | def append(self, child): 12 | self.children.append(child) 13 | 14 | def insert(self, num, child): 15 | self.children.insert(num, child) 16 | 17 | def num_childs(self): 18 | return len(self.children) 19 | 20 | def get_child(self, type_or_idx): 21 | 22 | if isinstance(type_or_idx, str) or (sys.version_info[0] == 2 and isinstance(type_or_idx, unicode)): 23 | for c in self.children: 24 | if isinstance(c, Node) and c.type == type_or_idx: 25 | return c 26 | else: 27 | return self.children[type_or_idx] 28 | 29 | return None 30 | 31 | def set_child(self, index, node): 32 | self.children[index] = node 33 | 34 | def traverse_nodes(self, callback, node=None): 35 | '''DFS''' 36 | if node is None: 37 | node = self 38 | 39 | callback(node) 40 | 41 | for c in node.children: 42 | if isinstance(c, Node): 43 | self.traverse_nodes(callback, c) 44 | 45 | def find_nodes(self, *types): 46 | '''Find all children nodes of given type(s)''' 47 | 48 | out = [] 49 | 50 | def cb(node): 51 | # NOTE: not working in python2 52 | #nonlocal out 53 | for type in types: 54 | if node.type == type: 55 | out.append(node) 56 | 57 | self.traverse_nodes(cb) 58 | 59 | return out 60 | 61 | def find_node(self, type): 62 | '''Find first child node of given type''' 63 | 64 | nodes = self.find_nodes(type) 65 | if len(nodes): 66 | return nodes[0] 67 | else: 68 | return None 69 | 70 | def get_ancestor(self, ast): 71 | '''Get nearest parent node''' 72 | 73 | ast.assign_tmp_parents() 74 | return self.tmp 75 | 76 | def find_ancestor_node(self, ast, type): 77 | '''Find parent node of the given type(s)''' 78 | 79 | ast.assign_tmp_parents() 80 | 81 | node = self 82 | 83 | while True: 84 | parent = node.tmp 85 | 86 | if parent: 87 | if parent.type == type: 88 | return parent 89 | else: 90 | node = parent 91 | else: 92 | return None 93 | 94 | def assign_tmp_parents(self): 95 | '''For internal use''' 96 | 97 | # root node 98 | self.tmp = None 99 | 100 | def cb(node): 101 | for c in node.children: 102 | if isinstance(c, Node): 103 | # parent 104 | c.tmp = node 105 | 106 | self.traverse_nodes(cb) 107 | 108 | 109 | def get_shader_name(self): 110 | return self.find_node('shader-declaration').get_child(1) 111 | 112 | def get_variables(self): 113 | variables = {} 114 | 115 | decl_nodes = self.find_nodes('variable-declaration', 116 | 'function-formal-param', 117 | 'shader-formal-param') 118 | 119 | for dn in decl_nodes: 120 | 121 | type = dn.get_child('typespec').get_typespec_type() 122 | 123 | if dn.type == 'variable-declaration': 124 | for expr in dn.find_nodes('def-expression'): 125 | name = expr.get_child(0) 126 | variables[name] = type 127 | else: 128 | name = dn.get_child(2) 129 | variables[name] = type 130 | 131 | return variables 132 | 133 | def get_typespec_type(self): 134 | assert self.type == 'typespec' 135 | 136 | if self.get_child('simple-typename'): 137 | return self.get_child('simple-typename').get_child(0) 138 | else: 139 | return self.get_child(0) 140 | 141 | def get_functions(self): 142 | functions = {} 143 | 144 | decl_nodes = self.find_nodes('function-declaration') 145 | 146 | for dn in decl_nodes: 147 | type = dn.get_child('typespec').get_typespec_type() 148 | name = dn.get_child(1) 149 | params = dn.get_child(2) 150 | 151 | spec = [type] 152 | 153 | if params: 154 | for param in params.children: 155 | spec.append(param.get_child('typespec').get_typespec_type()) 156 | 157 | functions[name] = tuple(spec) 158 | 159 | return functions 160 | 161 | def get_shader_params(self): 162 | 163 | inputs = [] 164 | outputs = [] 165 | 166 | shader_name = self.get_shader_name() 167 | 168 | decl_nodes = self.find_nodes('shader-formal-param') 169 | 170 | for dn in decl_nodes: 171 | is_in = True if dn.get_child('outputspec').get_child(0) is None else False 172 | type = dn.get_child('typespec').get_child('simple-typename').get_child(0) 173 | name = dn.get_child(2) 174 | 175 | if is_in: 176 | init_ast = None 177 | init_node = dn.get_child('initializer') 178 | if init_node: 179 | param = dn.clone() 180 | param.get_child('outputspec').set_child(0, 'output') 181 | init_ast = self.create_shader(shader_name + '_init_' + str(decl_nodes.index(dn)), 182 | [param], 183 | [Node('statement-semi', Node('def-expression', name, init_node.clone()))]) 184 | 185 | uses_gl_var = False 186 | for gl_var_name in ['P', 'I', 'N', 'u', 'v']: 187 | if init_ast.uses_variable(gl_var_name): 188 | uses_gl_var = True 189 | 190 | if init_ast and uses_gl_var: 191 | inputs.append((type, name, init_ast)) 192 | else: 193 | inputs.append((type, name, None)) 194 | else: 195 | outputs.append((type, name, None)) 196 | 197 | return inputs, outputs 198 | 199 | def uses_variable(self, name): 200 | var_nodes = self.find_nodes('variable-ref') 201 | 202 | for vn in var_nodes: 203 | if vn.find_node('variable-lvalue').get_child(0) == name: 204 | return True 205 | 206 | return False 207 | 208 | def create_shader(self, name, params, statements): 209 | params = Node('shader-formal-params', *params) 210 | statements = Node('statement-list', *statements) 211 | return Node('shader-file', Node('shader-declaration', 'shader', name, None, params, statements)) 212 | 213 | def clone(self): 214 | cloned_children = [c.clone() if isinstance(c, Node) else c for c in self.children] 215 | return Node(self.type, *cloned_children) 216 | 217 | def print_tree(self, level=0, indent=2, node=None): 218 | if node is None: 219 | node = self 220 | 221 | print(' ' * indent * level + node.type) 222 | 223 | for c in node.children: 224 | if isinstance(c, Node): 225 | self.print_tree(level+1, indent, c) 226 | else: 227 | print(' ' * indent * (level + 1) + str(c)) 228 | 229 | 230 | -------------------------------------------------------------------------------- /gltf2_export.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2025 Soft8Soft, LLC. All rights reserved. 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | import bpy 16 | import json, struct, os, tempfile 17 | 18 | import pluginUtils 19 | from pluginUtils.manager import AppManagerConn 20 | 21 | log = pluginUtils.log.getLogger('V3D-BL') 22 | 23 | from .gltf2_filter import * 24 | from .gltf2_generate import * 25 | 26 | def prepare(exportSettings): 27 | """ 28 | Stores current state of Blender and prepares for export, depending on the current export settings. 29 | """ 30 | if bpy.context.active_object is not None and bpy.context.active_object.mode != 'OBJECT': 31 | bpy.ops.object.mode_set(mode='OBJECT') 32 | 33 | filterApply(exportSettings) 34 | 35 | exportSettings['originalFrame'] = bpy.context.scene.frame_current 36 | exportSettings['jointCache'] = {} 37 | 38 | if exportSettings['exportAnimations']: 39 | bpy.context.scene.frame_set(0) 40 | 41 | def finish(exportSettings): 42 | """ 43 | Brings back Blender into its original state before export and cleans up temporary objects. 44 | """ 45 | if exportSettings['temporaryMeshes'] is not None: 46 | for tempMesh in exportSettings['temporaryMeshes']: 47 | bpy.data.meshes.remove(tempMesh) 48 | 49 | if exportSettings['temporaryMaterials'] is not None: 50 | for temporary_mat in exportSettings['temporaryMaterials']: 51 | bpy.data.materials.remove(temporary_mat) 52 | 53 | for bl_image in exportSettings['filteredImages']: 54 | del bl_image['compression_error_status'] 55 | 56 | del exportSettings['uriCache']['uri'][:] 57 | del exportSettings['uriCache']['blDatablocks'][:] 58 | 59 | for obj in bpy.data.objects: 60 | if obj.data is not None and hasattr(obj.data, 'materials'): 61 | mats = obj.data.materials 62 | for i in range(len(mats)): 63 | mats[i] = mats[i] 64 | 65 | bpy.context.scene.frame_set(exportSettings['originalFrame']) 66 | 67 | def compressLZMA(path, settings): 68 | 69 | if settings['sneakPeek']: 70 | return 71 | 72 | if not settings['lzmaEnabled']: 73 | return 74 | 75 | pluginUtils.convert.compressLZMA(path) 76 | 77 | def save(operator, context, exportSettings): 78 | """ 79 | Starts the glTF 2.0 export and saves to content either to a .gltf or .glb file. 80 | """ 81 | 82 | log.info('Starting glTF 2.0 export') 83 | bpy.context.window_manager.progress_begin(0, 100) 84 | bpy.context.window_manager.progress_update(1) 85 | 86 | prepare(exportSettings) 87 | 88 | glTF = {} 89 | 90 | generateGLTF(operator, context, exportSettings, glTF) 91 | 92 | cleanupDataKeys(glTF) 93 | 94 | indent = None 95 | separators = separators=(',', ':') 96 | 97 | jsonStrip = exportSettings['strip'] and not exportSettings['sneakPeek'] 98 | 99 | exportFormat = exportSettings['format'] 100 | 101 | if exportFormat == 'ASCII' and not jsonStrip: 102 | indent = 4 103 | separators = separators=(', ', ' : ') 104 | 105 | glTF_encoded = json.dumps(glTF, indent=indent, separators=separators, 106 | sort_keys=True, ensure_ascii=False) 107 | 108 | if exportFormat == 'ASCII': 109 | file = open(exportSettings['filepath'], 'w', encoding='utf8', newline='\n') 110 | file.write(glTF_encoded) 111 | file.write('\n') 112 | file.close() 113 | 114 | binary = exportSettings['binary'] 115 | if len(binary) > 0: 116 | file = open(exportSettings['filedirectory'] + exportSettings['binaryfilename'], 'wb') 117 | file.write(binary) 118 | file.close() 119 | 120 | compressLZMA(exportSettings['filepath'], exportSettings) 121 | 122 | bin_path = exportSettings['filedirectory'] + exportSettings['binaryfilename'] 123 | if os.path.isfile(bin_path): 124 | compressLZMA(bin_path, exportSettings) 125 | 126 | else: 127 | if exportFormat == 'BINARY': 128 | file = open(exportSettings['filepath'], 'wb') 129 | else: # HTML 130 | file = tempfile.NamedTemporaryFile(delete=False) 131 | 132 | glTF_data = glTF_encoded.encode() 133 | binary = exportSettings['binary'] 134 | 135 | length_gtlf = len(glTF_data) 136 | spaces_gltf = (4 - (length_gtlf & 3)) & 3 137 | length_gtlf += spaces_gltf 138 | 139 | length_bin = len(binary) 140 | zeros_bin = (4 - (length_bin & 3)) & 3 141 | length_bin += zeros_bin 142 | 143 | length = 12 + 8 + length_gtlf 144 | if length_bin > 0: 145 | length += 8 + length_bin 146 | 147 | file.write('glTF'.encode()) 148 | file.write(struct.pack('I', 2)) 149 | file.write(struct.pack('I', length)) 150 | 151 | file.write(struct.pack('I', length_gtlf)) 152 | file.write('JSON'.encode()) 153 | file.write(glTF_data) 154 | for i in range(0, spaces_gltf): 155 | file.write(' '.encode()) 156 | 157 | if length_bin > 0: 158 | file.write(struct.pack('I', length_bin)) 159 | file.write('BIN\0'.encode()) 160 | file.write(binary) 161 | for i in range(0, zeros_bin): 162 | file.write('\0'.encode()) 163 | 164 | file.close() 165 | 166 | if exportFormat == 'BINARY': 167 | compressLZMA(exportSettings['filepath'], exportSettings) 168 | else: # HTML 169 | blendname = os.path.splitext(bpy.path.basename(bpy.context.blend_data.filepath))[0] 170 | title = blendname.replace('_', ' ').title() or 'Blender scene exported to HTML' 171 | if exportSettings['copyright']: 172 | title += ' (Copyright {})'.format(exportSettings['copyright']) 173 | pluginUtils.convert.composeSingleHTML(exportSettings['filepath'], file.name, title) 174 | os.unlink(file.name) 175 | 176 | finish(exportSettings) 177 | 178 | log.info('Finished glTF 2.0 export') 179 | bpy.context.window_manager.progress_end() 180 | 181 | return {'FINISHED'} 182 | 183 | def cleanupDataKeys(glTF): 184 | """ 185 | Remove "id" keys used in the exporter to assign entity indices 186 | """ 187 | for key, val in glTF.items(): 188 | if type(val) == list: 189 | for entity in val: 190 | if 'id' in entity: 191 | del entity['id'] 192 | elif key == 'extensions' and 'S8S_v3d_lights' in val: 193 | cleanupDataKeys(val['S8S_v3d_lights']) 194 | elif key == 'extensions' and 'S8S_v3d_light_probes' in val: 195 | cleanupDataKeys(val['S8S_v3d_light_probes']) 196 | elif key == 'extensions' and 'S8S_v3d_clipping_planes' in val: 197 | cleanupDataKeys(val['S8S_v3d_clipping_planes']) 198 | elif key == 'extensions' and 'S8S_v3d_curves' in val: 199 | cleanupDataKeys(val['S8S_v3d_curves']) 200 | 201 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2025 Soft8Soft, LLC. All rights reserved. 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | import bpy 16 | import os 17 | import sys 18 | 19 | join = os.path.join 20 | 21 | ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) 22 | sys.path.append(join(ROOT_DIR, 'python')) 23 | 24 | ADDON_DISABLE_DELAY = 2 25 | 26 | if 'bpy' in locals(): 27 | import importlib 28 | if 'gltf2_animate' in locals(): 29 | importlib.reload(gltf2_animate) 30 | if 'gltf2_export' in locals(): 31 | importlib.reload(gltf2_export) 32 | if 'gltf2_extract' in locals(): 33 | importlib.reload(gltf2_extract) 34 | if 'gltf2_filter' in locals(): 35 | importlib.reload(gltf2_filter) 36 | if 'gltf2_generate' in locals(): 37 | importlib.reload(gltf2_generate) 38 | if 'gltf2_get' in locals(): 39 | importlib.reload(gltf2_get) 40 | 41 | if 'node_material_wrapper' in locals(): 42 | importlib.reload(node_material_wrapper) 43 | if 'utils' in locals(): 44 | importlib.reload(utils) 45 | 46 | import pluginUtils 47 | from pluginUtils.manager import AppManagerConn 48 | from pluginUtils.path import getRoot 49 | 50 | log = pluginUtils.log.getLogger('V3D-BL') 51 | 52 | from bpy.props import (CollectionProperty, 53 | StringProperty, 54 | BoolProperty, 55 | EnumProperty, 56 | FloatProperty) 57 | 58 | from bpy_extras.io_utils import (ExportHelper) 59 | 60 | class V3D_AddonPreferences(bpy.types.AddonPreferences): 61 | bl_idname = __package__ 62 | 63 | disable_builtin_gltf_addon: BoolProperty( 64 | default = True, 65 | description = 'Disable built-in glTF 2.0 exporter (io_scene_gltf2)' 66 | ) 67 | 68 | def draw(self, context): 69 | layout = self.layout 70 | row = layout.row() 71 | row.prop(self, 'disable_builtin_gltf_addon', text='Disable Built-in glTF Add-on') 72 | 73 | class V3D_OT_export(): 74 | 75 | export_sneak_peek: BoolProperty( 76 | name='Sneak Peek Mode', 77 | description='', 78 | default=False 79 | ) 80 | 81 | def execute(self, context): 82 | from . import gltf2_export 83 | 84 | scene0 = bpy.data.scenes[0] 85 | v3d_export = scene0.v3d_export 86 | 87 | exportSettings = {} 88 | 89 | exportSettings['filepath'] = bpy.path.ensure_ext(self.filepath, self.filename_ext) 90 | exportSettings['filedirectory'] = os.path.dirname(exportSettings['filepath']) + '/' 91 | 92 | exportSettings['format'] = self.export_format 93 | exportSettings['copyright'] = v3d_export.copyright 94 | exportSettings['useShadows'] = scene0.eevee.use_shadows 95 | exportSettings['shadowMapType'] = v3d_export.shadow_map_type 96 | exportSettings['shadowMapSide'] = v3d_export.shadow_map_side 97 | exportSettings['esmDistanceScale'] = v3d_export.esm_distance_scale 98 | exportSettings['shadowCubeSize'] = v3d_export.shadow_cube_size 99 | exportSettings['shadowCascadeSize'] = v3d_export.shadow_cascade_size 100 | exportSettings['iblEnvironmentMode'] = v3d_export.ibl_environment_mode 101 | exportSettings['bakeModifiers'] = v3d_export.bake_modifiers 102 | exportSettings['bakeArmatureActions'] = v3d_export.bake_armature_actions 103 | exportSettings['bakeText'] = v3d_export.bake_text if self.export_format != 'HTML' else True 104 | exportSettings['exportConstraints'] = v3d_export.export_constraints 105 | exportSettings['exportCustomProps'] = v3d_export.export_custom_props 106 | exportSettings['lzmaEnabled'] = v3d_export.lzma_enabled 107 | exportSettings['compressTextures'] = v3d_export.compress_textures if self.export_format != 'HTML' else False 108 | exportSettings['optimizeAttrs'] = v3d_export.optimize_attrs 109 | exportSettings['aaMethod'] = v3d_export.aa_method 110 | exportSettings['useHDR'] = v3d_export.use_hdr 111 | exportSettings['useOIT'] = v3d_export.use_oit 112 | exportSettings['exportAnimations'] = v3d_export.export_animations 113 | if v3d_export.export_animations: 114 | exportSettings['exportFrameRange'] = v3d_export.export_frame_range 115 | exportSettings['moveKeyframes'] = v3d_export.export_move_keyframes 116 | else: 117 | exportSettings['exportFrameRange'] = False 118 | exportSettings['moveKeyframes'] = False 119 | 120 | exportSettings['uriCache'] = { 'uri': [], 'blDatablocks': [] } 121 | exportSettings['binary'] = bytearray() 122 | exportSettings['binaryfilename'] = os.path.splitext(os.path.basename(self.filepath))[0] + '.bin' 123 | 124 | exportSettings['sneakPeek'] = self.export_sneak_peek 125 | 126 | exportSettings['temporaryMeshes'] = None 127 | exportSettings['temporaryMaterials'] = None 128 | 129 | exportSettings['strip'] = True 130 | 131 | exportSettings['indices'] = 'UNSIGNED_INT' 132 | exportSettings['forceIndices'] = False 133 | 134 | exportSettings['forceSampling'] = False 135 | exportSettings['skins'] = True 136 | exportSettings['morph'] = True 137 | exportSettings['morphNormal'] = True 138 | exportSettings['morphTangent'] = True 139 | 140 | return gltf2_export.save(self, context, exportSettings) 141 | 142 | def draw(self, context): 143 | pass 144 | 145 | class V3D_OT_export_html(bpy.types.Operator, ExportHelper, V3D_OT_export): 146 | """Export scene to HTML""" 147 | bl_idname = 'export_scene.v3d_html' 148 | bl_label = 'Export HTML' 149 | 150 | filename_ext = '.html' 151 | filter_glob: StringProperty(default='*.html', options={'HIDDEN'}) 152 | 153 | export_format = 'HTML' 154 | 155 | class V3D_OT_export_gltf(bpy.types.Operator, ExportHelper, V3D_OT_export): 156 | """Export scene to glTF 2.0 format""" 157 | bl_idname = 'export_scene.v3d_gltf' 158 | bl_label = 'Export Verge3D glTF' 159 | 160 | filename_ext = '.gltf' 161 | filter_glob: StringProperty(default='*.gltf', options={'HIDDEN'}) 162 | 163 | export_format = 'ASCII' 164 | 165 | class V3D_OT_export_glb(bpy.types.Operator, ExportHelper, V3D_OT_export): 166 | """Export scene to glTF 2.0 binary format""" 167 | bl_idname = 'export_scene.v3d_glb' 168 | bl_label = 'Export Verge3D glTF Binary' 169 | 170 | filename_ext = '.glb' 171 | filter_glob: StringProperty(default='*.glb', options={'HIDDEN'}) 172 | 173 | export_format = 'BINARY' 174 | 175 | def menuExportHTML(self, context): 176 | self.layout.operator(V3D_OT_export_html.bl_idname, text='HTML (.html)') 177 | 178 | def menuExportGLTF(self, context): 179 | self.layout.operator(V3D_OT_export_gltf.bl_idname, text='Verge3D glTF (.gltf)') 180 | 181 | def menuExportGLB(self, context): 182 | self.layout.operator(V3D_OT_export_glb.bl_idname, text='Verge3D glTF Binary (.glb)') 183 | 184 | def disableBuiltInGLTFAddon(): 185 | 186 | import addon_utils 187 | 188 | is_enabled, is_loaded = addon_utils.check('io_scene_gltf2') 189 | 190 | if is_enabled: 191 | import io_scene_gltf2 192 | bpy.types.TOPBAR_MT_file_export.remove(io_scene_gltf2.menu_func_export) 193 | 194 | def register(): 195 | from . import custom_props, custom_ui, reexport, manual_map 196 | 197 | AppManagerConn.init(getRoot(), 'BLENDER') 198 | 199 | bpy.utils.register_class(V3D_AddonPreferences) 200 | 201 | if AppManagerConn.isAvailable(): 202 | bpy.utils.register_class(V3D_OT_export_html) 203 | bpy.types.TOPBAR_MT_file_export.append(menuExportHTML) 204 | bpy.utils.register_class(V3D_OT_export_gltf) 205 | bpy.utils.register_class(V3D_OT_export_glb) 206 | 207 | custom_props.register() 208 | custom_ui.register() 209 | reexport.register() 210 | manual_map.register() 211 | 212 | bpy.types.TOPBAR_MT_file_export.append(menuExportGLTF) 213 | bpy.types.TOPBAR_MT_file_export.append(menuExportGLB) 214 | 215 | if AppManagerConn.isAvailable(): 216 | if not AppManagerConn.ping(): 217 | AppManagerConn.start() 218 | else: 219 | log.warning('App Manager is not available!') 220 | 221 | if bpy.context.preferences.addons[__package__].preferences.disable_builtin_gltf_addon: 222 | bpy.app.timers.register(disableBuiltInGLTFAddon, first_interval=ADDON_DISABLE_DELAY, persistent=True) 223 | 224 | def unregister(): 225 | from . import custom_props, custom_ui, reexport, manual_map 226 | 227 | bpy.utils.unregister_class(V3D_AddonPreferences) 228 | 229 | if AppManagerConn.isAvailable(): 230 | bpy.utils.unregister_class(V3D_OT_export_html) 231 | bpy.types.TOPBAR_MT_file_export.remove(menuExportHTML) 232 | bpy.utils.unregister_class(V3D_OT_export_gltf) 233 | bpy.utils.unregister_class(V3D_OT_export_glb) 234 | 235 | custom_props.unregister() 236 | custom_ui.unregister() 237 | reexport.unregister() 238 | manual_map.unregister() 239 | 240 | bpy.types.TOPBAR_MT_file_export.remove(menuExportGLTF) 241 | bpy.types.TOPBAR_MT_file_export.remove(menuExportGLB) 242 | -------------------------------------------------------------------------------- /python/iniparse/config.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Iterable, List, TextIO, Union, TYPE_CHECKING 2 | 3 | if TYPE_CHECKING: 4 | from .ini import INIConfig, INISection 5 | 6 | 7 | class ConfigNamespace: 8 | """Abstract class representing the interface of Config objects. 9 | 10 | A ConfigNamespace is a collection of names mapped to values, where 11 | the values may be nested namespaces. Values can be accessed via 12 | container notation - obj[key] - or via dotted notation - obj.key. 13 | Both these access methods are equivalent. 14 | 15 | To minimize name conflicts between namespace keys and class members, 16 | the number of class members should be minimized, and the names of 17 | all class members should start with an underscore. 18 | 19 | Subclasses must implement the methods for container-like access, 20 | and this class will automatically provide dotted access. 21 | """ 22 | 23 | # Methods that must be implemented by subclasses 24 | 25 | def _getitem(self, key: str) -> object: 26 | return NotImplementedError(key) 27 | 28 | def __setitem__(self, key: str, value: object): 29 | raise NotImplementedError(key, value) 30 | 31 | def __delitem__(self, key: str) -> None: 32 | raise NotImplementedError(key) 33 | 34 | def __iter__(self) -> Iterable[str]: 35 | # FIXME Raise instead return 36 | return NotImplementedError() 37 | 38 | def _new_namespace(self, name: str) -> "ConfigNamespace": 39 | raise NotImplementedError(name) 40 | 41 | def __contains__(self, key: str) -> bool: 42 | try: 43 | self._getitem(key) 44 | except KeyError: 45 | return False 46 | return True 47 | 48 | # Machinery for converting dotted access into container access, 49 | # and automatically creating new sections/namespaces. 50 | # 51 | # To distinguish between accesses of class members and namespace 52 | # keys, we first call object.__getattribute__(). If that succeeds, 53 | # the name is assumed to be a class member. Otherwise, it is 54 | # treated as a namespace key. 55 | # 56 | # Therefore, member variables should be defined in the class, 57 | # not just in the __init__() function. See BasicNamespace for 58 | # an example. 59 | 60 | def __getitem__(self, key: str) -> Union[object, "Undefined"]: 61 | try: 62 | return self._getitem(key) 63 | except KeyError: 64 | return Undefined(key, self) 65 | 66 | def __getattr__(self, name: str) -> Union[object, "Undefined"]: 67 | try: 68 | return self._getitem(name) 69 | except KeyError: 70 | if name.startswith('__') and name.endswith('__'): 71 | raise AttributeError 72 | return Undefined(name, self) 73 | 74 | def __setattr__(self, name: str, value: object) -> None: 75 | try: 76 | object.__getattribute__(self, name) 77 | object.__setattr__(self, name, value) 78 | except AttributeError: 79 | self.__setitem__(name, value) 80 | 81 | def __delattr__(self, name: str) -> None: 82 | try: 83 | object.__getattribute__(self, name) 84 | object.__delattr__(self, name) 85 | except AttributeError: 86 | self.__delitem__(name) 87 | 88 | # During unpickling, Python checks if the class has a __setstate__ 89 | # method. But, the data dicts have not been initialised yet, which 90 | # leads to _getitem and hence __getattr__ raising an exception. So 91 | # we explicitly implement default __setstate__ behavior. 92 | def __setstate__(self, state: dict) -> None: 93 | self.__dict__.update(state) 94 | 95 | 96 | class Undefined: 97 | """Helper class used to hold undefined names until assignment. 98 | 99 | This class helps create any undefined subsections when an 100 | assignment is made to a nested value. For example, if the 101 | statement is "cfg.a.b.c = 42", but "cfg.a.b" does not exist yet. 102 | """ 103 | 104 | def __init__(self, name: str, namespace: ConfigNamespace): 105 | # FIXME These assignments into `object` feel very strange. 106 | # What's the reason for it? 107 | object.__setattr__(self, 'name', name) 108 | object.__setattr__(self, 'namespace', namespace) 109 | 110 | def __setattr__(self, name: str, value: object) -> None: 111 | obj = self.namespace._new_namespace(self.name) 112 | obj[name] = value 113 | 114 | def __setitem__(self, name, value) -> None: 115 | obj = self.namespace._new_namespace(self.name) 116 | obj[name] = value 117 | 118 | 119 | # ---- Basic implementation of a ConfigNamespace 120 | 121 | class BasicConfig(ConfigNamespace): 122 | """Represents a hierarchical collection of named values. 123 | 124 | Values are added using dotted notation: 125 | 126 | >>> n = BasicConfig() 127 | >>> n.x = 7 128 | >>> n.name.first = 'paramjit' 129 | >>> n.name.last = 'oberoi' 130 | 131 | ...and accessed the same way, or with [...]: 132 | 133 | >>> n.x 134 | 7 135 | >>> n.name.first 136 | 'paramjit' 137 | >>> n.name.last 138 | 'oberoi' 139 | >>> n['x'] 140 | 7 141 | >>> n['name']['first'] 142 | 'paramjit' 143 | 144 | Iterating over the namespace object returns the keys: 145 | 146 | >>> l = list(n) 147 | >>> l.sort() 148 | >>> l 149 | ['name', 'x'] 150 | 151 | Values can be deleted using 'del' and printed using 'print'. 152 | 153 | >>> n.aaa = 42 154 | >>> del n.x 155 | >>> print(n) 156 | aaa = 42 157 | name.first = paramjit 158 | name.last = oberoi 159 | 160 | Nested namespaces are also namespaces: 161 | 162 | >>> isinstance(n.name, ConfigNamespace) 163 | True 164 | >>> print(n.name) 165 | first = paramjit 166 | last = oberoi 167 | >>> sorted(list(n.name)) 168 | ['first', 'last'] 169 | 170 | Finally, values can be read from a file as follows: 171 | 172 | >>> from io import StringIO 173 | >>> sio = StringIO(''' 174 | ... # comment 175 | ... ui.height = 100 176 | ... ui.width = 150 177 | ... complexity = medium 178 | ... have_python 179 | ... data.secret.password = goodness=gracious me 180 | ... ''') 181 | >>> n = BasicConfig() 182 | >>> n._readfp(sio) 183 | >>> print(n) 184 | complexity = medium 185 | data.secret.password = goodness=gracious me 186 | have_python 187 | ui.height = 100 188 | ui.width = 150 189 | """ 190 | 191 | # this makes sure that __setattr__ knows this is not a namespace key 192 | _data: Dict[str, str] = None 193 | 194 | def __init__(self): 195 | self._data = {} 196 | 197 | def _getitem(self, key: str) -> str: 198 | return self._data[key] 199 | 200 | def __setitem__(self, key: str, value: object) -> None: 201 | # FIXME We can add any object as 'value', but when an integer is read 202 | # from a file, it will be a string. Should we explicitly convert 203 | # this 'value' to string, to ensure consistency? 204 | # It will stay the original type until it is written to a file. 205 | self._data[key] = value 206 | 207 | def __delitem__(self, key: str) -> None: 208 | del self._data[key] 209 | 210 | def __iter__(self) -> Iterable[str]: 211 | return iter(self._data) 212 | 213 | def __str__(self, prefix: str = '') -> str: 214 | lines: List[str] = [] 215 | keys: List[str] = list(self._data.keys()) 216 | keys.sort() 217 | for name in keys: 218 | value: object = self._data[name] 219 | if isinstance(value, ConfigNamespace): 220 | lines.append(value.__str__(prefix='%s%s.' % (prefix,name))) 221 | else: 222 | if value is None: 223 | lines.append('%s%s' % (prefix, name)) 224 | else: 225 | lines.append('%s%s = %s' % (prefix, name, value)) 226 | return '\n'.join(lines) 227 | 228 | def _new_namespace(self, name: str) -> "BasicConfig": 229 | obj = BasicConfig() 230 | self._data[name] = obj 231 | return obj 232 | 233 | def _readfp(self, fp: TextIO) -> None: 234 | while True: 235 | line: str = fp.readline() 236 | if not line: 237 | break 238 | 239 | line = line.strip() 240 | if not line: continue 241 | if line[0] == '#': continue 242 | data: List[str] = line.split('=', 1) 243 | if len(data) == 1: 244 | name = line 245 | value = None 246 | else: 247 | name = data[0].strip() 248 | value = data[1].strip() 249 | name_components = name.split('.') 250 | ns: ConfigNamespace = self 251 | for n in name_components[:-1]: 252 | if n in ns: 253 | maybe_ns: object = ns[n] 254 | if not isinstance(maybe_ns, ConfigNamespace): 255 | raise TypeError('value-namespace conflict', n) 256 | ns = maybe_ns 257 | else: 258 | ns = ns._new_namespace(n) 259 | ns[name_components[-1]] = value 260 | 261 | 262 | # ---- Utility functions 263 | 264 | def update_config(target: ConfigNamespace, source: ConfigNamespace): 265 | """Imports values from source into target. 266 | 267 | Recursively walks the ConfigNamespace and inserts values 268 | into the ConfigNamespace. For example: 269 | 270 | >>> n = BasicConfig() 271 | >>> n.playlist.expand_playlist = True 272 | >>> n.ui.display_clock = True 273 | >>> n.ui.display_qlength = True 274 | >>> n.ui.width = 150 275 | >>> print(n) 276 | playlist.expand_playlist = True 277 | ui.display_clock = True 278 | ui.display_qlength = True 279 | ui.width = 150 280 | 281 | >>> from iniparse import ini 282 | >>> i = ini.INIConfig() 283 | >>> update_config(i, n) 284 | >>> print(i) 285 | [playlist] 286 | expand_playlist = True 287 | 288 | [ui] 289 | display_clock = True 290 | display_qlength = True 291 | width = 150 292 | """ 293 | for name in sorted(source): 294 | value: object = source[name] 295 | if isinstance(value, ConfigNamespace): 296 | if name in target: 297 | maybe_myns: object = target[name] 298 | if not isinstance(maybe_myns, ConfigNamespace): 299 | raise TypeError('value-namespace conflict') 300 | myns = maybe_myns 301 | else: 302 | myns = target._new_namespace(name) 303 | update_config(myns, value) 304 | else: 305 | target[name] = value 306 | -------------------------------------------------------------------------------- /python/pcpp/pcmd.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, print_function 2 | import sys, argparse, traceback, os, copy 3 | if __name__ == '__main__' and __package__ is None: 4 | sys.path.append( os.path.dirname( os.path.dirname( os.path.abspath(__file__) ) ) ) 5 | from pcpp.preprocessor import Preprocessor, OutputDirective, Action 6 | 7 | version='1.21' 8 | 9 | __all__ = [] 10 | 11 | class FileAction(argparse.Action): 12 | def __init__(self, option_strings, dest, **kwargs): 13 | super(FileAction, self).__init__(option_strings, dest, **kwargs) 14 | 15 | def __call__(self, parser, namespace, values, option_string=None): 16 | if getattr(namespace, self.dest)[0] == sys.stdin: 17 | items = [] 18 | else: 19 | items = copy.copy(getattr(namespace, self.dest)) 20 | items += [argparse.FileType('rt')(value) for value in values] 21 | setattr(namespace, self.dest, items) 22 | 23 | class CmdPreprocessor(Preprocessor): 24 | def __init__(self, argv): 25 | if len(argv) < 2: 26 | argv = [argv[0], '--help'] 27 | argp = argparse.ArgumentParser(prog='pcpp', 28 | description= 29 | '''A pure universal Python C (pre-)preprocessor implementation very useful for 30 | pre-preprocessing header only C++ libraries into single file includes and 31 | other such build or packaging stage malarky.''', 32 | epilog= 33 | '''Note that so pcpp can stand in for other preprocessor tooling, it 34 | ignores any arguments it does not understand.''') 35 | argp.add_argument('inputs', metavar = 'input', default = [sys.stdin], nargs = '*', action = FileAction, help = 'Files to preprocess (use \'-\' for stdin)') 36 | argp.add_argument('-o', dest = 'output', metavar = 'path', type = argparse.FileType('wt'), default=sys.stdout, nargs = '?', help = 'Output to a file instead of stdout') 37 | argp.add_argument('-D', dest = 'defines', metavar = 'macro[=val]', nargs = 1, action = 'append', help = 'Predefine name as a macro [with value]') 38 | argp.add_argument('-U', dest = 'undefines', metavar = 'macro', nargs = 1, action = 'append', help = 'Pre-undefine name as a macro') 39 | argp.add_argument('-N', dest = 'nevers', metavar = 'macro', nargs = 1, action = 'append', help = 'Never define name as a macro, even if defined during the preprocessing.') 40 | argp.add_argument('-I', dest = 'includes', metavar = 'path', nargs = 1, action = 'append', help = "Path to search for unfound #include's") 41 | #argp.add_argument('--passthru', dest = 'passthru', action = 'store_true', help = 'Pass through everything unexecuted except for #include and include guards (which need to be the first thing in an include file') 42 | argp.add_argument('--passthru-defines', dest = 'passthru_defines', action = 'store_true', help = 'Pass through but still execute #defines and #undefs if not always removed by preprocessor logic') 43 | argp.add_argument('--passthru-unfound-includes', dest = 'passthru_unfound_includes', action = 'store_true', help = 'Pass through #includes not found without execution') 44 | argp.add_argument('--passthru-unknown-exprs', dest = 'passthru_undefined_exprs', action = 'store_true', help = 'Unknown macros in expressions cause preprocessor logic to be passed through instead of executed by treating unknown macros as 0L') 45 | argp.add_argument('--passthru-comments', dest = 'passthru_comments', action = 'store_true', help = 'Pass through comments unmodified') 46 | argp.add_argument('--passthru-magic-macros', dest = 'passthru_magic_macros', action = 'store_true', help = 'Pass through double underscore magic macros unmodified') 47 | argp.add_argument('--disable-auto-pragma-once', dest = 'auto_pragma_once_disabled', action = 'store_true', default = False, help = 'Disable the heuristics which auto apply #pragma once to #include files wholly wrapped in an obvious include guard macro') 48 | argp.add_argument('--line-directive', dest = 'line_directive', metavar = 'form', default = '#line', nargs = '?', help = "Form of line directive to use, defaults to #line, specify nothing to disable output of line directives") 49 | argp.add_argument('--debug', dest = 'debug', action = 'store_true', help = 'Generate a pcpp_debug.log file logging execution') 50 | argp.add_argument('--time', dest = 'time', action = 'store_true', help = 'Print the time it took to #include each file') 51 | argp.add_argument('--filetimes', dest = 'filetimes', metavar = 'path', type = argparse.FileType('wt'), default=None, nargs = '?', help = 'Write CSV file with time spent inside each included file, inclusive and exclusive') 52 | argp.add_argument('--compress', dest = 'compress', action = 'store_true', help = 'Make output as small as possible') 53 | argp.add_argument('--version', action='version', version='pcpp ' + version) 54 | args = argp.parse_known_args(argv[1:]) 55 | #print(args) 56 | for arg in args[1]: 57 | print("NOTE: Argument %s not known, ignoring!" % arg, file = sys.stderr) 58 | 59 | self.args = args[0] 60 | super(CmdPreprocessor, self).__init__() 61 | 62 | # Override Preprocessor instance variables 63 | self.define("__PCPP_VERSION__ " + version) 64 | self.define("__PCPP_ALWAYS_FALSE__ 0") 65 | self.define("__PCPP_ALWAYS_TRUE__ 1") 66 | if self.args.debug: 67 | self.debugout = open("pcpp_debug.log", "wt") 68 | self.auto_pragma_once_enabled = not self.args.auto_pragma_once_disabled 69 | self.line_directive = self.args.line_directive 70 | self.compress = 2 if self.args.compress else 0 71 | if self.args.passthru_magic_macros: 72 | self.undef('__DATE__') 73 | self.undef('__TIME__') 74 | self.expand_linemacro = False 75 | self.expand_filemacro = False 76 | self.expand_countermacro = False 77 | 78 | # My own instance variables 79 | self.bypass_ifpassthru = False 80 | self.potential_include_guard = None 81 | 82 | if self.args.defines: 83 | self.args.defines = [x[0] for x in self.args.defines] 84 | for d in self.args.defines: 85 | if '=' not in d: 86 | d += '=1' 87 | d = d.replace('=', ' ', 1) 88 | self.define(d) 89 | if self.args.undefines: 90 | self.args.undefines = [x[0] for x in self.args.undefines] 91 | for d in self.args.undefines: 92 | self.undef(d) 93 | if self.args.nevers: 94 | self.args.nevers = [x[0] for x in self.args.nevers] 95 | if self.args.includes: 96 | self.args.includes = [x[0] for x in self.args.includes] 97 | for d in self.args.includes: 98 | self.add_path(d) 99 | 100 | try: 101 | if len(self.args.inputs) == 1: 102 | self.parse(self.args.inputs[0]) 103 | else: 104 | input = '' 105 | for i in self.args.inputs: 106 | input += '#include "' + i.name + '"\n' 107 | self.parse(input) 108 | self.write(self.args.output) 109 | except: 110 | print(traceback.print_exc(10), file = sys.stderr) 111 | print("\nINTERNAL PREPROCESSOR ERROR AT AROUND %s:%d, FATALLY EXITING NOW\n" 112 | % (self.lastdirective.source, self.lastdirective.lineno), file = sys.stderr) 113 | sys.exit(-99) 114 | finally: 115 | for i in self.args.inputs: 116 | i.close() 117 | self.args.output.close() 118 | 119 | if self.args.time: 120 | print("\nTime report:") 121 | print("============") 122 | for n in range(0, len(self.include_times)): 123 | if n == 0: 124 | print("top level: %f seconds" % self.include_times[n].elapsed) 125 | elif self.include_times[n].depth == 1: 126 | print("\n %s: %f seconds (%f%%)" % (self.include_times[n].included_path, self.include_times[n].elapsed, 100 * self.include_times[n].elapsed / self.include_times[0].elapsed)) 127 | else: 128 | print("%s%s: %f seconds" % (' ' * self.include_times[n].depth, self.include_times[n].included_path, self.include_times[n].elapsed)) 129 | print("\nPragma once files (including heuristically applied):") 130 | print("====================================================") 131 | for i in self.include_once: 132 | print(" ", i) 133 | print() 134 | if self.args.filetimes: 135 | print('"Total seconds","Self seconds","File size","File path"', file = self.args.filetimes) 136 | filetimes = {} 137 | currentfiles = [] 138 | for n in range(0, len(self.include_times)): 139 | while self.include_times[n].depth < len(currentfiles): 140 | currentfiles.pop() 141 | if self.include_times[n].depth > len(currentfiles) - 1: 142 | currentfiles.append(self.include_times[n].included_abspath) 143 | #print() 144 | #for path in currentfiles: 145 | # print("currentfiles =", path) 146 | path = currentfiles[-1] 147 | if path in filetimes: 148 | filetimes[path][0] += self.include_times[n].elapsed 149 | filetimes[path][1] += self.include_times[n].elapsed 150 | else: 151 | filetimes[path] = [self.include_times[n].elapsed, self.include_times[n].elapsed] 152 | if self.include_times[n].elapsed > 0 and len(currentfiles) > 1: 153 | #print("Removing child %f from parent %s = %f" % (self.include_times[n].elapsed, currentfiles[-2], filetimes[currentfiles[-2]])) 154 | filetimes[currentfiles[-2]][1] -= self.include_times[n].elapsed 155 | filetimes = [(v[0],v[1],k) for k,v in filetimes.items()] 156 | filetimes.sort(reverse=True) 157 | for t,s,p in filetimes: 158 | print(('%f,%f,%d,"%s"' % (t, s, os.stat(p).st_size, p)), file = self.args.filetimes) 159 | def on_include_not_found(self,is_system_include,curdir,includepath): 160 | if self.args.passthru_unfound_includes: 161 | raise OutputDirective(Action.IgnoreAndPassThrough) 162 | return super(CmdPreprocessor, self).on_include_not_found(is_system_include,curdir,includepath) 163 | 164 | def on_unknown_macro_in_defined_expr(self,tok): 165 | if self.args.undefines: 166 | if tok.value in self.args.undefines: 167 | return False 168 | if self.args.passthru_undefined_exprs: 169 | return None # Pass through as expanded as possible 170 | return super(CmdPreprocessor, self).on_unknown_macro_in_defined_expr(tok) 171 | 172 | def on_unknown_macro_in_expr(self,tok): 173 | if self.args.undefines: 174 | if tok.value in self.args.undefines: 175 | return super(CmdPreprocessor, self).on_unknown_macro_in_expr(tok) 176 | if self.args.passthru_undefined_exprs: 177 | return None # Pass through as expanded as possible 178 | return super(CmdPreprocessor, self).on_unknown_macro_in_expr(tok) 179 | 180 | def on_directive_handle(self,directive,toks,ifpassthru,precedingtoks): 181 | if ifpassthru: 182 | if directive.value == 'if' or directive.value == 'elif' or directive == 'else' or directive.value == 'endif': 183 | self.bypass_ifpassthru = len([tok for tok in toks if tok.value == '__PCPP_ALWAYS_FALSE__' or tok.value == '__PCPP_ALWAYS_TRUE__']) > 0 184 | if not self.bypass_ifpassthru and (directive.value == 'define' or directive.value == 'undef'): 185 | if toks[0].value != self.potential_include_guard: 186 | raise OutputDirective(Action.IgnoreAndPassThrough) # Don't execute anything with effects when inside an #if expr with undefined macro 187 | if (directive.value == 'define' or directive.value == 'undef') and self.args.nevers: 188 | if toks[0].value in self.args.nevers: 189 | raise OutputDirective(Action.IgnoreAndPassThrough) 190 | if self.args.passthru_defines: 191 | super(CmdPreprocessor, self).on_directive_handle(directive,toks,ifpassthru,precedingtoks) 192 | return None # Pass through where possible 193 | return super(CmdPreprocessor, self).on_directive_handle(directive,toks,ifpassthru,precedingtoks) 194 | 195 | def on_directive_unknown(self,directive,toks,ifpassthru,precedingtoks): 196 | if ifpassthru: 197 | return None # Pass through 198 | return super(CmdPreprocessor, self).on_directive_unknown(directive,toks,ifpassthru,precedingtoks) 199 | 200 | def on_potential_include_guard(self,macro): 201 | self.potential_include_guard = macro 202 | return super(CmdPreprocessor, self).on_potential_include_guard(macro) 203 | 204 | def on_comment(self,tok): 205 | if self.args.passthru_comments: 206 | return True # Pass through 207 | return super(CmdPreprocessor, self).on_comment(tok) 208 | 209 | def main(): 210 | p = CmdPreprocessor(sys.argv) 211 | sys.exit(p.return_code) 212 | 213 | if __name__ == "__main__": 214 | p = CmdPreprocessor(sys.argv) 215 | sys.exit(p.return_code) 216 | -------------------------------------------------------------------------------- /python/sysfont.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # pygame - Python Game Library 3 | # Copyright (C) 2000-2003 Pete Shinners 4 | # 5 | # This library is free software; you can redistribute it and/or 6 | # modify it under the terms of the GNU Library General Public 7 | # License as published by the Free Software Foundation; either 8 | # version 2 of the License, or (at your option) any later version. 9 | # 10 | # This library is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 13 | # Library General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU Library General Public 16 | # License along with this library; if not, write to the Free 17 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 18 | # 19 | # Pete Shinners 20 | # pete@shinners.org 21 | """sysfont, used in the font module to find system fonts""" 22 | 23 | import os 24 | import sys 25 | import warnings 26 | from os.path import basename, dirname, exists, join, splitext 27 | 28 | if sys.platform != "emscripten": 29 | if os.name == "nt": 30 | import winreg as _winreg 31 | import subprocess 32 | 33 | 34 | OpenType_extensions = frozenset((".ttf", ".ttc", ".otf")) 35 | Sysfonts = {} 36 | Sysalias = {} 37 | 38 | is_init = False 39 | 40 | 41 | def _simplename(name): 42 | """create simple version of the font name""" 43 | # return alphanumeric characters of a string (converted to lowercase) 44 | return "".join(c.lower() for c in name if c.isalnum()) 45 | 46 | 47 | def _addfont(name, bold, italic, font, fontdict): 48 | """insert a font and style into the font dictionary""" 49 | if name not in fontdict: 50 | fontdict[name] = {} 51 | fontdict[name][bold, italic] = font 52 | 53 | 54 | def initsysfonts_win32(): 55 | """initialize fonts dictionary on Windows""" 56 | 57 | fontdir = join(os.environ.get("WINDIR", "C:\\Windows"), "Fonts") 58 | fonts = {} 59 | 60 | # add fonts entered in the registry 61 | microsoft_font_dirs = [ 62 | "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Fonts", 63 | "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Fonts", 64 | ] 65 | 66 | for domain in [_winreg.HKEY_LOCAL_MACHINE, _winreg.HKEY_CURRENT_USER]: 67 | for font_dir in microsoft_font_dirs: 68 | try: 69 | key = _winreg.OpenKey(domain, font_dir) 70 | except FileNotFoundError: 71 | continue 72 | 73 | for i in range(_winreg.QueryInfoKey(key)[1]): 74 | try: 75 | # name is the font's name e.g. Times New Roman (TrueType) 76 | # font is the font's filename e.g. times.ttf 77 | name, font, _ = _winreg.EnumValue(key, i) 78 | except OSError: 79 | break 80 | 81 | if splitext(font)[1].lower() not in OpenType_extensions: 82 | continue 83 | if not dirname(font): 84 | font = join(fontdir, font) 85 | 86 | # Some are named A & B, both names should be processed separately 87 | # Ex: the main Cambria file is marked as "Cambria & Cambria Math" 88 | for name in name.split("&"): 89 | _parse_font_entry_win(name, font, fonts) 90 | 91 | return fonts 92 | 93 | 94 | def _parse_font_entry_win(name, font, fonts): 95 | """ 96 | Parse out a simpler name and the font style from the initial file name. 97 | 98 | :param name: The font name 99 | :param font: The font file path 100 | :param fonts: The pygame font dictionary 101 | """ 102 | true_type_suffix = "(TrueType)" 103 | mods = ("demibold", "narrow", "light", "unicode", "bt", "mt") 104 | if name.endswith(true_type_suffix): 105 | name = name.rstrip(true_type_suffix).rstrip() 106 | name = name.lower().split() 107 | bold = italic = False 108 | for mod in mods: 109 | if mod in name: 110 | name.remove(mod) 111 | if "bold" in name: 112 | name.remove("bold") 113 | bold = True 114 | if "italic" in name: 115 | name.remove("italic") 116 | italic = True 117 | name = "".join(name) 118 | name = _simplename(name) 119 | 120 | _addfont(name, bold, italic, font, fonts) 121 | 122 | 123 | def _parse_font_entry_darwin(name, filepath, fonts): 124 | """ 125 | Parses a font entry for macOS 126 | 127 | :param name: The filepath without extensions or directories 128 | :param filepath: The full path to the font 129 | :param fonts: The pygame font dictionary to add the parsed font data to. 130 | """ 131 | 132 | name = _simplename(name) 133 | 134 | mods = ("regular",) 135 | 136 | for mod in mods: 137 | if mod in name: 138 | name = name.replace(mod, "") 139 | 140 | bold = italic = False 141 | if "bold" in name: 142 | name = name.replace("bold", "") 143 | bold = True 144 | if "italic" in name: 145 | name = name.replace("italic", "") 146 | italic = True 147 | 148 | _addfont(name, bold, italic, filepath, fonts) 149 | 150 | 151 | def _font_finder_darwin(): 152 | locations = [ 153 | "/Library/Fonts", 154 | "/Network/Library/Fonts", 155 | "/System/Library/Fonts", 156 | "/System/Library/Fonts/Supplemental", 157 | ] 158 | 159 | username = os.getenv("USER") 160 | if username: 161 | locations.append(f"/Users/{username}/Library/Fonts") 162 | 163 | strange_root = "/System/Library/Assets/com_apple_MobileAsset_Font3" 164 | if exists(strange_root): 165 | locations += [f"{strange_root}/{loc}" for loc in os.listdir(strange_root)] 166 | 167 | fonts = {} 168 | 169 | for location in locations: 170 | if not exists(location): 171 | continue 172 | 173 | files = os.listdir(location) 174 | for file in files: 175 | name, extension = splitext(file) 176 | if extension in OpenType_extensions: 177 | _parse_font_entry_darwin(name, join(location, file), fonts) 178 | 179 | return fonts 180 | 181 | 182 | def initsysfonts_darwin(): 183 | """Read the fonts on MacOS, and OS X.""" 184 | # fc-list is not likely to be there on pre 10.4.x, or MacOS 10.10+ 185 | fonts = {} 186 | 187 | fclist_locations = [ 188 | "/usr/X11/bin/fc-list", # apple x11 189 | "/usr/X11R6/bin/fc-list", # apple x11 190 | ] 191 | for bin_location in fclist_locations: 192 | if exists(bin_location): 193 | fonts = initsysfonts_unix(bin_location) 194 | break 195 | 196 | if len(fonts) == 0: 197 | fonts = _font_finder_darwin() 198 | 199 | return fonts 200 | 201 | 202 | # read the fonts on unix 203 | def initsysfonts_unix(path="fc-list"): 204 | """use the fc-list from fontconfig to get a list of fonts""" 205 | fonts = {} 206 | 207 | if sys.platform == "emscripten": 208 | return fonts 209 | 210 | try: 211 | proc = subprocess.run( 212 | [path, ":", "file", "family", "style"], 213 | stdout=subprocess.PIPE, # capture stdout 214 | stderr=subprocess.PIPE, # capture stderr 215 | check=True, # so that errors raise python exception which is handled below 216 | timeout=1, # so that we don't hang the program waiting 217 | ) 218 | 219 | except FileNotFoundError: 220 | warnings.warn( 221 | f"'{path}' is missing, system fonts cannot be loaded on your platform" 222 | ) 223 | 224 | except subprocess.TimeoutExpired: 225 | warnings.warn( 226 | f"Process running '{path}' timed-out! System fonts cannot be loaded on " 227 | "your platform" 228 | ) 229 | 230 | except subprocess.CalledProcessError as e: 231 | warnings.warn( 232 | f"'{path}' failed with error code {e.returncode}! System fonts cannot be " 233 | f"loaded on your platform. Error log is:\n{e.stderr}" 234 | ) 235 | 236 | else: 237 | for entry in proc.stdout.decode("ascii", "ignore").splitlines(): 238 | try: 239 | _parse_font_entry_unix(entry, fonts) 240 | except ValueError: # noqa: PERF203 241 | # try the next one. 242 | pass 243 | 244 | return fonts 245 | 246 | 247 | def _parse_font_entry_unix(entry, fonts): 248 | """ 249 | Parses an entry in the unix font data to add to the pygame font 250 | dictionary. 251 | 252 | :param entry: A entry from the unix font list. 253 | :param fonts: The pygame font dictionary to add the parsed font data to. 254 | 255 | """ 256 | filename, family, style = entry.split(":", 2) 257 | if splitext(filename)[1].lower() in OpenType_extensions: 258 | bold = "Bold" in style 259 | italic = "Italic" in style 260 | oblique = "Oblique" in style 261 | for name in family.strip().split(","): 262 | if name: 263 | break 264 | else: 265 | name = splitext(basename(filename))[0] 266 | 267 | _addfont(_simplename(name), bold, italic or oblique, filename, fonts) 268 | 269 | 270 | def create_aliases(): 271 | """Map common fonts that are absent from the system to similar fonts 272 | that are installed in the system 273 | """ 274 | alias_groups = ( 275 | ( 276 | "monospace", 277 | "misc-fixed", 278 | "courier", 279 | "couriernew", 280 | "console", 281 | "fixed", 282 | "mono", 283 | "freemono", 284 | "bitstreamverasansmono", 285 | "verasansmono", 286 | "monotype", 287 | "lucidaconsole", 288 | "consolas", 289 | "dejavusansmono", 290 | "liberationmono", 291 | ), 292 | ( 293 | "sans", 294 | "arial", 295 | "helvetica", 296 | "swiss", 297 | "freesans", 298 | "bitstreamverasans", 299 | "verasans", 300 | "verdana", 301 | "tahoma", 302 | "calibri", 303 | "gillsans", 304 | "segoeui", 305 | "trebuchetms", 306 | "ubuntu", 307 | "dejavusans", 308 | "liberationsans", 309 | ), 310 | ( 311 | "serif", 312 | "times", 313 | "freeserif", 314 | "bitstreamveraserif", 315 | "roman", 316 | "timesroman", 317 | "timesnewroman", 318 | "dutch", 319 | "veraserif", 320 | "georgia", 321 | "cambria", 322 | "constantia", 323 | "dejavuserif", 324 | "liberationserif", 325 | ), 326 | ("wingdings", "wingbats"), 327 | ("comicsansms", "comicsans"), 328 | ) 329 | for alias_set in alias_groups: 330 | for name in alias_set: 331 | if name in Sysfonts: 332 | found = Sysfonts[name] 333 | break 334 | else: 335 | continue 336 | for name in alias_set: 337 | if name not in Sysfonts: 338 | Sysalias[name] = found 339 | 340 | 341 | def initsysfonts(): 342 | """ 343 | Initialise the sysfont module, called once. Locates the installed fonts 344 | and creates some aliases for common font categories. 345 | 346 | Has different initialisation functions for different platforms. 347 | """ 348 | global is_init 349 | if is_init: 350 | # no need to re-init 351 | return 352 | 353 | if sys.platform == "win32": 354 | fonts = initsysfonts_win32() 355 | elif sys.platform == "darwin": 356 | fonts = initsysfonts_darwin() 357 | else: 358 | fonts = initsysfonts_unix() 359 | 360 | Sysfonts.update(fonts) 361 | create_aliases() 362 | is_init = True 363 | 364 | 365 | # the exported functions 366 | 367 | 368 | def get_fonts(): 369 | """pygame.font.get_fonts() -> list 370 | get a list of system font names 371 | 372 | Returns the list of all found system fonts. Note that 373 | the names of the fonts will be all lowercase with spaces 374 | removed. This is how pygame internally stores the font 375 | names for matching. 376 | """ 377 | initsysfonts() 378 | return list(Sysfonts) 379 | 380 | 381 | def match_font(name, bold=False, italic=False): 382 | """pygame.font.match_font(name, bold=0, italic=0) -> name 383 | find the filename for the named system font 384 | 385 | This performs the same font search as the SysFont() 386 | function, only it returns the path to the TTF file 387 | that would be loaded. The font name can also be an 388 | iterable of font names or a string/bytes of comma-separated 389 | font names to try. 390 | 391 | If no match is found, None is returned. 392 | """ 393 | initsysfonts() 394 | 395 | fontname = None 396 | if isinstance(name, (str, bytes)): 397 | name = name.split(b"," if isinstance(name, bytes) else ",") 398 | 399 | for single_name in name: 400 | if isinstance(single_name, bytes): 401 | single_name = single_name.decode() 402 | 403 | single_name = _simplename(single_name) 404 | styles = Sysfonts.get(single_name) 405 | if not styles: 406 | styles = Sysalias.get(single_name) 407 | if styles: 408 | while not fontname: 409 | fontname = styles.get((bold, italic)) 410 | if italic: 411 | italic = 0 412 | elif bold: 413 | bold = 0 414 | elif not fontname: 415 | fontname = list(styles.values())[0] 416 | 417 | if fontname: 418 | break 419 | 420 | return fontname 421 | 422 | if __name__ == '__main__': 423 | name = sys.argv[1] 424 | bold = bool(int(sys.argv[2])) if len(sys.argv) > 2 else False 425 | italic = bool(int(sys.argv[3])) if len(sys.argv) > 3 else False 426 | sys.stdout.write(str(match_font(name, bold, italic))) 427 | -------------------------------------------------------------------------------- /python/iniparse/compat.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2001, 2002, 2003 Python Software Foundation 2 | # Copyright (c) 2004-2008 Paramjit Oberoi 3 | # All Rights Reserved. See LICENSE-PSF & LICENSE for details. 4 | 5 | """Compatibility interfaces for ConfigParser 6 | 7 | Interfaces of ConfigParser, RawConfigParser and SafeConfigParser 8 | should be completely identical to the Python standard library 9 | versions. Tested with the unit tests included with Python-2.3.4 10 | 11 | The underlying INIConfig object can be accessed as cfg.data 12 | """ 13 | 14 | import re 15 | from typing import Dict, List, TextIO, Optional, Type, Union, Tuple 16 | 17 | from .configparser import DuplicateSectionError, \ 18 | NoSectionError, NoOptionError, \ 19 | InterpolationMissingOptionError, \ 20 | InterpolationDepthError, \ 21 | InterpolationSyntaxError, \ 22 | DEFAULTSECT, MAX_INTERPOLATION_DEPTH 23 | 24 | # These are imported only for compatibility. 25 | # The code below does not reference them directly. 26 | from .configparser import Error, InterpolationError, \ 27 | MissingSectionHeaderError, ParsingError 28 | 29 | from . import ini 30 | 31 | 32 | class RawConfigParser: 33 | def __init__(self, defaults: Optional[Dict[str, str]] = None, dict_type: Union[Type[Dict], str] = dict): 34 | if dict_type != dict: 35 | raise ValueError('Custom dict types not supported') 36 | self.data = ini.INIConfig(defaults=defaults, optionxformsource=self) 37 | 38 | def optionxform(self, optionstr: str) -> str: 39 | return optionstr.lower() 40 | 41 | def defaults(self) -> Dict[str, str]: 42 | d: Dict[str, str] = {} 43 | secobj: ini.INISection = self.data._defaults 44 | name: str 45 | for name in secobj._options: 46 | d[name] = secobj._compat_get(name) 47 | return d 48 | 49 | def sections(self) -> List[str]: 50 | """Return a list of section names, excluding [DEFAULT]""" 51 | return list(self.data) 52 | 53 | def add_section(self, section: str) -> None: 54 | """Create a new section in the configuration. 55 | 56 | Raise DuplicateSectionError if a section by the specified name 57 | already exists. Raise ValueError if name is DEFAULT or any of 58 | its case-insensitive variants. 59 | """ 60 | # The default section is the only one that gets the case-insensitive 61 | # treatment - so it is special-cased here. 62 | if section.lower() == "default": 63 | raise ValueError('Invalid section name: %s' % section) 64 | 65 | if self.has_section(section): 66 | raise DuplicateSectionError(section) 67 | else: 68 | self.data._new_namespace(section) 69 | 70 | def has_section(self, section: str) -> bool: 71 | """Indicate whether the named section is present in the configuration. 72 | 73 | The DEFAULT section is not acknowledged. 74 | """ 75 | return section in self.data 76 | 77 | def options(self, section: str) -> List[str]: 78 | """Return a list of option names for the given section name.""" 79 | if section in self.data: 80 | return list(self.data[section]) 81 | else: 82 | raise NoSectionError(section) 83 | 84 | def read(self, filenames: Union[List[str], str]) -> List[str]: 85 | """Read and parse a filename or a list of filenames. 86 | 87 | Files that cannot be opened are silently ignored; this is 88 | designed so that you can specify a list of potential 89 | configuration file locations (e.g. current directory, user's 90 | home directory, systemwide directory), and all existing 91 | configuration files in the list will be read. A single 92 | filename may also be given. 93 | 94 | Returns the list of files that were read. 95 | """ 96 | files_read = [] 97 | if isinstance(filenames, str): 98 | filenames = [filenames] 99 | for filename in filenames: 100 | try: 101 | fp = open(filename) 102 | except IOError: 103 | continue 104 | files_read.append(filename) 105 | self.data._readfp(fp) 106 | fp.close() 107 | return files_read 108 | 109 | def readfp(self, fp: TextIO, filename: Optional[str] = None) -> None: 110 | """Like read() but the argument must be a file-like object. 111 | 112 | The `fp' argument must have a `readline' method. Optional 113 | second argument is the `filename', which if not given, is 114 | taken from fp.name. If fp has no `name' attribute, `' is 115 | used. 116 | """ 117 | self.data._readfp(fp) 118 | 119 | def get(self, section: str, option: str, vars: dict = None) -> str: 120 | if not self.has_section(section): 121 | raise NoSectionError(section) 122 | 123 | sec: ini.INISection = self.data[section] 124 | if option in sec: 125 | return sec._compat_get(option) 126 | else: 127 | raise NoOptionError(option, section) 128 | 129 | def items(self, section: str) -> List[Tuple[str, str]]: 130 | if section in self.data: 131 | ans = [] 132 | opt: str 133 | for opt in self.data[section]: 134 | ans.append((opt, self.get(section, opt))) 135 | return ans 136 | else: 137 | raise NoSectionError(section) 138 | 139 | def getint(self, section: str, option: str) -> int: 140 | return int(self.get(section, option)) 141 | 142 | def getfloat(self, section: str, option: str) -> float: 143 | return float(self.get(section, option)) 144 | 145 | _boolean_states = { 146 | '1': True, 'yes': True, 'true': True, 'on': True, 147 | '0': False, 'no': False, 'false': False, 'off': False, 148 | } 149 | 150 | def getboolean(self, section: str, option: str) -> bool: 151 | v = self.get(section, option) 152 | if v.lower() not in self._boolean_states: 153 | raise ValueError('Not a boolean: %s' % v) 154 | return self._boolean_states[v.lower()] 155 | 156 | def has_option(self, section: str, option: str) -> bool: 157 | """Check for the existence of a given option in a given section.""" 158 | if section in self.data: 159 | sec = self.data[section] 160 | else: 161 | raise NoSectionError(section) 162 | return (option in sec) 163 | 164 | def set(self, section: str, option: str, value: str) -> None: 165 | """Set an option.""" 166 | if section in self.data: 167 | self.data[section][option] = value 168 | else: 169 | raise NoSectionError(section) 170 | 171 | def write(self, fp: TextIO) -> None: 172 | """Write an .ini-format representation of the configuration state.""" 173 | fp.write(str(self.data)) 174 | 175 | # FIXME Return a boolean instead of integer 176 | def remove_option(self, section: str, option: str) -> int: 177 | """Remove an option.""" 178 | if section in self.data: 179 | sec = self.data[section] 180 | else: 181 | raise NoSectionError(section) 182 | if option in sec: 183 | del sec[option] 184 | return 1 185 | else: 186 | return 0 187 | 188 | def remove_section(self, section: str) -> bool: 189 | """Remove a file section.""" 190 | if not self.has_section(section): 191 | return False 192 | del self.data[section] 193 | return True 194 | 195 | 196 | class ConfigDict: 197 | """Present a dict interface to an ini section.""" 198 | 199 | def __init__(self, cfg: RawConfigParser, section: str, vars: dict): 200 | self.cfg: RawConfigParser = cfg 201 | self.section: str = section 202 | self.vars: dict = vars 203 | 204 | def __getitem__(self, key: str) -> Union[str, List[Union[int, str]]]: 205 | try: 206 | return RawConfigParser.get(self.cfg, self.section, key, self.vars) 207 | except (NoOptionError, NoSectionError): 208 | raise KeyError(key) 209 | 210 | 211 | class ConfigParser(RawConfigParser): 212 | 213 | def get( 214 | self, 215 | section: str, 216 | option: str, 217 | raw: bool = False, 218 | vars: Optional[dict] = None, 219 | ) -> object: 220 | """Get an option value for a given section. 221 | 222 | All % interpolations are expanded in the return values, based on the 223 | defaults passed into the constructor, unless the optional argument 224 | `raw' is true. Additional substitutions may be provided using the 225 | `vars' argument, which must be a dictionary whose contents overrides 226 | any pre-existing defaults. 227 | 228 | The section DEFAULT is special. 229 | """ 230 | if section != DEFAULTSECT and not self.has_section(section): 231 | raise NoSectionError(section) 232 | 233 | option = self.optionxform(option) 234 | value = RawConfigParser.get(self, section, option, vars) 235 | 236 | if raw: 237 | return value 238 | else: 239 | d = ConfigDict(self, section, vars) 240 | return self._interpolate(section, option, value, d) 241 | 242 | def _interpolate(self, section: str, option: str, rawval: object, vars: "ConfigDict"): 243 | # do the string interpolation 244 | value = rawval 245 | depth = MAX_INTERPOLATION_DEPTH 246 | while depth: # Loop through this until it's done 247 | depth -= 1 248 | if "%(" in value: 249 | try: 250 | value = value % vars 251 | except KeyError as e: 252 | raise InterpolationMissingOptionError( 253 | option, section, rawval, e.args[0]) 254 | else: 255 | break 256 | if value.find("%(") != -1: 257 | raise InterpolationDepthError(option, section, rawval) 258 | return value 259 | 260 | def items(self, section: str, raw: bool = False, vars: Optional[dict] = None): 261 | """Return a list of tuples with (name, value) for each option 262 | in the section. 263 | 264 | All % interpolations are expanded in the return values, based on the 265 | defaults passed into the constructor, unless the optional argument 266 | `raw' is true. Additional substitutions may be provided using the 267 | `vars' argument, which must be a dictionary whose contents overrides 268 | any pre-existing defaults. 269 | 270 | The section DEFAULT is special. 271 | """ 272 | if section != DEFAULTSECT and not self.has_section(section): 273 | raise NoSectionError(section) 274 | if vars is None: 275 | options = list(self.data[section]) 276 | else: 277 | options = [] 278 | for x in self.data[section]: 279 | if x not in vars: 280 | options.append(x) 281 | options.extend(vars.keys()) 282 | 283 | if "__name__" in options: 284 | options.remove("__name__") 285 | 286 | d = ConfigDict(self, section, vars) 287 | if raw: 288 | return [(option, d[option]) 289 | for option in options] 290 | else: 291 | return [(option, self._interpolate(section, option, d[option], d)) 292 | for option in options] 293 | 294 | 295 | class SafeConfigParser(ConfigParser): 296 | _interpvar_re = re.compile(r"%\(([^)]+)\)s") 297 | _badpercent_re = re.compile(r"%[^%]|%$") 298 | 299 | def set(self, section: str, option: str, value: object) -> None: 300 | if not isinstance(value, str): 301 | raise TypeError("option values must be strings") 302 | # check for bad percent signs: 303 | # first, replace all "good" interpolations 304 | tmp_value = self._interpvar_re.sub('', value) 305 | # then, check if there's a lone percent sign left 306 | m = self._badpercent_re.search(tmp_value) 307 | if m: 308 | raise ValueError("invalid interpolation syntax in %r at " 309 | "position %d" % (value, m.start())) 310 | 311 | ConfigParser.set(self, section, option, value) 312 | 313 | def _interpolate(self, section: str, option: str, rawval: str, vars: ConfigDict): 314 | # do the string interpolation 315 | L = [] 316 | self._interpolate_some(option, L, rawval, section, vars, 1) 317 | return ''.join(L) 318 | 319 | _interpvar_match = re.compile(r"%\(([^)]+)\)s").match 320 | 321 | def _interpolate_some( 322 | self, 323 | option: str, 324 | accum: List[str], 325 | rest: str, 326 | section: str, 327 | map: ConfigDict, 328 | depth: int 329 | ) -> None: 330 | if depth > MAX_INTERPOLATION_DEPTH: 331 | raise InterpolationDepthError(option, section, rest) 332 | while rest: 333 | p = rest.find("%") 334 | if p < 0: 335 | accum.append(rest) 336 | return 337 | if p > 0: 338 | accum.append(rest[:p]) 339 | rest = rest[p:] 340 | # p is no longer used 341 | c = rest[1:2] 342 | if c == "%": 343 | accum.append("%") 344 | rest = rest[2:] 345 | elif c == "(": 346 | m = self._interpvar_match(rest) 347 | if m is None: 348 | raise InterpolationSyntaxError(option, section, "bad interpolation variable reference %r" % rest) 349 | var = m.group(1) 350 | rest = rest[m.end():] 351 | try: 352 | v = map[var] 353 | except KeyError: 354 | raise InterpolationMissingOptionError( 355 | option, section, rest, var) 356 | if "%" in v: 357 | self._interpolate_some(option, accum, v, 358 | section, map, depth + 1) 359 | else: 360 | accum.append(v) 361 | else: 362 | raise InterpolationSyntaxError( 363 | option, section, 364 | "'%' must be followed by '%' or '(', found: " + repr(rest)) 365 | -------------------------------------------------------------------------------- /gltf2_get.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2025 Soft8Soft, LLC. All rights reserved. 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | import bpy 16 | import os 17 | 18 | import pluginUtils.gltf as gltf 19 | 20 | from .utils import * 21 | 22 | def getUsedMaterials(): 23 | """ 24 | Gathers and returns all unfiltered, valid Blender materials. 25 | """ 26 | 27 | materials = [] 28 | 29 | for bl_mat in bpy.data.materials: 30 | materials.append(bl_mat) 31 | 32 | return materials 33 | 34 | def getImageIndex(exportSettings, uri): 35 | """ 36 | Return the image index in the glTF array. 37 | """ 38 | 39 | if exportSettings['uriCache'] is None: 40 | return -1 41 | 42 | if uri in exportSettings['uriCache']['uri']: 43 | return exportSettings['uriCache']['uri'].index(uri) 44 | 45 | return -1 46 | 47 | def getTextureIndexByTexture(exportSettings, glTF, bl_texture): 48 | """ 49 | Return the texture index in the glTF array by a given texture. Safer than 50 | "getTextureIndex" in case of different textures with the same image or linked textures with 51 | the same name but with different images. 52 | """ 53 | 54 | if (exportSettings['uriCache'] is None or glTF.get('textures') is None 55 | or bl_texture is None): 56 | return -1 57 | 58 | bl_image = getTexImage(bl_texture) 59 | if bl_image is None or bl_image.filepath is None: 60 | return -1 61 | 62 | uri = getImageExportedURI(exportSettings, bl_image) 63 | image_uri = exportSettings['uriCache']['uri'] 64 | tex_name = getTextureName(bl_texture) 65 | 66 | index = 0 67 | for texture in glTF['textures']: 68 | source = getTextureSource(texture) 69 | if source is not None and 'name' in texture: 70 | current_image_uri = image_uri[source] 71 | if current_image_uri == uri and texture['name'] == tex_name: 72 | return index 73 | 74 | index += 1 75 | 76 | return -1 77 | 78 | def getTextureSource(tex): 79 | """texture source can also be in extension""" 80 | 81 | if 'source' in tex: 82 | return tex['source'] 83 | else: 84 | ext = gltf.getAssetExtension(tex, 'KHR_texture_basisu') 85 | if ext: 86 | return ext['source'] 87 | else: 88 | return None 89 | 90 | def getTextureIndexNode(exportSettings, glTF, name, shaderNode): 91 | """ 92 | Return the texture index in the glTF array. 93 | """ 94 | 95 | if shaderNode is None: 96 | return -1 97 | 98 | if not isinstance(shaderNode, (bpy.types.ShaderNodeBsdfPrincipled, 99 | bpy.types.ShaderNodeMixShader, 100 | bpy.types.ShaderNodeGroup)): 101 | return -1 102 | 103 | if shaderNode.inputs.get(name) is None: 104 | return -1 105 | 106 | if len(shaderNode.inputs[name].links) == 0: 107 | return -1 108 | 109 | fromNode = shaderNode.inputs[name].links[0].from_node 110 | 111 | if isinstance(fromNode, bpy.types.ShaderNodeNormalMap): 112 | if len(fromNode.inputs['Color'].links) > 0: 113 | fromNode = fromNode.inputs['Color'].links[0].from_node 114 | else: 115 | return -1 116 | 117 | if isinstance(fromNode, bpy.types.ShaderNodeSeparateColor): 118 | if len(fromNode.inputs['Color'].links) > 0: 119 | fromNode = fromNode.inputs['Color'].links[0].from_node 120 | else: 121 | return -1 122 | 123 | if isinstance(fromNode, bpy.types.ShaderNodeMix) and fromNode.data_type == 'RGBA' and fromNode.blend_type == 'MULTIPLY': 124 | if len(fromNode.inputs['A'].links) > 0: 125 | fromNode = fromNode.inputs['A'].links[0].from_node 126 | elif len(fromNode.inputs['B'].links) > 0: 127 | fromNode = fromNode.inputs['B'].links[0].from_node 128 | else: 129 | return -1 130 | 131 | if not isinstance(fromNode, bpy.types.ShaderNodeTexImage): 132 | return -1 133 | 134 | if getTexImage(fromNode) is None or getTexImage(fromNode).size[0] == 0 or getTexImage(fromNode).size[1] == 0: 135 | return -1 136 | 137 | return getTextureIndexByTexture(exportSettings, glTF, fromNode) 138 | 139 | def getTexcoordIndex(glTF, name, shaderNode): 140 | """ 141 | Return the texture coordinate index, if assigend and used. 142 | """ 143 | 144 | if shaderNode is None: 145 | return 0 146 | 147 | if not isinstance(shaderNode, (bpy.types.ShaderNodeBsdfPrincipled, 148 | bpy.types.ShaderNodeMixShader, 149 | bpy.types.ShaderNodeGroup)): 150 | return 0 151 | 152 | if shaderNode.inputs.get(name) is None: 153 | return 0 154 | 155 | if len(shaderNode.inputs[name].links) == 0: 156 | return 0 157 | 158 | fromNode = shaderNode.inputs[name].links[0].from_node 159 | 160 | if isinstance(fromNode, bpy.types.ShaderNodeNormalMap): 161 | fromNode = fromNode.inputs['Color'].links[0].from_node 162 | 163 | if isinstance(fromNode, bpy.types.ShaderNodeSeparateColor): 164 | fromNode = fromNode.inputs['Color'].links[0].from_node 165 | 166 | if isinstance(fromNode, bpy.types.ShaderNodeMix) and fromNode.data_type == 'RGBA' and fromNode.blend_type == 'MULTIPLY': 167 | if len(fromNode.inputs['A'].links) > 0: 168 | fromNode = fromNode.inputs['A'].links[0].from_node 169 | elif len(fromNode.inputs['B'].links) > 0: 170 | fromNode = fromNode.inputs['B'].links[0].from_node 171 | 172 | if not isinstance(fromNode, bpy.types.ShaderNodeTexImage): 173 | return 0 174 | 175 | if len(fromNode.inputs['Vector'].links) == 0: 176 | return 0 177 | 178 | inputNode = fromNode.inputs['Vector'].links[0].from_node 179 | 180 | if not isinstance(inputNode, bpy.types.ShaderNodeUVMap): 181 | return 0 182 | 183 | if inputNode.uv_map == '': 184 | return 0 185 | 186 | for bl_mesh in bpy.data.meshes: 187 | texCoordIndex = bl_mesh.uv_layers.find(inputNode.uv_map) 188 | if texCoordIndex >= 0: 189 | return texCoordIndex 190 | 191 | return 0 192 | 193 | def getMaterialType(bl_mat): 194 | """ 195 | get blender material type: PBR, EEVEE, BASIC 196 | """ 197 | 198 | if not bl_mat.use_nodes or bl_mat.node_tree == None: 199 | return 'BASIC' 200 | 201 | if bl_mat.v3d.gltf_compat: 202 | return 'PBR' 203 | 204 | return 'EEVEE' 205 | 206 | def getSkinIndex(glTF, name, index_offset): 207 | """ 208 | Return the skin index in the glTF array. 209 | """ 210 | 211 | if glTF.get('skins') is None: 212 | return -1 213 | 214 | skeleton = gltf.getNodeIndex(glTF, name) 215 | 216 | index = 0 217 | for skin in glTF['skins']: 218 | if skin['skeleton'] == skeleton: 219 | return index + index_offset 220 | 221 | index += 1 222 | 223 | return -1 224 | 225 | def getCameraIndex(glTF, name): 226 | """ 227 | Return the camera index in the glTF array. 228 | """ 229 | 230 | if glTF.get('cameras') is None: 231 | return -1 232 | 233 | index = 0 234 | for camera in glTF['cameras']: 235 | if camera['name'] == name: 236 | return index 237 | 238 | index += 1 239 | 240 | return -1 241 | 242 | def getCurveIndex(glTF, name): 243 | """ 244 | Return the curve index in the glTF array. 245 | """ 246 | 247 | v3dExt = gltf.getAssetExtension(glTF, 'S8S_v3d_curves') 248 | 249 | if v3dExt == None: 250 | return -1 251 | 252 | if v3dExt.get('curves') == None: 253 | return -1 254 | 255 | curves = v3dExt['curves'] 256 | 257 | index = 0 258 | for curve in curves: 259 | if curve['name'] == name: 260 | return index 261 | 262 | index += 1 263 | 264 | return -1 265 | 266 | def getNodeGraphIndex(glTF, name): 267 | """ 268 | Return the node graph index in the glTF array. 269 | """ 270 | 271 | v3dExt = gltf.getAssetExtension(glTF, 'S8S_v3d_materials') 272 | 273 | if v3dExt == None: 274 | return -1 275 | 276 | if v3dExt.get('nodeGraphs') == None: 277 | return -1 278 | 279 | index = 0 280 | for graph in v3dExt['nodeGraphs']: 281 | if graph['name'] == name: 282 | return index 283 | 284 | index += 1 285 | 286 | return -1 287 | 288 | def getImageExportedURI(exportSettings, bl_image): 289 | """ 290 | Return exported URI for a blender image. 291 | """ 292 | name, ext = os.path.splitext(bpy.path.basename(bl_image.filepath)) 293 | 294 | name = name if name != '' else 'v3d_exported_image_' + bl_image.name.lower().replace(' ', '_') 295 | 296 | if imgNeedsCompression(bl_image, exportSettings): 297 | if bl_image.file_format == 'HDR': 298 | ext = '.hdr.xz' 299 | else: 300 | ext = '.ktx2' 301 | elif (bl_image.file_format == 'JPEG' 302 | or bl_image.file_format == 'WEBP' 303 | or bl_image.file_format == 'BMP' 304 | or bl_image.file_format == 'HDR' 305 | or bl_image.file_format == 'PNG'): 306 | if ext == '': 307 | ext = '.' + bl_image.file_format.lower() 308 | else: 309 | ext = '.png' 310 | 311 | uniqueURI = name + ext 312 | uriCache = exportSettings['uriCache'] 313 | 314 | i = 0 315 | while uniqueURI in uriCache['uri']: 316 | index = uriCache['uri'].index(uniqueURI) 317 | if uriCache['blDatablocks'][index] == bl_image: 318 | break 319 | 320 | i += 1 321 | uniqueURI = name + '_' + integerToBlSuffix(i) + ext 322 | 323 | return uniqueURI 324 | 325 | def getImageExportedMimeType(bl_image, exportSettings): 326 | 327 | if imgNeedsCompression(bl_image, exportSettings): 328 | if bl_image.file_format == 'HDR': 329 | return 'application/x-xz' 330 | else: 331 | return 'image/ktx2' 332 | elif bl_image.file_format == 'JPEG': 333 | return 'image/jpeg' 334 | elif bl_image.file_format == 'WEBP': 335 | return 'image/webp' 336 | elif bl_image.file_format == 'BMP': 337 | return 'image/bmp' 338 | elif bl_image.file_format == 'HDR': 339 | return 'image/vnd.radiance' 340 | else: 341 | return 'image/png' 342 | 343 | def getScalar(default_value, init_value = 0.0): 344 | """ 345 | Return scalar with a given default/fallback value. 346 | """ 347 | 348 | return_value = init_value 349 | 350 | if default_value is None: 351 | return return_value 352 | 353 | return_value = default_value 354 | 355 | return return_value 356 | 357 | def getVec3(default_value, init_value = [0.0, 0.0, 0.0]): 358 | """ 359 | Return vec3 with a given default/fallback value. 360 | """ 361 | 362 | return_value = init_value.copy() 363 | 364 | if default_value is None or len(default_value) < 3: 365 | return return_value 366 | 367 | index = 0 368 | for number in default_value: 369 | return_value[index] = number 370 | 371 | index += 1 372 | if index == 3: 373 | return return_value 374 | 375 | return return_value 376 | 377 | def getVec4(default_value, init_value = [0.0, 0.0, 0.0, 1.0]): 378 | """ 379 | Return vec4 with a given default/fallback value. 380 | """ 381 | 382 | return_value = init_value.copy() 383 | 384 | if default_value is None or len(default_value) < 4: 385 | return return_value 386 | 387 | index = 0 388 | for number in default_value: 389 | return_value[index] = number 390 | 391 | index += 1 392 | if index == 4: 393 | return return_value 394 | 395 | return return_value 396 | 397 | def getIndex(list, name): 398 | """ 399 | Return index of a glTF element by a given name. 400 | """ 401 | 402 | if list is None or name is None: 403 | return -1 404 | 405 | index = 0 406 | for element in list: 407 | if element.get('name') is None: 408 | continue 409 | 410 | if element['name'] == name: 411 | return index 412 | 413 | index += 1 414 | 415 | return -1 416 | 417 | def getByName(list, name): 418 | """ 419 | Return element by a given name. 420 | """ 421 | 422 | if list is None or name is None: 423 | return None 424 | 425 | for element in list: 426 | if element.get('name') is None: 427 | continue 428 | 429 | if element['name'] == name: 430 | return element 431 | 432 | return None 433 | 434 | def getOrCreateDefaultMatIndex(glTF): 435 | def_idx = gltf.getMaterialIndex(glTF, DEFAULT_MAT_NAME) 436 | 437 | if def_idx == -1: 438 | if 'materials' not in glTF: 439 | glTF['materials'] = [] 440 | 441 | glTF['materials'].append(createDefaultMaterial()) 442 | 443 | def_idx = len(glTF['materials']) - 1 444 | 445 | return def_idx 446 | 447 | def createDefaultMaterial(): 448 | return { 449 | "extensions" : { 450 | "S8S_v3d_materials" : { 451 | "nodeGraph" : { 452 | "edges" : [ 453 | { 454 | "fromNode" : 1, 455 | "fromOutput" : 0, 456 | "toInput" : 0, 457 | "toNode" : 0 458 | } 459 | ], 460 | "nodes" : [ 461 | { 462 | "inputs" : [ 463 | [ 0, 0, 0, 0 ], 464 | [ 0, 0, 0, 0 ], 465 | [ 0, 0, 0 ], 466 | 0 467 | ], 468 | "is_active_output" : True, 469 | "name" : "Material Output", 470 | "outputs" : [], 471 | "type" : "OUTPUT_MATERIAL_BL" 472 | }, 473 | { 474 | "inputs" : [ 475 | [ 0.800000011920929, 0.800000011920929, 0.800000011920929, 1.0 ], 476 | 0.0, 477 | [ 0.0, 0.0, 0.0 ] 478 | ], 479 | "is_active_output" : False, 480 | "name" : "Diffuse BSDF", 481 | "outputs" : [ 482 | [ 0, 0, 0, 0 ] 483 | ], 484 | "type" : "BSDF_DIFFUSE_BL" 485 | } 486 | ] 487 | }, 488 | "useCastShadows" : True, 489 | "useShadows" : True 490 | } 491 | }, 492 | "name" : DEFAULT_MAT_NAME 493 | } 494 | 495 | def getFontPath(bl_font): 496 | 497 | path = bl_font.filepath 498 | abspath = bpy.path.abspath(bl_font.filepath) 499 | 500 | if path == '' or not os.path.isfile(abspath): 501 | return os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fonts', 'bfont.woff') 502 | else: 503 | return abspath 504 | 505 | def getFontExportedURI(bl_font): 506 | 507 | path = bl_font.filepath 508 | abspath = bpy.path.abspath(bl_font.filepath) 509 | 510 | if path == '' or not os.path.isfile(abspath): 511 | return 'bfont.woff' 512 | else: 513 | return bpy.path.basename(path) 514 | 515 | def getFontExportedMimeType(bl_font): 516 | 517 | path = bl_font.filepath 518 | abspath = bpy.path.abspath(bl_font.filepath) 519 | 520 | if path == '' or not os.path.isfile(abspath): 521 | return 'font/woff' 522 | else: 523 | return 'font/ttf' 524 | 525 | -------------------------------------------------------------------------------- /gltf2_filter.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2025 Soft8Soft, LLC. All rights reserved. 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | from string import Template 16 | 17 | import bpy 18 | 19 | import pluginUtils 20 | log = pluginUtils.log.getLogger('V3D-BL') 21 | 22 | from .gltf2_get import * 23 | from .gltf2_extract import * 24 | from .node_material_wrapper import NodeMaterialWrapper 25 | from .utils import * 26 | 27 | TO_MESH_SOURCE_CUSTOM_PROP = "v3d_to_mesh_source_object" 28 | WORLD_NODE_MAT_NAME = Template('Verge3D_Environment_${name}') 29 | 30 | def flattenCollectionUnique(collection, dest_set): 31 | 32 | for bl_obj in collection.all_objects: 33 | 34 | is_unique = bl_obj not in dest_set 35 | dest_set.add(bl_obj) 36 | 37 | if bl_obj.instance_type == 'COLLECTION' and bl_obj.instance_collection != None: 38 | if is_unique: 39 | flattenCollectionUnique(bl_obj.instance_collection, dest_set) 40 | 41 | def meshObjGetExportData(objOriginal, bakeModifiers, optimizeTangents): 42 | """ 43 | Prepare the data of the given MESH object before export by making such 44 | operations as: 45 | - applying all suitable modifiers if any 46 | - triangulate mesh ngons if tangents export is needed 47 | - restore shape keys after the previous operations if it's possible or needed 48 | """ 49 | 50 | generatedObjs = [] 51 | generatedMeshes = [] 52 | generatedShapeKeys = [] 53 | 54 | needApplyMods = bakeModifiers is True and objHasExportedModifiers(objOriginal) 55 | 56 | objModsApplied = objOriginal 57 | if needApplyMods: 58 | objModsApplied = objOriginal.copy() 59 | objDelNotExportedModifiers(objModsApplied) 60 | 61 | tmpMesh = objModsApplied.data.copy() 62 | objModsApplied.data = tmpMesh 63 | if tmpMesh.shape_keys: 64 | generatedShapeKeys.append(tmpMesh.shape_keys) 65 | objModsApplied.shape_key_clear() 66 | 67 | objApplyModifiers(objModsApplied) 68 | 69 | generatedObjs.append(objModsApplied) 70 | generatedMeshes.append(tmpMesh) 71 | generatedMeshes.append(objModsApplied.data) 72 | 73 | needTangents = meshNeedTangentsForExport(objModsApplied.data, optimizeTangents) 74 | if not needTangents: 75 | log.debug('Tangent attribute will not be exported for mesh "%s"' % objOriginal.data.name) 76 | needTriangulation = needTangents and meshHasNgons(objModsApplied.data) 77 | 78 | objTriangulated = objModsApplied 79 | if needTriangulation: 80 | objTriangulated = objModsApplied.copy() 81 | objDelNotExportedModifiers(objTriangulated) 82 | objAddTriModifier(objTriangulated) 83 | 84 | tmpMesh = objTriangulated.data.copy() 85 | objTriangulated.data = tmpMesh 86 | if tmpMesh.shape_keys: 87 | generatedShapeKeys.append(tmpMesh.shape_keys) 88 | objTriangulated.shape_key_clear() 89 | 90 | objApplyModifiers(objTriangulated) 91 | 92 | generatedObjs.append(objTriangulated) 93 | generatedMeshes.append(tmpMesh) 94 | generatedMeshes.append(objTriangulated.data) 95 | 96 | needTransferShapeKeys = (objOriginal.data.shape_keys is not None and objTriangulated.data.shape_keys is None) 97 | 98 | objShapeKeyTransfered = objTriangulated 99 | if needTransferShapeKeys: 100 | objShapeKeyTransfered = objTriangulated.copy() 101 | bpy.context.collection.objects.link(objShapeKeyTransfered) 102 | 103 | success = objTransferShapeKeys(objOriginal, objShapeKeyTransfered, generatedObjs, generatedMeshes, generatedShapeKeys) 104 | if not success: 105 | log.warning('Could not generate shape keys because they ' 106 | + 'change vertex count. Object "' + objOriginal.name + '".') 107 | 108 | generatedObjs.append(objShapeKeyTransfered) 109 | 110 | resultingMesh = objShapeKeyTransfered.data 111 | 112 | for obj in generatedObjs: 113 | bpy.data.objects.remove(obj, do_unlink=True, do_id_user=True, do_ui_user=True) 114 | for mesh in generatedMeshes: 115 | if mesh != resultingMesh: 116 | bpy.data.meshes.remove(mesh, do_unlink=True, do_id_user=True, do_ui_user=True) 117 | if generatedShapeKeys: 118 | bpy.data.batch_remove(generatedShapeKeys) 119 | 120 | return resultingMesh 121 | 122 | def filterApply(exportSettings): 123 | """ 124 | Gathers and filters the objects and assets to export. 125 | Also filters out invalid, deleted and not exportable elements. 126 | """ 127 | 128 | filteredObjectsShallow = set() 129 | filteredObjectsWithIC = set() 130 | for bl_scene in bpy.data.scenes: 131 | filteredObjectsShallow.update(bl_scene.objects) 132 | flattenCollectionUnique(bl_scene.collection, filteredObjectsWithIC) 133 | 134 | def collExpFilter(obj): 135 | return all(coll.v3d.enable_export for coll in getObjectAllCollections(obj)) 136 | 137 | filteredObjectsShallow = list(filter(collExpFilter, filteredObjectsShallow)) 138 | filteredObjectsWithIC = list(filter(collExpFilter, filteredObjectsWithIC)) 139 | 140 | exportSettings['filteredObjectsShallow'] = filteredObjectsShallow 141 | exportSettings['filteredObjectsWithIC'] = filteredObjectsWithIC 142 | 143 | filteredMeshes = [] 144 | filteredVertexGroups = {} 145 | temporaryMeshes = [] 146 | 147 | for bl_mesh in bpy.data.meshes: 148 | 149 | if bl_mesh.users == 0: 150 | continue 151 | 152 | current_bl_mesh = bl_mesh 153 | 154 | current_bl_object = None 155 | 156 | skip = True 157 | 158 | for bl_obj in filteredObjectsWithIC: 159 | 160 | current_bl_object = bl_obj 161 | 162 | if current_bl_object.type != 'MESH': 163 | continue 164 | 165 | if current_bl_object.data == current_bl_mesh: 166 | 167 | skip = False 168 | 169 | mesh_for_export = meshObjGetExportData(current_bl_object, 170 | exportSettings['bakeModifiers'], exportSettings['optimizeAttrs']) 171 | 172 | if mesh_for_export != current_bl_mesh: 173 | mesh_for_export[TO_MESH_SOURCE_CUSTOM_PROP] = current_bl_object 174 | temporaryMeshes.append(mesh_for_export) 175 | current_bl_mesh = mesh_for_export 176 | 177 | break 178 | 179 | if skip: 180 | continue 181 | 182 | filteredMeshes.append(current_bl_mesh) 183 | filteredVertexGroups[getPtr(bl_mesh)] = current_bl_object.vertex_groups 184 | 185 | filteredCurves = [] 186 | 187 | for bl_curve in bpy.data.curves: 188 | 189 | if bl_curve.users == 0: 190 | continue 191 | 192 | if isinstance(bl_curve, bpy.types.TextCurve) and not exportSettings['bakeText']: 193 | filteredCurves.append(bl_curve) 194 | 195 | else: 196 | current_bl_curve = bl_curve 197 | current_bl_mesh = None 198 | current_bl_object = None 199 | 200 | skip = True 201 | 202 | for bl_obj in filteredObjectsWithIC: 203 | 204 | current_bl_object = bl_obj 205 | 206 | if current_bl_object.type not in ['CURVE', 'SURFACE', 'FONT']: 207 | continue 208 | 209 | if current_bl_object.data == current_bl_curve: 210 | 211 | skip = False 212 | 213 | copy_obj = current_bl_object.copy() 214 | 215 | if not exportSettings['bakeModifiers']: 216 | copy_obj.modifiers.clear() 217 | 218 | dg = bpy.context.evaluated_depsgraph_get() 219 | 220 | dg.scene.collection.objects.link(copy_obj) 221 | copy_obj.update_tag() 222 | bpy.context.view_layer.update() 223 | 224 | copy_obj_eval = copy_obj.evaluated_get(dg) 225 | current_bl_mesh = bpy.data.meshes.new_from_object(copy_obj_eval) 226 | 227 | dg.scene.collection.objects.unlink(copy_obj) 228 | 229 | if current_bl_mesh is not None: 230 | current_bl_mesh.name = bl_curve.name 231 | current_bl_mesh[TO_MESH_SOURCE_CUSTOM_PROP] = current_bl_object 232 | temporaryMeshes.append(current_bl_mesh) 233 | else: 234 | skip = True 235 | 236 | bpy.data.objects.remove(copy_obj) 237 | 238 | break 239 | 240 | if skip: 241 | continue 242 | 243 | filteredMeshes.append(current_bl_mesh) 244 | filteredVertexGroups[getPtr(bl_curve)] = current_bl_object.vertex_groups 245 | 246 | filteredFonts = [] 247 | 248 | for bl_curve in filteredCurves: 249 | 250 | font = bl_curve.font if isinstance(bl_curve, bpy.types.TextCurve) else None 251 | if font is not None and font not in filteredFonts and font.users != 0: 252 | filteredFonts.append(font) 253 | 254 | for bl_meta in bpy.data.metaballs: 255 | 256 | if bl_meta.users == 0: 257 | continue 258 | 259 | current_bl_meta = bl_meta 260 | current_bl_mesh = None 261 | current_bl_obj = None 262 | 263 | skip = True 264 | 265 | for bl_obj in filteredObjectsWithIC: 266 | 267 | current_bl_obj = bl_obj 268 | 269 | if current_bl_obj.type == 'META' and current_bl_obj.data == current_bl_meta: 270 | 271 | skip = False 272 | 273 | dg = bpy.context.evaluated_depsgraph_get() 274 | obj_eval = current_bl_obj.evaluated_get(dg) 275 | current_bl_mesh = bpy.data.meshes.new_from_object(obj_eval) 276 | 277 | if current_bl_mesh is not None: 278 | current_bl_mesh.name = bl_meta.name 279 | current_bl_mesh[TO_MESH_SOURCE_CUSTOM_PROP] = current_bl_obj 280 | temporaryMeshes.append(current_bl_mesh) 281 | else: 282 | skip = True 283 | 284 | break 285 | 286 | if skip: 287 | continue 288 | 289 | filteredMeshes.append(current_bl_mesh) 290 | filteredVertexGroups[getPtr(bl_meta)] = current_bl_obj.vertex_groups 291 | 292 | exportSettings['filteredCurves'] = filteredCurves 293 | exportSettings['filteredFonts'] = filteredFonts 294 | exportSettings['filteredMeshes'] = filteredMeshes 295 | exportSettings['filteredVertexGroups'] = filteredVertexGroups 296 | exportSettings['temporaryMeshes'] = temporaryMeshes 297 | 298 | filteredMaterials = [] 299 | temporaryMaterials = [] 300 | 301 | for bl_mat in getUsedMaterials(): 302 | 303 | if bl_mat.users == 0: 304 | continue 305 | 306 | for bl_mesh in filteredMeshes: 307 | for mat in bl_mesh.materials: 308 | if mat == bl_mat and mat not in filteredMaterials: 309 | filteredMaterials.append(mat) 310 | 311 | for bl_obj in filteredObjectsWithIC: 312 | if bl_obj.material_slots: 313 | for bl_material_slot in bl_obj.material_slots: 314 | if bl_material_slot.link == 'DATA': 315 | continue 316 | 317 | mat = bl_material_slot.material 318 | if mat == bl_mat and mat not in filteredMaterials: 319 | filteredMaterials.append(mat) 320 | 321 | for bl_curve in filteredCurves: 322 | for mat in bl_curve.materials: 323 | if mat == bl_mat and mat not in filteredMaterials: 324 | filteredMaterials.append(mat) 325 | 326 | curr_world = bpy.context.scene.world 327 | if curr_world is not None: 328 | 329 | world_mat = bpy.data.materials.new(WORLD_NODE_MAT_NAME.substitute( 330 | name=curr_world.name)) 331 | world_mat.use_nodes = True 332 | 333 | world_mat.v3d.dithering = curr_world.v3d.dithering 334 | 335 | world_mat_wrapper = NodeMaterialWrapper(world_mat) 336 | 337 | if bpy.app.version >= (5, 0, 0) or curr_world.use_nodes: 338 | mat_node_tree = curr_world.node_tree.copy() 339 | else: 340 | mat_node_tree = world_mat.node_tree.copy() 341 | mat_node_tree.nodes.clear() 342 | 343 | bkg_node = mat_node_tree.nodes.new('ShaderNodeBackground') 344 | bkg_node.inputs['Color'].default_value[0] = curr_world.color[0] 345 | bkg_node.inputs['Color'].default_value[1] = curr_world.color[1] 346 | bkg_node.inputs['Color'].default_value[2] = curr_world.color[2] 347 | bkg_node.inputs['Color'].default_value[3] = 1 348 | bkg_node.inputs['Strength'].default_value = 1 349 | 350 | out_node = mat_node_tree.nodes.new('ShaderNodeOutputWorld') 351 | 352 | mat_node_tree.links.new(bkg_node.outputs['Background'], out_node.inputs['Surface']) 353 | 354 | world_mat_wrapper.node_tree = mat_node_tree 355 | 356 | temporaryMaterials.append(world_mat) 357 | filteredMaterials.append(world_mat_wrapper) 358 | 359 | exportSettings['filteredMaterials'] = filteredMaterials 360 | exportSettings['temporaryMaterials'] = temporaryMaterials 361 | 362 | filteredNodeGroups = [] 363 | for group in bpy.data.node_groups: 364 | if group.users == 0: 365 | continue 366 | 367 | for bl_mat in filteredMaterials: 368 | mat_type = getMaterialType(bl_mat) 369 | if mat_type == 'EEVEE': 370 | if (group not in filteredNodeGroups and 371 | group in extractMaterialNodeTrees(bl_mat.node_tree)): 372 | filteredNodeGroups.append(group) 373 | 374 | exportSettings['filteredNodeGroups'] = filteredNodeGroups 375 | 376 | filteredTextures = [] 377 | 378 | for bl_mat in filteredMaterials: 379 | if bl_mat.node_tree and bl_mat.use_nodes: 380 | for bl_node in bl_mat.node_tree.nodes: 381 | if (isinstance(bl_node, (bpy.types.ShaderNodeTexImage, bpy.types.ShaderNodeTexEnvironment)) and 382 | getTexImage(bl_node) is not None and 383 | getTexImage(bl_node).users != 0 and 384 | getTexImage(bl_node).size[0] > 0 and 385 | getTexImage(bl_node).size[1] > 0 and 386 | bl_node not in filteredTextures): 387 | filteredTextures.append(bl_node) 388 | 389 | for node_group in filteredNodeGroups: 390 | for bl_node in node_group.nodes: 391 | if (isinstance(bl_node, (bpy.types.ShaderNodeTexImage, bpy.types.ShaderNodeTexEnvironment)) and 392 | getTexImage(bl_node) is not None and 393 | getTexImage(bl_node).users != 0 and 394 | getTexImage(bl_node).size[0] > 0 and 395 | getTexImage(bl_node).size[1] > 0 and 396 | bl_node not in filteredTextures): 397 | filteredTextures.append(bl_node) 398 | 399 | exportSettings['filteredTextures'] = filteredTextures 400 | 401 | filteredImages = [] 402 | 403 | for bl_texture in filteredTextures: 404 | img = getTexImage(bl_texture) 405 | if img not in filteredImages: 406 | img['compression_error_status'] = 0 # no error 407 | filteredImages.append(img) 408 | 409 | exportSettings['filteredImages'] = filteredImages 410 | 411 | filteredCameras = [] 412 | 413 | for bl_camera in bpy.data.cameras: 414 | 415 | if bl_camera.users == 0: 416 | continue 417 | 418 | filteredCameras.append(bl_camera) 419 | 420 | exportSettings['filteredCameras'] = filteredCameras 421 | 422 | filteredLights = [] 423 | 424 | for bl_light in bpy.data.lights: 425 | 426 | if bl_light.users == 0: 427 | continue 428 | 429 | filteredLights.append(bl_light) 430 | 431 | exportSettings['filteredLights'] = filteredLights 432 | 433 | filteredLightProbes = [] 434 | 435 | for bl_probe in bpy.data.lightprobes: 436 | 437 | if bl_probe.users == 0: 438 | continue 439 | 440 | if bl_probe.type != 'SPHERE' and bl_probe.type != 'PLANE': 441 | continue 442 | 443 | filteredLightProbes.append(bl_probe) 444 | 445 | exportSettings['filteredLightProbes'] = filteredLightProbes 446 | 447 | jointIndices = {} 448 | 449 | if exportSettings['skins']: 450 | for bl_obj in filteredObjectsWithIC: 451 | 452 | if bl_obj.type != 'MESH': 453 | continue 454 | 455 | armature_object = findArmature(bl_obj) 456 | if armature_object is None or len(armature_object.pose.bones) == 0: 457 | continue 458 | 459 | grp = jointIndices[bl_obj.data.name] = {} 460 | 461 | for bl_bone in armature_object.pose.bones: 462 | grp[bl_bone.name] = len(grp) 463 | 464 | exportSettings['jointIndices'] = jointIndices 465 | 466 | filteredClippingPlanes = [] 467 | 468 | for bl_obj in bpy.data.objects: 469 | if bl_obj.type == 'EMPTY' and bl_obj.v3d.clipping_plane: 470 | filteredClippingPlanes.append(bl_obj) 471 | 472 | exportSettings['filteredClippingPlanes'] = filteredClippingPlanes 473 | 474 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2025 Soft8Soft, LLC. All rights reserved. 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | import math 16 | 17 | import bpy 18 | import numpy as np 19 | import mathutils 20 | 21 | import pyosl.glslgen 22 | 23 | import pluginUtils 24 | import pluginUtils as pu 25 | 26 | log = pluginUtils.log.getLogger('V3D-BL') 27 | 28 | ORTHO_EPS = 1e-5 29 | DEFAULT_MAT_NAME = 'v3d_default_material' 30 | 31 | selectedObject = None 32 | selectedObjectsSave = [] 33 | prevActiveObject = None 34 | 35 | def clamp(val, minval, maxval): 36 | return max(minval, min(maxval, val)) 37 | 38 | def integerToBlSuffix(val): 39 | 40 | suf = str(val) 41 | 42 | for i in range(0, 3 - len(suf)): 43 | suf = '0' + suf 44 | 45 | return suf 46 | 47 | def setSelectedObject(bl_obj): 48 | """ 49 | Select object for NLA baking 50 | """ 51 | global prevActiveObject 52 | 53 | global selectedObject, selectedObjectsSave 54 | 55 | selectedObject = bl_obj 56 | selectedObjectsSave = bpy.context.selected_objects.copy() 57 | 58 | for o in selectedObjectsSave: 59 | o.select_set(False) 60 | 61 | prevActiveObject = bpy.context.view_layer.objects.active 62 | bpy.context.view_layer.objects.active = bl_obj 63 | 64 | bl_obj.select_set(True) 65 | 66 | def restoreSelectedObjects(): 67 | global prevActiveObject 68 | 69 | global selectedObject, selectedObjectsSave 70 | 71 | selectedObject.select_set(False) 72 | 73 | for o in selectedObjectsSave: 74 | o.select_set(True) 75 | 76 | bpy.context.view_layer.objects.active = prevActiveObject 77 | prevActiveObject = None 78 | 79 | selectedObject = None 80 | selectedObjectsSave = [] 81 | 82 | def getSceneByObject(obj): 83 | 84 | for scene in bpy.data.scenes: 85 | index = scene.objects.find(obj.name) 86 | if index > -1 and scene.objects[index] == obj: 87 | return scene 88 | 89 | return None 90 | 91 | def getTexImage(bl_tex): 92 | 93 | """ 94 | Get texture image from a texture, avoiding AttributeError for textures 95 | without an image (e.g. a texture of type 'NONE'). 96 | """ 97 | 98 | return getattr(bl_tex, 'image', None) 99 | 100 | def getTextureName(bl_texture): 101 | if (isinstance(bl_texture, (bpy.types.ShaderNodeTexImage, 102 | bpy.types.ShaderNodeTexEnvironment))): 103 | tex_name = bl_texture.image.name 104 | else: 105 | tex_name = bl_texture.name 106 | 107 | return tex_name 108 | 109 | def imgNeedsCompression(bl_image, exportSettings): 110 | method = bl_image.v3d.compression_method 111 | 112 | if bl_image.get('compression_error_status') == 1: 113 | return False 114 | elif (exportSettings['compressTextures'] and method != 'DISABLE' and 115 | bl_image.file_format in ['JPEG', 'PNG', 'HDR'] and 116 | pu.isPowerOfTwo(bl_image.size[0]) and pu.isPowerOfTwo(bl_image.size[1])): 117 | return True 118 | else: 119 | return False 120 | 121 | def mat4IsIdentity(mat4): 122 | return np.all(np.isclose(mat4, mathutils.Matrix.Identity(4), atol=1e-6)) 123 | 124 | def mat4IsTRSDecomposable(mat4): 125 | 126 | mat = mat4.to_3x3().transposed() 127 | v0 = mat[0].normalized() 128 | v1 = mat[1].normalized() 129 | v2 = mat[2].normalized() 130 | 131 | return (abs(v0.dot(v1)) < ORTHO_EPS 132 | and abs(v0.dot(v2)) < ORTHO_EPS 133 | and abs(v1.dot(v2)) < ORTHO_EPS) 134 | 135 | def mat4SvdDecomposeToMatrs(mat4): 136 | """ 137 | Decompose the given matrix into a couple of TRS-decomposable matrices or 138 | Returns None in case of an error. 139 | """ 140 | 141 | try: 142 | u, s, vh = np.linalg.svd(mat4.to_3x3()) 143 | mat_u = mathutils.Matrix(u) 144 | mat_s = mathutils.Matrix([[s[0], 0, 0], [0, s[1], 0], [0, 0, s[2]]]) 145 | mat_vh = mathutils.Matrix(vh) 146 | 147 | mat_trans = mathutils.Matrix.Translation(mat4.to_translation()) 148 | mat_left = mat_trans @ (mat_u @ mat_s).to_4x4() 149 | 150 | return (mat_left, mat_vh.to_4x4()) 151 | 152 | except np.linalg.LinAlgError: 153 | return None 154 | 155 | def findArmature(obj): 156 | 157 | for mod in obj.modifiers: 158 | if mod.type == 'ARMATURE' and mod.object is not None and mod.object.users > 0: 159 | return mod.object 160 | 161 | armature = obj.find_armature() 162 | return armature if armature is not None and armature.users > 0 else None 163 | 164 | def extractAlphaMode(bl_mat): 165 | blendMethod = bl_mat.v3d.blend_method 166 | 167 | if blendMethod in ['OPAQUE', 'BLEND']: 168 | return blendMethod 169 | elif blendMethod == 'CLIP': 170 | return 'MASK' 171 | elif blendMethod == 'HASHED': 172 | return 'BLEND' 173 | 174 | if bl_mat and bl_mat.use_nodes and bl_mat.node_tree != None: 175 | node_trees = extractMaterialNodeTrees(bl_mat.node_tree) 176 | for node_tree in node_trees: 177 | for bl_node in node_tree.nodes: 178 | if isinstance(bl_node, bpy.types.ShaderNodeBsdfPrincipled): 179 | if len(bl_node.inputs['Alpha'].links) > 0: 180 | return 'BLEND' 181 | elif bl_node.inputs['Alpha'].default_value < 1: 182 | return 'BLEND' 183 | elif len(bl_node.inputs['Transmission Weight'].links) > 0: 184 | return 'BLEND' 185 | elif bl_node.inputs['Transmission Weight'].default_value > 0: 186 | return 'BLEND' 187 | elif isinstance(bl_node, bpy.types.ShaderNodeBsdfTransparent): 188 | return 'BLEND' 189 | 190 | return 'OPAQUE' 191 | 192 | def updateOrbitCameraView(cam_obj, scene): 193 | 194 | target_obj = cam_obj.data.v3d.orbit_target_object 195 | 196 | eye = cam_obj.matrix_world.to_translation() 197 | target = (cam_obj.data.v3d.orbit_target if target_obj is None 198 | else target_obj.matrix_world.to_translation()) 199 | 200 | quat = getLookAtAlignedUpMatrix(eye, target).to_quaternion() 201 | quat.rotate(cam_obj.matrix_world.inverted()) 202 | quat.rotate(cam_obj.matrix_basis) 203 | 204 | rot_mode = cam_obj.rotation_mode 205 | cam_obj.rotation_mode = 'QUATERNION' 206 | cam_obj.rotation_quaternion = quat 207 | cam_obj.rotation_mode = rot_mode 208 | 209 | bpy.context.view_layer.update() 210 | 211 | def getLookAtAlignedUpMatrix(eye, target): 212 | 213 | """ 214 | This method uses camera axes for building the matrix. 215 | """ 216 | 217 | axis_z = (eye - target).normalized() 218 | 219 | if axis_z.length == 0: 220 | axis_z = mathutils.Vector((0, -1, 0)) 221 | 222 | axis_x = mathutils.Vector((0, 0, 1)).cross(axis_z) 223 | 224 | if axis_x.length == 0: 225 | axis_x = mathutils.Vector((1, 0, 0)) 226 | 227 | axis_y = axis_z.cross(axis_x) 228 | 229 | return mathutils.Matrix([ 230 | axis_x, 231 | axis_y, 232 | axis_z, 233 | ]).transposed() 234 | 235 | def objDataUsesLineRendering(bl_obj_data): 236 | line_settings = getattr(getattr(bl_obj_data, 'v3d', None), 'line_rendering_settings', None) 237 | return bool(line_settings and line_settings.enable) 238 | 239 | def getObjectAllCollections(blObj): 240 | return [coll for coll in bpy.data.collections if blObj in coll.all_objects[:]] 241 | 242 | def objHasExportedModifiers(obj): 243 | """ 244 | Check if an object has any modifiers that should be applied before export. 245 | """ 246 | 247 | return any([modifierNeedsExport(mod) for mod in obj.modifiers]) 248 | 249 | def objDelNotExportedModifiers(obj): 250 | """ 251 | Remove modifiers that shouldn't be applied before export from an object. 252 | """ 253 | 254 | for mod in obj.modifiers: 255 | if not modifierNeedsExport(mod): 256 | obj.modifiers.remove(mod) 257 | 258 | def objAddTriModifier(obj): 259 | mod = obj.modifiers.new('Temporary_Triangulation', 'TRIANGULATE') 260 | mod.quad_method = 'FIXED' 261 | 262 | def objApplyModifiers(obj): 263 | """ 264 | Creates a new mesh from applying modifiers to the mesh of the given object. 265 | Assignes the newly created mesh to the given object. The old mesh's user 266 | count will be decreased by 1. 267 | """ 268 | 269 | dg = bpy.context.evaluated_depsgraph_get() 270 | 271 | need_linking = dg.scene.collection.objects.find(obj.name) == -1 272 | need_showing = obj.hide_viewport 273 | 274 | if need_linking: 275 | dg.scene.collection.objects.link(obj) 276 | 277 | obj.update_tag() 278 | 279 | if need_showing: 280 | obj.hide_viewport = False 281 | 282 | bpy.context.view_layer.update() 283 | 284 | obj_eval = obj.evaluated_get(dg) 285 | 286 | obj.data = bpy.data.meshes.new_from_object(obj_eval, 287 | preserve_all_data_layers=True, depsgraph=dg) 288 | obj.modifiers.clear() 289 | 290 | if need_linking: 291 | dg.scene.collection.objects.unlink(obj) 292 | if need_showing: 293 | obj.hide_viewport = True 294 | 295 | def applyShapeKey(obj, index): 296 | shapekeys = obj.data.shape_keys.key_blocks 297 | 298 | if index < 0 or index > len(shapekeys): 299 | return 300 | 301 | for i in reversed(range(0, len(shapekeys))): 302 | if i != index: 303 | obj.shape_key_remove(shapekeys[i]) 304 | 305 | obj.shape_key_remove(shapekeys[0]) 306 | 307 | def objTransferShapeKeys(objFrom, objTo, generatedObjs, generatedMeshes, generatedShapeKeys): 308 | names = [shkey.name for shkey in objFrom.data.shape_keys.key_blocks] 309 | weights = [shkey.value for shkey in objFrom.data.shape_keys.key_blocks] 310 | 311 | log.debug('Transferring {} shapekeys on {}'.format(len(names) - 1, objFrom.name)) 312 | 313 | for index in range(1, len(objFrom.data.shape_keys.key_blocks)): 314 | log.debug('Transferring shapekey {} with name {}'.format(index, names[index])) 315 | 316 | objShapeKey = objFrom.copy() 317 | tmpMesh = objShapeKey.data.copy() 318 | generatedShapeKeys.append(tmpMesh.shape_keys) 319 | objShapeKey.data = tmpMesh 320 | 321 | bpy.context.collection.objects.link(objShapeKey) 322 | 323 | applyShapeKey(objShapeKey, index) 324 | objApplyModifiers(objShapeKey) 325 | 326 | generatedObjs.append(objShapeKey) 327 | generatedMeshes.append(tmpMesh) 328 | generatedMeshes.append(objShapeKey.data) 329 | 330 | for obj in bpy.context.scene.objects: 331 | obj.select_set(False) 332 | 333 | objShapeKey.select_set(True) 334 | bpy.context.view_layer.objects.active = objTo 335 | bpy.ops.object.join_shapes() 336 | 337 | if objTo.data.shape_keys is None: 338 | return False 339 | 340 | numTransferredKeys = len(objTo.data.shape_keys.key_blocks) - 1 341 | if numTransferredKeys != index: 342 | return False 343 | 344 | objTo.data.shape_keys.key_blocks[index].name = names[index] 345 | objTo.data.shape_keys.key_blocks[index].value = weights[index] 346 | 347 | return True 348 | 349 | def meshNeedTangentsForExport(mesh, optimize_tangents): 350 | """ 351 | Check if it's needed to export tangents for the given mesh. 352 | """ 353 | 354 | return (meshHasUvLayers(mesh) and (meshMaterialsUseTangents(mesh) 355 | or not optimize_tangents)) 356 | 357 | def meshHasUvLayers(mesh): 358 | return bool(mesh.uv_layers.active and len(mesh.uv_layers) > 0) 359 | 360 | def meshMaterialsUseTangents(mesh): 361 | 362 | for mat in mesh.materials: 363 | if mat and mat.use_nodes and mat.node_tree != None: 364 | node_trees = extractMaterialNodeTrees(mat.node_tree) 365 | for node_tree in node_trees: 366 | for bl_node in node_tree.nodes: 367 | if matNodeUseTangents(bl_node): 368 | return True 369 | 370 | elif mat == None: 371 | return True 372 | 373 | return False 374 | 375 | def matNodeUseTangents(bl_node): 376 | 377 | if isinstance(bl_node, bpy.types.ShaderNodeNormalMap): 378 | return True 379 | 380 | if (isinstance(bl_node, bpy.types.ShaderNodeTangent) 381 | and bl_node.direction_type == 'UV_MAP'): 382 | return True 383 | 384 | if isinstance(bl_node, bpy.types.ShaderNodeNewGeometry): 385 | for out in bl_node.outputs: 386 | if out.identifier == 'Tangent' and out.is_linked: 387 | return True 388 | 389 | return False 390 | 391 | def meshPreferredTangentsUvMap(mesh): 392 | uvMaps = [] 393 | 394 | for mat in mesh.materials: 395 | if mat and mat.use_nodes and mat.node_tree != None: 396 | nodeTrees = extractMaterialNodeTrees(mat.node_tree) 397 | for nodeTree in nodeTrees: 398 | for node in nodeTree.nodes: 399 | if ((isinstance(node, bpy.types.ShaderNodeNormalMap) or 400 | (isinstance(node, bpy.types.ShaderNodeTangent) and node.direction_type == 'UV_MAP')) 401 | and node.uv_map): 402 | uvMaps.append(node.uv_map) 403 | 404 | if len(uvMaps) == 1 and uvMaps[0] in mesh.uv_layers: 405 | return uvMaps[0] 406 | else: 407 | 408 | if len(uvMaps) > 1: 409 | log.warning('More than 1 UV map is used to calculate tangents in material(s) ' + 410 | 'assigned on mesh {}, expect incorrect normal mapping'.format(mesh.name)) 411 | 412 | for uvLayer in mesh.uv_layers: 413 | if uvLayer.active_render: 414 | return uvLayer.name 415 | 416 | log.error('Tangents UV map not found') 417 | return '' 418 | 419 | def extractMaterialNodeTrees(node_tree): 420 | """NOTE: located here since it's needed for meshMaterialsUseTangents()""" 421 | 422 | out = [node_tree] 423 | 424 | for bl_node in node_tree.nodes: 425 | if isinstance(bl_node, bpy.types.ShaderNodeGroup): 426 | out += extractMaterialNodeTrees(bl_node.node_tree) 427 | 428 | return out 429 | 430 | def meshHasNgons(mesh): 431 | for poly in mesh.polygons: 432 | if poly.loop_total > 4: 433 | return True 434 | 435 | return False 436 | 437 | def modifierNeedsExport(mod): 438 | """ 439 | Modifiers that are applied before export shouldn't be: 440 | - hidden during render (a way to disable export of a modifier) 441 | - ARMATURE modifiers (used separately via skinning) 442 | """ 443 | 444 | return mod.show_render and mod.type != 'ARMATURE' 445 | 446 | def getSocketDefvalCompat(socket, RGBAToRGB=False, isOSL=False): 447 | """ 448 | Get the default value of input/output sockets in some compatible form. 449 | Vector types such as bpy_prop_aray, Vector, Euler, etc... are converted to lists, 450 | primitive types are converted to int/float. 451 | """ 452 | 453 | if socket.type == 'VALUE' or socket.type == 'INT': 454 | return socket.default_value 455 | elif socket.type == 'BOOLEAN': 456 | return int(socket.default_value) 457 | elif socket.type == 'VECTOR': 458 | return [i for i in socket.default_value] 459 | elif socket.type == 'RGBA': 460 | val = [i for i in socket.default_value] 461 | if RGBAToRGB: 462 | val = val[0:3] 463 | return val 464 | elif socket.type == 'SHADER': 465 | return [0, 0, 0, 0] 466 | elif socket.type == 'STRING' and isOSL: 467 | return pyosl.glslgen.string_to_osl_const(socket.default_value) 468 | elif socket.type == 'CUSTOM': 469 | return 0 470 | else: 471 | return 0 472 | 473 | def createCustomProperty(bl_element): 474 | """ 475 | Filters and creates a custom property, which is stored in the glTF extra field. 476 | """ 477 | if not bl_element: 478 | return None 479 | 480 | props = {} 481 | 482 | black_list = ['cycles', 'cycles_visibility', 'cycles_curves', '_RNA_UI', 'v3d'] 483 | 484 | count = 0 485 | for custom_property in bl_element.keys(): 486 | if custom_property in black_list: 487 | continue 488 | 489 | value = bl_element[custom_property] 490 | 491 | add_value = False 492 | 493 | if isinstance(value, str): 494 | add_value = True 495 | 496 | if isinstance(value, (int, float)): 497 | add_value = True 498 | 499 | if hasattr(value, "to_list"): 500 | value = value.to_list() 501 | add_value = True 502 | 503 | if add_value: 504 | props[custom_property] = value 505 | count += 1 506 | 507 | if count == 0: 508 | return None 509 | 510 | return props 511 | 512 | def calcLightThresholdDist(bl_light, threshold): 513 | """Calculate the light attenuation distance from the given threshold. 514 | 515 | The light power at this distance equals the threshold value. 516 | """ 517 | return math.sqrt(max(1e-16, 518 | max(bl_light.color.r, bl_light.color.g, bl_light.color.b) 519 | * max(1, bl_light.specular_factor) 520 | * abs(bl_light.energy / 100) 521 | / max(threshold, 1e-16) 522 | )) 523 | 524 | def objHasFixOrthoZoom(bl_obj): 525 | if (bl_obj.parent and bl_obj.parent.type == 'CAMERA' and 526 | bl_obj.parent.data.type == 'ORTHO' and bl_obj.v3d.fix_ortho_zoom): 527 | return True 528 | else: 529 | return False 530 | 531 | def objHasCanvasFitParams(bl_obj): 532 | if (bl_obj.parent and bl_obj.parent.type == 'CAMERA' and 533 | (bl_obj.v3d.canvas_fit_x != 'NONE' or bl_obj.v3d.canvas_fit_y != 'NONE')): 534 | return True 535 | else: 536 | return False 537 | 538 | def sceneFrameSetFloat(scene, value): 539 | frame = math.floor(value) 540 | subframe = value - frame 541 | scene.frame_set(frame, subframe=subframe) 542 | 543 | def nodeIsConnectedTo(blNode, fromOutput, toType): 544 | for link in blNode.outputs[fromOutput].links: 545 | if link.is_valid and isinstance(link.to_node, toType): 546 | return True 547 | return False 548 | -------------------------------------------------------------------------------- /python/pyosl/oslparse.py: -------------------------------------------------------------------------------- 1 | # OSL Parser 2 | 3 | import re, os, sys 4 | 5 | from . import osllex 6 | 7 | from .oslast import Node 8 | 9 | import ply.yacc as yacc 10 | 11 | # Get the token map 12 | tokens = osllex.tokens 13 | 14 | precedence = ( 15 | ('left', 'OR'), 16 | ('left', 'AND'), 17 | ('left', 'BITOR'), 18 | ('left', 'XOR'), 19 | ('left', 'BITAND'), 20 | ('left', 'EQ', 'NE'), 21 | ('left', 'LT', 'LE', 'GT', 'GE'), 22 | ('left', 'LSHIFT', 'RSHIFT'), 23 | ('left', 'PLUS', 'MINUS'), 24 | ('left', 'TIMES', 'DIVIDE', 'MOD'), 25 | ('right', 'UNARY') 26 | ) 27 | 28 | # overall structure 29 | 30 | def p_shader_file(p): 31 | '''shader-file : shader-file global-declaration 32 | | global-declaration''' 33 | if len(p) > 2: 34 | p[0] = p[1] 35 | p[0].append(p[2]) 36 | else: 37 | p[0] = Node('shader-file', p[1]) 38 | 39 | def p_global_declaration(p): 40 | '''global-declaration : function-declaration 41 | | struct-declaration 42 | | shader-declaration''' 43 | p[0] = p[1] 44 | 45 | def p_shader_declaration(p): 46 | '''shader-declaration : shadertype identifier metadata-block-opt LPAREN shader-formal-params RPAREN LBRACE statement-list RBRACE''' 47 | p[0] = Node('shader-declaration', p[1], p[2], p[3], p[5], p[8]) 48 | 49 | def p_shadertype(p): 50 | '''shadertype : DISPLACEMENT 51 | | SHADER 52 | | SURFACE 53 | | VOLUME''' 54 | p[0] = p[1] 55 | 56 | def p_shader_formal_params(p): 57 | '''shader-formal-params : shader-formal-params COMMA shader-formal-param 58 | | shader-formal-param''' 59 | if len(p) > 2: 60 | p[0] = p[1] 61 | p[0].append(p[3]) 62 | else: 63 | p[0] = Node('shader-formal-params', p[1]) 64 | 65 | def p_shader_formal_param(p): 66 | '''shader-formal-param : outputspec typespec identifier initializer metadata-block-opt 67 | | outputspec typespec identifier arrayspec initializer-list metadata-block-opt 68 | | empty''' 69 | if len(p) == 6: 70 | p[0] = Node('shader-formal-param', p[1], p[2], p[3], p[4], p[5]) 71 | elif len(p) == 7: 72 | p[0] = Node('shader-formal-param', p[1], p[2], p[3], p[4], p[5], [6]) 73 | else: 74 | p[0] = None 75 | 76 | def p_metadata_block_opt(p): 77 | '''metadata-block-opt : metadata-block 78 | | empty''' 79 | if len(p) > 1: 80 | p[0] = p[1] 81 | else: 82 | p[0] = None 83 | 84 | def p_metadata_block(p): 85 | '''metadata-block : METABEGIN metadata-list RBRACKET RBRACKET''' 86 | p[0] = p[2] 87 | 88 | def p_metadata_list(p): 89 | '''metadata-list : metadata-list COMMA metadata 90 | | metadata''' 91 | if len(p) > 2: 92 | p[0] = p[1] 93 | p[0].append(p[3]) 94 | else: 95 | p[0] = Node('metadata-list', p[1]) 96 | 97 | # NOTE: simple-typespec in spec 98 | def p_metadata(p): 99 | '''metadata : simple-typename identifier initializer 100 | | empty''' 101 | 102 | if len(p) > 2: 103 | p[0] = Node('metadata', p[1], p[2], p[3]) 104 | else: 105 | p[0] = None 106 | 107 | # declarations 108 | 109 | def p_function_declaration(p): 110 | '''function-declaration : typespec identifier LPAREN function-formal-params-opt RPAREN LBRACE statement-list RBRACE''' 111 | p[0] = Node('function-declaration', p[1], p[2], p[4], p[7]) 112 | 113 | def p_function_formal_params_opt(p): 114 | '''function-formal-params-opt : function-formal-params 115 | | empty''' 116 | if len(p) > 1: 117 | p[0] = p[1] 118 | else: 119 | p[0] = None 120 | 121 | def p_function_formal_params(p): 122 | '''function-formal-params : function-formal-params COMMA function-formal-param 123 | | function-formal-param''' 124 | if len(p) > 2: 125 | p[0] = p[1] 126 | p[0].append(p[3]) 127 | else: 128 | p[0] = Node('function-formal-params', p[1]) 129 | 130 | def p_function_formal_param(p): 131 | '''function-formal-param : outputspec typespec identifier arrayspec 132 | | outputspec typespec identifier''' 133 | if len(p) == 5: 134 | p[0] = Node('function-formal-param', p[1], p[2], p[3], p[4]) 135 | else: 136 | p[0] = Node('function-formal-param', p[1], p[2], p[3]) 137 | 138 | def p_outputspec(p): 139 | '''outputspec : OUTPUT 140 | | empty''' 141 | if len(p) > 1: 142 | p[0] = Node('outputspec', p[1]) 143 | else: 144 | p[0] = Node('outputspec', None) 145 | 146 | def p_struct_declatation(p): 147 | 'struct-declaration : STRUCT identifier LBRACE field-declarations RBRACE SEMI' 148 | p[0] = Node('struct-declaration', p[2], p[4]) 149 | 150 | def p_field_declarations(p): 151 | '''field-declarations : field-declarations field-declaration 152 | | field-declaration''' 153 | if len(p) > 2: 154 | p[0] = p[1] 155 | p[0].append(p[2]) 156 | else: 157 | p[0] = Node('field-declarations', p[1]) 158 | 159 | def p_field_declaration(p): 160 | '''field-declaration : typespec typed-field-list SEMI''' 161 | p[0] = Node('field-declaration', p[1], p[2]) 162 | 163 | def p_typed_field_list(p): 164 | '''typed-field-list : typed-field-list COMMA typed-field 165 | | typed-field''' 166 | if len(p) > 2: 167 | p[0] = p[1] 168 | p[0].append(p[3]) 169 | else: 170 | p[0] = Node('typed-field-list', p[1]) 171 | 172 | def p_typed_field(p): 173 | 'typed-field : identifier arrayspec-opt' 174 | p[0] = Node('typed-field', p[1], p[2]) 175 | 176 | def p_local_declaration(p): 177 | '''local-declaration : function-declaration 178 | | variable-declaration''' 179 | p[0] = p[1] 180 | 181 | def p_arrayspec_opt(p): 182 | '''arrayspec-opt : arrayspec 183 | | empty''' 184 | if len(p) > 1: 185 | p[0] = p[1] 186 | else: 187 | p[0] = None 188 | 189 | def p_arrayspec(p): 190 | '''arrayspec : LBRACKET integer RBRACKET 191 | | LBRACKET RBRACKET''' 192 | if len(p) > 3: 193 | p[0] = Node('arrayspec', p[2]) 194 | else: 195 | p[0] = Node('arrayspec') 196 | 197 | def p_variable_declaration(p): 198 | 'variable-declaration : typespec def-expressions SEMI' 199 | p[0] = Node('variable-declaration', p[1], p[2]) 200 | 201 | def p_def_expressions(p): 202 | '''def-expressions : def-expressions COMMA def-expression 203 | | def-expression''' 204 | if len(p) > 2: 205 | p[0] = p[1] 206 | p[0].append(p[3]) 207 | else: 208 | p[0] = Node('def-expressions', p[1]) 209 | 210 | def p_def_expression(p): 211 | '''def-expression : identifier initializer-opt 212 | | identifier arrayspec initializer-list-opt''' 213 | if len(p) == 3: 214 | p[0] = Node('def-expression', p[1], p[2]) 215 | else: 216 | p[0] = Node('def-expression', p[1], p[2], p[3]) 217 | 218 | def p_initializer_opt(p): 219 | '''initializer-opt : initializer 220 | | empty''' 221 | if len(p) > 1: 222 | p[0] = p[1] 223 | else: 224 | p[0] = None 225 | 226 | def p_initializer(p): 227 | 'initializer : EQUALS expression' 228 | p[0] = Node('initializer', p[2]) 229 | 230 | def p_initializer_list_opt(p): 231 | '''initializer-list-opt : initializer-list 232 | | empty''' 233 | if len(p) > 1: 234 | p[0] = p[1] 235 | else: 236 | p[0] = None 237 | 238 | def p_initializer_list(p): 239 | 'initializer-list : EQUALS compound-initializer' 240 | p[0] = Node('initializer-list', p[2]) 241 | 242 | def p_compound_initializer(p): 243 | 'compound-initializer : LBRACE init-expression-list RBRACE' 244 | p[0] = Node('compound-initializer', p[2]) 245 | 246 | def p_init_expression_list(p): 247 | '''init-expression-list : init-expression-list COMMA init-expression 248 | | init-expression''' 249 | if len(p) > 2: 250 | p[0] = p[1] 251 | p[0].append(p[3]) 252 | else: 253 | p[0] = Node('init-expression-list', p[1]) 254 | 255 | def p_init_expression(p): 256 | '''init-expression : expression 257 | | compound-initializer''' 258 | p[0] = Node('init-expression', p[1]) 259 | 260 | # NOTE: identifier-structname in spec 261 | def p_typespec(p): 262 | '''typespec : simple-typename 263 | | CLOSURE simple-typename 264 | | identifier''' 265 | if len(p) == 2: 266 | p[0] = Node('typespec', p[1]) 267 | else: 268 | p[0] = Node('typespec', p[2]) 269 | 270 | def p_simple_typename(p): 271 | '''simple-typename : COLOR 272 | | FLOAT 273 | | INT 274 | | MATRIX 275 | | NORMAL 276 | | POINT 277 | | STRING 278 | | VECTOR 279 | | VOID''' 280 | p[0] = Node('simple-typename', p[1]) 281 | 282 | 283 | # statements 284 | 285 | def p_statement_list_opt(p): 286 | '''statement-list-opt : statement-list 287 | | empty''' 288 | if len(p) > 1: 289 | p[0] = p[1] 290 | else: 291 | p[0] = None 292 | 293 | def p_statement_list(p): 294 | '''statement-list : statement-list statement 295 | | statement''' 296 | if len(p) > 2: 297 | p[0] = p[1] 298 | p[0].append(p[2]) 299 | else: 300 | p[0] = Node('statement-list', p[1]) 301 | 302 | def p_statement(p): 303 | '''statement : compound-expression-opt SEMI 304 | | scoped-statements 305 | | local-declaration 306 | | conditional-statement 307 | | loop-statement 308 | | loopmod-statement 309 | | return-statement''' 310 | if len(p) == 2: 311 | p[0] = Node('statement', p[1]) 312 | else: 313 | p[0] = Node('statement-semi', p[1]) 314 | 315 | def p_scoped_statements(p): 316 | 'scoped-statements : LBRACE statement-list-opt RBRACE' 317 | p[0] = Node('scoped-statements', p[2]) 318 | 319 | def p_conditional_statement(p): 320 | '''conditional-statement : IF LPAREN compound-expression RPAREN statement 321 | | IF LPAREN compound-expression RPAREN statement ELSE statement''' 322 | if len(p) == 6: 323 | p[0] = Node('conditional-statement', p[3], p[5]) 324 | else: 325 | p[0] = Node('conditional-statement', p[3], p[5], p[7]) 326 | 327 | def p_loop_statement(p): 328 | '''loop-statement : WHILE LPAREN compound-expression RPAREN statement 329 | | DO statement WHILE LPAREN compound-expression RPAREN SEMI 330 | | FOR LPAREN for-init-statement compound-expression-opt SEMI compound-expression-opt RPAREN statement''' 331 | if len(p) == 6: 332 | p[0] = Node('loop-statement-while', p[3], p[5]) 333 | elif len(p) == 7: 334 | p[0] = Node('loop-statement-do-while', p[2], p[5]) 335 | else: 336 | p[0] = Node('loop-statement-for', p[3], p[4], p[6], p[8]) 337 | 338 | def p_for_init_statement(p): 339 | '''for-init-statement : expression-opt SEMI 340 | | variable-declaration''' 341 | if len(p) != 2: 342 | p[0] = Node('for-init-statement-semi', p[1]) 343 | else: 344 | p[0] = Node('for-init-statement', p[1]) 345 | 346 | def p_loopmod_statement(p): 347 | '''loopmod-statement : BREAK SEMI 348 | | CONTINUE SEMI''' 349 | p[0] = Node('loopmod-statement', p[1]) 350 | 351 | def p_return_statement(p): 352 | 'return-statement : RETURN expression-opt SEMI' 353 | p[0] = Node('return-statement', p[2]) 354 | 355 | 356 | # expressions 357 | 358 | def p_expression_list(p): 359 | '''expression-list : expression-list COMMA expression 360 | | expression''' 361 | if len(p) > 2: 362 | p[0] = p[1] 363 | p[0].append(p[3]) 364 | else: 365 | p[0] = Node('expression-list', p[1]) 366 | 367 | def p_expression_opt(p): 368 | '''expression-opt : expression 369 | | empty''' 370 | if len(p) > 1: 371 | p[0] = p[1] 372 | else: 373 | p[0] = None 374 | 375 | #| expression binary-op expression 376 | def p_expression(p): 377 | '''expression : number 378 | | stringliteral 379 | | type-constructor 380 | | incdec-op variable-ref 381 | | variable-ref incdec-op 382 | | unary-op expression %prec UNARY 383 | | LPAREN compound-expression RPAREN 384 | | binary-op 385 | | function-call 386 | | assign-expression 387 | | ternary-expression 388 | | typecast-expression 389 | | variable-ref 390 | | compound-initializer''' 391 | 392 | if len(p) == 2: 393 | p[0] = Node('expression', p[1]) 394 | elif len(p) == 3: 395 | p[0] = Node('expression', p[1], p[2]) 396 | else: 397 | p[0] = Node('expression-paren', p[2]) 398 | 399 | def p_compound_expression_opt(p): 400 | '''compound-expression-opt : compound-expression 401 | | empty''' 402 | if len(p) > 1: 403 | p[0] = p[1] 404 | else: 405 | p[0] = None 406 | 407 | def p_compound_expression(p): 408 | '''compound-expression : compound-expression COMMA expression 409 | | expression''' 410 | if len(p) > 2: 411 | p[0] = p[1] 412 | p[0].append(p[3]) 413 | else: 414 | p[0] = Node('compound-expression', p[1]) 415 | 416 | 417 | def p_variable_lvalue(p): 418 | '''variable-lvalue : identifier 419 | | variable-lvalue LBRACKET expression RBRACKET 420 | | variable-lvalue PERIOD identifier''' 421 | if len(p) == 2: 422 | p[0] = Node('variable-lvalue', p[1]) 423 | elif len(p) == 5: 424 | p[0] = Node('variable-lvalue-brackets', p[1], p[3]) 425 | else: 426 | p[0] = Node('variable-lvalue-period', p[1], p[3]) 427 | 428 | def p_variable_ref(p): 429 | 'variable-ref : variable-lvalue' 430 | p[0] = Node('variable-ref', p[1]) 431 | 432 | def p_binary_op(p): 433 | '''binary-op : expression TIMES expression 434 | | expression DIVIDE expression 435 | | expression MOD expression 436 | | expression PLUS expression 437 | | expression MINUS expression 438 | | expression LSHIFT expression 439 | | expression RSHIFT expression 440 | | expression LT expression 441 | | expression LE expression 442 | | expression GT expression 443 | | expression GE expression 444 | | expression EQ expression 445 | | expression NE expression 446 | | expression BITAND expression 447 | | expression XOR expression 448 | | expression BITOR expression 449 | | expression AND expression 450 | | expression OR expression''' 451 | p[0] = Node('binary-op', p[1], p[2], p[3]) 452 | 453 | def p_unary_op(p): 454 | '''unary-op : MINUS 455 | | PLUS 456 | | BITNOT 457 | | NOT''' 458 | p[0] = Node('unary-op', p[1]) 459 | 460 | def p_incdec_op(p): 461 | '''incdec-op : PLUSPLUS 462 | | MINUSMINUS''' 463 | p[0] = Node('incdec-op', p[1]) 464 | 465 | def p_type_constructor(p): 466 | 'type-constructor : typespec LPAREN expression-list RPAREN' 467 | p[0] = Node('type-constructor', p[1], p[3]) 468 | 469 | def p_function_call(p): 470 | 'function-call : identifier LPAREN function-args-opt RPAREN' 471 | p[0] = Node('function-call', p[1], p[3]) 472 | 473 | def p_function_args_opt(p): 474 | '''function-args-opt : function-args 475 | | empty''' 476 | if len(p) > 1: 477 | p[0] = p[1] 478 | else: 479 | p[0] = None 480 | 481 | def p_function_args(p): 482 | '''function-args : function-args COMMA expression 483 | | expression''' 484 | if len(p) > 2: 485 | p[0] = p[1] 486 | p[0].append(p[3]) 487 | else: 488 | p[0] = Node('function-args', p[1]) 489 | 490 | def p_assign_expression(p): 491 | 'assign-expression : variable-lvalue assign-op expression' 492 | p[0] = Node('assign-expression', p[1], p[2], p[3]) 493 | 494 | def p_assign_op(p): 495 | '''assign-op : EQUALS 496 | | TIMESEQUAL 497 | | DIVEQUAL 498 | | PLUSEQUAL 499 | | MINUSEQUAL 500 | | ANDEQUAL 501 | | OREQUAL 502 | | XOREQUAL 503 | | LSHIFTEQUAL 504 | | RSHIFTEQUAL''' 505 | p[0] = Node('assign-op', p[1]) 506 | 507 | def p_ternary_expression(p): 508 | 'ternary-expression : expression CONDOP expression COLON expression' 509 | p[0] = Node('ternary-expression', p[1], p[3], p[5]) 510 | 511 | def p_typecast_expression(p): 512 | 'typecast-expression : LPAREN simple-typename RPAREN expression' 513 | p[0] = Node('typecast-expression', p[2], p[4]) 514 | 515 | # lexical elements 516 | 517 | def p_integer(p): 518 | 'integer : ICONST' 519 | # TODO 520 | p[0] = Node('integer', p[1]) 521 | 522 | def p_floating_point(p): 523 | 'floating-point : FCONST' 524 | p[0] = Node('floating-point', p[1]) 525 | 526 | def p_number(p): 527 | '''number : integer 528 | | floating-point''' 529 | p[0] = p[1] 530 | 531 | def p_stringliteral(p): 532 | 'stringliteral : SCONST' 533 | p[0] = Node('stringliteral', p[1]) 534 | 535 | def p_identifier(p): 536 | 'identifier : ID' 537 | p[0] = p[1] 538 | 539 | def p_empty(p): 540 | 'empty : ' 541 | if len(p) > 1: 542 | p[0] = p[1] 543 | else: 544 | p[0] = None 545 | 546 | def p_error(p): 547 | if p: 548 | print('Syntax error at token', p.type, 'line', p.lineno) 549 | else: 550 | print('Syntax error at EOF') 551 | 552 | def get_ast(data): 553 | 554 | # apply shader hacks before parsing 555 | 556 | # double "" "" string literals in metadata 557 | data = re.sub(r'"\s*^\s*"', '', data, flags=re.MULTILINE) 558 | data = re.sub(r'"([^"\n]+)" +"([^"\n]+)"', '"\\1\\2"', data) 559 | data = re.sub(r'"([^"\n]+)" +"([^"\n]+)"', '"\\1\\2"', data) 560 | 561 | parser = yacc.yacc(debug=False) 562 | #parser = yacc.yacc(write_tables=False,debug=False) 563 | return parser.parse(data, osllex.lexer, debug=False) 564 | 565 | -------------------------------------------------------------------------------- /python/pluginUtils/gltf.py: -------------------------------------------------------------------------------- 1 | import math, mimetypes, struct, sys 2 | 3 | from .log import getLogger 4 | log = getLogger('V3D-PU') 5 | 6 | import importlib.util 7 | numpySpec = importlib.util.find_spec("numpy") 8 | useNumpy = numpySpec is not None 9 | if useNumpy: 10 | import numpy as np 11 | GLTF_TO_NP_DTYPE = { 12 | "BYTE" : np.int8, 13 | "UNSIGNED_BYTE" : np.uint8, 14 | "SHORT" : np.int16, 15 | "UNSIGNED_SHORT": np.uint16, 16 | "UNSIGNED_INT" : np.uint32, 17 | "FLOAT" : np.float32 18 | } 19 | 20 | WEBGL_FILTERS = { 21 | 'NEAREST' : 9728, 22 | 'LINEAR' : 9729, 23 | 'NEAREST_MIPMAP_NEAREST' : 9984, 24 | 'LINEAR_MIPMAP_NEAREST' : 9985, 25 | 'NEAREST_MIPMAP_LINEAR' : 9986, 26 | 'LINEAR_MIPMAP_LINEAR' : 9987 27 | } 28 | 29 | WEBGL_WRAPPINGS = { 30 | 'CLAMP_TO_EDGE' : 33071, 31 | 'MIRRORED_REPEAT' : 33648, 32 | 'REPEAT' : 10497 33 | } 34 | 35 | WEBGL_BLEND_EQUATIONS = { 36 | 'FUNC_ADD' : 32774, 37 | 'FUNC_SUBTRACT' : 32778, 38 | 'FUNC_REVERSE_SUBTRACT' : 32779 39 | } 40 | 41 | WEBGL_BLEND_FUNCS = { 42 | 'ZERO' : 0, 43 | 'ONE' : 1, 44 | 'SRC_COLOR' : 768, 45 | 'ONE_MINUS_SRC_COLOR' : 769, 46 | 'SRC_ALPHA' : 770, 47 | 'ONE_MINUS_SRC_ALPHA' : 771, 48 | 'DST_ALPHA' : 772, 49 | 'ONE_MINUS_DST_ALPHA' : 773, 50 | 'DST_COLOR' : 774, 51 | 'ONE_MINUS_DST_COLOR' : 775, 52 | 'SRC_ALPHA_SATURATE' : 776, 53 | 54 | # the followings are not supported by the engine yet 55 | # 'CONSTANT_COLOR' : 32769, 56 | # 'ONE_MINUS_CONSTANT_COLOR' : 32770, 57 | # 'CONSTANT_ALPHA' : 32771, 58 | # 'ONE_MINUS_CONSTANT_ALPHA' : 32772 59 | } 60 | 61 | # NOTE: some Windows systems use 'image/hdr' instead of 'image/vnd.radiance' 62 | COMPAT_IMAGE_MIME = ['image/jpeg', 'image/bmp', 'image/png', 'image/x-png', 'image/vnd.radiance', 'image/hdr'] 63 | 64 | def appendEntity(gltf, name, entity): 65 | 66 | if not gltf.get(name): 67 | gltf[name] = [] 68 | 69 | gltf[name].append(entity) 70 | 71 | # return index 72 | return (len(gltf[name]) - 1) 73 | 74 | def appendExtension(gltf, name, entity=None, extensionData={}, isRequired=False): 75 | 76 | if entity is not None: 77 | if entity.get('extensions') is None: 78 | entity['extensions'] = {} 79 | 80 | extensions = entity['extensions'] 81 | 82 | if extensions.get(name) is None: 83 | extensions[name] = {} 84 | extensions[name].update(extensionData) 85 | extension = extensions[name] 86 | else: 87 | extension = None 88 | 89 | # add to used extensions 90 | 91 | if gltf.get('extensionsUsed') is None: 92 | gltf['extensionsUsed'] = [] 93 | 94 | extensionsUsed = gltf['extensionsUsed'] 95 | 96 | if name not in extensionsUsed: 97 | extensionsUsed.append(name) 98 | 99 | # add to required extensions 100 | 101 | if isRequired: 102 | if gltf.get('extensionsRequired') is None: 103 | gltf['extensionsRequired'] = [] 104 | 105 | extensionsRequired = gltf['extensionsRequired'] 106 | 107 | if name not in extensionsRequired: 108 | extensionsRequired.append(name) 109 | 110 | return extension 111 | 112 | def getAssetExtension(asset, extension): 113 | """ 114 | Get global/local asset extension 115 | """ 116 | 117 | if asset.get('extensions') == None: 118 | return None 119 | 120 | return asset['extensions'].get(extension) 121 | 122 | 123 | def createSampler(gltf, magFilter, wrapS, wrapT): 124 | """ 125 | Creates and appends a texture sampler with the given parameters 126 | """ 127 | 128 | if gltf.get('samplers') is None: 129 | gltf['samplers'] = [] 130 | 131 | samplers = gltf['samplers'] 132 | 133 | if len(samplers) == 0: 134 | sampler = {} 135 | samplers.append(sampler) 136 | 137 | if (magFilter == WEBGL_FILTERS['LINEAR'] and 138 | wrapS == WEBGL_WRAPPINGS['REPEAT'] and 139 | wrapT == WEBGL_WRAPPINGS['REPEAT']): 140 | return 0 141 | 142 | index = 0 143 | 144 | for currentSampler in samplers: 145 | # pass by empty one 146 | if currentSampler.get('magFilter') is None or currentSampler.get('wrapS') is None: 147 | index += 1 148 | continue 149 | 150 | if (currentSampler['magFilter'] == magFilter and 151 | currentSampler['wrapS'] == wrapS and 152 | currentSampler['wrapT'] == wrapT): 153 | return index 154 | 155 | index += 1 156 | 157 | minFilter = WEBGL_FILTERS['LINEAR_MIPMAP_LINEAR'] 158 | 159 | if magFilter == WEBGL_FILTERS['NEAREST']: 160 | # looks better while preserving "pixel art" graphics 161 | minFilter = WEBGL_FILTERS['NEAREST_MIPMAP_LINEAR'] 162 | 163 | sampler = { 164 | 'magFilter' : magFilter, 165 | 'minFilter' : minFilter, 166 | 'wrapS' : wrapS, 167 | 'wrapT' : wrapT 168 | } 169 | 170 | samplers.append(sampler) 171 | 172 | return len(samplers) - 1 173 | 174 | def getSceneIndex(gltf, idname): 175 | 176 | if gltf.get('scenes') is None: 177 | return -1 178 | 179 | index = 0 180 | for scene in gltf['scenes']: 181 | key = 'id' if scene.get('id') != None else 'name' 182 | if scene.get(key) == idname: 183 | return index 184 | 185 | index += 1 186 | 187 | return -1 188 | 189 | def getNodeIndex(gltf, idname): 190 | """ 191 | Return the node index in the gltf array. 192 | """ 193 | 194 | if gltf.get('nodes') is None: 195 | return -1 196 | 197 | index = 0 198 | for node in gltf['nodes']: 199 | key = 'id' if node.get('id') != None else 'name' 200 | if node.get(key) == idname: 201 | return index 202 | 203 | index += 1 204 | 205 | return -1 206 | 207 | def getMeshIndex(gltf, idname): 208 | """ 209 | Return the mesh index in the gltf array. 210 | """ 211 | 212 | if gltf.get('meshes') is None: 213 | return -1 214 | 215 | index = 0 216 | for mesh in gltf['meshes']: 217 | key = 'id' if mesh.get('id') != None else 'name' 218 | if mesh.get(key) == idname: 219 | return index 220 | 221 | index += 1 222 | 223 | return -1 224 | 225 | 226 | def getMaterialIndex(gltf, idname): 227 | """ 228 | Return the material index in the gltf array. 229 | """ 230 | if idname is None: 231 | return -1 232 | 233 | if gltf.get('materials') is None: 234 | return -1 235 | 236 | index = 0 237 | for material in gltf['materials']: 238 | key = 'id' if material.get('id') != None else 'name' 239 | if material.get(key) == idname: 240 | return index 241 | 242 | index += 1 243 | 244 | return -1 245 | 246 | def getCameraIndex(gltf, idname): 247 | """ 248 | Return the camera index in the gltf array. 249 | """ 250 | 251 | if gltf.get('cameras') is None: 252 | return -1 253 | 254 | index = 0 255 | for camera in gltf['cameras']: 256 | key = 'id' if camera.get('id') != None else 'name' 257 | if camera.get(key) == idname: 258 | return index 259 | 260 | index += 1 261 | 262 | return -1 263 | 264 | def getLightIndex(gltf, idname): 265 | """ 266 | Return the light index in the gltf array. 267 | """ 268 | 269 | v3dExt = appendExtension(gltf, 'S8S_v3d_lights', gltf) 270 | 271 | if v3dExt.get('lights') == None: 272 | return -1 273 | 274 | lights = v3dExt['lights'] 275 | 276 | index = 0 277 | for light in lights: 278 | key = 'id' if light.get('id') != None else 'name' 279 | if light.get(key) == idname: 280 | return index 281 | 282 | index += 1 283 | 284 | return -1 285 | 286 | def getLightProbeIndex(gltf, idname): 287 | """ 288 | Return the light probe index in the gltf array. 289 | """ 290 | 291 | v3dExt = appendExtension(gltf, 'S8S_v3d_light_probes', gltf) 292 | 293 | if v3dExt.get('lightProbes') == None: 294 | return -1 295 | 296 | lightProbes = v3dExt['lightProbes'] 297 | 298 | index = 0 299 | for probe in lightProbes: 300 | key = 'id' if probe.get('id') != None else 'name' 301 | if probe.get(key) == idname: 302 | return index 303 | 304 | index += 1 305 | 306 | return -1 307 | 308 | def getCurveIndex(gltf, idname): 309 | """ 310 | Return the curve index in the gltf array. 311 | """ 312 | 313 | v3dExt = appendExtension(gltf, 'S8S_v3d_curves', gltf) 314 | 315 | if v3dExt.get('curves') == None: 316 | return -1 317 | 318 | curves = v3dExt['curves'] 319 | 320 | index = 0 321 | for curve in curves: 322 | key = 'id' if curve.get('id') != None else 'name' 323 | if curve.get(key) == idname: 324 | return index 325 | 326 | index += 1 327 | 328 | return -1 329 | 330 | def getTextureIndex(gltf, idname): 331 | 332 | if gltf.get('textures') is None: 333 | return -1 334 | 335 | index = 0 336 | for tex in gltf['textures']: 337 | key = 'id' if tex.get('id') != None else 'name' 338 | if tex.get(key) == idname: 339 | return index 340 | 341 | index += 1 342 | 343 | return -1 344 | 345 | def getImageIndex(gltf, idname): 346 | 347 | if gltf.get('images') is None: 348 | return -1 349 | 350 | index = 0 351 | for image in gltf['images']: 352 | key = 'id' if image.get('id') != None else 'name' 353 | if image.get(key) == idname: 354 | return index 355 | 356 | index += 1 357 | 358 | return -1 359 | 360 | def getFontIndex(gltf, idname): 361 | 362 | v3dExt = appendExtension(gltf, 'S8S_v3d_curves', gltf) 363 | 364 | if v3dExt.get('fonts') == None: 365 | return -1 366 | 367 | fonts = v3dExt['fonts'] 368 | 369 | index = 0 370 | for font in fonts: 371 | key = 'id' if font.get('id') != None else 'name' 372 | if font.get(key) == idname: 373 | return index 374 | 375 | index += 1 376 | 377 | return -1 378 | 379 | def getClippingPlaneIndex(gltf, idname): 380 | 381 | v3dExt = appendExtension(gltf, 'S8S_v3d_clipping_planes', gltf) 382 | 383 | if v3dExt.get('clippingPlanes') == None: 384 | return -1 385 | 386 | clippingPlanes = v3dExt['clippingPlanes'] 387 | 388 | index = 0 389 | for plane in clippingPlanes: 390 | key = 'id' if plane.get('id') != None else 'name' 391 | if plane.get(key) == idname: 392 | return index 393 | 394 | index += 1 395 | 396 | return -1 397 | 398 | def generateBufferView(gltf, binary, data_buffer, target, alignment): 399 | 400 | if data_buffer is None: 401 | return -1 402 | 403 | gltf_target_number = [ 34962, 34963 ] 404 | gltf_target_enums = [ "ARRAY_BUFFER", "ELEMENT_ARRAY_BUFFER" ] 405 | 406 | target_number = 0 407 | if target in gltf_target_enums: 408 | target_number = gltf_target_number[gltf_target_enums.index(target)] 409 | 410 | if gltf.get('bufferViews') is None: 411 | gltf['bufferViews'] = [] 412 | 413 | bufferViews = gltf['bufferViews'] 414 | 415 | bufferView = {} 416 | 417 | if target_number != 0: 418 | bufferView['target'] = target_number 419 | 420 | bufferView['byteLength'] = len(data_buffer) 421 | 422 | binary_length = len(binary) 423 | 424 | remainder = 0 425 | 426 | if alignment > 0: 427 | remainder = binary_length % alignment 428 | 429 | if remainder > 0: 430 | padding_byte = struct.pack(bytes(str('<1b').encode()), 0) 431 | for i in range(0, alignment - remainder): 432 | binary.extend(padding_byte) 433 | 434 | 435 | bufferView['byteOffset'] = len(binary) 436 | binary.extend(data_buffer) 437 | 438 | # only have one buffer. 439 | bufferView['buffer'] = 0 440 | 441 | bufferViews.append(bufferView) 442 | 443 | return len(bufferViews) - 1 444 | 445 | 446 | def generateAccessor(gltf, binary, data, componentType, count, _type, target): 447 | 448 | if data is None: 449 | log.error('No data') 450 | return -1 451 | 452 | gltf_convert_type = [ "b", "B", "h", "H", "I", "f" ] 453 | gltf_enumNames = [ "BYTE", "UNSIGNED_BYTE", "SHORT", "UNSIGNED_SHORT", "UNSIGNED_INT", "FLOAT" ] 454 | gltf_convert_type_size = [ 1, 1, 2, 2, 4, 4 ] 455 | 456 | if componentType not in gltf_enumNames: 457 | log.error('Invalid componentType ' + componentType) 458 | return -1 459 | 460 | componentTypeInteger = [ 5120, 5121, 5122, 5123, 5125, 5126 ][gltf_enumNames.index(componentType)] 461 | 462 | convert_type = gltf_convert_type[gltf_enumNames.index(componentType)] 463 | convert_type_size = gltf_convert_type_size[gltf_enumNames.index(componentType)] 464 | 465 | if count < 1: 466 | log.error('Invalid count ' + str(count)) 467 | return -1 468 | 469 | gltf_type_count = [1, 2, 3, 4, 4, 9, 16] 470 | gltf_type = [ "SCALAR", "VEC2", "VEC3", "VEC4", "MAT2", "MAT3", "MAT4" ] 471 | 472 | if _type not in gltf_type: 473 | log.error('Invalid type ' + _type) 474 | return -1 475 | 476 | type_count = gltf_type_count[gltf_type.index(_type)] 477 | 478 | 479 | if gltf.get('accessors') is None: 480 | gltf['accessors'] = [] 481 | 482 | accessors = gltf['accessors'] 483 | 484 | 485 | accessor = { 486 | 'componentType' : componentTypeInteger, 487 | 'count' : count, 488 | 'type' : _type 489 | } 490 | 491 | 492 | if useNumpy: 493 | if isinstance(data, list): 494 | npData = np.array(data, dtype=GLTF_TO_NP_DTYPE[componentType]) 495 | else: 496 | npData = data 497 | data = npData.tolist() 498 | 499 | npData = npData.reshape(-1, type_count) 500 | accessor['min'] = npData.min(axis=0).tolist() 501 | accessor['max'] = npData.max(axis=0).tolist() 502 | 503 | else: 504 | minimum = [] 505 | maximum = [] 506 | 507 | for component_index in range(0, type_count): 508 | for component in range(0, count): 509 | element = data[component * type_count + component_index] 510 | 511 | if component == 0: 512 | minimum.append(element) 513 | maximum.append(element) 514 | else: 515 | minimum[component_index] = min(minimum[component_index], element) 516 | maximum[component_index] = max(maximum[component_index], element) 517 | 518 | accessor['min'] = minimum 519 | accessor['max'] = maximum 520 | 521 | convert_type = '<' + str(count * type_count) + convert_type 522 | 523 | # NOTE: There is a bug in the struct package happened on old 524 | # python versions, reproduced in 3ds max 2017. Need to 525 | # use byte strings in the pack method as a workaround. 526 | # see: https://bugs.python.org/issue19099 527 | 528 | data_buffer = struct.pack(bytes(convert_type.encode()), *data) 529 | 530 | bufferView = generateBufferView(gltf, binary, data_buffer, target, convert_type_size) 531 | 532 | if bufferView < 0: 533 | log.error('Invalid buffer view') 534 | return -1 535 | 536 | accessor['bufferView'] = bufferView 537 | 538 | accessors.append(accessor) 539 | 540 | return len(accessors) - 1 541 | 542 | def createAnimChannel(sampler, nodeIndex, path): 543 | channel = { 544 | 'sampler' : sampler, 545 | 'target': { 546 | 'node': nodeIndex, 547 | 'path': path 548 | } 549 | } 550 | 551 | return channel 552 | 553 | def createAnimSampler(gltf, binary, keys, values, dim, interpolation='LINEAR'): 554 | sampler = {} 555 | 556 | sampler['interpolation'] = interpolation 557 | 558 | input = generateAccessor(gltf, binary, 559 | keys, 'FLOAT', len(keys), 'SCALAR', '') 560 | sampler['input'] = input 561 | 562 | if dim == 1: 563 | accessorType = 'SCALAR' 564 | elif dim == 2: 565 | accessorType = 'VEC2' 566 | elif dim == 3: 567 | accessorType = 'VEC3' 568 | elif dim == 4: 569 | accessorType = 'VEC4' 570 | 571 | output = generateAccessor(gltf, binary, 572 | values, 'FLOAT', len(values) // dim, accessorType, '') 573 | sampler['output'] = output 574 | 575 | return sampler 576 | 577 | def mergeAnimations(gltf, animations): 578 | """Find animations with the same name and merge them into one""" 579 | 580 | newAnimations = [] 581 | animMergeInfo = {} 582 | 583 | for anim in animations: 584 | 585 | name = anim['name'] 586 | channels = anim['channels'] 587 | samplers = anim['samplers'] 588 | 589 | if not name in animMergeInfo: 590 | animMergeInfo[name] = [[], [], None] 591 | 592 | for channel in channels: 593 | sampler = samplers[channel['sampler']] 594 | 595 | # fix sampler index in new array 596 | channel['sampler'] = len(animMergeInfo[name][1]) 597 | 598 | animMergeInfo[name][0].append(channel) 599 | animMergeInfo[name][1].append(sampler) 600 | animMergeInfo[name][2] = getAssetExtension(anim, 'S8S_v3d_animation') 601 | 602 | for name, mergeInfoElem in animMergeInfo.items(): 603 | anim = { 604 | 'name': name, 605 | 'channels' : mergeInfoElem[0], 606 | 'samplers' : mergeInfoElem[1] 607 | } 608 | 609 | if mergeInfoElem[2]: 610 | appendExtension(gltf, 'S8S_v3d_animation', anim, mergeInfoElem[2]) 611 | 612 | newAnimations.append(anim) 613 | 614 | return newAnimations 615 | 616 | def isCompatibleImagePath(path): 617 | 618 | # NOTE: add missing HDR mime type to python database 619 | if mimetypes.guess_type('somefile.hdr')[0] == None: 620 | mimetypes.add_type('image/vnd.radiance', '.hdr') 621 | 622 | mime = mimetypes.guess_type(path)[0] 623 | 624 | if mime in COMPAT_IMAGE_MIME: 625 | return True 626 | else: 627 | return False 628 | 629 | 630 | def imageMimeType(path): 631 | 632 | # NOTE: add missing HDR mime type to python database 633 | if mimetypes.guess_type('somefile.hdr')[0] == None: 634 | mimetypes.add_type('image/vnd.radiance', '.hdr') 635 | 636 | mime = mimetypes.guess_type(path)[0] 637 | 638 | # NOTE: no image/x-png 639 | if mime in ['image/jpeg', 'image/bmp', 'image/vnd.radiance', 'image/png']: 640 | return mime 641 | else: 642 | return 'image/png' 643 | 644 | def flatten(arr): 645 | if len(arr) and isinstance(arr[0], tuple): 646 | return list(sum(arr, ())) 647 | elif len(arr) and isinstance(arr[0], list): 648 | return list(sum(arr, [])) 649 | else: 650 | return arr 651 | 652 | def getNodeGraph(mat): 653 | if ('extensions' in mat and 'S8S_v3d_materials' in mat['extensions'] 654 | and 'nodeGraph' in mat['extensions']['S8S_v3d_materials']): 655 | return mat['extensions']['S8S_v3d_materials']['nodeGraph'] 656 | else: 657 | return None 658 | 659 | def createBlendMode(equation, srcRGB, dstRGB): 660 | 661 | blendMode = { 662 | 'blendEquation': WEBGL_BLEND_EQUATIONS[equation], 663 | 'srcRGB': WEBGL_BLEND_FUNCS[srcRGB], 664 | 'dstRGB': WEBGL_BLEND_FUNCS[dstRGB] 665 | } 666 | 667 | return blendMode 668 | 669 | def processInfinity(value): 670 | if math.isinf(value): 671 | if value > 0: 672 | return 'Infinity' 673 | else: 674 | return '-Infinity' 675 | else: 676 | return value 677 | --------------------------------------------------------------------------------