├── .github
├── pull_request_template.md
└── stale.yml
├── .gitignore
├── check_blender_release
├── check_module_enabled.py
├── check_module_numpy.py
├── check_module_requests.py
├── check_release.py
├── check_static_binaries.py
├── check_utils.py
└── scripts
│ ├── modules_enabled.py
│ ├── numpy_basic_operation.py
│ ├── numpy_import.py
│ ├── requests_basic_access.py
│ └── requests_import.py
├── check_source
├── check_cmake_consistency.py
├── check_cmake_consistency_config.py
├── check_deprecated.py
├── check_descriptions.py
├── check_header_duplicate.py
├── check_licenses.py
├── check_mypy.py
├── check_mypy_config.py
├── check_spelling.py
├── check_spelling_c_config.py
└── check_unused_defines.py
├── check_wiki
└── check_wiki_file_structure.py
├── config
├── analysis
│ └── valgrind.supp
└── ide
│ ├── eclipse
│ └── code_style_formatter.xml
│ └── qtcreator
│ └── code_style.xml
├── git
├── git_sh1_to_svn_rev.fossils
├── git_sh1_to_svn_rev.py
└── git_sort_commits.py
├── modules
└── blendfile.py
├── pyproject.toml
├── readme.rst
├── svn_rev_map
├── rev_to_sha1.py
└── sha1_to_rev.py
├── utils
├── autopep8_clean.py
├── autopep8_clean_config.py
├── blend2json.py
├── blender_keyconfig_export_permutations.py
├── blender_merge_format_changes.py
├── blender_theme_as_c.py
├── credits_git_gen.py
├── cycles_commits_sync.py
├── cycles_timeit.py
├── gdb_struct_repr_c99.py
├── git_log.py
├── git_log_review_commits.py
├── git_log_review_commits_advanced.py
├── make_cursor_gui.py
├── make_gl_stipple_from_xpm.py
└── make_shape_2d_from_blend.py
├── utils_api
└── bpy_introspect_ui.py
├── utils_build
└── cmake-flags
├── utils_doc
├── doxygen_single_file
└── rna_manual_reference_updater.py
├── utils_ide
└── qtcreator
│ ├── externaltools
│ ├── qtc_assembler_preview.py
│ ├── qtc_assembler_preview.xml
│ ├── qtc_blender_diffusion.py
│ ├── qtc_blender_diffusion.xml
│ ├── qtc_cpp_to_c_comments.py
│ ├── qtc_cpp_to_c_comments.xml
│ ├── qtc_doxy_file.py
│ ├── qtc_doxy_file.xml
│ ├── qtc_expand_tabmix.py
│ ├── qtc_expand_tabmix.xml
│ ├── qtc_project_update.py
│ ├── qtc_project_update.xml
│ ├── qtc_right_align_trailing_char.py
│ ├── qtc_right_align_trailing_char.xml
│ ├── qtc_select_surround.py
│ ├── qtc_select_surround.xml
│ ├── qtc_sort_paths.py
│ ├── qtc_sort_paths.xml
│ ├── qtc_toggle_if0.py
│ └── qtc_toggle_if0.xml
│ └── readme.rst
└── utils_maintenance
├── autopep8_format_paths.py
├── blender_menu_search_coverage.py
├── blender_update_themes.py
├── c_sort_blocks.py
├── c_struct_clean.py
├── clang_format_paths.py
├── cmake_sort_filelists.py
├── code_clean.py
├── modules
└── batch_edit_text.py
├── trailing_space_clean.py
└── trailing_space_clean_config.py
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | This repository is only used as a mirror. Blender development happens on projects.blender.org.
2 |
3 | To get started with contributing code, please see:
4 | https://wiki.blender.org/wiki/Process/Contributing_Code
5 |
--------------------------------------------------------------------------------
/.github/stale.yml:
--------------------------------------------------------------------------------
1 | # Configuration for probot-stale - https://github.com/probot/stale
2 | # This file is used on Blender's GitHub mirror to automatically close any pull request
3 | # and invite contributors to join the official development platform on blender.org
4 |
5 | # Number of days of inactivity before an Issue or Pull Request becomes stale
6 | daysUntilStale: 1
7 |
8 | # Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
9 | # Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
10 | daysUntilClose: 1
11 |
12 | # Label to use when marking as stale
13 | staleLabel: stale
14 |
15 | # Comment to post when closing a stale Issue or Pull Request.
16 | closeComment: >
17 | This issue has been automatically closed, because this repository is only
18 | used as a mirror. Blender development happens on projects.blender.org.
19 |
20 | To get started contributing code, please read:
21 | https://wiki.blender.org/wiki/Process/Contributing_Code
22 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # generic files to ignore
2 | .*
3 |
4 | # python temp paths
5 | __pycache__/
6 | *.py[cod]
7 |
8 | # editors
9 | *~
10 | *.swp
11 | *.swo
12 | *#
13 |
14 |
--------------------------------------------------------------------------------
/check_blender_release/check_module_enabled.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import unittest
5 |
6 | from check_utils import (sliceCommandLineArguments,
7 | SceiptUnitTesting)
8 |
9 |
10 | class UnitTesting(SceiptUnitTesting):
11 | def test_modulesEnabled(self):
12 | self.checkScript("modules_enabled")
13 |
14 |
15 | def main():
16 | # Slice command line arguments by '--'
17 | unittest_args, _parser_args = sliceCommandLineArguments()
18 | # Construct and run unit tests.
19 | unittest.main(argv=unittest_args)
20 |
21 |
22 | if __name__ == "__main__":
23 | main()
24 |
--------------------------------------------------------------------------------
/check_blender_release/check_module_numpy.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import unittest
5 |
6 | from check_utils import (sliceCommandLineArguments,
7 | SceiptUnitTesting)
8 |
9 |
10 | class UnitTesting(SceiptUnitTesting):
11 | def test_numpyImports(self):
12 | self.checkScript("numpy_import")
13 |
14 | def test_numpyBasicOperation(self):
15 | self.checkScript("numpy_basic_operation")
16 |
17 |
18 | def main():
19 | # Slice command line arguments by '--'
20 | unittest_args, _parser_args = sliceCommandLineArguments()
21 | # Construct and run unit tests.
22 | unittest.main(argv=unittest_args)
23 |
24 |
25 | if __name__ == "__main__":
26 | main()
27 |
--------------------------------------------------------------------------------
/check_blender_release/check_module_requests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import unittest
5 |
6 | from check_utils import (sliceCommandLineArguments,
7 | SceiptUnitTesting)
8 |
9 |
10 | class UnitTesting(SceiptUnitTesting):
11 | def test_requestsImports(self):
12 | self.checkScript("requests_import")
13 |
14 | def test_requestsBasicAccess(self):
15 | self.checkScript("requests_basic_access")
16 |
17 |
18 | def main():
19 | # Slice command line arguments by '--'
20 | unittest_args, _parser_args = sliceCommandLineArguments()
21 | # Construct and run unit tests.
22 | unittest.main(argv=unittest_args)
23 |
24 |
25 | if __name__ == "__main__":
26 | main()
27 |
--------------------------------------------------------------------------------
/check_blender_release/check_release.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | # Usage: ./check_release.py -- ../path/to/release/folder
5 |
6 |
7 | import os
8 | import sys
9 | import unittest
10 |
11 | import check_module_enabled
12 | import check_module_numpy
13 | import check_module_requests
14 | import check_static_binaries
15 | from check_utils import sliceCommandLineArguments
16 |
17 |
18 | def load_tests(loader, standard_tests, pattern):
19 | standard_tests.addTests(loader.loadTestsFromTestCase(
20 | check_module_enabled.UnitTesting))
21 | standard_tests.addTests(loader.loadTestsFromTestCase(
22 | check_module_numpy.UnitTesting))
23 | standard_tests.addTests(loader.loadTestsFromTestCase(
24 | check_module_requests.UnitTesting))
25 | standard_tests.addTests(loader.loadTestsFromTestCase(
26 | check_static_binaries.UnitTesting))
27 | return standard_tests
28 |
29 |
30 | def main():
31 | # Slice command line arguments by '--'
32 | unittest_args, _parser_args = sliceCommandLineArguments()
33 | # Construct and run unit tests.
34 | unittest.main(argv=unittest_args)
35 |
36 |
37 | if __name__ == "__main__":
38 | main()
39 |
--------------------------------------------------------------------------------
/check_blender_release/check_static_binaries.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import os
5 | from pathlib import Path
6 | import re
7 | import subprocess
8 | import unittest
9 |
10 | from check_utils import (
11 | sliceCommandLineArguments,
12 | parseArguments,
13 | )
14 |
15 |
16 | ALLOWED_LIBS = (
17 | # Core C/C++ libraries
18 | "ld-linux.so",
19 | "ld-linux-x86-64.so",
20 | "libc.so",
21 | "libm.so",
22 | "libstdc++.so",
23 | "libdl.so",
24 | "libpthread.so",
25 | "libgcc_s.so",
26 | "librt.so",
27 | "libutil.so",
28 |
29 | # Libraries which are oart of default install,
30 | "libcrypt.so",
31 | "libnsl.so",
32 | "libmvec.so.1",
33 |
34 | # X11 libraries we don't link statically,
35 | "libX11.so",
36 | "libXext.so",
37 | "libXrender.so",
38 | "libXxf86vm.so",
39 | "libXi.so",
40 | "libXfixes.so",
41 |
42 | # OpenGL libraries.
43 | "libGL.so",
44 | "libGLU.so",
45 |
46 | # Library the software-GL is linking against and distributes with it.
47 | 'libglapi.so',
48 | 'libxcb.so',
49 |
50 | # Own dependencies we don't link statically.
51 | "libfreetype.so",
52 | )
53 |
54 | IGNORE_FILES = ("blender-softwaregl", )
55 | IGNORE_EXTENSION = (".sh", ".py", )
56 |
57 |
58 | # Library dependencies.
59 |
60 | def getNeededLibrariesLDD(binary_filepath):
61 | """
62 | This function uses ldd to collect libraries which binary depends on.
63 |
64 | Not totally safe since ldd might actually execute the binary to get it's
65 | symbols and will also collect indirect dependencies which might not be
66 | desired.
67 |
68 | Has advantage of telling that some dependency library is not found.
69 | """
70 | ldd_command = ("ldd", str(binary_filepath))
71 | ldd_output = subprocess.check_output(ldd_command, stderr=subprocess.STDOUT)
72 | lines = ldd_output.decode().split("\n")
73 | libraries = []
74 | for line in lines:
75 | line = line.strip()
76 | if not line:
77 | continue
78 | lib_name = line.split("=>")[0]
79 | lib_name = lib_name.split(" (")[0].strip()
80 | lib_file_name = os.path.basename(lib_name)
81 | libraries.append(lib_file_name)
82 | return libraries
83 |
84 |
85 | def getNeededLibrariesOBJDUMP(binary_filepath):
86 | """
87 | This function uses objdump to get direct dependencies of a given binary.
88 |
89 | Totally safe, but will require manual check over libraries which are not
90 | found on the system.
91 | """
92 | objdump_command = ("objdump", "-p", str(binary_filepath))
93 | objdump_output = subprocess.check_output(objdump_command,
94 | stderr=subprocess.STDOUT)
95 | lines = objdump_output.decode().split("\n")
96 | libraries = []
97 | for line in lines:
98 | line = line.strip()
99 | if not line:
100 | continue
101 | if not line.startswith("NEEDED"):
102 | continue
103 | lib_name = line[6:].strip()
104 | libraries.append(lib_name)
105 | return libraries
106 |
107 |
108 | def getNeededLibraries(binary_filepath):
109 | """
110 | Get all libraries given binary depends on.
111 | """
112 | if False:
113 | return getNeededLibrariesLDD(binary_filepath)
114 | else:
115 | return getNeededLibrariesOBJDUMP(binary_filepath)
116 |
117 |
118 | def stripLibraryABI(lib_name):
119 | """
120 | Strip ABI suffix from .so file
121 |
122 | Example; ``libexample.so.1.0`` => ``libexample.so``.
123 | """
124 | lib_name_no_abi = lib_name
125 | # TODO(sergey): Optimize this!
126 | while True:
127 | no_abi = re.sub(r"\.[0-9]+$", "", lib_name_no_abi)
128 | if lib_name_no_abi == no_abi:
129 | break
130 | lib_name_no_abi = no_abi
131 | return lib_name_no_abi
132 |
133 |
134 | class UnitTesting(unittest.TestCase):
135 | def checkBinary(self, binary_filepath):
136 | """
137 | Check given binary file to be a proper static self-sufficient.
138 | """
139 |
140 | libraries = getNeededLibraries(binary_filepath)
141 | for lib_name in libraries:
142 | lib_name_no_abi = stripLibraryABI(lib_name)
143 | self.assertTrue(lib_name_no_abi in ALLOWED_LIBS,
144 | "Error detected in {}: library used {}" . format(
145 | binary_filepath, lib_name))
146 |
147 | def checkDirectory(self, directory):
148 | """
149 | Recursively traverse directory and check every binary in.
150 | """
151 |
152 | for path in Path(directory).rglob("*"):
153 | # Ignore any checks on directory.
154 | if path.is_dir():
155 | continue
156 | # Ignore script files.
157 | if path.name in IGNORE_FILES:
158 | continue
159 | if path.suffix in IGNORE_EXTENSION:
160 | continue
161 | # Check any executable binary,
162 | if path.stat().st_mode & 0o111 != 0:
163 | self.checkBinary(path)
164 | # Check all dynamic libraries.
165 | elif path.suffix == ".so":
166 | self.checkBinary(path)
167 |
168 | def test_directoryIsStatic(self):
169 | # Parse arguments which are not handled by unit testing framework.
170 | args = parseArguments()
171 | # Do some sanity checks first.
172 | self.assertTrue(os.path.exists(args.directory),
173 | "Given directory does not exist: {}" .
174 | format(args.directory))
175 | self.assertTrue(os.path.isdir(args.directory),
176 | "Given path is not a directory: {}" .
177 | format(args.directory))
178 | # Perform actual test,
179 | self.checkDirectory(args.directory)
180 |
181 |
182 | def main():
183 | # Slice command line arguments by '--'
184 | unittest_args, _parser_args = sliceCommandLineArguments()
185 | # Construct and run unit tests.
186 | unittest.main(argv=unittest_args)
187 |
188 |
189 | if __name__ == "__main__":
190 | main()
191 |
--------------------------------------------------------------------------------
/check_blender_release/check_utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 |
5 | import unittest
6 |
7 |
8 | def sliceCommandLineArguments():
9 | """
10 | Slice command line arguments by -- argument.
11 | """
12 |
13 | import sys
14 |
15 | try:
16 | double_shasl_index = sys.argv.index("--")
17 | except ValueError:
18 | unittest_args = sys.argv[:]
19 | parser_args = []
20 | else:
21 | unittest_args = sys.argv[:double_shasl_index]
22 | parser_args = sys.argv[double_shasl_index + 1:]
23 |
24 | return unittest_args, parser_args
25 |
26 |
27 | def parseArguments():
28 | import argparse
29 |
30 | # Construct argument parser.
31 | parser = argparse.ArgumentParser(description="Static binary checker")
32 | parser.add_argument('directory', help='Directories to check')
33 | # Parse arguments which are not handled by unit testing framework.
34 | unittest_args, parser_args = sliceCommandLineArguments()
35 | args = parser.parse_args(args=parser_args)
36 | # TODO(sergey): Run some checks here?
37 | return args
38 |
39 |
40 | def runScriptInBlender(blender_directory, script):
41 | """
42 | Run given script inside Blender and check non-zero exit code
43 | """
44 |
45 | import os
46 | import subprocess
47 |
48 | blender = os.path.join(blender_directory, "blender")
49 | python = os.path.join(os.path.dirname(__file__), "scripts", script) + ".py"
50 |
51 | command = (blender,
52 | "-b",
53 | "--factory-startup",
54 | "--python-exit-code", "1",
55 | "--python", python)
56 |
57 | process = subprocess.Popen(command,
58 | shell=False,
59 | stdout=subprocess.PIPE,
60 | stderr=subprocess.STDOUT)
61 | output, error = process.communicate()
62 | return process.returncode == 0
63 |
64 |
65 | class SceiptUnitTesting(unittest.TestCase):
66 | def checkScript(self, script):
67 | # Parse arguments which are not handled by unit testing framework.
68 | args = parseArguments()
69 | # Perform actual test,
70 | self.assertTrue(runScriptInBlender(args.directory, script),
71 | "Failed to run script {}" . format(script))
72 |
--------------------------------------------------------------------------------
/check_blender_release/scripts/modules_enabled.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 | import _sha1
3 | import _sha256
4 | import _md5
5 | import ssl
6 | import multiprocessing.synchronize
7 |
--------------------------------------------------------------------------------
/check_blender_release/scripts/numpy_basic_operation.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 | # This code tests bug reported in #50703
3 |
4 | import numpy
5 |
6 | a = numpy.array([[3, 2, 0], [3, 1, 0]], dtype=numpy.int32)
7 | a[0]
8 |
--------------------------------------------------------------------------------
/check_blender_release/scripts/numpy_import.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 | import numpy
3 |
--------------------------------------------------------------------------------
/check_blender_release/scripts/requests_basic_access.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 | import requests
3 |
4 | r = requests.get("https://blender.org/", verify=True)
5 |
6 | assert r.status_code == 200
7 | assert r.reason == "OK"
8 | assert True if r.ok else False
9 | assert len(r.content) > 256
10 |
--------------------------------------------------------------------------------
/check_blender_release/scripts/requests_import.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 | import requests
3 |
--------------------------------------------------------------------------------
/check_source/check_cmake_consistency_config.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 | import os
3 |
4 | IGNORE_SOURCE = (
5 | "/test/",
6 | "/tests/gtests/",
7 | "/release/",
8 |
9 | # specific source files
10 | "extern/audaspace/",
11 |
12 | # Use for `WIN32` only.
13 | "source/creator/blender_launcher_win32.c",
14 |
15 | # specific source files
16 | "extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.cpp",
17 | "extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.cpp",
18 | "extern/bullet2/src/BulletCollision/CollisionDispatch/btInternalEdgeUtility.cpp",
19 | "extern/bullet2/src/BulletCollision/CollisionShapes/btBox2dShape.cpp",
20 | "extern/bullet2/src/BulletCollision/CollisionShapes/btConvex2dShape.cpp",
21 | "extern/bullet2/src/BulletDynamics/Character/btKinematicCharacterController.cpp",
22 | "extern/bullet2/src/BulletDynamics/ConstraintSolver/btHinge2Constraint.cpp",
23 | "extern/bullet2/src/BulletDynamics/ConstraintSolver/btUniversalConstraint.cpp",
24 |
25 | "doc/doxygen/doxygen.extern.h",
26 | "doc/doxygen/doxygen.intern.h",
27 | "doc/doxygen/doxygen.main.h",
28 | "doc/doxygen/doxygen.source.h",
29 | "extern/bullet2/src/BulletCollision/CollisionDispatch/btBox2dBox2dCollisionAlgorithm.h",
30 | "extern/bullet2/src/BulletCollision/CollisionDispatch/btConvex2dConvex2dAlgorithm.h",
31 | "extern/bullet2/src/BulletCollision/CollisionDispatch/btInternalEdgeUtility.h",
32 | "extern/bullet2/src/BulletCollision/CollisionShapes/btBox2dShape.h",
33 | "extern/bullet2/src/BulletCollision/CollisionShapes/btConvex2dShape.h",
34 | "extern/bullet2/src/BulletDynamics/Character/btKinematicCharacterController.h",
35 | "extern/bullet2/src/BulletDynamics/ConstraintSolver/btHinge2Constraint.h",
36 | "extern/bullet2/src/BulletDynamics/ConstraintSolver/btUniversalConstraint.h",
37 |
38 | "build_files/build_environment/patches/config_gmpxx.h",
39 | )
40 |
41 | # Ignore cmake file, path pairs.
42 | IGNORE_SOURCE_MISSING = (
43 | ( # Use for `WITH_NANOVDB`.
44 | "intern/cycles/kernel/CMakeLists.txt", (
45 | "nanovdb/util/CSampleFromVoxels.h",
46 | "nanovdb/util/SampleFromVoxels.h",
47 | "nanovdb/NanoVDB.h",
48 | "nanovdb/CNanoVDB.h",
49 | ),
50 | ),
51 | )
52 |
53 | IGNORE_CMAKE = (
54 | "extern/audaspace/CMakeLists.txt",
55 | )
56 |
57 | UTF8_CHECK = True
58 |
59 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "..")))
60 |
61 | # doesn't have to exist, just use as reference
62 | BUILD_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(SOURCE_DIR, "..", "build"))))
63 |
--------------------------------------------------------------------------------
/check_source/check_deprecated.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | """
4 | Utility for reporting deprecated code which should be removed,
5 | noted by the date which must be included with the *DEPRECATED* comment.
6 |
7 | Once this date is past, the code should be removed.
8 | """
9 |
10 | from typing import (
11 | Callable,
12 | Generator,
13 | List,
14 | Tuple,
15 | Optional,
16 | )
17 |
18 |
19 | import os
20 | import datetime
21 | from os.path import splitext
22 |
23 | SKIP_DIRS = (
24 | "extern",
25 | # Not this directory.
26 | "tests",
27 | )
28 |
29 |
30 | class term_colors:
31 | HEADER = '\033[95m'
32 | OKBLUE = '\033[94m'
33 | OKCYAN = '\033[96m'
34 | OKGREEN = '\033[92m'
35 | WARNING = '\033[93m'
36 | FAIL = '\033[91m'
37 | ENDC = '\033[0m'
38 | BOLD = '\033[1m'
39 | UNDERLINE = '\033[4m'
40 |
41 |
42 | def is_c_header(filename: str) -> bool:
43 | ext = splitext(filename)[1]
44 | return (ext in {".h", ".hh", ".hpp", ".hxx", ".hh"})
45 |
46 |
47 | def is_c(filename: str) -> bool:
48 | ext = splitext(filename)[1]
49 | return (ext in {".c", ".cc", ".cpp", ".cxx", ".m", ".mm", ".rc", ".inl"})
50 |
51 |
52 | def is_c_any(filename: str) -> bool:
53 | return is_c(filename) or is_c_header(filename)
54 |
55 |
56 | def is_py(filename: str) -> bool:
57 | ext = splitext(filename)[1]
58 | return (ext == ".py")
59 |
60 |
61 | def is_source_any(filename: str) -> bool:
62 | return is_c_any(filename) or is_py(filename)
63 |
64 |
65 | def source_list(path: str, filename_check: Optional[Callable[[str], bool]] = None) -> Generator[str, None, None]:
66 | for dirpath, dirnames, filenames in os.walk(path):
67 | # skip '.git'
68 | dirnames[:] = [d for d in dirnames if not d.startswith(".")]
69 |
70 | for filename in filenames:
71 | if filename_check is None or filename_check(filename):
72 | yield os.path.join(dirpath, filename)
73 |
74 |
75 | def deprecations() -> List[Tuple[datetime.datetime, Tuple[str, int], str]]:
76 | """
77 | Searches out source code for lines like
78 |
79 | /* *DEPRECATED* 2011/7/17 ``bgl.Buffer.list`` info text. */
80 |
81 | Or...
82 |
83 | # *DEPRECATED* 2010/12/22 ``some.py.func`` more info.
84 |
85 | """
86 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "..")))
87 |
88 | SKIP_DIRS_ABS = [os.path.join(SOURCE_DIR, p) for p in SKIP_DIRS]
89 |
90 | DEPRECATED_ID = "*DEPRECATED*"
91 | depprecation_list = []
92 |
93 | scan_count = 0
94 |
95 | print("Scanning in %r for '%s YYYY/MM/DD info'" % (SOURCE_DIR, DEPRECATED_ID), end="...")
96 |
97 | for fn in source_list(SOURCE_DIR, is_source_any):
98 | if os.path.samefile(fn, __file__):
99 | continue
100 |
101 | skip = False
102 | for p in SKIP_DIRS_ABS:
103 | if fn.startswith(p):
104 | skip = True
105 | break
106 | if skip:
107 | continue
108 |
109 | with open(fn, 'r', encoding="utf8") as fh:
110 | fn = os.path.relpath(fn, SOURCE_DIR)
111 | buf = fh.read()
112 | index = 0
113 | while True:
114 | index = buf.find(DEPRECATED_ID, index)
115 | if index == -1:
116 | break
117 | index_end = buf.find("\n", index)
118 | if index_end == -1:
119 | index_end = len(buf)
120 | line_number = buf[:index].count("\n") + 1
121 | l = buf[index + len(DEPRECATED_ID): index_end].strip()
122 | try:
123 | data = [w.strip() for w in l.split('/', 2)]
124 | data[-1], info = data[-1].split(' ', 1)
125 | info = info.split("*/", 1)[0].strip()
126 | if len(data) != 3:
127 | print(
128 | " poorly formatting line:\n"
129 | " %r:%d\n"
130 | " %s" %
131 | (fn, line_number, data)
132 | )
133 | else:
134 | depprecation_list.append((
135 | datetime.datetime(int(data[0]), int(data[1]), int(data[2])),
136 | (fn, line_number),
137 | info,
138 | ))
139 | except:
140 | print("Error file - %r:%d" % (fn, line_number))
141 | import traceback
142 | traceback.print_exc()
143 |
144 | index = index_end
145 |
146 | scan_count += 1
147 |
148 | print(" {:d} files done, found {:d} deprecation(s)!".format(scan_count, len(depprecation_list)))
149 |
150 | return depprecation_list
151 |
152 |
153 | def main() -> None:
154 | import datetime
155 | now = datetime.datetime.now()
156 |
157 | deps = deprecations()
158 |
159 | for data, fileinfo, info in deps:
160 | days_old = (now - data).days
161 | info = term_colors.BOLD + info + term_colors.ENDC
162 | if days_old > 0:
163 | info = "[" + term_colors.FAIL + "REMOVE" + term_colors.ENDC + "] " + info
164 | else:
165 | info = "[" + term_colors.OKBLUE + "OK" + term_colors.ENDC + "] " + info
166 |
167 | print("{:s}: days-old({:d}), {:s}:{:d} {:s}".format(
168 | data.strftime("%Y/%m/%d"),
169 | days_old,
170 | fileinfo[0],
171 | fileinfo[1],
172 | info,
173 | ))
174 |
175 |
176 | if __name__ == '__main__':
177 | main()
178 |
--------------------------------------------------------------------------------
/check_source/check_descriptions.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | this script updates XML themes once new settings are added
6 |
7 | ./blender.bin --background -noaudio --python source/tools/check_source/check_descriptions.py
8 | """
9 |
10 | import bpy
11 |
12 | # These are known duplicates which do not warn.
13 | DUPLICATE_ACCEPT = (
14 | # operators
15 | ('ACTION_OT_clean', 'GRAPH_OT_clean'),
16 | ('ACTION_OT_clickselect', 'GRAPH_OT_clickselect'),
17 | ('ACTION_OT_copy', 'GRAPH_OT_copy'),
18 | ('ACTION_OT_delete', 'GRAPH_OT_delete'),
19 | ('ACTION_OT_duplicate', 'GRAPH_OT_duplicate'),
20 | ('ACTION_OT_duplicate_move', 'GRAPH_OT_duplicate_move'),
21 | ('ACTION_OT_extrapolation_type', 'GRAPH_OT_extrapolation_type'),
22 | ('ACTION_OT_handle_type', 'GRAPH_OT_handle_type'),
23 | ('ACTION_OT_interpolation_type', 'GRAPH_OT_interpolation_type'),
24 | ('ACTION_OT_keyframe_insert', 'GRAPH_OT_keyframe_insert'),
25 | ('ACTION_OT_mirror', 'GRAPH_OT_mirror'),
26 | ('ACTION_OT_paste', 'GRAPH_OT_paste'),
27 | ('ACTION_OT_sample', 'GRAPH_OT_sample'),
28 | ('ACTION_OT_select_all', 'GRAPH_OT_select_all'),
29 | ('ACTION_OT_select_border', 'GRAPH_OT_select_border'),
30 | ('ACTION_OT_select_column', 'GRAPH_OT_select_column'),
31 | ('ACTION_OT_select_leftright', 'GRAPH_OT_select_leftright'),
32 | ('ACTION_OT_select_less', 'GRAPH_OT_select_less'),
33 | ('ACTION_OT_select_linked', 'GRAPH_OT_select_linked'),
34 | ('ACTION_OT_select_more', 'GRAPH_OT_select_more'),
35 | ('ACTION_OT_unlink', 'NLA_OT_action_unlink'),
36 | ('ACTION_OT_view_all', 'CLIP_OT_dopesheet_view_all', 'GRAPH_OT_view_all'),
37 | ('ACTION_OT_view_frame', 'GRAPH_OT_view_frame'),
38 | ('ANIM_OT_change_frame', 'CLIP_OT_change_frame', 'IMAGE_OT_change_frame'),
39 | ('ARMATURE_OT_autoside_names', 'POSE_OT_autoside_names'),
40 | ('ARMATURE_OT_bone_layers', 'POSE_OT_bone_layers'),
41 | ('ARMATURE_OT_extrude_forked', 'ARMATURE_OT_extrude_move'),
42 | ('ARMATURE_OT_flip_names', 'POSE_OT_flip_names'),
43 | ('ARMATURE_OT_select_all', 'POSE_OT_select_all'),
44 | ('ARMATURE_OT_select_hierarchy', 'POSE_OT_select_hierarchy'),
45 | ('ARMATURE_OT_select_linked', 'POSE_OT_select_linked'),
46 | ('ARMATURE_OT_select_mirror', 'POSE_OT_select_mirror'),
47 | ('CLIP_OT_cursor_set', 'UV_OT_cursor_set'),
48 | ('CLIP_OT_disable_markers', 'CLIP_OT_graph_disable_markers'),
49 | ('CLIP_OT_graph_select_border', 'MASK_OT_select_border'),
50 | ('CLIP_OT_view_ndof', 'IMAGE_OT_view_ndof', 'VIEW2D_OT_ndof'),
51 | ('CLIP_OT_view_pan', 'IMAGE_OT_view_pan', 'VIEW2D_OT_pan', 'VIEW3D_OT_view_pan'),
52 | ('CLIP_OT_view_zoom', 'VIEW2D_OT_zoom'),
53 | ('CLIP_OT_view_zoom_in', 'VIEW2D_OT_zoom_in'),
54 | ('CLIP_OT_view_zoom_out', 'VIEW2D_OT_zoom_out'),
55 | ('CONSOLE_OT_copy', 'FONT_OT_text_copy', 'TEXT_OT_copy'),
56 | ('CONSOLE_OT_delete', 'FONT_OT_delete', 'TEXT_OT_delete'),
57 | ('CONSOLE_OT_insert', 'FONT_OT_text_insert', 'TEXT_OT_insert'),
58 | ('CONSOLE_OT_paste', 'FONT_OT_text_paste', 'TEXT_OT_paste'),
59 | ('CURVE_OT_handle_type_set', 'MASK_OT_handle_type_set'),
60 | ('CURVE_OT_shortest_path_pick', 'MESH_OT_shortest_path_pick'),
61 | ('CURVE_OT_switch_direction', 'MASK_OT_switch_direction'),
62 | ('FONT_OT_line_break', 'TEXT_OT_line_break'),
63 | ('FONT_OT_move', 'TEXT_OT_move'),
64 | ('FONT_OT_move_select', 'TEXT_OT_move_select'),
65 | ('FONT_OT_select_all', 'TEXT_OT_select_all'),
66 | ('FONT_OT_text_cut', 'TEXT_OT_cut'),
67 | ('GRAPH_OT_previewrange_set', 'NLA_OT_previewrange_set'),
68 | ('GRAPH_OT_properties', 'IMAGE_OT_properties', 'LOGIC_OT_properties', 'NLA_OT_properties'),
69 | ('IMAGE_OT_clear_render_border', 'VIEW3D_OT_clear_render_border'),
70 | ('IMAGE_OT_render_border', 'VIEW3D_OT_render_border'),
71 | ('IMAGE_OT_toolshelf', 'NODE_OT_toolbar', 'VIEW3D_OT_toolshelf'),
72 | ('LATTICE_OT_select_ungrouped', 'MESH_OT_select_ungrouped', 'PAINT_OT_vert_select_ungrouped'),
73 | ('MESH_OT_extrude_region_move', 'MESH_OT_extrude_region_shrink_fatten'),
74 | ('NODE_OT_add_node', 'NODE_OT_add_search'),
75 | ('NODE_OT_move_detach_links', 'NODE_OT_move_detach_links_release'),
76 | ('NODE_OT_properties', 'VIEW3D_OT_properties'),
77 | ('OBJECT_OT_bake', 'OBJECT_OT_bake_image'),
78 | ('OBJECT_OT_duplicate_move', 'OBJECT_OT_duplicate_move_linked'),
79 | ('WM_OT_context_cycle_enum', 'WM_OT_context_toggle', 'WM_OT_context_toggle_enum'),
80 | ('WM_OT_context_set_boolean', 'WM_OT_context_set_enum', 'WM_OT_context_set_float',
81 | 'WM_OT_context_set_int', 'WM_OT_context_set_string', 'WM_OT_context_set_value'),
82 | )
83 |
84 | DUPLICATE_IGNORE = {
85 | "",
86 | }
87 |
88 |
89 | def check_duplicates():
90 | import rna_info
91 |
92 | DUPLICATE_IGNORE_FOUND = set()
93 | DUPLICATE_ACCEPT_FOUND = set()
94 |
95 | structs, funcs, ops, props = rna_info.BuildRNAInfo()
96 |
97 | # This is mainly useful for operators,
98 | # other types have too many false positives
99 |
100 | # for t in (structs, funcs, ops, props):
101 | for t in (ops, ):
102 | description_dict = {}
103 | print("")
104 | for k, v in t.items():
105 | if v.description not in DUPLICATE_IGNORE:
106 | id_str = ".".join([s if isinstance(s, str) else s.identifier for s in k if s])
107 | description_dict.setdefault(v.description, []).append(id_str)
108 | else:
109 | DUPLICATE_IGNORE_FOUND.add(v.description)
110 | # sort for easier viewing
111 | sort_ls = [(tuple(sorted(v)), k) for k, v in description_dict.items()]
112 | sort_ls.sort()
113 |
114 | for v, k in sort_ls:
115 | if len(v) > 1:
116 | if v not in DUPLICATE_ACCEPT:
117 | print("found %d: %r, \"%s\"" % (len(v), v, k))
118 | # print("%r," % (v,))
119 | else:
120 | DUPLICATE_ACCEPT_FOUND.add(v)
121 |
122 | test = (DUPLICATE_IGNORE - DUPLICATE_IGNORE_FOUND)
123 | if test:
124 | print("Invalid 'DUPLICATE_IGNORE': %r" % test)
125 | test = (set(DUPLICATE_ACCEPT) - DUPLICATE_ACCEPT_FOUND)
126 | if test:
127 | print("Invalid 'DUPLICATE_ACCEPT': %r" % test)
128 |
129 |
130 | def main():
131 | check_duplicates()
132 |
133 |
134 | if __name__ == "__main__":
135 | main()
136 |
--------------------------------------------------------------------------------
/check_source/check_header_duplicate.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | Run this script to check if headers are included multiple times.
6 |
7 | python3 check_header_duplicate.py ../../
8 |
9 | Now build the code to find duplicate errors, resolve them manually.
10 |
11 | Then restore the headers to their original state:
12 |
13 | python3 check_header_duplicate.py --restore ../../
14 | """
15 |
16 | # Use GCC's __INCLUDE_LEVEL__ to find direct duplicate includes
17 |
18 | UUID = 0
19 |
20 |
21 | def source_filepath_guard(filepath):
22 | global UUID
23 |
24 | footer = """
25 | #if __INCLUDE_LEVEL__ == 1
26 | # ifdef _DOUBLEHEADERGUARD_%d
27 | # error "duplicate header!"
28 | # endif
29 | #endif
30 |
31 | #if __INCLUDE_LEVEL__ == 1
32 | # define _DOUBLEHEADERGUARD_%d
33 | #endif
34 | """ % (UUID, UUID)
35 | UUID += 1
36 |
37 | with open(filepath, 'a', encoding='utf-8') as f:
38 | f.write(footer)
39 |
40 |
41 | def source_filepath_restore(filepath):
42 | import os
43 | os.system("git co %s" % filepath)
44 |
45 |
46 | def scan_source_recursive(dirpath, is_restore):
47 | import os
48 | from os.path import join, splitext
49 |
50 | # ensure git working dir is ok
51 | os.chdir(dirpath)
52 |
53 | def source_list(path, filename_check=None):
54 | for dirpath, dirnames, filenames in os.walk(path):
55 | # skip '.git'
56 | dirnames[:] = [d for d in dirnames if not d.startswith(".")]
57 |
58 | for filename in filenames:
59 | filepath = join(dirpath, filename)
60 | if filename_check is None or filename_check(filepath):
61 | yield filepath
62 |
63 | def is_source(filename):
64 | ext = splitext(filename)[1]
65 | return (ext in {".hpp", ".hxx", ".h", ".hh"})
66 |
67 | def is_ignore(filename):
68 | pass
69 |
70 | for filepath in sorted(source_list(dirpath, is_source)):
71 | print("file:", filepath)
72 | if is_ignore(filepath):
73 | continue
74 |
75 | if is_restore:
76 | source_filepath_restore(filepath)
77 | else:
78 | source_filepath_guard(filepath)
79 |
80 |
81 | def main():
82 | import sys
83 | is_restore = ("--restore" in sys.argv[1:])
84 | scan_source_recursive(sys.argv[-1], is_restore)
85 |
86 |
87 | if __name__ == "__main__":
88 | main()
89 |
--------------------------------------------------------------------------------
/check_source/check_mypy.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import os
5 | from os.path import join, splitext
6 |
7 | from check_mypy_config import PATHS, PATHS_EXCLUDE
8 |
9 | from typing import (
10 | Any,
11 | Callable,
12 | Generator,
13 | Optional,
14 | Tuple,
15 | Dict,
16 | )
17 |
18 | FileAndArgs = Tuple[str, Tuple[Any, ...], Dict[str, str]]
19 |
20 | # print(PATHS)
21 | SOURCE_EXT = (
22 | # Python
23 | ".py",
24 | )
25 |
26 |
27 | def is_source(filename: str) -> bool:
28 | return filename.endswith(SOURCE_EXT)
29 |
30 |
31 | def path_iter(
32 | path: str,
33 | filename_check: Optional[Callable[[str], bool]] = None,
34 | ) -> Generator[str, None, None]:
35 | for dirpath, dirnames, filenames in os.walk(path):
36 | # skip ".git"
37 | dirnames[:] = [d for d in dirnames if not d.startswith(".")]
38 |
39 | for filename in filenames:
40 | if filename.startswith("."):
41 | continue
42 | filepath = join(dirpath, filename)
43 | if filename_check is None or filename_check(filepath):
44 | yield filepath
45 |
46 |
47 | def path_expand_with_args(
48 | paths_and_args: Tuple[FileAndArgs, ...],
49 | filename_check: Optional[Callable[[str], bool]] = None,
50 | ) -> Generator[FileAndArgs, None, None]:
51 | for f_and_args in paths_and_args:
52 | f, f_args = f_and_args[0], f_and_args[1:]
53 | if not os.path.exists(f):
54 | print("Missing:", f)
55 | elif os.path.isdir(f):
56 | for f_iter in path_iter(f, filename_check):
57 | yield (f_iter, *f_args)
58 | else:
59 | yield (f, *f_args)
60 |
61 |
62 | def main() -> None:
63 | import sys
64 | import subprocess
65 | import shlex
66 |
67 | # Fixed location, so change the current working directory doesn't create cache everywhere.
68 | cache_dir = os.path.join(os.getcwd(), ".mypy_cache")
69 |
70 | if os.path.samefile(sys.argv[-1], __file__):
71 | paths = path_expand_with_args(PATHS, is_source)
72 | else:
73 | paths = path_expand_with_args(
74 | tuple((p, (), {}) for p in sys.argv[1:]),
75 | is_source,
76 | )
77 |
78 | for f, extra_args, extra_env in paths:
79 | if f in PATHS_EXCLUDE:
80 | continue
81 |
82 | if not extra_args:
83 | extra_args = ()
84 | if not extra_env:
85 | extra_env = {}
86 |
87 | print(f)
88 | cmd = (
89 | "mypy",
90 | "--strict",
91 | "--cache-dir=" + cache_dir,
92 | "--color-output",
93 | f,
94 | *extra_args,
95 | )
96 | # p = subprocess.Popen(cmd, env=extra_env, stdout=sys.stdout, stderr=sys.stderr)
97 |
98 | if extra_env:
99 | for k, v in extra_env.items():
100 | os.environ[k] = v
101 |
102 | os.chdir(os.path.dirname(f))
103 |
104 | os.system(" ".join([shlex.quote(arg) for arg in cmd]))
105 |
106 | if extra_env:
107 | for k in extra_env.keys():
108 | del os.environ[k]
109 |
110 |
111 | if __name__ == "__main__":
112 | main()
113 |
--------------------------------------------------------------------------------
/check_source/check_mypy_config.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | import os
4 | from typing import (
5 | Any,
6 | Tuple,
7 | Dict,
8 | )
9 |
10 | PATHS: Tuple[Tuple[str, Tuple[Any, ...], Dict[str, str]], ...] = (
11 | ("build_files/cmake/", (), {'MYPYPATH': "modules"}),
12 | ("build_files/utils/", (), {'MYPYPATH': "modules"}),
13 | ("doc/manpage/blender.1.py", (), {}),
14 | ("source/tools/check_blender_release/", (), {}),
15 | ("source/tools/check_source/", (), {'MYPYPATH': "modules"}),
16 | ("source/tools/check_wiki/", (), {}),
17 | ("source/tools/utils/", (), {}),
18 | ("source/tools/utils_api/", (), {}),
19 | ("source/tools/utils_build/", (), {}),
20 | ("source/tools/utils_doc/", (), {}),
21 | ("source/tools/utils_ide/", (), {}),
22 | ("source/tools/utils_maintenance/", (), {'MYPYPATH': "modules"}),
23 | )
24 |
25 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(
26 | os.path.join(os.path.dirname(__file__), "..", "..", ".."))))
27 |
28 | PATHS_EXCLUDE = set(
29 | os.path.join(SOURCE_DIR, p.replace("/", os.sep))
30 | for p in
31 | (
32 | "build_files/cmake/clang_array_check.py",
33 | "build_files/cmake/cmake_netbeans_project.py",
34 | "build_files/cmake/cmake_qtcreator_project.py",
35 | "build_files/cmake/cmake_static_check_smatch.py",
36 | "build_files/cmake/cmake_static_check_sparse.py",
37 | "build_files/cmake/cmake_static_check_splint.py",
38 | "source/tools/check_blender_release/check_module_enabled.py",
39 | "source/tools/check_blender_release/check_module_numpy.py",
40 | "source/tools/check_blender_release/check_module_requests.py",
41 | "source/tools/check_blender_release/check_release.py",
42 | "source/tools/check_blender_release/check_static_binaries.py",
43 | "source/tools/check_blender_release/check_utils.py",
44 | "source/tools/check_blender_release/scripts/modules_enabled.py",
45 | "source/tools/check_blender_release/scripts/requests_basic_access.py",
46 | "source/tools/check_blender_release/scripts/requests_import.py",
47 | "source/tools/check_source/check_descriptions.py",
48 | "source/tools/check_source/check_header_duplicate.py",
49 | "source/tools/check_source/check_unused_defines.py",
50 | "source/tools/utils/blend2json.py",
51 | "source/tools/utils/blender_keyconfig_export_permutations.py",
52 | "source/tools/utils/blender_merge_format_changes.py",
53 | "source/tools/utils/blender_theme_as_c.py",
54 | "source/tools/utils/credits_git_gen.py",
55 | "source/tools/utils/cycles_commits_sync.py",
56 | "source/tools/utils/cycles_timeit.py",
57 | "source/tools/utils/gdb_struct_repr_c99.py",
58 | "source/tools/utils/git_log.py",
59 | "source/tools/utils/git_log_review_commits.py",
60 | "source/tools/utils/git_log_review_commits_advanced.py",
61 | "source/tools/utils/make_cursor_gui.py",
62 | "source/tools/utils/make_gl_stipple_from_xpm.py",
63 | "source/tools/utils/make_shape_2d_from_blend.py",
64 | "source/tools/utils/weekly_report.py",
65 | "source/tools/utils_api/bpy_introspect_ui.py", # Uses `bpy`.
66 | "source/tools/utils_doc/rna_manual_reference_updater.py",
67 | "source/tools/utils_ide/qtcreator/externaltools/qtc_assembler_preview.py",
68 | "source/tools/utils_ide/qtcreator/externaltools/qtc_blender_diffusion.py",
69 | "source/tools/utils_ide/qtcreator/externaltools/qtc_cpp_to_c_comments.py",
70 | "source/tools/utils_ide/qtcreator/externaltools/qtc_doxy_file.py",
71 | "source/tools/utils_ide/qtcreator/externaltools/qtc_project_update.py",
72 | "source/tools/utils_ide/qtcreator/externaltools/qtc_sort_paths.py",
73 | "source/tools/utils_maintenance/blender_menu_search_coverage.py", # Uses `bpy`.
74 | "source/tools/utils_maintenance/blender_update_themes.py", # Uses `bpy`.
75 | "source/tools/utils_maintenance/trailing_space_clean.py",
76 | "source/tools/utils_maintenance/trailing_space_clean_config.py",
77 | )
78 | )
79 |
80 | PATHS = tuple(
81 | (os.path.join(SOURCE_DIR, p_items[0].replace("/", os.sep)), *p_items[1:])
82 | for p_items in PATHS
83 | )
84 |
--------------------------------------------------------------------------------
/check_source/check_spelling_c_config.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | # these must be all lower case for comparisons
4 |
5 | dict_custom = {
6 | # Added to newer versions of the dictionary,
7 | # we can remove these when the updated word-lists have been applied to aspell-en.
8 | "accessor",
9 | "accessors",
10 | "completer",
11 | "completers",
12 | "enqueue",
13 | "enqueued",
14 | "enqueues",
15 | "intrinsics",
16 | "iterable",
17 | "parallelization",
18 | "parallelized",
19 | "pipelining",
20 | "polygonization",
21 | "prepend",
22 | "prepends",
23 | "rasterize",
24 | "reachability",
25 | "runtime",
26 | "runtimes",
27 | "serializable",
28 | "unary",
29 | "variadic",
30 |
31 | # Correct spelling, update the dictionary, here:
32 | # https://github.com/en-wl/wordlist
33 | "accessor",
34 | "additively",
35 | "adjoint",
36 | "adjugate",
37 | "affectable",
38 | "alignable",
39 | "allocatable",
40 | "allocator",
41 | "allocators",
42 | "anisotropic",
43 | "anisotropy",
44 | "atomicity",
45 | "bindless",
46 | "bitwise",
47 | "blocky",
48 | "boolean",
49 | "borderless",
50 | "breaked",
51 | "callables",
52 | "canonicalization",
53 | "canonicalized",
54 | "canonicalizing",
55 | "catadioptric",
56 | "checksums",
57 | "clearcoat",
58 | "codecs",
59 | "collapser",
60 | "collinear",
61 | "comparator",
62 | "comparators",
63 | "compilable",
64 | "confusticate",
65 | "confusticated",
66 | "constructability",
67 | "constructible",
68 | "coplanarity",
69 | "copyable",
70 | "counterforce",
71 | "criterium",
72 | "crosstalk",
73 | "customizable",
74 | "deallocate",
75 | "deallocated",
76 | "deallocating",
77 | "decorrelated",
78 | "decrement",
79 | "decrementing",
80 | "deduplicate",
81 | "deduplicating",
82 | "deduplication",
83 | "defocus",
84 | "degeneracies",
85 | "deleter",
86 | "denoised",
87 | "denoiser",
88 | "denoising",
89 | "dereference",
90 | "dereferenced",
91 | "dereferences",
92 | "dereferencing",
93 | "derivates",
94 | "desaturate",
95 | "designator",
96 | "despeckled",
97 | "destructor",
98 | "destructors",
99 | "dialogs",
100 | "digitizers",
101 | "dihedral",
102 | "discoverability",
103 | "discretization",
104 | "discretized",
105 | "discretizes",
106 | "downcasting",
107 | "draggable",
108 | "drawable",
109 | "durations",
110 | "eachother",
111 | "editability",
112 | "effector",
113 | "effectors",
114 | "elementwise",
115 | "embedder",
116 | "enablement",
117 | "enqueueing",
118 | "equiangular",
119 | "extrema",
120 | "fallbacks",
121 | "finalizer",
122 | "flushable",
123 | "formatter",
124 | "formatters",
125 | "foveation",
126 | "generatrix",
127 | "glitchy",
128 | "haptics",
129 | "highlightable",
130 | "homogenous",
131 | "ideographic",
132 | "illuminant",
133 | "impactful",
134 | "incrementation",
135 | "initializer",
136 | "initializers",
137 | "inlining",
138 | "instancer",
139 | "instancers",
140 | "instantiable",
141 | "instantiation",
142 | "instantiations",
143 | "interferences",
144 | "interocular",
145 | "invariant",
146 | "invariants",
147 | "invisibilities",
148 | "irradiance",
149 | "iteratively",
150 | "jitteryness",
151 | "keyless",
152 | "linearize",
153 | "linearized",
154 | "linearizes",
155 | "linearizing",
156 | "linkable",
157 | "lockless",
158 | "losslessly",
159 | "luminances",
160 | "mappable",
161 | "merchantability",
162 | "mergeable",
163 | "minimalistic",
164 | "misconfiguration",
165 | "misconfigured",
166 | "modally",
167 | "modifyability",
168 | "monoscopy",
169 | "monospaced",
170 | "mutators",
171 | "natively",
172 | "occludee",
173 | "occluder",
174 | "occluders",
175 | "optionals",
176 | "orthogonalize",
177 | "orthogonally",
178 | "orthonormalize",
179 | "orthonormalized",
180 | "overridable",
181 | "paddings",
182 | "pannable",
183 | "parallelepiped",
184 | "parallelize",
185 | "parallelizing",
186 | "parameterization",
187 | "parametrization",
188 | "parentless",
189 | "passepartout",
190 | "passthrough",
191 | "performant",
192 | "piecewise",
193 | "pixelate",
194 | "pixelated",
195 | "pixelation",
196 | "pixelisation",
197 | "planarity",
198 | "planarize",
199 | "polygonizer",
200 | "polytope",
201 | "postprocess",
202 | "postprocessed",
203 | "pre-filtered",
204 | "pre-multiplied",
205 | "precalculate",
206 | "precisions",
207 | "precomputations",
208 | "precompute",
209 | "precomputed",
210 | "precomputing",
211 | "prefetch",
212 | "prefetching",
213 | "prefilter",
214 | "prefiltered",
215 | "prefiltering",
216 | "premutliplied",
217 | "preorder",
218 | "prepend",
219 | "prepending",
220 | "preprocess",
221 | "preprocessing",
222 | "preprocessor",
223 | "preprocessors",
224 | "preventively",
225 | "probabilistically",
226 | "procedurally",
227 | "profiler",
228 | "programmatically",
229 | "purgeability",
230 | "quadratically",
231 | "queryable",
232 | "rasterizer",
233 | "rasterizes",
234 | "rasterizing",
235 | "reallocations",
236 | "rebalancing",
237 | "rebase",
238 | "recomputation",
239 | "recurse",
240 | "recursed",
241 | "recurses",
242 | "recursing",
243 | "recursivity",
244 | "redefinitions",
245 | "redistributions",
246 | "registerable",
247 | "reimplement",
248 | "remappable",
249 | "remapper",
250 | "remappings",
251 | "remesher",
252 | "rendeder",
253 | "renderable",
254 | "renormalize",
255 | "renormalized",
256 | "reparameterization",
257 | "reparametization",
258 | "representable",
259 | "reprojecting",
260 | "repurpose",
261 | "respecialized",
262 | "retiming",
263 | "reusability",
264 | "saveable",
265 | "schemas",
266 | "scrollable",
267 | "serializers",
268 | "sharpnesses",
269 | "sidedness",
270 | "simplices",
271 | "situationally",
272 | "skippable",
273 | "sortable",
274 | "stitchable",
275 | "subclass",
276 | "subclasses",
277 | "subclassing",
278 | "subdirectories",
279 | "subdirectory",
280 | "suboptimally",
281 | "subtractive",
282 | "superset",
283 | "symmetrize",
284 | "symmetrized",
285 | "targetless",
286 | "targetless",
287 | "teleporting",
288 | "templating",
289 | "tertiarily",
290 | "tokenize",
291 | "tokenizing",
292 | "transmissive",
293 | "triangulations",
294 | "triangulator",
295 | "trilinear",
296 | "tunable",
297 | "unadjusted",
298 | "unalignable",
299 | "unallocated",
300 | "unapplied",
301 | "unapply",
302 | "unassign",
303 | "unbake",
304 | "unbuffered",
305 | "uncached",
306 | "uncategorized",
307 | "unclaim",
308 | "unclamped",
309 | "unclipped",
310 | "uncollapsed",
311 | "uncomment",
312 | "uncommented",
313 | "uncompacted",
314 | "uncomputed",
315 | "unconfigured",
316 | "unconvert",
317 | "undefine",
318 | "undefined",
319 | "undeform",
320 | "undeformed",
321 | "undeformed",
322 | "undisplaced",
323 | "undistored",
324 | "undistorted",
325 | "undistorting",
326 | "uneditable",
327 | "unflagged",
328 | "unflip",
329 | "unfoldable",
330 | "unformatted",
331 | "unfreed",
332 | "ungrabbed",
333 | "ungrabbing",
334 | "ungroup",
335 | "ungrouped",
336 | "ungrouping",
337 | "ungrown",
338 | "unhandled",
339 | "unhidden",
340 | "unhide",
341 | "unintuitive",
342 | "unkeyed",
343 | "unkeyed",
344 | "unkeyframed",
345 | "unlink",
346 | "unlinkable",
347 | "unlinked",
348 | "unlinking",
349 | "unlinks",
350 | "unmap",
351 | "unmapped",
352 | "unmark",
353 | "unmask",
354 | "unmatching",
355 | "unmaximized",
356 | "unmeasurable",
357 | "unminimize",
358 | "unmute",
359 | "unnormalize",
360 | "unnormalized",
361 | "unoptimized",
362 | "unparameterized",
363 | "unparsed",
364 | "unpause",
365 | "unpaused",
366 | "unpoision",
367 | "unproject",
368 | "unquantifiable",
369 | "unregister",
370 | "unregistering",
371 | "unregisters",
372 | "unreproducible",
373 | "unscaled",
374 | "unselect",
375 | "unselect",
376 | "unselected",
377 | "unsetting",
378 | "unshadowed",
379 | "unsharp",
380 | "unsqueezed",
381 | "unstretch",
382 | "unsubdivided",
383 | "unsubdividing",
384 | "unsubdivisions",
385 | "unsynchronized",
386 | "untag",
387 | "untagging",
388 | "unterminated",
389 | "untracked",
390 | "untransformed",
391 | "untransformed",
392 | "untrusted",
393 | "untyped",
394 | "unusably",
395 | "unvisited",
396 | "unwritable",
397 | "userless",
398 | "vectorial",
399 | "vectorization",
400 | "vectorized",
401 | "versionable",
402 | "videogrammetry",
403 | "viewports",
404 | "virtualized",
405 | "visibilities",
406 | "volumetrics",
407 | "vortices",
408 | "voxelize",
409 | "writeable",
410 | "zoomable",
411 |
412 | # C/C++/Python types (we could quote every instance but it's impractical).
413 | "enum",
414 | "enums",
415 | "int",
416 | "ints",
417 | "nullptr", # C++ NULL-pointer.
418 | "str",
419 | "tuple",
420 | "tuples",
421 |
422 | # python functions
423 | "func",
424 | "repr",
425 |
426 | # Accepted concatenations.
427 | "addon",
428 | "addons",
429 | "autocomplete",
430 | "colospace",
431 | "datablock",
432 | "datablocks",
433 | "keyframe",
434 | "keyframing",
435 | "lookup",
436 | "lookups",
437 | "multithreaded",
438 | "multithreading",
439 | "namespace",
440 | "reparent",
441 | "tooltip",
442 | "unparent",
443 |
444 | # Accepted abbreviations.
445 | # "dir", # direction/directory? Too ambiguous, don't use this.
446 | "anim", # animation.
447 | "attr",
448 | "attrs",
449 | "config", # configuration.
450 | "coord",
451 | "coords",
452 | "iter", # iteration.
453 | "multi",
454 | "numpad", # numeric-pad.
455 | "numpads", # numeric-pads.
456 | "ortho",
457 | "recalc",
458 | "resync",
459 | "struct",
460 | "structs",
461 | "subdir",
462 |
463 | # General computer terms.
464 | "app",
465 | "ascii",
466 | "autocomplete",
467 | "autorepeat",
468 | "bilinear",
469 | "blit",
470 | "blitting",
471 | "boids",
472 | "booleans",
473 | "codepage",
474 | "contructor",
475 | "decimator",
476 | "diff",
477 | "diffs",
478 | "endian",
479 | "endianness",
480 | "env",
481 | "euler",
482 | "eulers",
483 | "foo",
484 | "hashable",
485 | "http",
486 | "intelisense",
487 | "jitter",
488 | "jittered",
489 | "jittering",
490 | "keymap",
491 | "lerp",
492 | "metadata",
493 | "mutex",
494 | "opengl",
495 | "quantized",
496 | "searchable",
497 | "segfault",
498 | "stdin",
499 | "stdin",
500 | "stdout",
501 | "sudo",
502 | "threadsafe",
503 | "touchpad",
504 | "touchpads",
505 | "trackpad",
506 | "trackpads",
507 | "trilinear",
508 | "unicode",
509 | "usr",
510 | "vert",
511 | "verts",
512 | "voxel",
513 | "voxels",
514 | "wiki",
515 |
516 | # specific computer terms/brands
517 | "ack",
518 | "amiga",
519 | "cmake",
520 | "ffmpeg",
521 | "freebsd",
522 | "linux",
523 | "manpage",
524 | "mozilla",
525 | "nvidia",
526 | "openexr",
527 | "posix",
528 | "qtcreator",
529 | "unix",
530 | "valgrind",
531 | "xinerama",
532 |
533 | # general computer graphics terms
534 | "atomics",
535 | "barycentric",
536 | "bezier",
537 | "bicubic",
538 | "bitangent",
539 | "centroid",
540 | "colinear",
541 | "compositing",
542 | "coplanar",
543 | "crypto",
544 | "deinterlace",
545 | "emissive",
546 | "fresnel",
547 | "gaussian",
548 | "grayscale",
549 | "kerning",
550 | "lacunarity",
551 | "lossless",
552 | "lossy",
553 | "luma",
554 | "mipmap",
555 | "mipmapped",
556 | "mipmapping",
557 | "mipmaps",
558 | "musgrave",
559 | "n-gon",
560 | "n-gons",
561 | "normals",
562 | "nurbs",
563 | "octree",
564 | "quaternions",
565 | "radiosity",
566 | "reflectance",
567 | "shader",
568 | "shaders",
569 | "specular",
570 |
571 | # Blender specific terms.
572 | "animsys",
573 | "animviz",
574 | "bmain",
575 | "bmesh",
576 | "bpy",
577 | "depsgraph",
578 | "doctree",
579 | "editmode",
580 | "eekadoodle",
581 | "fcurve",
582 | "look-dev",
583 | "mathutils",
584 | "obdata",
585 | "userpref",
586 | "userprefs",
587 |
588 | # Should have apostrophe but ignore for now unless we want to get really picky!
589 | "indices",
590 | "vertices",
591 | }
592 |
593 | # incorrect spelling but ignore anyway
594 | dict_ignore = {
595 | "a-z",
596 | "animatable",
597 | "arg",
598 | "args",
599 | "bool",
600 | "constness",
601 | "dirpath",
602 | "dupli",
603 | "eg",
604 | "filename",
605 | "filenames",
606 | "filepath",
607 | "filepaths",
608 | "hardcoded",
609 | "id-block",
610 | "inlined",
611 | "loc",
612 | "namespace",
613 | "node-trees",
614 | "ok",
615 | "ok-ish",
616 | "param",
617 | "polyline",
618 | "polylines",
619 | "premultiplied",
620 | "premultiply",
621 | "pylint",
622 | "quad",
623 | "readonly",
624 | "submodule",
625 | "submodules",
626 | "tooltips",
627 | "tri",
628 | "ui",
629 | "unfuzzy",
630 | "utils",
631 | "uv",
632 | "vec",
633 | "wireframe",
634 | "x-axis",
635 | "y-axis",
636 | "z-axis",
637 |
638 | # acronyms
639 | "api",
640 | "cpu",
641 | "gl",
642 | "gpl",
643 | "gpu",
644 | "gzip",
645 | "hg",
646 | "ik",
647 | "lhs",
648 | "nan",
649 | "nla",
650 | "ppc",
651 | "rgb",
652 | "rhs",
653 | "rna",
654 | "smpte",
655 | "svn",
656 | "utf",
657 |
658 | # extensions
659 | "py",
660 | "rst",
661 | "xml",
662 | "xpm",
663 |
664 | # tags
665 | "fixme",
666 | "todo",
667 |
668 | # sphinx/rst
669 | "rtype",
670 |
671 | # slang
672 | "automagically",
673 | "hacky",
674 | "hrmf",
675 |
676 | # names
677 | "campbell",
678 | "jahka",
679 | "mikkelsen",
680 | "morten",
681 |
682 | # Company names.
683 | "Logitech",
684 | "Wacom",
685 |
686 | # Project Names.
687 | "Wayland",
688 |
689 | # clang-tidy (for convenience).
690 | "bugprone-suspicious-enum-usage",
691 | "bugprone-use-after-move",
692 | }
693 |
694 | # Allow: `un-word`, `re-word` ... etc, in this case only check `word`.
695 | dict_ignore_hyphenated_prefix = {
696 | "de",
697 | "mis",
698 | "non",
699 | "post",
700 | "pre",
701 | "re",
702 | "un",
703 | }
704 |
705 | dict_ignore_hyphenated_suffix = {
706 | "ify",
707 | "ish",
708 | "ness",
709 | }
710 |
711 | files_ignore = {
712 | "source/tools/utils_doc/rna_manual_reference_updater.py", # Contains language ID references.
713 | }
714 |
--------------------------------------------------------------------------------
/check_source/check_unused_defines.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | # Checks for defines which aren't used anywhere.
5 |
6 | import os
7 | import sys
8 |
9 | PWD = os.path.dirname(__file__)
10 | sys.path.append(os.path.join(PWD, "..", "utils_maintenance", "modules"))
11 |
12 | from batch_edit_text import run
13 |
14 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(PWD, "..", "..", ".."))))
15 |
16 | # TODO, move to config file
17 | SOURCE_DIRS = (
18 | "source",
19 | )
20 |
21 | SOURCE_EXT = (
22 | # C/C++
23 | ".c", ".h", ".cpp", ".hpp", ".cc", ".hh", ".cxx", ".hxx", ".inl",
24 | # Objective C
25 | ".m", ".mm",
26 | # GLSL
27 | ".glsl",
28 | )
29 |
30 | words = set()
31 | words_multi = set()
32 | defines = {}
33 |
34 | import re
35 | re_words = re.compile("[A-Za-z_][A-Za-z_0-9]*")
36 | re_defines = re.compile("^\\s*#define\\s+([A-Za-z_][A-Za-z_0-9]*)", re.MULTILINE)
37 |
38 | # From
39 | # https://stackoverflow.com/a/18381470/432509
40 |
41 |
42 | def remove_comments(string):
43 | pattern = r"(\".*?\"|\'.*?\')|(/\*.*?\*/|//[^\r\n]*$)"
44 | # first group captures quoted strings (double or single)
45 | # second group captures comments (//single-line or /* multi-line */)
46 | regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
47 |
48 | def _replacer(match):
49 | # if the 2nd group (capturing comments) is not None,
50 | # it means we have captured a non-quoted (real) comment string.
51 | if match.group(2) is not None:
52 | return "" # so we will return empty to remove the comment
53 | else: # otherwise, we will return the 1st group
54 | return match.group(1) # capture
55 | return regex.sub(_replacer, string)
56 |
57 |
58 | def extract_terms(fn, data_src):
59 | data_src_nocomments = remove_comments(data_src)
60 | for m in re_words.finditer(data_src_nocomments):
61 | words_len = len(words)
62 | m_text = m.group()
63 | words.add(m_text)
64 | if words_len == len(words):
65 | words_multi.add(m_text)
66 |
67 | for m in re_defines.finditer(data_src_nocomments):
68 | defines[m.group(1)] = fn
69 |
70 | # Don't edit the file.
71 | return None
72 |
73 |
74 | run(
75 | directories=[os.path.join(SOURCE_DIR, d) for d in SOURCE_DIRS],
76 | is_text=lambda fn: fn.endswith(SOURCE_EXT),
77 | text_operation=extract_terms,
78 | # Can't be used if we want to accumulate in a global variable.
79 | use_multiprocess=False,
80 | )
81 |
82 | print("Found", len(defines), "defines, searching", len(words_multi), "terms...")
83 | for fn, define in sorted([(fn, define) for define, fn in defines.items()]):
84 | if define not in words_multi:
85 | print(define, "->", fn)
86 |
--------------------------------------------------------------------------------
/check_wiki/check_wiki_file_structure.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | This script is to validate the WIKI page that documents Blender's file-structure, see:
6 |
7 | https://wiki.blender.org/wiki/Source/File_Structure
8 |
9 | It can run without any arguments, where it will download the WIKI to Blender's source root:
10 |
11 | You may pass the wiki text as an argument, e.g.
12 |
13 | check_wiki_file_structure.py --wiki=wiki.txt
14 | """
15 |
16 | import os
17 | import re
18 | import argparse
19 |
20 | from typing import (
21 | List,
22 | Optional,
23 | Tuple,
24 | )
25 |
26 |
27 | # -----------------------------------------------------------------------------
28 | # Constants
29 |
30 | CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
31 | SOURCE_DIR = os.path.normpath(os.path.join(CURRENT_DIR, "..", "..", ".."))
32 | WIKI_URL = "https://wiki.blender.org/wiki/Source/File_Structure"
33 | WIKI_URL_EDIT = "https://wiki.blender.org/w/index.php?title=Source/File_Structure&action=edit"
34 |
35 |
36 | # -----------------------------------------------------------------------------
37 | # HTML Utilities
38 |
39 | def text_with_title_underline(text: str, underline: str = "=") -> str:
40 | return "\n{:s}\n{:s}\n".format(text, len(text) * underline)
41 |
42 |
43 | def html_extract_first_textarea(data: str) -> Optional[str]:
44 | """
45 | Extract and escape text within the first
46 | ```` found in the HTML text.
47 | """
48 | beg = data.find("")
62 | return None
63 |
64 | data = data[beg:end]
65 | for (src, dst) in (
66 | ("<", "<"),
67 | (">", ">"),
68 | ("&", "&"),
69 | (""", "\""),
70 | ):
71 | data = data.replace(src, dst)
72 | return data
73 |
74 |
75 | def html_extract_first_textarea_from_url(url: str) -> Optional[str]:
76 | """
77 | Download
78 | """
79 | import urllib.request
80 |
81 | req = urllib.request.Request(url=url)
82 | with urllib.request.urlopen(req) as fh:
83 | data = fh.read().decode('utf-8')
84 |
85 | return html_extract_first_textarea(data)
86 |
87 |
88 | # -----------------------------------------------------------------------------
89 | # WIKI Text Parsing
90 |
91 | def wiki_to_paths_and_docstrings(wiki_text: str) -> Tuple[List[str], List[str]]:
92 | file_paths = []
93 | file_paths_docstring = []
94 | lines = wiki_text.split("\n")
95 | i = 0
96 | while i < len(lines):
97 | if lines[i].startswith("| /"):
98 | # Convert:
99 | # `| /source/'''blender/'''` -> `/source/blender`.
100 | p = lines[i][3:].replace("'''", "").split(" ", 1)[0].rstrip("/")
101 | file_paths.append(p)
102 |
103 | body = []
104 | i += 1
105 | while lines[i].strip() not in {"|-", "|}"}:
106 | body.append(lines[i].lstrip("| "))
107 | i += 1
108 | i -= 1
109 | file_paths_docstring.append("\n".join(body))
110 |
111 | i += 1
112 |
113 | return file_paths, file_paths_docstring
114 |
115 |
116 | # -----------------------------------------------------------------------------
117 | # Reporting
118 |
119 | def report_known_wiki_paths(file_paths: List[str]) -> None:
120 | heading = "Paths Found in WIKI Table"
121 | print(text_with_title_underline(heading))
122 | for p in file_paths:
123 | print("-", p)
124 |
125 |
126 | def report_missing_source(file_paths: List[str]) -> int:
127 | heading = "Missing in Source Dir"
128 |
129 | test = [p for p in file_paths if not os.path.exists(os.path.join(SOURCE_DIR, p))]
130 |
131 | amount = str(len(test)) if test else "none found"
132 | print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
133 | if not test:
134 | return 0
135 |
136 | print("The following paths were found in the WIKI text\n"
137 | "but were not found in Blender's source directory:\n")
138 | for p in test:
139 | print("-", p)
140 |
141 | return len(test)
142 |
143 |
144 | def report_incomplete(file_paths: List[str]) -> int:
145 | heading = "Missing Documentation"
146 |
147 | test = []
148 | basedirs = {os.path.dirname(p) for p in file_paths}
149 | for base in sorted(basedirs):
150 | base_abs = os.path.join(SOURCE_DIR, base)
151 | for p in os.listdir(base_abs):
152 | if not p.startswith("."):
153 | p_abs = os.path.join(base_abs, p)
154 | if os.path.isdir(p_abs):
155 | p_rel = os.path.join(base, p)
156 | if p_rel not in file_paths:
157 | test.append(p_rel)
158 |
159 | amount = str(len(test)) if test else "none found"
160 | print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
161 | if not test:
162 | return 0
163 |
164 | print("The following paths were found in Blender's source directory\n"
165 | "but are missing from the WIKI text:\n")
166 | for p in sorted(test):
167 | print("-", p)
168 |
169 | return len(test)
170 |
171 |
172 | def report_alphabetical_order(file_paths: List[str]) -> int:
173 | heading = "Non-Alphabetically Ordered"
174 | test = []
175 |
176 | p_prev = ""
177 | p_prev_dir = ""
178 | for p in file_paths:
179 | p_dir = os.path.dirname(p)
180 | if p_prev:
181 | if p_dir == p_prev_dir:
182 | if p < p_prev:
183 | test.append((p_prev, p))
184 | p_prev_dir = p_dir
185 | p_prev = p
186 |
187 | amount = str(len(test)) if test else "none found"
188 | print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
189 | if not test:
190 | return 0
191 |
192 | for p_prev, p in test:
193 | print("-", p, "(should be before)\n ", p_prev)
194 |
195 | return len(test)
196 |
197 |
198 | def report_todo_in_docstrings(file_paths: List[str], file_paths_docstring: List[str]) -> int:
199 | heading = "Marked as TODO"
200 | test = []
201 |
202 | re_todo = re.compile(r"\bTODO\b")
203 | for p, docstring in zip(file_paths, file_paths_docstring):
204 | if re_todo.match(docstring):
205 | test.append(p)
206 |
207 | amount = str(len(test)) if test else "none found"
208 | print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
209 | if not test:
210 | return 0
211 |
212 | for p in test:
213 | print("-", p)
214 |
215 | return len(test)
216 |
217 |
218 | # -----------------------------------------------------------------------------
219 | # Argument Parser
220 |
221 | def create_parser() -> argparse.ArgumentParser:
222 | parser = argparse.ArgumentParser(description=__doc__)
223 |
224 | parser.add_argument(
225 | "-w",
226 | "--wiki",
227 | dest="wiki_text",
228 | metavar='PATH',
229 | default=os.path.join(SOURCE_DIR, "wiki_file_structure.txt"),
230 | help="WIKI text file path, NOTE: this will be downloaded if not found!",
231 | )
232 | return parser
233 |
234 |
235 | # -----------------------------------------------------------------------------
236 | # Main Function
237 |
238 | def main() -> None:
239 | parser = create_parser()
240 |
241 | args = parser.parse_args()
242 |
243 | if os.path.exists(args.wiki_text):
244 | print("Using existing WIKI text:", args.wiki_text)
245 | else:
246 | data = html_extract_first_textarea_from_url(WIKI_URL_EDIT)
247 | if data is not None:
248 | with open(args.wiki_text, 'w', encoding='utf-8') as fh:
249 | fh.write(data)
250 | print("Downloaded WIKI text to:", args.wiki_text)
251 | print("Update and save to:", WIKI_URL)
252 | else:
253 | print("Failed to downloaded or extract WIKI text, aborting!")
254 | return
255 |
256 | with open(args.wiki_text, 'r', encoding='utf-8') as fh:
257 | file_paths, file_paths_docstring = wiki_to_paths_and_docstrings(fh.read())
258 |
259 | # Disable, mostly useful when debugging why paths might not be found.
260 | # report_known_wiki_paths()
261 | issues = 0
262 | issues += report_missing_source(file_paths)
263 | issues += report_incomplete(file_paths)
264 | issues += report_alphabetical_order(file_paths)
265 | issues += report_todo_in_docstrings(file_paths, file_paths_docstring)
266 |
267 | if issues:
268 | print("Warning, found {:d} issues!\n".format(issues))
269 | else:
270 | print("Success! The WIKI text is up to date with Blender's source tree!\n")
271 |
272 |
273 | if __name__ == "__main__":
274 | main()
275 |
--------------------------------------------------------------------------------
/config/analysis/valgrind.supp:
--------------------------------------------------------------------------------
1 | ##----------------------------------------------------------------------##
2 | # Blender's BLI_mempool
3 | #
4 | # Blender can be built WITH_MEM_VALGRIND.
5 | # This causes internal operations to raise false-positives.
6 |
7 | {
8 | BLI_mempool_alloc_A8
9 | Memcheck:Addr8
10 | fun:BLI_mempool_alloc
11 | fun:*
12 | }
13 |
14 | {
15 | BLI_mempool_alloc_A4
16 | Memcheck:Addr4
17 | fun:BLI_mempool_alloc
18 | fun:*
19 | }
20 |
21 | {
22 | BLI_mempool_add_A8
23 | Memcheck:Addr8
24 | fun:mempool_chunk_add
25 | fun:*
26 | }
27 |
28 | {
29 | BLI_mempool_add_A4
30 | Memcheck:Addr4
31 | fun:mempool_chunk_add
32 | fun:*
33 | }
34 |
35 | {
36 | BLI_mempool_iterstep_A4
37 | Memcheck:Addr4
38 | fun:BLI_mempool_iterstep
39 | fun:*
40 | }
41 |
42 | ##----------------------------------------------------------------------##
43 | # Python Calls aren't so useful unless we're debugging Python it's self
44 |
45 | # _PyObject_Free
46 | {
47 | PyFree_C
48 | Memcheck:Cond
49 | fun:_PyObject_Free
50 | fun:*
51 | }
52 |
53 | {
54 | PyFree_A4
55 | Memcheck:Addr4
56 | fun:_PyObject_Free
57 | fun:*
58 | }
59 |
60 | {
61 | PyFree_A8
62 | Memcheck:Addr8
63 | fun:_PyObject_Free
64 | fun:*
65 | }
66 |
67 | {
68 | PyFree_V4
69 | Memcheck:Value4
70 | fun:_PyObject_Free
71 | fun:*
72 | }
73 |
74 | {
75 | PyFree_V8
76 | Memcheck:Value8
77 | fun:_PyObject_Free
78 | fun:*
79 | }
80 |
81 |
82 | # _PyObject_Realloc
83 | {
84 | PyRealloc_C
85 | Memcheck:Cond
86 | fun:_PyObject_Realloc
87 | fun:*
88 | }
89 |
90 | {
91 | PyRealloc_A4
92 | Memcheck:Addr4
93 | fun:_PyObject_Realloc
94 | fun:*
95 | }
96 |
97 | {
98 | PyRealloc_A8
99 | Memcheck:Addr8
100 | fun:_PyObject_Realloc
101 | fun:*
102 | }
103 | {
104 | PyRealloc_V4
105 | Memcheck:Value4
106 | fun:_PyObject_Realloc
107 | fun:*
108 | }
109 |
110 | {
111 | PyRealloc_V8
112 | Memcheck:Value8
113 | fun:_PyObject_Realloc
114 | fun:*
115 | }
116 |
117 |
118 |
--------------------------------------------------------------------------------
/config/ide/qtcreator/code_style.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CodeStyleData
6 |
7 | true
8 | false
9 | true
10 | false
11 | false
12 | false
13 | false
14 | false
15 | true
16 | false
17 | false
18 | false
19 | true
20 | true
21 | false
22 | true
23 | false
24 | false
25 | false
26 | 4
27 | true
28 | true
29 | 1
30 | false
31 | 4
32 |
33 |
34 |
35 | DisplayName
36 | Blender
37 |
38 |
39 |
--------------------------------------------------------------------------------
/git/git_sh1_to_svn_rev.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | # generate svn rev-sha1 mapping
5 |
6 | import os
7 | CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
8 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(CURRENT_DIR, "..", "..", ".."))))
9 |
10 | print("creating git-log of %r" % SOURCE_DIR)
11 | os.chdir(SOURCE_DIR)
12 | os.system('git log --all --format="%H %cd" --date=iso > "' + CURRENT_DIR + '/git_log.txt"')
13 |
14 | print("creating mapping...")
15 | os.chdir(CURRENT_DIR)
16 | time_to_sha1 = {}
17 | f = "git_log.txt"
18 | with open(f, "r", encoding="utf-8") as fh:
19 | for l in fh:
20 | sha1 = l[:40]
21 | time = l[41:60]
22 | time_to_sha1[time] = sha1
23 | os.remove("git_log.txt")
24 |
25 | # for reverse mapping
26 | rev_sha1_ls = []
27 |
28 | with open("rev_to_sha1.py", "w", encoding="utf-8") as fh_dst:
29 | fh_dst.write("data = {\n")
30 |
31 | f = "git_sh1_to_svn_rev.fossils"
32 | with open(f, "r", encoding="utf-8") as fh:
33 | for l in fh:
34 | # skip 'SVN:'
35 | rev, time = l[4:].split("\t", 1)
36 | time = time.split("Z", 1)[0].replace("T", " ", 1)
37 | sha1 = time_to_sha1.get(time)
38 | if sha1 is not None:
39 | fh_dst.write('%s: "%s",\n' % (rev, sha1))
40 |
41 | rev_sha1_ls.append((rev, sha1))
42 |
43 | fh_dst.write('}\n')
44 |
45 | print("written: rev_to_sha1.py")
46 |
47 | with open("sha1_to_rev.py", "w", encoding="utf-8") as fh_dst:
48 | fh_dst.write("data = {\n")
49 | for rev, sha1 in rev_sha1_ls:
50 | fh_dst.write('"%s": %s,\n' % (sha1, rev))
51 | fh_dst.write('}\n')
52 |
53 | print("written: sha1_to_rev.py")
54 |
--------------------------------------------------------------------------------
/git/git_sort_commits.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | Sort commits by date (oldest first)
6 | (useful for collecting commits to cherry pick)
7 |
8 | Example:
9 |
10 | git_sort_commits.py < commits.txt
11 | """
12 |
13 | import sys
14 | import os
15 |
16 | MODULE_DIR = os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "utils"))
17 | SOURCE_DIR = os.path.normpath(os.path.join(MODULE_DIR, "..", "..", "..", ".git"))
18 |
19 | sys.path.append(MODULE_DIR)
20 |
21 | import git_log
22 |
23 |
24 | def main():
25 |
26 | import re
27 | re_sha1_prefix = re.compile("^r[A-Z]+")
28 |
29 | def strip_sha1(sha1):
30 | # strip rB, rBA ... etc
31 | sha1 = re.sub(re_sha1_prefix, "", sha1)
32 | return sha1
33 |
34 | commits = [git_log.GitCommit(strip_sha1(l), SOURCE_DIR)
35 | for l in sys.stdin.read().split()]
36 |
37 | commits.sort(key=lambda c: c.date)
38 |
39 | for c in commits:
40 | print(c.sha1)
41 |
42 |
43 | if __name__ == "__main__":
44 | main()
45 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | [tool.autopep8]
4 | # Configuratuion for `autopep8`, allowing the command: autopep8 .
5 | # to reformat all source files.
6 | #
7 | # NOTE: the settings defined here map directly to commmand line arguments
8 | # which will override these settings when passed in to autopep8.
9 |
10 | max_line_length = 120
11 |
12 | ignore = [
13 | # Info: Use `isinstance()` instead of comparing types directly.
14 | # Why disable? Changes code logic, in rare cases we want to compare exact types.
15 | "E721",
16 | # Info: Fix bare except.
17 | # Why disable? Disruptive, leave our exceptions alone.
18 | "E722",
19 | # Info: Fix module level import not at top of file.
20 | # Why disable? Re-ordering imports is disruptive and breaks some scripts
21 | # that need to check if a module has already been loaded in the case of reloading.
22 | "E402",
23 | # Info: Try to make lines fit within --max-line-length characters.
24 | # Why disable? Causes lines to be wrapped, where long lines have the trailing bracket moved to the end of the line.
25 | # If trailing commas were respected as they are by clang-format this might be acceptable.
26 | # Note that this doesn't disable all line wrapping.
27 | "E501",
28 | # Info: Fix various deprecated code (via lib2to3)
29 | # Why disable? Does nothing besides incorrectly adding a duplicate import,
30 | # could be reported as a bug except this is likely to be removed soon, see:
31 | # https://github.com/python/cpython/issues/84540.
32 | "W690",
33 | ]
34 |
35 | # Use aggressive as many useful edits are disabled unless it's enabled.
36 | # Any edits which are overly disruptive or risky can be removed in the `ignore` list.
37 | aggressive = 2
38 |
39 | # Exclude:
40 | # - `./svn_rev_map/` contains data-files which are slow to re-format and don't benefit from formatting.
41 | exclude = """
42 | ./svn_rev_map/sha1_to_rev.py,
43 | ./svn_rev_map/rev_to_sha1.py,
44 | """
45 |
46 | # Omit settings such as `jobs`, `in_place` & `recursive` as they can cause editor utilities that auto-format on save
47 | # to fail if the STDIN/STDOUT is used for formatting (which isn't compatible with these options).
48 |
--------------------------------------------------------------------------------
/readme.rst:
--------------------------------------------------------------------------------
1 |
2 | Blender Dev Tools
3 | #################
4 |
5 | This repository is intended for miscellaneous tools, utilities, configurations and
6 | anything that helps with Blender development, but aren't directly related to building Blender.
7 |
8 | Some of the tools included may be used stand-alone, others expect Blenders source code to be available.
9 |
10 |
11 | Usage
12 | =====
13 |
14 | While this is a stand-alone repository,
15 | some of the scripts which access Blenders source code assume this repository will be located at
16 | ``source/tools`` within Blenders source code repository. At some point this may be included as a submodule.
17 |
18 | Some tools also rely on the ``blender`` binary, this is assumed to be located at: ``../../blender.bin``.
19 | *The root directory of Blender's git repository*
20 |
21 |
22 | Categories
23 | ==========
24 |
25 | Check Source
26 | ------------
27 |
28 | Any tools for scanning source files to report issues with code, style, conventions, deprecated features etc.
29 |
30 |
31 | Config
32 | ------
33 |
34 | Configuration for 3rd party applications (IDE's, code-analysis, debugging tools... etc).
35 |
36 |
37 | Git
38 | ---
39 |
40 | Scripts and utilities for working with git.
41 |
42 |
43 | Utils
44 | -----
45 |
46 | Programs (scripts) to help with development
47 | (currently for converting formats, creating mouse cursor, updating themes).
48 |
--------------------------------------------------------------------------------
/utils/autopep8_clean.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import subprocess
5 | import os
6 | from os.path import join
7 |
8 | from autopep8_clean_config import PATHS, PATHS_EXCLUDE
9 |
10 | from typing import (
11 | Callable,
12 | Generator,
13 | Optional,
14 | Sequence,
15 | )
16 |
17 | # Useful to disable when debugging warnings.
18 | USE_MULTIPROCESS = True
19 |
20 | print(PATHS)
21 | SOURCE_EXT = (
22 | # Python
23 | ".py",
24 | )
25 |
26 |
27 | def is_source_and_included(filename: str) -> bool:
28 | return (
29 | filename.endswith(SOURCE_EXT) and
30 | filename not in PATHS_EXCLUDE
31 | )
32 |
33 |
34 | def path_iter(
35 | path: str,
36 | filename_check: Optional[Callable[[str], bool]] = None,
37 | ) -> Generator[str, None, None]:
38 | for dirpath, dirnames, filenames in os.walk(path):
39 | # skip ".git"
40 | dirnames[:] = [d for d in dirnames if not d.startswith(".")]
41 |
42 | for filename in filenames:
43 | if filename.startswith("."):
44 | continue
45 | filepath = join(dirpath, filename)
46 | if filename_check is None or filename_check(filepath):
47 | yield filepath
48 |
49 |
50 | def path_expand(
51 | paths: Sequence[str],
52 | filename_check: Optional[Callable[[str], bool]] = None,
53 | ) -> Generator[str, None, None]:
54 | for f in paths:
55 | if not os.path.exists(f):
56 | print("Missing:", f)
57 | elif os.path.isdir(f):
58 | yield from path_iter(f, filename_check)
59 | else:
60 | yield f
61 |
62 |
63 | def autopep8_format_file(f: str) -> None:
64 | print(f)
65 | subprocess.call((
66 | "autopep8",
67 | "--ignore",
68 | ",".join((
69 | # Info: Use `isinstance()` instead of comparing types directly.
70 | # Why disable?: Changes code logic, in rare cases we want to compare exact types.
71 | "E721",
72 | # Info: Fix bare except.
73 | # Why disable?: Disruptive, leave our exceptions alone.
74 | "E722",
75 | # Info: Fix module level import not at top of file.
76 | # Why disable?: re-ordering imports is disruptive and breaks some scripts
77 | # that need to check if a module has already been loaded in the case of reloading.
78 | "E402",
79 | # Info: Try to make lines fit within --max-line-length characters.
80 | # Why disable? Causes lines to be wrapped, where long lines have the
81 | # trailing bracket moved to the end of the line.
82 | # If trailing commas were respected as they are by clang-format this might be acceptable.
83 | # Note that this doesn't disable all line wrapping.
84 | "E501",
85 | # Info: Fix various deprecated code (via lib2to3)
86 | # Why disable?: causes imports to be added/re-arranged.
87 | "W690",
88 | )),
89 | "--aggressive",
90 | "--in-place",
91 | "--max-line-length", "120",
92 | f,
93 | ))
94 |
95 |
96 | def main() -> None:
97 | import sys
98 |
99 | if os.path.samefile(sys.argv[-1], __file__):
100 | paths = path_expand(PATHS, is_source_and_included)
101 | else:
102 | paths = path_expand(sys.argv[1:], is_source_and_included)
103 |
104 | if USE_MULTIPROCESS:
105 | import multiprocessing
106 | job_total = multiprocessing.cpu_count()
107 | pool = multiprocessing.Pool(processes=job_total * 2)
108 | pool.map(autopep8_format_file, paths)
109 | else:
110 | for f in paths:
111 | autopep8_format_file(f)
112 |
113 |
114 | if __name__ == "__main__":
115 | main()
116 |
--------------------------------------------------------------------------------
/utils/autopep8_clean_config.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | import os
4 |
5 | from typing import (
6 | Generator,
7 | Callable,
8 | Set,
9 | Tuple,
10 | )
11 |
12 | PATHS: Tuple[str, ...] = (
13 | "build_files",
14 | "doc",
15 | "release/datafiles",
16 | "release/lts",
17 | "release/scripts/freestyle",
18 | "release/scripts/modules",
19 | "release/scripts/presets",
20 | "release/scripts/startup",
21 | "release/scripts/templates_py",
22 | "source/blender",
23 | "source/tools",
24 | "tests",
25 | )
26 |
27 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(
28 | os.path.join(os.path.dirname(__file__), "..", "..", ".."))))
29 |
30 | PATHS = tuple(
31 | os.path.join(SOURCE_DIR, p.replace("/", os.sep))
32 | for p in PATHS
33 | )
34 |
35 | PATHS_EXCLUDE: Set[str] = set(
36 | os.path.join(SOURCE_DIR, p.replace("/", os.sep))
37 | for p in
38 | (
39 | "source/tools/svn_rev_map/sha1_to_rev.py",
40 | "source/tools/svn_rev_map/rev_to_sha1.py",
41 | "source/tools/svn_rev_map/rev_to_sha1.py",
42 | "release/scripts/modules/rna_manual_reference.py",
43 | )
44 | )
45 |
46 |
47 | def files(path: str, test_fn: Callable[[str], bool]) -> Generator[str, None, None]:
48 | for dirpath, dirnames, filenames in os.walk(path):
49 | # skip '.git'
50 | dirnames[:] = [d for d in dirnames if not d.startswith(".")]
51 | for filename in filenames:
52 | if test_fn(filename):
53 | filepath = os.path.join(dirpath, filename)
54 | yield filepath
55 |
--------------------------------------------------------------------------------
/utils/blender_keyconfig_export_permutations.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 |
5 | r"""
6 | This script exports permutations of key-maps with different settings modified.
7 |
8 | Useful for checking changes intended for one configuration don't impact others accidentally.
9 |
10 | ./blender.bin -b --factory-startup \
11 | --python source/tools/utils/blender_keyconfig_export_permutations.py -- \
12 | --preset=Blender \
13 | --output-dir=./output \
14 | --keymap-prefs=select_mouse:rmb_action
15 |
16 | /blender.bin -b --factory-startup \
17 | --python source/tools/utils/blender_keyconfig_export_permutations.py -- \
18 | --preset=Blender_27x \
19 | --output-dir=output \
20 | --keymap-prefs="select_mouse"
21 |
22 | The preferences setting: ``select_mouse:rmb_action`` expands into:
23 |
24 | config = [
25 | ("select_mouse", ('LEFT', 'RIGHT')),
26 | ("rmb_action", ('TWEAK', 'FALLBACK_TOOL')),
27 | ]
28 | """
29 |
30 | import os
31 | import sys
32 |
33 |
34 | def argparse_create():
35 | import argparse
36 |
37 | parser = argparse.ArgumentParser(
38 | description=__doc__,
39 | formatter_class=argparse.RawTextHelpFormatter,
40 | )
41 |
42 | parser.add_argument(
43 | "--preset",
44 | dest="preset",
45 | default="Blender",
46 | metavar='PRESET', type=str,
47 | help="The name of the preset to export",
48 | required=False,
49 | )
50 |
51 | parser.add_argument(
52 | "--output-dir",
53 | dest="output_dir",
54 | default=".",
55 | metavar='OUTPUT_DIR', type=str,
56 | help="The directory to output to.",
57 | required=False,
58 | )
59 |
60 | parser.add_argument(
61 | "--keymap-prefs",
62 | dest="keymap_prefs",
63 | default="select_mouse:rmb_action",
64 | metavar='KEYMAP_PREFS', type=str,
65 | help=(
66 | "Colon separated list of attributes to generate key-map configuration permutations. "
67 | "WARNING: as all combinations are tested, their number increases exponentially!"
68 | ),
69 | required=False,
70 | )
71 |
72 | return parser
73 |
74 |
75 | def permutations_from_attrs_impl(config, permutation, index):
76 | index_next = index + 1
77 | attr, values = config[index]
78 | for val in values:
79 | permutation[index] = (attr, val)
80 | if index_next == len(config):
81 | yield tuple(permutation)
82 | else:
83 | # Keep walking down the list of permutations.
84 | yield from permutations_from_attrs_impl(config, permutation, index_next)
85 | # Not necessary, just ensure stale values aren't used.
86 | permutation[index] = None
87 |
88 |
89 | def permutations_from_attrs(config):
90 | """
91 | Take a list of attributes and possible values:
92 |
93 | config = [
94 | ("select_mouse", ('LEFT', 'RIGHT')),
95 | ("rmb_action", ('TWEAK', 'FALLBACK_TOOL')),
96 | ]
97 |
98 | Yielding all permutations:
99 |
100 | [("select_mouse", 'LEFT'), ("rmb_action", 'TWEAK')],
101 | [("select_mouse", 'LEFT'), ("rmb_action", 'FALLBACK_TOOL')],
102 | ... etc ...
103 | """
104 | if not config:
105 | return ()
106 | permutation = [None] * len(config)
107 | result = list(permutations_from_attrs_impl(config, permutation, 0))
108 | assert permutation == ([None] * len(config))
109 | return result
110 |
111 |
112 | def permutation_as_filename(preset, values):
113 | """
114 | Takes a configuration, eg:
115 |
116 | [("select_mouse", 'LEFT'), ("rmb_action", 'TWEAK')]
117 |
118 | And returns a filename compatible path:
119 | """
120 | from urllib.parse import quote
121 | if not values:
122 | return quote(preset)
123 |
124 | return quote(
125 | preset + "_" + ".".join([
126 | "-".join((str(key), str(val)))
127 | for key, val in values
128 | ]),
129 | # Needed so forward slashes aren't included in the resulting name.
130 | safe="",
131 | )
132 |
133 |
134 | def main():
135 | args = sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else []
136 | try:
137 | import bpy
138 | except ImportError:
139 | # Run outside of Blender, just show "--help".
140 | bpy = None
141 | args.insert(0, "--help")
142 |
143 | args = argparse_create().parse_args(args)
144 | if bpy is None:
145 | return
146 |
147 | from bpy import context
148 |
149 | preset = args.preset
150 | output_dir = args.output_dir
151 |
152 | os.makedirs(output_dir, exist_ok=True)
153 |
154 | # Needed for background mode.
155 | preset_filepath = bpy.utils.preset_find(preset, preset_path="keyconfig")
156 | bpy.ops.preferences.keyconfig_activate(filepath=preset_filepath)
157 |
158 | # Key-map preferences..
159 | km_prefs = context.window_manager.keyconfigs.active.preferences
160 | config = []
161 | # Use RNA introspection:
162 | if args.keymap_prefs:
163 | for attr in args.keymap_prefs.split(":"):
164 | if not hasattr(km_prefs, attr):
165 | print(f"KeyMap preferences does not have attribute: {attr:s}")
166 | sys.exit(1)
167 |
168 | prop_def = km_prefs.rna_type.properties.get(attr)
169 | match prop_def.type:
170 | case 'ENUM':
171 | value = tuple(val.identifier for val in prop_def.enum_items)
172 | case 'BOOLEAN':
173 | value = (True, False)
174 | case _ as prop_def_type:
175 | raise Exception(f"Unhandled attribute type {prop_def_type:s}")
176 | config.append((attr, value))
177 | config = tuple(config)
178 |
179 | for attr_permutation in (permutations_from_attrs(config) or ((),)):
180 |
181 | # Reload and set.
182 | if attr_permutation is not None:
183 | km_prefs = context.window_manager.keyconfigs.active.preferences
184 | for attr, value in attr_permutation:
185 | setattr(km_prefs, attr, value)
186 | # Re-activate after setting preferences, tsk, ideally this shouldn't be necessary.
187 | bpy.ops.preferences.keyconfig_activate(filepath=preset_filepath)
188 |
189 | filepath = os.path.join(output_dir, permutation_as_filename(preset, attr_permutation) + ".py")
190 |
191 | print("Writing:", filepath)
192 | bpy.ops.preferences.keyconfig_export(filepath=filepath, all=True)
193 |
194 | sys.exit()
195 |
196 |
197 | if __name__ == "__main__":
198 | main()
199 |
--------------------------------------------------------------------------------
/utils/blender_merge_format_changes.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import os
5 | import subprocess
6 | import sys
7 |
8 | # We unfortunately ended with three commits instead of a single one to be handled as
9 | # 'clang-format' commit, we are handling them as a single 'block'.
10 | format_commits = (
11 | 'e12c08e8d170b7ca40f204a5b0423c23a9fbc2c1',
12 | '91a9cd0a94000047248598394c41ac30f893f147',
13 | '3076d95ba441cd32706a27d18922a30f8fd28b8a',
14 | )
15 | pre_format_commit = format_commits[0] + '~1'
16 |
17 |
18 | def get_string(cmd):
19 | return subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode('utf8').strip()
20 |
21 |
22 | # Parse arguments.
23 | mode = None
24 | base_branch = 'main'
25 | if len(sys.argv) >= 2:
26 | # Note that recursive conflict resolution strategy has to reversed in rebase compared to merge.
27 | # See https://git-scm.com/docs/git-rebase#Documentation/git-rebase.txt--m
28 | if sys.argv[1] == '--rebase':
29 | mode = 'rebase'
30 | recursive_format_commit_merge_options = '-Xignore-all-space -Xtheirs'
31 | elif sys.argv[1] == '--merge':
32 | mode = 'merge'
33 | recursive_format_commit_merge_options = '-Xignore-all-space -Xours'
34 | if len(sys.argv) == 4:
35 | if sys.argv[2] == '--base_branch':
36 | base_branch = sys.argv[3]
37 |
38 | if mode is None:
39 | print("Merge or rebase Blender main (or another base branch) into a branch in 3 steps,")
40 | print("to automatically merge clang-format changes.")
41 | print("")
42 | print(" --rebase Perform equivalent of 'git rebase main'")
43 | print(" --merge Perform equivalent of 'git merge main'")
44 | print("")
45 | print("Optional arguments:")
46 | print(" --base_branch Use given branch instead of main")
47 | print(" (assuming that base branch has already been updated")
48 | print(" and has the initial clang-format commit).")
49 | sys.exit(0)
50 |
51 | # Verify we are in the right directory.
52 | root_path = get_string(['git', 'rev-parse', '--show-superproject-working-tree'])
53 | if os.path.realpath(root_path) != os.path.realpath(os.getcwd()):
54 | print("BLENDER MERGE: must run from blender repository root directory")
55 | sys.exit(1)
56 |
57 | # Abort if a rebase is still progress.
58 | rebase_merge = get_string(['git', 'rev-parse', '--git-path', 'rebase-merge'])
59 | rebase_apply = get_string(['git', 'rev-parse', '--git-path', 'rebase-apply'])
60 | merge_head = get_string(['git', 'rev-parse', '--git-path', 'MERGE_HEAD'])
61 | if os.path.exists(rebase_merge) or \
62 | os.path.exists(rebase_apply) or \
63 | os.path.exists(merge_head):
64 | print("BLENDER MERGE: rebase or merge in progress, complete it first")
65 | sys.exit(1)
66 |
67 | # Abort if uncommitted changes.
68 | changes = get_string(['git', 'status', '--porcelain', '--untracked-files=no'])
69 | if len(changes) != 0:
70 | print("BLENDER MERGE: detected uncommitted changes, can't run")
71 | sys.exit(1)
72 |
73 | # Setup command, with commit message for merge commits.
74 | if mode == 'rebase':
75 | mode_cmd = 'rebase'
76 | else:
77 | branch = get_string(['git', 'rev-parse', '--abbrev-ref', 'HEAD'])
78 | mode_cmd = 'merge --no-edit -m "Merge \'' + base_branch + '\' into \'' + branch + '\'"'
79 |
80 | # Rebase up to the clang-format commit.
81 | code = os.system('git merge-base --is-ancestor ' + pre_format_commit + ' HEAD')
82 | if code != 0:
83 | code = os.system('git ' + mode_cmd + ' ' + pre_format_commit)
84 | if code != 0:
85 | print("BLENDER MERGE: resolve conflicts, complete " + mode + " and run again")
86 | sys.exit(code)
87 |
88 | # Rebase clang-format commit.
89 | code = os.system('git merge-base --is-ancestor ' + format_commits[-1] + ' HEAD')
90 | if code != 0:
91 | os.system('git ' + mode_cmd + ' ' + recursive_format_commit_merge_options + ' ' + format_commits[-1])
92 | paths = get_string(('git', '--no-pager', 'diff', '--name-only', format_commits[-1])).replace('\n', ' ')
93 | if sys.platform == 'win32' and len(paths) > 8000:
94 | # Windows command-line does not accept more than 8191 chars.
95 | os.system('make format')
96 | else:
97 | os.system('make format PATHS="' + paths + '"')
98 | os.system('git add -u')
99 | count = int(get_string(['git', 'rev-list', '--count', '' + format_commits[-1] + '..HEAD']))
100 | if count == 1 or mode == 'merge':
101 | # Amend if we just have a single commit or are merging.
102 | os.system('git commit --amend --no-edit')
103 | else:
104 | # Otherwise create a commit for formatting.
105 | os.system('git commit -m "Cleanup: apply clang format"')
106 |
107 | # Rebase remaining commits
108 | code = os.system('git ' + mode_cmd + ' ' + base_branch)
109 | if code != 0:
110 | print("BLENDER MERGE: resolve conflicts, complete " + mode + " and you're done")
111 | else:
112 | print("BLENDER MERGE: done")
113 | sys.exit(code)
114 |
--------------------------------------------------------------------------------
/utils/blender_theme_as_c.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | Generates 'userdef_default_theme.c' from a 'userpref.blend' file.
6 |
7 | Pass your user preferenes blend file to this script to update the C source file.
8 |
9 | eg:
10 |
11 | ./source/tools/utils/blender_theme_as_c.py ~/.config/blender/2.80/config/userpref.blend
12 |
13 | .. or find the latest:
14 |
15 | ./source/tools/utils/blender_theme_as_c.py $(find ~/.config/blender -name "userpref.blend" | sort | tail -1)
16 | """
17 |
18 | C_SOURCE_HEADER = r'''/* SPDX-License-Identifier: GPL-2.0-or-later */
19 |
20 | /**
21 | * Generated by 'source/tools/utils/blender_theme_as_c.py'
22 | *
23 | * Do not hand edit this file!
24 | */
25 |
26 | #include "DNA_userdef_types.h"
27 |
28 | #include "BLO_readfile.h"
29 |
30 | /* clang-format off */
31 |
32 | #ifdef __LITTLE_ENDIAN__
33 | # define RGBA(c) {((c) >> 24) & 0xff, ((c) >> 16) & 0xff, ((c) >> 8) & 0xff, (c) & 0xff}
34 | # define RGB(c) {((c) >> 16) & 0xff, ((c) >> 8) & 0xff, (c) & 0xff}
35 | #else
36 | # define RGBA(c) {(c) & 0xff, ((c) >> 8) & 0xff, ((c) >> 16) & 0xff, ((c) >> 24) & 0xff}
37 | # define RGB(c) {(c) & 0xff, ((c) >> 8) & 0xff, ((c) >> 16) & 0xff}
38 | #endif
39 |
40 | '''
41 |
42 |
43 | def round_float_32(f):
44 | from struct import pack, unpack
45 | return unpack("f", pack("f", f))[0]
46 |
47 |
48 | def repr_f32(f):
49 | f_round = round_float_32(f)
50 | f_str = repr(f)
51 | f_str_frac = f_str.partition(".")[2]
52 | if not f_str_frac:
53 | return f_str
54 | for i in range(1, len(f_str_frac)):
55 | f_test = round(f, i)
56 | f_test_round = round_float_32(f_test)
57 | if f_test_round == f_round:
58 | return "%.*f" % (i, f_test)
59 | return f_str
60 |
61 |
62 | import os
63 |
64 | # Avoid maintaining multiple blendfile modules
65 | import sys
66 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "modules"))
67 | del sys
68 |
69 | source_dst = os.path.join(
70 | os.path.dirname(__file__),
71 | "..", "..", "..",
72 | "release", "datafiles", "userdef", "userdef_default_theme.c",
73 | )
74 |
75 | dna_rename_defs_h = os.path.join(
76 | os.path.dirname(__file__),
77 | "..", "..", "..",
78 | "source", "blender", "makesdna", "intern", "dna_rename_defs.h",
79 | )
80 |
81 |
82 | def dna_rename_defs(blend):
83 | """
84 | """
85 | from blendfile import DNAName
86 | import re
87 | re_dna_struct_rename_elem = re.compile(
88 | r'DNA_STRUCT_RENAME_ELEM+\('
89 | r'([a-zA-Z0-9_]+)' r',\s*'
90 | r'([a-zA-Z0-9_]+)' r',\s*'
91 | r'([a-zA-Z0-9_]+)' r'\)',
92 | )
93 | with open(dna_rename_defs_h, 'r', encoding='utf-8') as fh:
94 | data = fh.read()
95 | for l in data.split('\n'):
96 | m = re_dna_struct_rename_elem.match(l)
97 | if m is not None:
98 | struct_name, member_storage, member_runtime = m.groups()
99 | struct_name = struct_name.encode('utf-8')
100 | member_storage = member_storage.encode('utf-8')
101 | member_runtime = member_runtime.encode('utf-8')
102 | dna_struct = blend.structs[blend.sdna_index_from_id[struct_name]]
103 | for field in dna_struct.fields:
104 | dna_name = field.dna_name
105 | if member_storage == dna_name.name_only:
106 | field.dna_name = dna_name = DNAName(dna_name.name_full)
107 | del dna_struct.field_from_name[dna_name.name_only]
108 | dna_name.name_full = dna_name.name_full.replace(member_storage, member_runtime)
109 | dna_name.name_only = member_runtime
110 | dna_struct.field_from_name[dna_name.name_only] = field
111 |
112 |
113 | def theme_data(userpref_filename):
114 | import blendfile
115 | blend = blendfile.open_blend(userpref_filename)
116 | dna_rename_defs(blend)
117 | u = next((c for c in blend.blocks if c.code == b'USER'), None)
118 | # theme_type = b.sdna_index_from_id[b'bTheme']
119 | t = u.get_pointer((b'themes', b'first'))
120 | t.refine_type(b'bTheme')
121 | return blend, t
122 |
123 |
124 | def is_ignore_dna_name(name):
125 | if name.startswith(b'_'):
126 | return True
127 | elif name in {
128 | b'active_theme_area',
129 | }:
130 | return True
131 | else:
132 | return False
133 |
134 |
135 | def write_member(fw, indent, b, theme, ls):
136 | path_old = ()
137 |
138 | for key, value in ls:
139 | key = key if type(key) is tuple else (key,)
140 | path_new = key[:-1]
141 |
142 | if tuple(path_new) != tuple(path_old):
143 | if path_old:
144 | p = len(path_old) - 1
145 | while p >= 0 and (p >= len(path_new) or path_new[p] != path_old[p]):
146 | indent = p + 1
147 | fw('\t' * indent)
148 | fw('},\n')
149 | p -= 1
150 | del p
151 |
152 | p = 0
153 | for p in range(min(len(path_old), len(path_new))):
154 | if path_old[p] != key[p]:
155 | break
156 | else:
157 | p = p + 1
158 |
159 | for i, c in enumerate(path_new[p:]):
160 | indent = p + i + 1
161 | fw('\t' * indent)
162 | if type(c) is bytes:
163 | attr = c.decode('ascii')
164 | fw(f'.{attr} = ')
165 | fw('{\n')
166 |
167 | if not is_ignore_dna_name(key[-1]):
168 | indent = '\t' * (len(path_new) + 1)
169 | attr = key[-1].decode('ascii')
170 | if isinstance(value, float):
171 | if value != 0.0:
172 | value_repr = repr_f32(value)
173 | fw(f'{indent}.{attr} = {value_repr}f,\n')
174 | elif isinstance(value, int):
175 | if value != 0:
176 | fw(f'{indent}.{attr} = {value},\n')
177 | elif isinstance(value, bytes):
178 | if set(value) != {0}:
179 | if len(value) == 3:
180 | value_repr = "".join(f'{ub:02x}' for ub in value)
181 | fw(f'{indent}.{attr} = RGB(0x{value_repr}),\n')
182 | elif len(value) == 4:
183 | value_repr = "".join(f'{ub:02x}' for ub in value)
184 | fw(f'{indent}.{attr} = RGBA(0x{value_repr}),\n')
185 | else:
186 | value = value.rstrip(b'\x00')
187 | is_ascii = True
188 | for ub in value:
189 | if not (ub >= 32 and ub < 127):
190 | is_ascii = False
191 | break
192 | if is_ascii:
193 | value_repr = value.decode('ascii')
194 | fw(f'{indent}.{attr} = "{value_repr}",\n')
195 | else:
196 | value_repr = "".join(f'{ub:02x}' for ub in value)
197 | fw(f'{indent}.{attr} = {{{value_repr}}},\n')
198 | else:
199 | fw(f'{indent}.{attr} = {value},\n')
200 | path_old = path_new
201 |
202 |
203 | def convert_data(blend, theme, f):
204 | fw = f.write
205 | fw(C_SOURCE_HEADER)
206 | fw('const bTheme U_theme_default = {\n')
207 | ls = list(theme.items_recursive_iter(use_nil=False))
208 | write_member(fw, 1, blend, theme, ls)
209 |
210 | fw('};\n')
211 | fw('\n')
212 | fw('/* clang-format on */\n')
213 |
214 |
215 | def file_remove_empty_braces(source_dst):
216 | with open(source_dst, 'r', encoding='utf-8') as fh:
217 | data = fh.read()
218 | # Remove:
219 | # .foo = { }
220 | import re
221 |
222 | def key_replace(match):
223 | return ""
224 | data_prev = None
225 | # Braces may become empty by removing nested
226 | while data != data_prev:
227 | data_prev = data
228 | data = re.sub(
229 | r'\s+\.[a-zA-Z_0-9]+\s+=\s+\{\s*\},',
230 | key_replace, data, re.MULTILINE,
231 | )
232 |
233 | # Use two spaces instead of tabs.
234 | data = data.replace('\t', ' ')
235 |
236 | with open(source_dst, 'w', encoding='utf-8') as fh:
237 | fh.write(data)
238 |
239 |
240 | def main():
241 | import sys
242 | blend, theme = theme_data(sys.argv[-1])
243 | with open(source_dst, 'w', encoding='utf-8') as fh:
244 | convert_data(blend, theme, fh)
245 |
246 | # Microsoft Visual Studio doesn't support empty braces.
247 | file_remove_empty_braces(source_dst)
248 |
249 |
250 | if __name__ == "__main__":
251 | main()
252 |
--------------------------------------------------------------------------------
/utils/credits_git_gen.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | Example use:
6 |
7 | credits_git_gen.py --source=/src/blender --range=SHA1..HEAD
8 | """
9 |
10 | from git_log import GitCommitIter
11 | import unicodedata as ud
12 |
13 | # -----------------------------------------------------------------------------
14 | # Lookup Table to clean up the credits
15 | #
16 | # This is a combination of unifying git logs as well as
17 | # name change requested by the authors.
18 |
19 | author_table = {
20 | "Aaron": "Aaron Carlisle",
21 | "Your Name": "Aaron Carlisle",
22 | "Alan": "Alan Troth",
23 | "andreas atteneder": "Andreas Atteneder",
24 | "Ankit": "Ankit Meel",
25 | "Antonioya": "Antonio Vazquez",
26 | "Antonio Vazquez": "Antonio Vazquez",
27 | "Antony Ryakiotakis": "Antony Riakiotakis",
28 | "Amélie Fondevilla": "Amelie Fondevilla",
29 | "bastien": "Bastien Montagne",
30 | "mont29": "Bastien Montagne",
31 | "bjornmose": "Bjorn Mose",
32 | "meta-androcto": "Brendon Murphy",
33 | "Brecht van Lommel": "Brecht Van Lommel",
34 | "Brecht Van Lömmel": "Brecht Van Lommel",
35 | "recht Van Lommel": "Brecht Van Lommel",
36 | "Clément Foucault": "Clément Foucault",
37 | "Clément": "Clément Foucault",
38 | "fclem": "Clément Foucault",
39 | "Clment Foucault": "Clément Foucault",
40 | "christian brinkmann": "Christian Brinkmann",
41 | "ZanQdo": "Daniel Salazar",
42 | "unclezeiv": "Davide Vercelli",
43 | "dilithjay": "Dilith Jayakody",
44 | "gaiaclary": "Gaia Clary",
45 | "Diego Hernan Borghetti": "Diego Borghetti",
46 | "Dotsnov Valentin": "Dontsov Valentin",
47 | "Eitan": "Eitan Traurig",
48 | "EitanSomething": "Eitan Traurig",
49 | "Erik": "Erik Abrahamsson",
50 | "Erick Abrahammson": "Erik Abrahamsson",
51 | "Eric Abrahamsson": "Erik Abrahamsson",
52 | "Ethan-Hall": "Ethan Hall",
53 | "filedescriptor": "Falk David",
54 | "Germano": "Germano Cavalcante",
55 | "Germano Cavalcantemano-wii": "Germano Cavalcante",
56 | "mano-wii": "Germano Cavalcante",
57 | "gsr": "Guillermo S. Romero",
58 | "Henrik Dick (weasel)": "Henrik Dick",
59 | "howardt": "Howard Trickey",
60 | "Iliay Katueshenock": "Iliya Katueshenock",
61 | "MOD": "Iliya Katueshenock",
62 | "Inês Almeida": "Ines Almeida",
63 | "brita": "Ines Almeida",
64 | "Ivan": "Ivan Perevala",
65 | "jensverwiebe": "Jens Verwiebe",
66 | "Jesse Y": "Jesse Yurkovich",
67 | "Joe Eagar": "Joseph Eagar",
68 | "Johnny Matthews (guitargeek)": "Johnny Matthews",
69 | "guitargeek": "Johnny Matthews",
70 | "jon denning": "Jon Denning",
71 | "julianeisel": "Julian Eisel",
72 | "Severin": "Julian Eisel",
73 | "Alex Strand": "Kenzie Strand",
74 | "Kevin Dietrich": "Kévin Dietrich",
75 | "Leon Leno": "Leon Schittek",
76 | "Lukas Toenne": "Lukas Tönne",
77 | "Mikhail": "Mikhail Matrosov",
78 | "OmarSquircleArt": "Omar Emara",
79 | "lazydodo": "Ray Molenkamp",
80 | "Ray molenkamp": "Ray Molenkamp",
81 | "Author Name": "Robert Guetzkow",
82 | "Sybren A. Stüvel": "Sybren A. Stüvel",
83 | "Simon": "Simon G",
84 | "Stephan": "Stephan Seitz",
85 | "Sebastian Herhoz": "Sebastian Herholz",
86 | "blender": "Sergey Sharybin",
87 | "Vuk GardaÅ¡eviÄ": "Vuk Gardašević",
88 | "ianwill": "Willian Padovani Germano",
89 | "Yiming Wu": "YimingWu",
90 | }
91 |
92 |
93 | # -----------------------------------------------------------------------------
94 | # Class for generating credits
95 |
96 | class CreditUser:
97 | __slots__ = (
98 | "commit_total",
99 | "year_min",
100 | "year_max",
101 | )
102 |
103 | def __init__(self):
104 | self.commit_total = 0
105 |
106 |
107 | class Credits:
108 | __slots__ = (
109 | "users",
110 | )
111 |
112 | def __init__(self):
113 | self.users = {}
114 |
115 | def process_commit(self, c):
116 | # Normalize author string into canonical form, prevents duplicate credit users
117 | author = ud.normalize('NFC', c.author)
118 | author = author_table.get(author, author)
119 | year = c.date.year
120 | cu = self.users.get(author)
121 | if cu is None:
122 | cu = self.users[author] = CreditUser()
123 | cu.year_min = year
124 | cu.year_max = year
125 |
126 | cu.commit_total += 1
127 | cu.year_min = min(cu.year_min, year)
128 | cu.year_max = max(cu.year_max, year)
129 |
130 | def process(self, commit_iter):
131 | for i, c in enumerate(commit_iter):
132 | self.process_commit(c)
133 | if not (i % 100):
134 | print(i)
135 |
136 | def write(self, filepath,
137 | is_main_credits=True,
138 | contrib_companies=(),
139 | sort="name"):
140 |
141 | # patch_word = "patch", "patches"
142 | commit_word = "commit", "commits"
143 |
144 | sorted_authors = {}
145 | if sort == "commit":
146 | sorted_authors = dict(sorted(self.users.items(), key=lambda item: item[1].commit_total))
147 | else:
148 | sorted_authors = dict(sorted(self.users.items()))
149 |
150 | with open(filepath, 'w', encoding="ascii", errors='xmlcharrefreplace') as file:
151 | file.write("Individual Contributors
\n\n")
152 | for author, cu in sorted_authors.items():
153 | file.write("{:s}, {:,d} {:s} {:s}
\n".format(
154 | author,
155 | cu.commit_total,
156 | commit_word[cu.commit_total > 1],
157 | ("" if not is_main_credits else
158 | ("- {:d}".format(cu.year_min) if cu.year_min == cu.year_max else
159 | ("({:d} - {:d})".format(cu.year_min, cu.year_max))))))
160 | file.write("\n\n")
161 |
162 | # -------------------------------------------------------------------------
163 | # Companies, hard coded
164 | if is_main_credits:
165 | file.write("Contributions from Companies & Organizations
\n")
166 | file.write("\n")
167 | for line in contrib_companies:
168 | file.write("{:s}
\n".format(line))
169 | file.write("
\n")
170 |
171 | import datetime
172 | now = datetime.datetime.now()
173 | fn = __file__.split("\\")[-1].split("/")[-1]
174 | file.write(
175 | "Generated by '{:s}' {:d}/{:d}/{:d}
\n".format(
176 | fn, now.year, now.month, now.day
177 | ))
178 |
179 |
180 | def argparse_create():
181 | import argparse
182 |
183 | # When --help or no args are given, print this help
184 | usage_text = "Review revisions."
185 |
186 | epilog = "This script is used to generate credits"
187 |
188 | parser = argparse.ArgumentParser(description=usage_text, epilog=epilog)
189 |
190 | parser.add_argument(
191 | "--source", dest="source_dir",
192 | metavar='PATH',
193 | required=True,
194 | help="Path to git repository",
195 | )
196 | parser.add_argument(
197 | "--range",
198 | dest="range_sha1",
199 | metavar='SHA1_RANGE',
200 | required=True,
201 | help="Range to use, eg: 169c95b8..HEAD",
202 | )
203 |
204 | parser.add_argument(
205 | "--sort", dest="sort",
206 | metavar='METHOD',
207 | required=False,
208 | help="Sort credits by 'name' (default) or 'commit'",
209 | )
210 |
211 | return parser
212 |
213 |
214 | def main():
215 |
216 | # ----------
217 | # Parse Args
218 |
219 | args = argparse_create().parse_args()
220 |
221 | def is_credit_commit_valid(c):
222 | ignore_dir = (
223 | b"blender/extern/",
224 | b"blender/intern/opennl/",
225 | )
226 |
227 | if not any(f for f in c.files if not f.startswith(ignore_dir)):
228 | return False
229 |
230 | return True
231 |
232 | # TODO, there are for sure more companies then are currently listed.
233 | # 1 liners for in html syntax
234 | contrib_companies = (
235 | "Unity Technologies - FBX Exporter",
236 | "BioSkill GmbH - H3D compatibility for X3D Exporter, "
237 | "OBJ Nurbs Import/Export",
238 | "AutoCRC - Improvements to fluid particles, vertex color baking",
239 | "Adidas - Principled BSDF shader in Cycles",
240 | "AMD - Cycles HIP GPU rendering, CPU optimizations",
241 | "Intel - Cycles oneAPI GPU rendering, CPU optimizations",
242 | "NVIDIA - Cycles OptiX GPU rendering, USD importer",
243 | "Facebook - Cycles subsurface scattering improvements",
244 | "Apple - Cycles Metal GPU backend",
245 | )
246 |
247 | credits = Credits()
248 | # commit_range = "HEAD~10..HEAD"
249 | # commit_range = "blender-v2.81-release..blender-v2.82-release"
250 | # commit_range = "blender-v2.82-release"
251 | commit_range = args.range_sha1
252 | sort = args.sort
253 | citer = GitCommitIter(args.source_dir, commit_range)
254 | credits.process((c for c in citer if is_credit_commit_valid(c)))
255 | credits.write("credits.html",
256 | is_main_credits=True,
257 | contrib_companies=contrib_companies,
258 | sort=sort)
259 | print("Written: credits.html")
260 |
261 |
262 | if __name__ == "__main__":
263 | main()
264 |
--------------------------------------------------------------------------------
/utils/cycles_commits_sync.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import collections
5 | import os
6 | import subprocess
7 | import sys
8 |
9 | # Hashes to be ignored
10 | #
11 | # The system sometimes fails to match commits and suggests to backport
12 | # revision which was already ported. In order to solve that we can:
13 | #
14 | # - Explicitly ignore some of the commits.
15 | # - Move the synchronization point forward.
16 | IGNORE_HASHES = {
17 | }
18 |
19 | # Start revisions from both repositories.
20 | CYCLES_START_COMMIT = b"b941eccba81bbb1309a0eb4977fc3a77796f4ada" # blender-v2.92
21 | BLENDER_START_COMMIT = b"02948a2cab44f74ed101fc1b2ad9fe4431123e85" # v2.92
22 |
23 | # Prefix which is common for all the subjects.
24 | GIT_SUBJECT_COMMON_PREFIX = b"Subject: [PATCH] "
25 |
26 | # Marker which indicates begin of new file in the patch set.
27 | GIT_FILE_SECTION_MARKER = b"diff --git"
28 |
29 | # Marker of the end of the patchset.
30 | GIT_PATCHSET_END_MARKER = b"-- "
31 |
32 | # Prefix of topic to be omitted
33 | SUBJECT_SKIP_PREFIX = (
34 | b"Cycles: ",
35 | b"cycles: ",
36 | b"Cycles Standalone: ",
37 | b"Cycles standalone: ",
38 | b"cycles standalone: ",
39 | )
40 |
41 |
42 | def subject_strip(common_prefix, subject):
43 | for prefix in SUBJECT_SKIP_PREFIX:
44 | full_prefix = common_prefix + prefix
45 | if subject.startswith(full_prefix):
46 | subject = subject[len(full_prefix):].capitalize()
47 | subject = common_prefix + subject
48 | break
49 | return subject
50 |
51 |
52 | def replace_file_prefix(path, prefix, replace_prefix):
53 | tokens = path.split(b' ')
54 | prefix_len = len(prefix)
55 | for i, t in enumerate(tokens):
56 | for x in (b"a/", b"b/"):
57 | if t.startswith(x + prefix):
58 | tokens[i] = x + replace_prefix + t[prefix_len + 2:]
59 | return b' '.join(tokens)
60 |
61 |
62 | def cleanup_patch(patch, accept_prefix, replace_prefix):
63 | assert accept_prefix[0] != b'/'
64 | assert replace_prefix[0] != b'/'
65 |
66 | full_accept_prefix = GIT_FILE_SECTION_MARKER + b" a/" + accept_prefix
67 |
68 | with open(patch, "rb") as f:
69 | content = f.readlines()
70 |
71 | clean_content = []
72 | do_skip = False
73 | for line in content:
74 | if line.startswith(GIT_SUBJECT_COMMON_PREFIX):
75 | # Skip possible prefix like "Cycles:", we already know change is
76 | # about Cycles since it's being committed to a Cycles repository.
77 | line = subject_strip(GIT_SUBJECT_COMMON_PREFIX, line)
78 |
79 | # Dots usually are omitted in the topic
80 | line = line.replace(b".\n", b"\n")
81 | elif line.startswith(GIT_FILE_SECTION_MARKER):
82 | if not line.startswith(full_accept_prefix):
83 | do_skip = True
84 | else:
85 | do_skip = False
86 | line = replace_file_prefix(line, accept_prefix, replace_prefix)
87 | elif line.startswith(GIT_PATCHSET_END_MARKER):
88 | do_skip = False
89 | elif line.startswith(b"---") or line.startswith(b"+++"):
90 | line = replace_file_prefix(line, accept_prefix, replace_prefix)
91 |
92 | if not do_skip:
93 | clean_content.append(line)
94 |
95 | with open(patch, "wb") as f:
96 | f.writelines(clean_content)
97 |
98 |
99 | # Get mapping from commit subject to commit hash.
100 | #
101 | # It'll actually include timestamp of the commit to the map key, so commits with
102 | # the same subject wouldn't conflict with each other.
103 | def commit_map_get(repository, path, start_commit):
104 | command = (b"git",
105 | b"--git-dir=" + os.path.join(repository, b'.git'),
106 | b"--work-tree=" + repository,
107 | b"log", b"--format=%H %at %s", b"--reverse",
108 | start_commit + b'..HEAD',
109 | b'--',
110 | os.path.join(repository, path),
111 | b':(exclude)' + os.path.join(repository, b'intern/cycles/blender'))
112 | lines = subprocess.check_output(command).split(b"\n")
113 | commit_map = collections.OrderedDict()
114 | for line in lines:
115 | if line:
116 | commit_sha, stamped_subject = line.split(b' ', 1)
117 | stamp, subject = stamped_subject.split(b' ', 1)
118 | subject = subject_strip(b"", subject).rstrip(b".")
119 | stamped_subject = stamp + b" " + subject
120 |
121 | if commit_sha in IGNORE_HASHES:
122 | continue
123 | commit_map[stamped_subject] = commit_sha
124 | return commit_map
125 |
126 |
127 | # Get difference between two lists of commits.
128 | # Returns two lists: first are the commits to be ported from Cycles to Blender,
129 | # second one are the commits to be ported from Blender to Cycles.
130 | def commits_get_difference(cycles_map, blender_map):
131 | cycles_to_blender = []
132 | for stamped_subject, commit_hash in cycles_map.items():
133 | if stamped_subject not in blender_map:
134 | cycles_to_blender.append(commit_hash)
135 |
136 | blender_to_cycles = []
137 | for stamped_subject, commit_hash in blender_map.items():
138 | if stamped_subject not in cycles_map:
139 | blender_to_cycles.append(commit_hash)
140 |
141 | return cycles_to_blender, blender_to_cycles
142 |
143 |
144 | # Transfer commits from one repository to another.
145 | # Doesn't do actual commit just for the safety.
146 | def transfer_commits(commit_hashes,
147 | from_repository,
148 | to_repository,
149 | dst_is_cycles):
150 | patch_index = 1
151 | for commit_hash in commit_hashes:
152 | command = (
153 | b"git",
154 | b"--git-dir=" + os.path.join(from_repository, b'.git'),
155 | b"--work-tree=" + from_repository,
156 | b"format-patch", b"-1",
157 | b"--start-number", bytes(str(patch_index), 'utf-8'),
158 | b"-o", to_repository,
159 | commit_hash,
160 | b'--',
161 | b':(exclude)' + os.path.join(from_repository, b'intern/cycles/blender'),
162 | )
163 | patch_file = subprocess.check_output(command).rstrip(b"\n")
164 | if dst_is_cycles:
165 | cleanup_patch(patch_file, b"intern/cycles", b"src")
166 | else:
167 | cleanup_patch(patch_file, b"src", b"intern/cycles")
168 | patch_index += 1
169 |
170 |
171 | def main():
172 | if len(sys.argv) != 3:
173 | print("Usage: %s /path/to/cycles/ /path/to/blender/" % sys.argv[0])
174 | return
175 |
176 | cycles_repository = sys.argv[1].encode()
177 | blender_repository = sys.argv[2].encode()
178 |
179 | cycles_map = commit_map_get(cycles_repository, b'', CYCLES_START_COMMIT)
180 | blender_map = commit_map_get(blender_repository, b"intern/cycles", BLENDER_START_COMMIT)
181 | diff = commits_get_difference(cycles_map, blender_map)
182 |
183 | transfer_commits(diff[0], cycles_repository, blender_repository, False)
184 | transfer_commits(diff[1], blender_repository, cycles_repository, True)
185 |
186 | print("Missing commits were saved to the blender and cycles repositories.")
187 | print("Check them and if they're all fine run:")
188 | print("")
189 | print(" git am *.patch")
190 |
191 |
192 | if __name__ == '__main__':
193 | main()
194 |
--------------------------------------------------------------------------------
/utils/cycles_timeit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import argparse
5 | import re
6 | import shutil
7 | import subprocess
8 | import sys
9 | import time
10 |
11 |
12 | class COLORS:
13 | HEADER = '\033[95m'
14 | OKBLUE = '\033[94m'
15 | OKGREEN = '\033[92m'
16 | WARNING = '\033[93m'
17 | FAIL = '\033[91m'
18 | ENDC = '\033[0m'
19 | BOLD = '\033[1m'
20 | UNDERLINE = '\033[4m'
21 |
22 |
23 | VERBOSE = False
24 |
25 | #########################################
26 | # Generic helper functions.
27 |
28 |
29 | def logVerbose(*args):
30 | if VERBOSE:
31 | print(*args)
32 |
33 |
34 | def logHeader(*args):
35 | print(COLORS.HEADER + COLORS.BOLD, end="")
36 | print(*args, end="")
37 | print(COLORS.ENDC)
38 |
39 |
40 | def logWarning(*args):
41 | print(COLORS.WARNING + COLORS.BOLD, end="")
42 | print(*args, end="")
43 | print(COLORS.ENDC)
44 |
45 |
46 | def logOk(*args):
47 | print(COLORS.OKGREEN + COLORS.BOLD, end="")
48 | print(*args, end="")
49 | print(COLORS.ENDC)
50 |
51 |
52 | def progress(count, total, prefix="", suffix=""):
53 | if VERBOSE:
54 | return
55 |
56 | size = shutil.get_terminal_size((80, 20))
57 |
58 | if prefix != "":
59 | prefix = prefix + " "
60 | if suffix != "":
61 | suffix = " " + suffix
62 |
63 | bar_len = size.columns - len(prefix) - len(suffix) - 10
64 | filled_len = int(round(bar_len * count / float(total)))
65 |
66 | percents = round(100.0 * count / float(total), 1)
67 | bar = '=' * filled_len + '-' * (bar_len - filled_len)
68 |
69 | sys.stdout.write('%s[%s] %s%%%s\r' % (prefix, bar, percents, suffix))
70 | sys.stdout.flush()
71 |
72 |
73 | def progressClear():
74 | if VERBOSE:
75 | return
76 |
77 | size = shutil.get_terminal_size((80, 20))
78 | sys.stdout.write(" " * size.columns + "\r")
79 | sys.stdout.flush()
80 |
81 |
82 | def humanReadableTimeDifference(seconds):
83 | hours = int(seconds) // 60 // 60
84 | seconds = seconds - hours * 60 * 60
85 | minutes = int(seconds) // 60
86 | seconds = seconds - minutes * 60
87 | if hours == 0:
88 | return "%02d:%05.2f" % (minutes, seconds)
89 | else:
90 | return "%02d:%02d:%05.2f" % (hours, minutes, seconds)
91 |
92 |
93 | def humanReadableTimeToSeconds(time):
94 | tokens = time.split(".")
95 | result = 0
96 | if len(tokens) == 2:
97 | result = float("0." + tokens[1])
98 | mult = 1
99 | for token in reversed(tokens[0].split(":")):
100 | result += int(token) * mult
101 | mult *= 60
102 | return result
103 |
104 | #########################################
105 | # Benchmark specific helper functions.
106 |
107 |
108 | def configureArgumentParser():
109 | parser = argparse.ArgumentParser(
110 | description="Cycles benchmark helper script.")
111 | parser.add_argument("-b", "--binary",
112 | help="Full file path to Blender's binary " +
113 | "to use for rendering",
114 | default="blender")
115 | parser.add_argument("-f", "--files", nargs='+')
116 | parser.add_argument("-v", "--verbose",
117 | help="Perform fully verbose communication",
118 | action="store_true",
119 | default=False)
120 | return parser
121 |
122 |
123 | def benchmarkFile(blender, blendfile, stats):
124 | logHeader("Begin benchmark of file {}" . format(blendfile))
125 | # Prepare some regex for parsing
126 | re_path_tracing = re.compile(".*Path Tracing Tile ([0-9]+)/([0-9]+)$")
127 | re_total_render_time = re.compile(r".*Total render time: ([0-9]+(\.[0-9]+)?)")
128 | re_render_time_no_sync = re.compile(
129 | ".*Render time \\(without synchronization\\): ([0-9]+(\\.[0-9]+)?)")
130 | re_pipeline_time = re.compile(r"Time: ([0-9:\.]+) \(Saving: ([0-9:\.]+)\)")
131 | # Prepare output folder.
132 | # TODO(sergey): Use some proper output folder.
133 | output_folder = "/tmp/"
134 | # Configure command for the current file.
135 | command = (blender,
136 | "--background",
137 | "-noaudio",
138 | "--factory-startup",
139 | blendfile,
140 | "--engine", "CYCLES",
141 | "--debug-cycles",
142 | "--render-output", output_folder,
143 | "--render-format", "PNG",
144 | "-f", "1")
145 | # Run Blender with configured command line.
146 | logVerbose("About to execute command: {}" . format(command))
147 | start_time = time.time()
148 | process = subprocess.Popen(command,
149 | stdout=subprocess.PIPE,
150 | stderr=subprocess.STDOUT)
151 | # Keep reading status while Blender is alive.
152 | total_render_time = "N/A"
153 | render_time_no_sync = "N/A"
154 | pipeline_render_time = "N/A"
155 | while True:
156 | line = process.stdout.readline()
157 | if line == b"" and process.poll() is not None:
158 | break
159 | line = line.decode().strip()
160 | if line == "":
161 | continue
162 | logVerbose("Line from stdout: {}" . format(line))
163 | match = re_path_tracing.match(line)
164 | if match:
165 | current_tiles = int(match.group(1))
166 | total_tiles = int(match.group(2))
167 | elapsed_time = time.time() - start_time
168 | elapsed_time_str = humanReadableTimeDifference(elapsed_time)
169 | progress(current_tiles,
170 | total_tiles,
171 | prefix="Path Tracing Tiles {}" . format(elapsed_time_str))
172 | match = re_total_render_time.match(line)
173 | if match:
174 | total_render_time = float(match.group(1))
175 | match = re_render_time_no_sync.match(line)
176 | if match:
177 | render_time_no_sync = float(match.group(1))
178 | match = re_pipeline_time.match(line)
179 | if match:
180 | pipeline_render_time = humanReadableTimeToSeconds(match.group(1))
181 |
182 | if process.returncode != 0:
183 | return False
184 |
185 | # Clear line used by progress.
186 | progressClear()
187 | print("Total pipeline render time: {} ({} sec)"
188 | . format(humanReadableTimeDifference(pipeline_render_time),
189 | pipeline_render_time))
190 | print("Total Cycles render time: {} ({} sec)"
191 | . format(humanReadableTimeDifference(total_render_time),
192 | total_render_time))
193 | print("Pure Cycles render time (without sync): {} ({} sec)"
194 | . format(humanReadableTimeDifference(render_time_no_sync),
195 | render_time_no_sync))
196 | logOk("Successfully rendered")
197 | stats[blendfile] = {'PIPELINE_TOTAL': pipeline_render_time,
198 | 'CYCLES_TOTAL': total_render_time,
199 | 'CYCLES_NO_SYNC': render_time_no_sync}
200 | return True
201 |
202 |
203 | def benchmarkAll(blender, files):
204 | stats = {}
205 | for blendfile in files:
206 | try:
207 | benchmarkFile(blender, blendfile, stats)
208 | except KeyboardInterrupt:
209 | print("")
210 | logWarning("Rendering aborted!")
211 | return
212 |
213 |
214 | def main():
215 | parser = configureArgumentParser()
216 | args = parser.parse_args()
217 | if args.verbose:
218 | global VERBOSE
219 | VERBOSE = True
220 | benchmarkAll(args.binary, args.files)
221 |
222 |
223 | if __name__ == "__main__":
224 | main()
225 |
--------------------------------------------------------------------------------
/utils/gdb_struct_repr_c99.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | '''
4 | Define the command 'print_struct_c99' for gdb,
5 | useful for creating a literal for a nested runtime struct.
6 |
7 | Example use:
8 |
9 | (gdb) source source/tools/utils/gdb_struct_repr_c99.py
10 | (gdb) print_struct_c99 scene->toolsettings
11 | '''
12 |
13 |
14 | class PrintStructC99(gdb.Command):
15 | def __init__(self):
16 | super(PrintStructC99, self).__init__(
17 | "print_struct_c99",
18 | gdb.COMMAND_USER,
19 | )
20 |
21 | def get_count_heading(self, string):
22 | for i, s in enumerate(string):
23 | if s != ' ':
24 | break
25 | return i
26 |
27 | def extract_typename(self, string):
28 | first_line = string.split('\n')[0]
29 | return first_line.split('=')[1][:-1].strip()
30 |
31 | def invoke(self, arg, from_tty):
32 | ret_ptype = gdb.execute('ptype {}'.format(arg), to_string=True)
33 | tname = self.extract_typename(ret_ptype)
34 | print('{} {} = {{'.format(tname, arg))
35 | r = gdb.execute('p {}'.format(arg), to_string=True)
36 | r = r.split('\n')
37 | for rr in r[1:]:
38 | if '=' not in rr:
39 | print(rr)
40 | continue
41 | hs = self.get_count_heading(rr)
42 | rr_s = rr.strip().split('=', 1)
43 | rr_rval = rr_s[1].strip()
44 | print(' ' * hs + '.' + rr_s[0] + '= ' + rr_rval)
45 |
46 |
47 | print('Running GDB from: %s\n' % (gdb.PYTHONDIR))
48 | gdb.execute("set print pretty")
49 | gdb.execute('set pagination off')
50 | gdb.execute('set print repeats 0')
51 | gdb.execute('set print elements unlimited')
52 | # instantiate
53 | PrintStructC99()
54 |
--------------------------------------------------------------------------------
/utils/git_log.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | # Simple module for inspecting git commits
4 |
5 | import os
6 | import subprocess
7 |
8 |
9 | class GitCommit:
10 | __slots__ = (
11 | "sha1",
12 | # to extract more info
13 | "_git_dir",
14 |
15 | # cached values
16 | "_author",
17 | "_date",
18 | "_body",
19 | "_files",
20 | "_files_status",
21 | )
22 |
23 | def __init__(self, sha1, git_dir):
24 | self.sha1 = sha1
25 | self._git_dir = git_dir
26 |
27 | self._author = \
28 | self._date = \
29 | self._body = \
30 | self._files = \
31 | self._files_status = \
32 | None
33 |
34 | def cache(self):
35 | """ Cache all properties
36 | """
37 | self.author
38 | self.date
39 | self.body
40 | self.files
41 | self.files_status
42 |
43 | def _log_format(self, format, args=()):
44 | # sha1 = self.sha1.decode('ascii')
45 | cmd = (
46 | "git",
47 | "--git-dir",
48 | self._git_dir,
49 | "log",
50 | "-1", # only this rev
51 | self.sha1,
52 | "--format=" + format,
53 | ) + args
54 | # print(" ".join(cmd))
55 |
56 | p = subprocess.Popen(
57 | cmd,
58 | stdout=subprocess.PIPE,
59 | )
60 | return p.stdout.read()
61 |
62 | @property
63 | def sha1_short(self):
64 | cmd = (
65 | "git",
66 | "--git-dir",
67 | self._git_dir,
68 | "rev-parse",
69 | "--short",
70 | self.sha1,
71 | )
72 | p = subprocess.Popen(
73 | cmd,
74 | stdout=subprocess.PIPE,
75 | )
76 | return p.stdout.read().strip().decode('ascii')
77 |
78 | @property
79 | def author(self):
80 | ret = self._author
81 | if ret is None:
82 | content = self._log_format("%an")[:-1]
83 | ret = content.decode("utf8", errors="ignore")
84 | self._author = ret
85 | return ret
86 |
87 | @property
88 | def date(self):
89 | ret = self._date
90 | if ret is None:
91 | import datetime
92 | ret = datetime.datetime.fromtimestamp(int(self._log_format("%ct")))
93 | self._date = ret
94 | return ret
95 |
96 | @property
97 | def body(self):
98 | ret = self._body
99 | if ret is None:
100 | content = self._log_format("%B")[:-1]
101 | ret = content.decode("utf8", errors="ignore")
102 | self._body = ret
103 | return ret
104 |
105 | @property
106 | def subject(self):
107 | return self.body.lstrip().partition("\n")[0]
108 |
109 | @property
110 | def files(self):
111 | ret = self._files
112 | if ret is None:
113 | ret = [f for f in self._log_format("format:", args=("--name-only",)).split(b"\n") if f]
114 | self._files = ret
115 | return ret
116 |
117 | @property
118 | def files_status(self):
119 | ret = self._files_status
120 | if ret is None:
121 | ret = [f.split(None, 1) for f in self._log_format("format:", args=("--name-status",)).split(b"\n") if f]
122 | self._files_status = ret
123 | return ret
124 |
125 |
126 | class GitCommitIter:
127 | __slots__ = (
128 | "_path",
129 | "_git_dir",
130 | "_sha1_range",
131 | "_process",
132 | )
133 |
134 | def __init__(self, path, sha1_range):
135 | self._path = path
136 | self._git_dir = os.path.join(path, ".git")
137 | self._sha1_range = sha1_range
138 | self._process = None
139 |
140 | def __iter__(self):
141 | cmd = (
142 | "git",
143 | "--git-dir",
144 | self._git_dir,
145 | "log",
146 | self._sha1_range,
147 | "--format=%H",
148 | )
149 | # print(" ".join(cmd))
150 |
151 | self._process = subprocess.Popen(
152 | cmd,
153 | stdout=subprocess.PIPE,
154 | )
155 | return self
156 |
157 | def __next__(self):
158 | sha1 = self._process.stdout.readline()[:-1]
159 | if sha1:
160 | return GitCommit(sha1, self._git_dir)
161 | else:
162 | raise StopIteration
163 |
164 |
165 | class GitRepo:
166 | __slots__ = (
167 | "_path",
168 | "_git_dir",
169 | )
170 |
171 | def __init__(self, path):
172 | self._path = path
173 | self._git_dir = os.path.join(path, ".git")
174 |
175 | @property
176 | def branch(self):
177 | cmd = (
178 | "git",
179 | "--git-dir",
180 | self._git_dir,
181 | "rev-parse",
182 | "--abbrev-ref",
183 | "HEAD",
184 | )
185 | # print(" ".join(cmd))
186 |
187 | p = subprocess.Popen(
188 | cmd,
189 | stdout=subprocess.PIPE,
190 | )
191 | return p.stdout.read()
192 |
--------------------------------------------------------------------------------
/utils/git_log_review_commits.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | This is a tool for reviewing commit ranges, writing into accept/reject files.
6 |
7 | Useful for reviewing revisions to backport to stable builds.
8 |
9 | Example usage:
10 |
11 | ./git_log_review_commits.py --source=../../.. --range=HEAD~40..HEAD --filter=BUGFIX
12 | """
13 |
14 |
15 | class _Getch:
16 | """
17 | Gets a single character from standard input.
18 | Does not echo to the screen.
19 | """
20 |
21 | def __init__(self):
22 | try:
23 | self.impl = _GetchWindows()
24 | except ImportError:
25 | self.impl = _GetchUnix()
26 |
27 | def __call__(self):
28 | return self.impl()
29 |
30 |
31 | class _GetchUnix:
32 |
33 | def __init__(self):
34 | import tty
35 | import sys
36 |
37 | def __call__(self):
38 | import sys
39 | import tty
40 | import termios
41 | fd = sys.stdin.fileno()
42 | old_settings = termios.tcgetattr(fd)
43 | try:
44 | tty.setraw(sys.stdin.fileno())
45 | ch = sys.stdin.read(1)
46 | finally:
47 | termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
48 | return ch
49 |
50 |
51 | class _GetchWindows:
52 |
53 | def __init__(self):
54 | import msvcrt
55 |
56 | def __call__(self):
57 | import msvcrt
58 | return msvcrt.getch()
59 |
60 |
61 | getch = _Getch()
62 | # ------------------------------------------------------------------------------
63 | # Pretty Printing
64 |
65 | USE_COLOR = True
66 |
67 | if USE_COLOR:
68 | color_codes = {
69 | 'black': '\033[0;30m',
70 | 'bright_gray': '\033[0;37m',
71 | 'blue': '\033[0;34m',
72 | 'white': '\033[1;37m',
73 | 'green': '\033[0;32m',
74 | 'bright_blue': '\033[1;34m',
75 | 'cyan': '\033[0;36m',
76 | 'bright_green': '\033[1;32m',
77 | 'red': '\033[0;31m',
78 | 'bright_cyan': '\033[1;36m',
79 | 'purple': '\033[0;35m',
80 | 'bright_red': '\033[1;31m',
81 | 'yellow': '\033[0;33m',
82 | 'bright_purple': '\033[1;35m',
83 | 'dark_gray': '\033[1;30m',
84 | 'bright_yellow': '\033[1;33m',
85 | 'normal': '\033[0m',
86 | }
87 |
88 | def colorize(msg, color=None):
89 | return (color_codes[color] + msg + color_codes['normal'])
90 | else:
91 | def colorize(msg, color=None):
92 | return msg
93 | bugfix = ""
94 | # avoid encoding issues
95 | import os
96 | import sys
97 | import io
98 |
99 | sys.stdin = os.fdopen(sys.stdin.fileno(), "rb")
100 | sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='surrogateescape', line_buffering=True)
101 | sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='surrogateescape', line_buffering=True)
102 |
103 |
104 | def print_commit(c):
105 | print("------------------------------------------------------------------------------")
106 | print(colorize("{{GitCommit|%s}}" % c.sha1.decode(), color='green'), end=" ")
107 | # print("Author: %s" % colorize(c.author, color='bright_blue'))
108 | print(colorize(c.author, color='bright_blue'))
109 | print()
110 | print(colorize(c.body, color='normal'))
111 | print()
112 | print(colorize("Files: (%d)" % len(c.files_status), color='yellow'))
113 | for f in c.files_status:
114 | print(colorize(" %s %s" % (f[0].decode('ascii'), f[1].decode('ascii')), 'yellow'))
115 | print()
116 |
117 |
118 | def argparse_create():
119 | import argparse
120 |
121 | # When --help or no args are given, print this help
122 | usage_text = "Review revisions."
123 |
124 | epilog = "This script is typically used to help write release notes"
125 |
126 | parser = argparse.ArgumentParser(description=usage_text, epilog=epilog)
127 |
128 | parser.add_argument(
129 | "--source", dest="source_dir",
130 | metavar='PATH', required=True,
131 | help="Path to git repository")
132 | parser.add_argument(
133 | "--range", dest="range_sha1",
134 | metavar='SHA1_RANGE', required=True,
135 | help="Range to use, eg: 169c95b8..HEAD")
136 | parser.add_argument(
137 | "--author", dest="author",
138 | metavar='AUTHOR', type=str, required=False,
139 | help=("Method to filter commits in ['BUGFIX', todo]"))
140 | parser.add_argument(
141 | "--filter", dest="filter_type",
142 | metavar='FILTER', type=str, required=False,
143 | help=("Method to filter commits in ['BUGFIX', todo]"))
144 |
145 | return parser
146 |
147 |
148 | def main():
149 | ACCEPT_FILE = "review_accept.txt"
150 | REJECT_FILE = "review_reject.txt"
151 |
152 | # ----------
153 | # Parse Args
154 |
155 | args = argparse_create().parse_args()
156 |
157 | from git_log import GitCommitIter
158 |
159 | # --------------
160 | # Filter Commits
161 |
162 | def match(c):
163 | # filter_type
164 | if not args.filter_type:
165 | pass
166 | elif args.filter_type == 'BUGFIX':
167 | first_line = c.body.strip().split("\n")[0]
168 | assert len(first_line)
169 | if any(w for w in first_line.split() if w.lower().startswith(("fix", "bugfix", "bug-fix"))):
170 | pass
171 | else:
172 | return False
173 | elif args.filter_type == 'NOISE':
174 | first_line = c.body.strip().split("\n")[0]
175 | assert len(first_line)
176 | if any(w for w in first_line.split() if w.lower().startswith("cleanup")):
177 | pass
178 | else:
179 | return False
180 | else:
181 | raise Exception("Filter type %r isn't known" % args.filter_type)
182 |
183 | # author
184 | if not args.author:
185 | pass
186 | elif args.author != c.author:
187 | return False
188 |
189 | return True
190 |
191 | commits = [c for c in GitCommitIter(args.source_dir, args.range_sha1) if match(c)]
192 |
193 | # oldest first
194 | commits.reverse()
195 |
196 | tot_accept = 0
197 | tot_reject = 0
198 |
199 | def exit_message():
200 | print(" Written",
201 | colorize(ACCEPT_FILE, color='green'), "(%d)" % tot_accept,
202 | colorize(REJECT_FILE, color='red'), "(%d)" % tot_reject,
203 | )
204 |
205 | for i, c in enumerate(commits):
206 | if os.name == "posix":
207 | # Also clears scroll-back.
208 | os.system("tput reset")
209 | else:
210 | print('\x1b[2J') # clear
211 |
212 | sha1 = c.sha1
213 |
214 | # diff may scroll off the screen, that's OK
215 | os.system("git --git-dir %s show %s --format=%%n" % (c._git_dir, sha1.decode('ascii')))
216 | print("")
217 | print_commit(c)
218 | sys.stdout.flush()
219 | # print(ch)
220 | while True:
221 | print("Space=" + colorize("Accept", 'green'),
222 | "Enter=" + colorize("Skip", 'red'),
223 | "Ctrl+C or Q=" + colorize("Quit", color='white'),
224 | "[%d of %d]" % (i + 1, len(commits)),
225 | "(+%d | -%d)" % (tot_accept, tot_reject),
226 | )
227 | ch = getch()
228 |
229 | if ch == b'\x03' or ch == b'q':
230 | # Ctrl+C
231 | exit_message()
232 | print("Goodbye! (%s)" % c.sha1.decode())
233 | return
234 |
235 | elif ch == b' ':
236 | log_filepath = ACCEPT_FILE
237 | tot_accept += 1
238 | break
239 | elif ch == b'\r':
240 | log_filepath = REJECT_FILE
241 | tot_reject += 1
242 | break
243 | else:
244 | print("Unknown input %r" % ch)
245 |
246 | with open(log_filepath, 'ab') as f:
247 | f.write(sha1 + b'\n')
248 |
249 | exit_message()
250 |
251 |
252 | if __name__ == "__main__":
253 | main()
254 |
--------------------------------------------------------------------------------
/utils/make_cursor_gui.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | # Created by Robert Wenzlaff (Det. Thorn).
5 | # Oct. 30, 2003
6 |
7 | from tkinter import (
8 | Button,
9 | Canvas,
10 | Checkbutton,
11 | END,
12 | Frame,
13 | IntVar,
14 | Label,
15 | RIDGE,
16 | Text,
17 | Tk,
18 | )
19 |
20 | color = ("black", "white", "darkgreen", "gray")
21 |
22 |
23 | class App:
24 |
25 | def __init__(self, master):
26 | frame = Frame(master, borderwidth=5)
27 | frame.grid(column=0, row=0, pady=5)
28 |
29 | self.state = []
30 | self.states = 256
31 | self.laststate = 2 # 0=Black, 1=White, 2=Transp.
32 |
33 | self.size = 16
34 | self.gridsz = 20
35 |
36 | for x in range(1024):
37 | self.state.append(2)
38 |
39 | self.screen = Canvas(frame, height=320, width=320, bg=color[2])
40 | self.screen.bind("", self.scrnclick1)
41 | self.screen.bind("", self.scrnclick2)
42 | self.screen.bind("", self.scrndrag)
43 |
44 | for x in range(16):
45 | self.screen.create_line((x * 20, 0, x * 20, 320), fill=color[3])
46 | self.screen.create_line((0, x * 20, 320, x * 20), fill=color[3])
47 |
48 | self.screen.grid(row=0, column=0, columnspan=5)
49 |
50 | frame2 = Frame(master, borderwidth=5)
51 | frame2.grid(column=0, row=1, pady=5)
52 |
53 | self.clear = Button(frame2, text="Clear", command=self.clearit)
54 | self.clear.grid(row=0, column=0, pady=20)
55 |
56 | self.doit = Button(frame2, text="Print", command=self.doit)
57 | self.doit.grid(row=0, column=1, pady=20)
58 |
59 | #self.doitlab = Label(frame2, text="(Output to stdout)");
60 | #self.doitlab.grid(row=1, column=1);
61 |
62 | self.parse = Button(frame2, text="Parse", command=self.parsetext)
63 | self.parse.grid(row=0, column=2, pady=20)
64 |
65 | self.large = 0
66 | self.dummy = IntVar()
67 | self.largeb = Checkbutton(frame2, text="Large", var=self.dummy, command=self.changesize)
68 | self.largeb.grid(row=0, column=3, pady=20)
69 |
70 | self.prev = Canvas(frame2, height=17, width=17, bg=color[2], relief=RIDGE)
71 | self.prev.grid(row=0, column=4, pady=20, padx=20)
72 |
73 | # DataParsers
74 | self.bmlabel = Label(frame2, text="Bitmap Data (paste hex from code)")
75 | self.bmlabel.grid(row=2, column=0, columnspan=5, sticky="W")
76 |
77 | self.bmentry = Text(frame2, width=80, height=9, font="Times 8")
78 | self.bmentry.bind("", self.bmtextpaste)
79 | self.bmentry.grid(row=3, column=0, columnspan=5, pady=5)
80 |
81 | self.msklabel = Label(frame2, text="Mask Data (paste hex from code)")
82 | self.msklabel.grid(row=4, column=0, columnspan=5, sticky="W")
83 |
84 | self.mskentry = Text(frame2, width=80, height=9, font="Times 8")
85 | self.mskentry.bind("", self.msktextpaste)
86 | self.mskentry.grid(row=5, column=0, columnspan=5, pady=5)
87 |
88 | def changesize(self):
89 | self.large = ~self.large
90 | if self.large:
91 | self.size = 32
92 | self.gridsz = 10
93 | self.states = 1024
94 | oldstate = self.state
95 | self.state = []
96 | for n in range(1024):
97 | col = (n // 2) % 16
98 | row = int(n // 64)
99 | self.state.append(oldstate[16 * row + col])
100 | oldstate = []
101 | else:
102 | self.size = 16
103 | self.gridsz = 20
104 | self.states = 256
105 | oldstate = self.state
106 | self.state = []
107 | for n in range(1024):
108 | if not ((n % 2) or ((n // 32) % 2)):
109 | self.state.append(oldstate[n])
110 | for n in range(256, 1024):
111 | self.state.append(2)
112 | oldstate = []
113 |
114 | # Insert scaling here
115 |
116 | self.updatescrn()
117 | self.prev.config(width=self.size + 1, height=self.size + 1)
118 | for n in range(self.states):
119 | self.updateprev(n)
120 | #self.prev.grid(row=0, column=4, padx=self.gridsz, pady=self.gridsz)
121 |
122 | def scrnclick1(self, event):
123 | self.scrnclick(event, 1)
124 |
125 | def scrnclick2(self, event):
126 | self.scrnclick(event, -1)
127 |
128 | def scrnclick(self, event, direction):
129 | if (event.x > 319) or (event.y > 319) or (event.x < 0) or (event.y < 0):
130 | return
131 |
132 | n = (event.x // self.gridsz) + self.size * (event.y // self.gridsz)
133 |
134 | self.state[n] += direction
135 | self.state[n] %= 3
136 |
137 | row = n % self.size
138 | col = n // self.size
139 |
140 | self.screen.create_rectangle((self.gridsz * row + 1,
141 | self.gridsz * col + 1,
142 | self.gridsz * row + self.gridsz - 1,
143 | self.gridsz * col + self.gridsz - 1),
144 | fill=color[self.state[n]], outline="")
145 |
146 | self.laststate = self.state[n]
147 | self.updateprev(n)
148 |
149 | def scrndrag(self, event):
150 | if (event.x > 319) or (event.y > 319) or (event.x < 0) or (event.y < 0):
151 | return
152 |
153 | n = (event.x // self.gridsz) + self.size * (event.y // self.gridsz)
154 |
155 | row = n % self.size
156 | col = n // self.size
157 |
158 | self.screen.create_rectangle((self.gridsz * row + 1,
159 | self.gridsz * col + 1,
160 | self.gridsz * row + self.gridsz - 1,
161 | self.gridsz * col + self.gridsz - 1),
162 | fill=color[self.laststate], outline="")
163 | self.state[n] = self.laststate
164 |
165 | self.updateprev(n)
166 |
167 | def updateprev(self, n):
168 | x = n % self.size + 1
169 | y = n // self.size + 1
170 |
171 | if self.large:
172 | pad = 12
173 | else:
174 | pad = 20
175 |
176 | self.prev.create_line(x + 1, y + 1, x + 2, y + 1, fill=color[self.state[n]])
177 | self.prev.grid(row=0, column=4, padx=pad, pady=pad)
178 |
179 | def updatescrn(self):
180 | self.screen.create_rectangle(0, 0, 320, 320, fill=color[2])
181 | for x in range(self.size):
182 | self.screen.create_line((x * self.gridsz, 0, x * self.gridsz, 320), fill=color[3])
183 | self.screen.create_line((0, x * self.gridsz, 320, x * self.gridsz), fill=color[3])
184 | for n in range(self.states):
185 | row = n % self.size
186 | col = n // self.size
187 | self.screen.create_rectangle((self.gridsz * row + 1,
188 | self.gridsz * col + 1,
189 | self.gridsz * row + self.gridsz - 1,
190 | self.gridsz * col + self.gridsz - 1),
191 | fill=color[self.state[n]], outline="")
192 |
193 | def bmtextpaste(self, event):
194 | string = self.bmentry.get(1.0, END)
195 | self.bmentry.delete(1.0, END)
196 | string = string.replace("\t", "")
197 | self.bmentry.insert(END, string)
198 |
199 | def msktextpaste(self, event):
200 | string = self.mskentry.get(1.0, END)
201 | self.mskentry.delete(1.0, END)
202 | string = string.replace("\t", "")
203 | self.mskentry.insert(END, string)
204 |
205 | def parsetext(self):
206 | bmstring = self.bmentry.get(1.0, END)
207 | bmstring = bmstring.replace(",", " ")
208 | bmstring = bmstring.split()
209 |
210 | mskstring = self.mskentry.get(1.0, END)
211 | mskstring = mskstring.replace(",", " ")
212 | mskstring = mskstring.split()
213 |
214 | if len(bmstring) != len(mskstring):
215 | print("Mismatched data. Bitmap and mask must be same size,")
216 | return
217 | elif not (len(bmstring) == 32 or len(bmstring) == 128):
218 | print("Size Error, input must be 32 or 128 hex bytes. ")
219 | return
220 |
221 | for n in range(self.states):
222 | self.state[n] = 0
223 |
224 | m = 0
225 | for entry in bmstring:
226 | e = int(entry, 16)
227 | for bit in range(8):
228 | self.state[m] = (e & 1)
229 | e = e >> 1
230 | m += 1
231 |
232 | m = 0
233 | for entry in mskstring:
234 | e = int(entry, 16)
235 | for bit in range(8):
236 | if not (e & 1):
237 | self.state[m] = 2
238 | e = e >> 1
239 | m += 1
240 |
241 | self.updatescrn()
242 | for n in range(self.states):
243 | self.updateprev(n)
244 |
245 | def clearit(self):
246 | for n in range(self.states):
247 | self.state[n] = 2
248 | self.updateprev(n)
249 | self.updatescrn()
250 | self.bmentry.delete(0.0, END)
251 | self.mskentry.delete(0.0, END)
252 |
253 | def doit(self):
254 | mask = []
255 | bitmap = []
256 | numbytes = self.size * self.size // 8
257 | for i in range(numbytes):
258 | m = 0
259 | b = 0
260 | for j in range(8):
261 | m <<= 1
262 | b <<= 1
263 | if (self.state[(i * 8) + (7 - j)] != 2):
264 | m |= 1
265 | if (self.state[(i * 8) + (7 - j)] == 1):
266 | b |= 1
267 | #print((i * 8) + (7 - j), self.state[(i * 8) + (7 - j)], m)
268 | mask.append(m)
269 | bitmap.append(b)
270 |
271 | print("\n\nstatic char bitmap[] = {", end=' ')
272 | for i in range(numbytes):
273 | b1 = bitmap[i]
274 | if not (i % 8):
275 | print("\n\t", end=' ')
276 | print("0x%(b1)02x, " % vars(), end=' ')
277 | print("\n};")
278 |
279 | print("\nstatic char mask[] = {", end=' ')
280 | for i in range(numbytes):
281 | b1 = mask[i]
282 | if not (i % 8):
283 | print("\n\t", end=' ')
284 | print("0x%(b1)02x, " % vars(), end=' ')
285 | print("\n};")
286 |
287 |
288 | ################## Main App #######################
289 | root = Tk()
290 |
291 | app = App(root)
292 | root.title("Cursor Maker")
293 |
294 | root.mainloop()
295 |
--------------------------------------------------------------------------------
/utils/make_gl_stipple_from_xpm.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | # Converts 32x32 XPM images written be the gimp to GL stipples
4 | # takes XPM files as arguments, prints out C style definitions.
5 |
6 | import sys
7 | import os
8 |
9 |
10 | def main():
11 | xpm_ls = [f for f in sys.argv[1:] if f.lower().endswith(".xpm")]
12 |
13 | print("Converting: " + " ".join(xpm_ls))
14 |
15 | for xpm in xpm_ls:
16 | f = open(xpm, "r")
17 | data = f.read()
18 | f.close()
19 |
20 | # all after first {
21 | data = data.split("{", 1)[1]
22 |
23 | # all before first }
24 | data = data.rsplit("}", 1)[0]
25 |
26 | data = data.replace("\n", "")
27 |
28 | data = data.split(",")
29 |
30 | w, h, c, dummy = map(int, data[0].strip("\"").split())
31 |
32 | if w != 32 or h != 32 or c != 2:
33 | print("Skipping %r, expected 32x32, monochrome, got %s" %
34 | (xpm, data[0]))
35 | continue
36 |
37 | # col_1 = data[1][1]
38 | col_2 = data[2][1]
39 |
40 | data = [d[1:-1] for d in data[3:]]
41 |
42 | bits = []
43 |
44 | for d in data:
45 | for i, c in enumerate(d):
46 | bits.append('01'[(c == col_2)])
47 |
48 | if len(bits) != 1024:
49 | print("Skipping %r, expected 1024 bits, got %d" %
50 | (xpm, len(bits)))
51 | continue
52 |
53 | bits = "".join(bits)
54 |
55 | chars = []
56 |
57 | for i in range(0, len(bits), 8):
58 | chars.append("0x%.2x" % int(bits[i:i + 8], 2))
59 |
60 | fout = sys.stdout
61 |
62 | var = os.path.basename(xpm)
63 | var = os.path.splitext(var)[0]
64 |
65 | fout.write("GLubyte stipple_%s[128] {\n\t" % var)
66 | for i, c in enumerate(chars):
67 | if i != 127:
68 | fout.write("%s, " % c)
69 | else:
70 | fout.write("%s" % c)
71 |
72 | if not ((i + 1) % 8):
73 | fout.write("\n\t")
74 | fout.write("};\n")
75 |
76 |
77 | if __name__ == "__main__":
78 | main()
79 |
--------------------------------------------------------------------------------
/utils/make_shape_2d_from_blend.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: Apache-2.0
2 | '''
3 | Created compact byte arrays which can be decoded into 2D shapes.
4 | (See 'GPU_batch_from_poly_2d_encoded').
5 |
6 | - Objects must use the prefix "shape_"
7 | - Meshes and Curves are supported as input.
8 | - C and Python output is written to "output/"
9 |
10 | The format is simple: a series of (X, Y) locations one byte each.
11 | Repeating the same value terminates the polygon, moving onto the next.
12 |
13 | Example Use::
14 |
15 | blender.bin -b --factory-startup my_shapes.blend --python make_shape_2d_from_blend.py
16 | '''
17 | import bpy
18 | import os
19 |
20 | USE_C_STYLE = True
21 | USE_PY_STYLE = True
22 |
23 | WRAP_LIMIT = 79
24 | TAB_WIDTH = 4
25 |
26 | SUBDIR = "output"
27 |
28 |
29 | def float_to_ubyte(f):
30 | return max(0, min(255, int(round(f * 255.0))))
31 |
32 |
33 | def curve_to_loops(ob):
34 | import bmesh
35 | cu = ob.data
36 |
37 | me = ob.to_mesh()
38 | bm = bmesh.new()
39 | bm.from_mesh(me)
40 | me = ob.to_mesh_clear()
41 |
42 | bmesh.ops.beautify_fill(bm, faces=bm.faces, edges=bm.edges)
43 |
44 | edges = bm.edges[:]
45 | edges.sort(key=lambda e: e.calc_length(), reverse=True)
46 |
47 | for e in edges:
48 | if e.is_manifold:
49 | f_a, f_b = [f for f in e.link_faces]
50 | bmesh.utils.face_join((f_a, f_b), False)
51 |
52 | edges = bm.edges[:]
53 | for e in edges:
54 | if e.is_wire:
55 | bm.edges.remove(e)
56 |
57 | bm.normal_update()
58 |
59 | data_all = []
60 | for f in bm.faces:
61 | points = []
62 | # Ensure all faces are pointing the correct direction
63 | # Note, we may want to use polygon sign for a second color
64 | # (via the material index).
65 | if f.normal.z > 0.0:
66 | loops = f.loops
67 | else:
68 | loops = reversed(f.loops)
69 | for l in loops:
70 | points.append(
71 | tuple(float_to_ubyte(axis) for axis in l.vert.co.xy)
72 | )
73 | data_all.append((points, f.material_index))
74 | bm.free()
75 | return data_all
76 |
77 |
78 | def write_c(ob):
79 | cu = ob.data
80 | name = ob.name
81 | with open(os.path.join(SUBDIR, name + ".c"), 'w') as fh:
82 | fw = fh.write
83 | fw(f"/* {name} */\n")
84 | fw(f"const uchar {name}[] = {{")
85 | line_len = WRAP_LIMIT
86 | line_is_first = True
87 | array_len = 0
88 | data_all = curve_to_loops(ob)
89 | for (points, material_index) in data_all:
90 | # TODO, material_index
91 | for p in points + [points[-1]]:
92 | line_len += 12
93 | if line_len >= WRAP_LIMIT:
94 | fw("\n\t")
95 | line_len = TAB_WIDTH
96 | line_is_first = True
97 | if not line_is_first:
98 | fw(" ")
99 | fw(", ".join([f"0x{axis:02x}" for axis in p]) + ",")
100 | line_is_first = False
101 | array_len += (len(points) + 1) * 2
102 | fw("\n};\n")
103 | # fw(f"const int data_len = {array_len}\n")
104 |
105 |
106 | def write_py(ob):
107 | cu = ob.data
108 | name = ob.name
109 | with open(os.path.join(SUBDIR, name + ".py"), 'w') as fh:
110 | fw = fh.write
111 | fw(f"# {name}\n")
112 | fw("data = (")
113 | line_len = WRAP_LIMIT
114 | fw = fh.write
115 | data_all = curve_to_loops(ob)
116 | for (points, material_index) in data_all:
117 | # TODO, material_index
118 | for p in points + [points[-1]]:
119 | line_len += 8
120 | if line_len >= WRAP_LIMIT:
121 | if p is not points[0]:
122 | fw("'")
123 | fw("\n b'")
124 | line_len = 6
125 | fw("".join([f"\\x{axis:02x}" for axis in p]))
126 | fw("'\n)\n")
127 |
128 |
129 | def main():
130 | os.makedirs(SUBDIR, exist_ok=True)
131 | for ob in bpy.data.objects:
132 | if ob.type not in {'MESH', 'CURVE'}:
133 | continue
134 | if not ob.name.startswith('shape_'):
135 | continue
136 | if USE_C_STYLE:
137 | write_c(ob)
138 | if USE_PY_STYLE:
139 | write_py(ob)
140 |
141 |
142 | if __name__ == "__main__":
143 | main()
144 |
--------------------------------------------------------------------------------
/utils_doc/doxygen_single_file:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | This script takes 2-3 args: [--browse]
4 |
5 | Where Doxyfile is a path relative to source root,
6 | and the sourcefile as an absolute path.
7 |
8 | --browse will open the resulting docs in a web browser.
9 | """
10 | import sys
11 | import os
12 | import subprocess
13 | import tempfile
14 |
15 | print(sys.argv)
16 |
17 |
18 | def find_gitroot(filepath_reference):
19 | path = filepath_reference
20 | path_prev = ""
21 | while not os.path.exists(os.path.join(path, ".git")) and path != path_prev:
22 | path_prev = path
23 | path = os.path.dirname(path)
24 | return path
25 |
26 |
27 | doxyfile, sourcefile = sys.argv[-2:]
28 |
29 | doxyfile = os.path.join(find_gitroot(sourcefile), doxyfile)
30 | os.chdir(os.path.dirname(doxyfile))
31 |
32 | tempfile = tempfile.NamedTemporaryFile(mode='w+b')
33 | doxyfile_tmp = tempfile.name
34 | tempfile.write(open(doxyfile, "r+b").read())
35 | tempfile.write(b'\n\n')
36 | tempfile.write(b'INPUT=' + os.fsencode(sourcefile) + b'\n')
37 | tempfile.flush()
38 |
39 | subprocess.call(("doxygen", doxyfile_tmp))
40 | del tempfile
41 |
42 | # Maybe handy, but also annoying?
43 | if "--browse" in sys.argv:
44 | import webbrowser
45 | webbrowser.open("html/files.html")
46 |
--------------------------------------------------------------------------------
/utils_doc/rna_manual_reference_updater.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | '''
4 | RNA Manual Reference Mapping Updater
5 |
6 | This script generates a file that maps RNA strings to online URL's
7 | for the context menu documentation access.
8 |
9 | This script either downloads a sphinx requirement file from the manual
10 | or optionally can take a path to the file using `--input`.
11 |
12 | To make international, we made a script,
13 | pointing the manuals to the proper language,
14 | specified in the 'User Preferences Window' by the users.
15 | Some Languages have their manual page, using a prefix or
16 | being preceded by their respective reference, for example:
17 |
18 | manual/ --> manual/ru/
19 |
20 | The table in the script, contains all of the languages we have in the
21 | Blender manual website, for those other languages that still
22 | does not have a team of translators,
23 | and/or don't have a manual for their languages we commented the lines below,
24 | you should add them to the language table when they have a proper manual,
25 | or added to the Blender UI translation table.
26 |
27 | URL is the: url_manual_prefix + url_manual_mapping[#id]
28 | '''
29 |
30 | import os
31 | import argparse
32 | import re
33 | import sys
34 |
35 | try:
36 | import sphobjinv
37 | except ImportError:
38 | print("The module \"sphobjinv\" was not found, it may be installed via \"pip install sphobjinv\", exiting!")
39 | sys.exit(1)
40 |
41 | # The root of Blender's source directory.
42 | BASE_DIR = os.path.join(os.path.dirname(__file__), "..", "..", "..")
43 |
44 | # Names that don't match this regex can't be used as URL's.
45 | re_name_sanity_match = re.compile("[a-zA-Z0-9._*]+")
46 |
47 |
48 | def sphobjinv_sanity_check(o):
49 | """
50 | Ensure ``o`` can be used to make a URL.
51 | """
52 | name = o.name
53 | # Perhaps other characters could be excluded too.
54 | if not re_name_sanity_match.fullmatch(name):
55 | m = re_name_sanity_match.match(name)
56 | fail_char = 0
57 | if m:
58 | fail_char = m.span(0)[1]
59 | msg = "WARNING: invalid char found for name:"
60 | print(msg, name, "(at index %d)" % fail_char, "skipping!")
61 | print(" " * (len(msg) + fail_char), "^")
62 | return False
63 |
64 | if " " in name or "/" in name:
65 | return False
66 | return True
67 |
68 |
69 | def write_mappings(inv, output):
70 | print("Writing...")
71 | # Write the file
72 | file = open(output, "w", encoding="utf-8")
73 | fw = file.write
74 |
75 | fw("# SPDX-License-Identifier: GPL-2.0-or-later\n")
76 | fw("# Do not edit this file.")
77 | fw(" This file is auto generated from rna_manual_reference_updater.py\n\n")
78 | # Prevent systems with autopep8 configured from re-formatting the file.
79 | fw("# autopep8: off\n")
80 |
81 | fw(
82 | "import bpy\n"
83 | "\n"
84 | "url_manual_prefix = \"https://docs.blender.org/manual/%s/%d.%d/\" % (\n"
85 | " bpy.utils.manual_language_code(),\n"
86 | " *bpy.app.version[:2],\n"
87 | ")\n"
88 | "\n"
89 | )
90 |
91 | fw("url_manual_mapping = (\n")
92 |
93 |
94 |
95 | # Logic to manipulate strings from objects.inv
96 | lines = [
97 | o.data_line() for o in inv.objects
98 | if o.name.startswith(("bpy.types", "bpy.ops"))
99 | if sphobjinv_sanity_check(o)
100 | ]
101 | # Finding first space will return length of rna path
102 | lines.sort(key=lambda l: l.find(" "), reverse=True)
103 | for line in lines:
104 | split = line.split(" ")
105 | fw(" (\"" + split[0] + "*\", \"" + split[3] + "\"),\n")
106 |
107 | fw(")\n\n")
108 | fw("# autopep8: on\n")
109 |
110 |
111 | def is_valid_file(parser, arg):
112 | if not os.path.isfile(arg):
113 | parser.error("The file %s does not exist!" % arg)
114 | else:
115 | return arg
116 |
117 |
118 | def main():
119 | parser = argparse.ArgumentParser(
120 | usage=__doc__
121 | )
122 |
123 | parser.add_argument(
124 | "--input",
125 | dest="filename",
126 | required=False,
127 | help="sphinx inventory file (objects.inv)",
128 | metavar="FILE",
129 | type=lambda x: is_valid_file(parser, x))
130 |
131 | parser.add_argument(
132 | "--output",
133 | dest="output",
134 | default=os.path.join(BASE_DIR, "release", "scripts", "modules", "rna_manual_reference.py"),
135 | required=False,
136 | help="path to output including filename and extentsion",
137 | metavar="FILE")
138 |
139 | parser.add_argument(
140 | "--url",
141 | dest="url",
142 | default="https://docs.blender.org/manual/en/dev/objects.inv",
143 | required=False,
144 | help="url to sphinx inventory file (objects.inv)",
145 | metavar="FILE")
146 |
147 | args = parser.parse_args()
148 |
149 | if args.filename:
150 | # Download and decode objects.inv
151 | print("Loading from file...")
152 | inv = sphobjinv.Inventory(args.filename)
153 | else:
154 | # Load and decode objects.inv
155 | print("Downloading...")
156 | inv = sphobjinv.Inventory(url=args.url)
157 |
158 | write_mappings(inv, args.output)
159 | print("Done!")
160 |
161 |
162 | if __name__ == "__main__":
163 | main()
164 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_assembler_preview.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | Takes 2 args
6 |
7 | qtc_assembler_preview.py
8 |
9 | Currently GCC is assumed
10 | """
11 |
12 |
13 | import sys
14 | import os
15 | import shlex
16 | import subprocess
17 |
18 | VERBOSE = os.environ.get("VERBOSE", False)
19 | BUILD_DIR = sys.argv[-2]
20 | SOURCE_FILE = sys.argv[-1]
21 |
22 | # TODO, support other compilers
23 | COMPILER_ID = 'GCC'
24 |
25 |
26 | def find_arg(source, data):
27 | source_base = os.path.basename(source)
28 | for l in data:
29 | # chances are high that we found the file
30 | if source_base in l:
31 | # check if this file is in the line
32 | l_split = shlex.split(l)
33 | for w in l_split:
34 | if w.endswith(source_base):
35 | if os.path.isabs(w):
36 | if os.path.samefile(w, source):
37 | # print(l)
38 | return l
39 | else:
40 | # check trailing path (a/b/c/d/e.c == d/e.c)
41 | w_sep = os.path.normpath(w).split(os.sep)
42 | s_sep = os.path.normpath(source).split(os.sep)
43 | m = min(len(w_sep), len(s_sep))
44 | if w_sep[-m:] == s_sep[-m:]:
45 | # print(l)
46 | return l
47 |
48 |
49 | def find_build_args_ninja(source):
50 | make_exe = "ninja"
51 | process = subprocess.Popen(
52 | [make_exe, "-t", "commands"],
53 | stdout=subprocess.PIPE,
54 | cwd=BUILD_DIR,
55 | )
56 | while process.poll():
57 | time.sleep(1)
58 |
59 | out = process.stdout.read()
60 | process.stdout.close()
61 | # print("done!", len(out), "bytes")
62 | data = out.decode("utf-8", errors="ignore").split("\n")
63 | return find_arg(source, data)
64 |
65 |
66 | def find_build_args_make(source):
67 | make_exe = "make"
68 | process = subprocess.Popen(
69 | [make_exe, "--always-make", "--dry-run", "--keep-going", "VERBOSE=1"],
70 | stdout=subprocess.PIPE,
71 | cwd=BUILD_DIR,
72 | )
73 | while process.poll():
74 | time.sleep(1)
75 |
76 | out = process.stdout.read()
77 | process.stdout.close()
78 |
79 | # print("done!", len(out), "bytes")
80 | data = out.decode("utf-8", errors="ignore").split("\n")
81 | return find_arg(source, data)
82 |
83 |
84 | def main():
85 | import re
86 |
87 | # currently only supports ninja or makefiles
88 | build_file_ninja = os.path.join(BUILD_DIR, "build.ninja")
89 | build_file_make = os.path.join(BUILD_DIR, "Makefile")
90 | if os.path.exists(build_file_ninja):
91 | if VERBOSE:
92 | print("Using Ninja")
93 | arg = find_build_args_ninja(SOURCE_FILE)
94 | elif os.path.exists(build_file_make):
95 | if VERBOSE:
96 | print("Using Make")
97 | arg = find_build_args_make(SOURCE_FILE)
98 | else:
99 | sys.stderr.write(f"Can't find Ninja or Makefile ({build_file_ninja!r} or {build_file_make!r}), aborting")
100 | return
101 |
102 | if arg is None:
103 | sys.stderr.write(f"Can't find file {SOURCE_FILE!r} in build command output of {BUILD_DIR!r}, aborting")
104 | return
105 |
106 | # now we need to get arg and modify it to produce assembler
107 | arg_split = shlex.split(arg)
108 |
109 | # get rid of: 'cd /a/b/c && ' prefix used by make (ninja doesn't need)
110 | try:
111 | i = arg_split.index("&&")
112 | except ValueError:
113 | i = -1
114 | if i != -1:
115 | del arg_split[:i + 1]
116 |
117 | if COMPILER_ID == 'GCC':
118 | # --- Switch debug for optimized ---
119 | for arg, n in (
120 | # regular flags which prevent asm output
121 | ("-o", 2),
122 | ("-MF", 2),
123 | ("-MT", 2),
124 | ("-MMD", 1),
125 |
126 | # debug flags
127 | ("-O0", 1),
128 | (re.compile(r"\-g\d*"), 1),
129 | (re.compile(r"\-ggdb\d*"), 1),
130 | ("-fno-inline", 1),
131 | ("-fno-builtin", 1),
132 | ("-fno-nonansi-builtins", 1),
133 | ("-fno-common", 1),
134 | ("-DDEBUG", 1), ("-D_DEBUG", 1),
135 |
136 | # ASAN flags.
137 | (re.compile(r"\-fsanitize=.*"), 1),
138 | ):
139 | if isinstance(arg, str):
140 | # exact string compare
141 | while arg in arg_split:
142 | i = arg_split.index(arg)
143 | del arg_split[i: i + n]
144 | else:
145 | # regex match
146 | for i in reversed(range(len(arg_split))):
147 | if arg.match(arg_split[i]):
148 | del arg_split[i: i + n]
149 |
150 | # add optimized args
151 | arg_split += ["-O3", "-fomit-frame-pointer", "-DNDEBUG", "-Wno-error"]
152 |
153 | # not essential but interesting to know
154 | arg_split += ["-ftree-vectorizer-verbose=1"]
155 |
156 | arg_split += ["-S"]
157 | # arg_split += ["-masm=intel"] # optional
158 | # arg_split += ["-fverbose-asm"] # optional but handy
159 | else:
160 | sys.stderr.write(f"Compiler {COMPILER_ID!r} not supported")
161 | return
162 |
163 | source_asm = f"{SOURCE_FILE}.asm"
164 |
165 | # Never overwrite existing files
166 | i = 1
167 | while os.path.exists(source_asm):
168 | source_asm = f"{SOURCE_FILE}.asm.{i:d}"
169 | i += 1
170 |
171 | arg_split += ["-o", source_asm]
172 |
173 | # print("Executing:", arg_split)
174 | kwargs = {}
175 | if not VERBOSE:
176 | kwargs["stdout"] = subprocess.DEVNULL
177 |
178 | os.chdir(BUILD_DIR)
179 | subprocess.call(arg_split, **kwargs)
180 |
181 | del kwargs
182 |
183 | if not os.path.exists(source_asm):
184 | sys.stderr.write(f"Did not create {source_asm!r} from calling {arg_split!r}")
185 | return
186 | if VERBOSE:
187 | print(f"Running: {arg_split}")
188 | print(f"Created: {source_asm!r}")
189 |
190 |
191 | if __name__ == "__main__":
192 | main()
193 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_assembler_preview.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Create an assembler file from source (C/C++)
4 | Assembler Preview
5 | Compiler
6 |
7 | qtc_assembler_preview.py
8 | %{CurrentProject:BuildPath} %{CurrentDocument:FilePath}
9 | %{CurrentProject:BuildPath}
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_blender_diffusion.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | Takes 1 arg
6 |
7 | qtc_blender_diffusion.py
8 |
9 | Currently GCC is assumed
10 | """
11 |
12 | import sys
13 | import os
14 | import subprocess
15 |
16 | SOURCE_FILE = sys.argv[-2]
17 | SOURCE_ROW = sys.argv[-1]
18 |
19 | BASE_URL = "https://developer.blender.org/diffusion/B/browse"
20 |
21 |
22 | def main():
23 | dirname, _filename = os.path.split(SOURCE_FILE)
24 |
25 | process = subprocess.Popen(
26 | ["git", "rev-parse", "--symbolic-full-name", "--abbrev-ref",
27 | "@{u}"], stdout=subprocess.PIPE, cwd=dirname, universal_newlines=True)
28 | output = process.communicate()[0]
29 | branchname = output.rstrip().rsplit('/', 1)[-1]
30 |
31 | process = subprocess.Popen(
32 | ["git", "rev-parse", "--show-toplevel"],
33 | stdout=subprocess.PIPE, cwd=dirname, universal_newlines=True)
34 | output = process.communicate()[0]
35 | toplevel = output.rstrip()
36 | filepath = os.path.relpath(SOURCE_FILE, toplevel)
37 |
38 | url = '/'.join([BASE_URL, branchname, filepath]) + "$" + SOURCE_ROW
39 |
40 | print(url)
41 |
42 | # Maybe handy, but also annoying?
43 | if "--browse" in sys.argv:
44 | import webbrowser
45 | webbrowser.open(url)
46 |
47 |
48 | if __name__ == "__main__":
49 | main()
50 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_blender_diffusion.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Print a URL to Diffusion on developer.blender.org for online reference
4 | Blender Diffusion
5 | Documentation
6 |
7 | qtc_blender_diffusion.py
8 | %{CurrentDocument:FilePath} %{CurrentDocument:Row}
9 | %{CurrentDocument:Path}
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_cpp_to_c_comments.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | Convert C++ Style Comments:
6 |
7 | // hello
8 | // world
9 |
10 | To This:
11 |
12 | /* hello
13 | * world
14 | */
15 | """
16 |
17 | import sys
18 |
19 | txt = sys.stdin.read()
20 | data = txt.split("\n")
21 | # TODO. block comments
22 |
23 |
24 | # first detect blocks
25 | def block_data(data, i_start):
26 | i_begin = -1
27 | i_index = -1
28 | i_end = -1
29 | i = i_start
30 | while i < len(data):
31 | l = data[i]
32 | if "//" in l:
33 | i_begin = i
34 | i_index = l.index("//")
35 | break
36 | i += 1
37 | if i_begin != -1:
38 | i_end = i_begin
39 | for i in range(i_begin + 1, len(data)):
40 | l = data[i]
41 | if "//" in l and l.lstrip().startswith("//") and l.index("//") == i_index:
42 | i_end = i
43 | else:
44 | break
45 |
46 | if i_begin != i_end:
47 | # do a block comment replacement
48 | data[i_begin] = data[i_begin].replace("//", "/*", 1)
49 | for i in range(i_begin + 1, i_end + 1):
50 | data[i] = data[i].replace("//", " *", 1)
51 | data[i_end] = "%s */" % data[i_end].rstrip()
52 | # done with block comment, still go onto do regular replace
53 | return max(i_end, i_start + 1)
54 |
55 |
56 | i = 0
57 | while i < len(data):
58 | i = block_data(data, i)
59 |
60 | i = 0
61 | while "//" not in data[i] and i > len(data):
62 | i += 1
63 |
64 |
65 | for i, l in enumerate(data):
66 | if "//" in l: # should check if it's in a string.
67 |
68 | text, comment = l.split("//", 1)
69 |
70 | l = "%s/* %s */" % (text, comment.strip())
71 |
72 | data[i] = l
73 |
74 |
75 | print("\n".join(data), end="")
76 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_cpp_to_c_comments.xml:
--------------------------------------------------------------------------------
1 |
2 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_doxy_file.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | This script takes 2-3 args: [--browse]
6 |
7 | Where Doxyfile is a path relative to source root,
8 | and the sourcefile as an absolute path.
9 |
10 | --browse will open the resulting docs in a web browser.
11 | """
12 | import sys
13 | import os
14 | import subprocess
15 | import tempfile
16 |
17 |
18 | def find_gitroot(filepath_reference):
19 | path = filepath_reference
20 | path_prev = ""
21 | while not os.path.exists(os.path.join(path, ".git")) and path != path_prev:
22 | path_prev = path
23 | path = os.path.dirname(path)
24 | return path
25 |
26 |
27 | doxyfile, sourcefile = sys.argv[-2:]
28 |
29 | doxyfile = os.path.join(find_gitroot(sourcefile), doxyfile)
30 | os.chdir(os.path.dirname(doxyfile))
31 |
32 | tempfile = tempfile.NamedTemporaryFile(mode='w+b')
33 | doxyfile_tmp = tempfile.name
34 | tempfile.write(open(doxyfile, "r+b").read())
35 | tempfile.write(b'\n\n')
36 | tempfile.write(b'INPUT=' + os.fsencode(sourcefile) + b'\n')
37 | tempfile.flush()
38 |
39 | subprocess.call(("doxygen", doxyfile_tmp))
40 | del tempfile
41 |
42 | # Maybe handy, but also annoying?
43 | if "--browse" in sys.argv:
44 | import webbrowser
45 | webbrowser.open("html/files.html")
46 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_doxy_file.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Doxygen a single file
4 | Doxygen File
5 | Documentation
6 |
7 | qtc_doxy_file.py
8 | --browse doc/doxygen/Doxyfile %{CurrentDocument:FilePath}
9 | %{CurrentProject:BuildPath}
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_expand_tabmix.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 | import sys
4 |
5 | # TODO, get from QtCreator
6 | TABSIZE = 4
7 |
8 | txt = sys.stdin.read()
9 | data = txt.split("\n")
10 |
11 | for i, l in enumerate(data):
12 | l_lstrip = l.lstrip("\t")
13 | l_lstrip_tot = (len(l) - len(l_lstrip))
14 | if l_lstrip_tot:
15 | l_pre_ws, l_post_ws = l[:l_lstrip_tot], l[l_lstrip_tot:]
16 | else:
17 | l_pre_ws, l_post_ws = "", l
18 | # expand tabs and remove trailing space
19 | data[i] = l_pre_ws + l_post_ws.expandtabs(TABSIZE).rstrip(" \t")
20 |
21 |
22 | print("\n".join(data), end="")
23 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_expand_tabmix.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Expand non-leading tabs into spaces.
4 | Expand Tab Mix
5 | Formatting
6 |
7 | qtc_expand_tabmix.py
8 | %{CurrentDocument:Selection}
9 | %{CurrentDocument:Path}
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_project_update.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | This is just a wrapper to run Blender's QtCreator project file generator,
6 | knowing only the CMake build path.
7 |
8 | qtc_project_update.py
9 | """
10 |
11 | import sys
12 | import os
13 |
14 | PROJECT_DIR = sys.argv[-1]
15 |
16 |
17 | def cmake_find_source(path):
18 | import re
19 | match = re.compile(r"^CMAKE_HOME_DIRECTORY\b")
20 | cache = os.path.join(path, "CMakeCache.txt")
21 | with open(cache, 'r', encoding='utf-8') as f:
22 | for l in f:
23 | if re.match(match, l):
24 | return l[l.index("=") + 1:].strip()
25 | return ""
26 |
27 |
28 | SOURCE_DIR = cmake_find_source(PROJECT_DIR)
29 |
30 | cmd = (
31 | "python",
32 | os.path.join(SOURCE_DIR, "build_files/cmake/cmake_qtcreator_project.py"),
33 | "--build-dir",
34 | PROJECT_DIR,
35 | )
36 |
37 | print(cmd)
38 | os.system(" ".join(cmd))
39 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_project_update.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Regenerate the project file
4 | Project File Regenerate
5 | Project
6 |
7 | qtc_project_update.py
8 | %{CurrentProject:BuildPath}
9 |
10 |
11 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_right_align_trailing_char.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import sys
5 |
6 | # TODO, get from QtCreator
7 | TABSIZE = 4
8 |
9 | txt = sys.stdin.read()
10 | data = txt.split("\n")
11 |
12 | maxlen = 0
13 | # tabs -> spaces
14 | for i, l in enumerate(data):
15 | l = l.replace("\t", " " * TABSIZE)
16 | l = l.rstrip()
17 | maxlen = max(maxlen, len(l))
18 | data[i] = l
19 |
20 | for i, l in enumerate(data):
21 | ws = l.rsplit(" ", 1)
22 | if len(l.strip().split()) == 1 or len(ws) == 1:
23 | pass
24 | else:
25 | j = 1
26 | while len(l) < maxlen:
27 | l = (" " * j).join(ws)
28 | j += 1
29 | data[i] = l
30 |
31 | # add tabs back in
32 | for i, l in enumerate(data):
33 | ls = l.lstrip()
34 | d = len(l) - len(ls)
35 | indent = ""
36 | while d >= TABSIZE:
37 | d -= TABSIZE
38 | indent += "\t"
39 | if d:
40 | indent += (" " * d)
41 | data[i] = indent + ls
42 |
43 |
44 | print("\n".join(data), end="")
45 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_right_align_trailing_char.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Right align the last character of each line to the existing furthermost character (useful for multi-line macros).
4 | Right Align Trailing Char
5 | Formatting
6 |
7 | qtc_right_align_trailing_char.py
8 | %{CurrentDocument:Selection}
9 | %{CurrentDocument:Path}
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_select_surround.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import sys
5 |
6 | # TODO, accept other characters as args
7 |
8 | txt = sys.stdin.read()
9 | print("(", end="")
10 | print(txt, end="")
11 | print(")", end="")
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_select_surround.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Surround selection with parentheses or other optionally other characters.
4 | Surround selection with parentheses
5 | Formatting
6 |
7 | qtc_select_surround.py
8 | %{CurrentDocument:Selection}
9 | %{CurrentDocument:Path}
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_sort_paths.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 | import sys
4 |
5 | txt = sys.stdin.read()
6 | data = txt.split("\n")
7 |
8 |
9 | class PathCMP:
10 |
11 | def __init__(self, path):
12 | path = path.strip()
13 |
14 | self.path = path
15 | if path.startswith("."):
16 | path = path[1:]
17 |
18 | if path.startswith("/"):
19 | path = path[1:]
20 | if path.endswith("/"):
21 | path = path[:-1]
22 |
23 | self.level = self.path.count("..")
24 | if self.level == 0:
25 | self.level = (self.path.count("/") - 10000)
26 |
27 | def __eq__(self, other):
28 | return self.path == other.path
29 |
30 | def __lt__(self, other):
31 | return self.path < other.path if self.level == other.level else self.level < other.level
32 |
33 | def __gt__(self, other):
34 | return self.path > other.path if self.level == other.level else self.level > other.level
35 |
36 |
37 | data.sort(key=lambda a: PathCMP(a))
38 |
39 | print("\n".join(data), end="")
40 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_sort_paths.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Path sort selection, taking into account path depth.
4 | Sort (Path Depths)
5 | Formatting
6 |
7 | qtc_sort_paths.py
8 | %{CurrentDocument:Selection}
9 | %{CurrentDocument:Path}
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_toggle_if0.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 | import sys
4 |
5 | txt = sys.stdin.read()
6 | data = txt.split("\n")
7 |
8 | # Check if we're if0
9 | is_comment = False
10 | for l in data:
11 | l_strip = l.strip()
12 | if l_strip:
13 | if l_strip.startswith("#if 0"):
14 | is_comment = True
15 | else:
16 | is_comment = False
17 | break
18 |
19 | if is_comment:
20 | pop_a = None
21 | pop_b = None
22 | for i, l in enumerate(data):
23 | l_strip = l.strip()
24 |
25 | if pop_a is None:
26 | if l_strip.startswith("#if 0"):
27 | pop_a = i
28 |
29 | if l_strip.startswith("#endif"):
30 | pop_b = i
31 |
32 | if pop_a is not None and pop_b is not None:
33 | del data[pop_b]
34 | del data[pop_a]
35 | else:
36 | while data and not data[-1].strip():
37 | data.pop()
38 | data = ["#if 0"] + data + ["#endif\n"]
39 |
40 |
41 | print("\n".join(data), end="")
42 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/externaltools/qtc_toggle_if0.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | Toggle if 0 preprocessor block.
4 | Toggle #if 0
5 | Formatting
6 |
7 | qtc_toggle_if0.py
8 | %{CurrentDocument:Selection}
9 | %{CurrentDocument:Path}
10 |
11 |
12 |
--------------------------------------------------------------------------------
/utils_ide/qtcreator/readme.rst:
--------------------------------------------------------------------------------
1 | This repository contains utilities to perform various editing operations as well as some utilities to integrate
2 | Uncrustify and Meld.
3 |
4 |
5 | This is for my own personal use, but I have tried to make the tools generic (where possible) and useful to others.
6 |
7 |
8 | Installing
9 | ==========
10 |
11 | All the scripts install to QtCreators ``externaltools`` path:
12 |
13 | eg:
14 | ``~/.config/QtProject/qtcreator/externaltools/``
15 |
16 | Currently QtCreator has no way to reference commands relative to this directory so the ``externaltools`` dir **must**
17 | be added to the systems ``PATH``.
18 |
19 |
20 | Tools
21 | =====
22 |
23 | Here are a list of the tools with some details on how they work.
24 |
25 |
26 | Assembler Preview
27 | -----------------
28 |
29 | ``External Tools -> Compiler -> Assembler Preview``
30 |
31 | This tool generates the assembly for the current open document,
32 | saving it to a file in the same path with an ".asm" extension.
33 |
34 | This can be handy for checking if the compiler is really optimizing out code as expected.
35 |
36 | Or if some change really doesn't change any functionality.
37 |
38 | The way it works is to get a list of the build commands that would run, and get those commands for the current file.
39 |
40 | Then this command runs, swapping out object creation args for arguments that create the assembly.
41 |
42 | .. note:: It would be nice to open this file, but currently this isn't supported. It's just created along side the source.
43 |
44 | .. note:: Currently only GCC is supported.
45 |
--------------------------------------------------------------------------------
/utils_maintenance/autopep8_format_paths.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 | """
4 | This script runs autopep8 on multiple files/directories.
5 |
6 | While it can be called directly, you may prefer to run this from Blender's root directory with the command:
7 |
8 | make format
9 |
10 | Otherwise you may call this script directly, for example:
11 |
12 | ./source/tools/utils_maintenance/autopep8_format_paths.py --changed-only tests/python
13 | """
14 |
15 | import os
16 | import sys
17 |
18 | import subprocess
19 | import argparse
20 |
21 | from typing import (
22 | List,
23 | Tuple,
24 | Optional,
25 | )
26 |
27 | VERSION_MIN = (1, 6, 0)
28 | VERSION_MAX_RECOMMENDED = (1, 6, 0)
29 | AUTOPEP8_FORMAT_CMD = "autopep8"
30 |
31 | BASE_DIR = os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
32 | os.chdir(BASE_DIR)
33 |
34 |
35 | extensions = (
36 | ".py",
37 | )
38 |
39 | ignore_files = {
40 | "release/scripts/modules/rna_manual_reference.py", # Large generated file, don't format.
41 | }
42 |
43 |
44 | def compute_paths(paths: List[str], use_default_paths: bool) -> List[str]:
45 | # Optionally pass in files to operate on.
46 | if use_default_paths:
47 | paths = [
48 | "build_files",
49 | "intern",
50 | "release",
51 | "doc",
52 | "source",
53 | "tests",
54 | ]
55 | else:
56 | paths = [
57 | f for f in paths
58 | if os.path.isdir(f) or (os.path.isfile(f) and f.endswith(extensions))
59 | ]
60 |
61 | if os.sep != "/":
62 | paths = [f.replace("/", os.sep) for f in paths]
63 | return paths
64 |
65 |
66 | def source_files_from_git(paths: List[str], changed_only: bool) -> List[str]:
67 | if changed_only:
68 | cmd = ("git", "diff", "HEAD", "--name-only", "-z", "--", *paths)
69 | else:
70 | cmd = ("git", "ls-tree", "-r", "HEAD", *paths, "--name-only", "-z")
71 | files = subprocess.check_output(cmd).split(b'\0')
72 | return [f.decode('ascii') for f in files]
73 |
74 |
75 | def autopep8_ensure_version(autopep8_format_cmd_argument: str) -> Optional[Tuple[int, int, int]]:
76 | global AUTOPEP8_FORMAT_CMD
77 | autopep8_format_cmd = None
78 | version_output = None
79 | # Attempt to use `--autopep8-command` passed in from `make format`
80 | # so the autopep8 distributed with Blender will be used.
81 | for is_default in (True, False):
82 | if is_default:
83 | autopep8_format_cmd = autopep8_format_cmd_argument
84 | if autopep8_format_cmd and os.path.exists(autopep8_format_cmd):
85 | pass
86 | else:
87 | continue
88 | else:
89 | autopep8_format_cmd = "autopep8"
90 |
91 | cmd = [autopep8_format_cmd]
92 | if cmd[0].endswith(".py"):
93 | cmd = [sys.executable, *cmd]
94 |
95 | try:
96 | version_output = subprocess.check_output((*cmd, "--version")).decode('utf-8')
97 | except FileNotFoundError:
98 | continue
99 | AUTOPEP8_FORMAT_CMD = autopep8_format_cmd
100 | break
101 | if version_output is not None:
102 | version_str = next(iter(v for v in version_output.split() if v[0].isdigit()), None)
103 | if version_str is not None:
104 | # Ensure exactly 3 numbers.
105 | major, minor, patch = (tuple(int(n) for n in version_str.split("-")[0].split(".")) + (0, 0, 0))[0:3]
106 | print("Using %s (%d.%d.%d)..." % (AUTOPEP8_FORMAT_CMD, major, minor, patch))
107 | return major, minor, patch
108 | return None
109 |
110 |
111 | def autopep8_format(files: List[str]) -> bytes:
112 | cmd = [
113 | AUTOPEP8_FORMAT_CMD,
114 | # Operate on all directories recursively.
115 | "--recursive",
116 | # Update the files in-place.
117 | "--in-place",
118 | # Auto-detect the number of jobs to use.
119 | "--jobs=0",
120 | ] + files
121 |
122 | # Support executing from the module directory because Blender does not distribute the command.
123 | if cmd[0].endswith(".py"):
124 | cmd = [sys.executable, *cmd]
125 |
126 | return subprocess.check_output(cmd, stderr=subprocess.STDOUT)
127 |
128 |
129 | def argparse_create() -> argparse.ArgumentParser:
130 |
131 | parser = argparse.ArgumentParser(
132 | description="Format Python source code.",
133 | epilog=__doc__,
134 | # Don't re-wrap text, keep newlines & indentation.
135 | formatter_class=argparse.RawTextHelpFormatter,
136 | )
137 | parser.add_argument(
138 | "--changed-only",
139 | dest="changed_only",
140 | default=False,
141 | action='store_true',
142 | help=(
143 | "Format only edited files, including the staged ones. "
144 | "Using this with \"paths\" will pick the edited files lying on those paths. "
145 | "(default=False)"
146 | ),
147 | required=False,
148 | )
149 | parser.add_argument(
150 | "--autopep8-command",
151 | dest="autopep8_command",
152 | default=AUTOPEP8_FORMAT_CMD,
153 | help="The command to call autopep8.",
154 | required=False,
155 | )
156 | parser.add_argument(
157 | "paths",
158 | nargs=argparse.REMAINDER,
159 | help="All trailing arguments are treated as paths.",
160 | )
161 |
162 | return parser
163 |
164 |
165 | def main() -> None:
166 | args = argparse_create().parse_args()
167 |
168 | version = autopep8_ensure_version(args.autopep8_command)
169 | if version is None:
170 | print("Unable to detect 'autopep8 --version'")
171 | sys.exit(1)
172 | if version < VERSION_MIN:
173 | print("Version of autopep8 is too old:", version, "<", VERSION_MIN)
174 | sys.exit(1)
175 | if version > VERSION_MAX_RECOMMENDED:
176 | print(
177 | "WARNING: Version of autopep8 is too recent:",
178 | version, ">", VERSION_MAX_RECOMMENDED,
179 | )
180 | print(
181 | "You may want to install autopep8-%d.%d, "
182 | "or use the precompiled libs repository." %
183 | (VERSION_MAX_RECOMMENDED[0], VERSION_MAX_RECOMMENDED[1]),
184 | )
185 |
186 | use_default_paths = not (bool(args.paths) or bool(args.changed_only))
187 |
188 | paths = compute_paths(args.paths, use_default_paths)
189 | print("Operating on:" + (" (%d changed paths)" % len(paths) if args.changed_only else ""))
190 | for p in paths:
191 | print(" ", p)
192 |
193 | files = [
194 | f for f in source_files_from_git(paths, args.changed_only)
195 | if f.endswith(extensions)
196 | if f not in ignore_files
197 | ]
198 |
199 | # Happens when users run "make format" passing in individual C/C++ files
200 | # (and no Python files).
201 | if not files:
202 | return
203 |
204 | autopep8_format(files)
205 |
206 |
207 | if __name__ == "__main__":
208 | main()
209 |
--------------------------------------------------------------------------------
/utils_maintenance/blender_update_themes.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | # this script updates XML themes once new settings are added
4 | #
5 | # ./blender.bin --background --python ./source/tools/utils_maintenance/blender_update_themes.py
6 |
7 | import bpy
8 | import os
9 |
10 |
11 | def update(filepath):
12 | import rna_xml
13 | context = bpy.context
14 |
15 | print("Updating theme: %r" % filepath)
16 | preset_xml_map = (
17 | ("preferences.themes[0]", "Theme"),
18 | ("preferences.ui_styles[0]", "Theme"),
19 | )
20 | rna_xml.xml_file_run(
21 | context,
22 | filepath,
23 | preset_xml_map,
24 | )
25 |
26 | rna_xml.xml_file_write(
27 | context,
28 | filepath,
29 | preset_xml_map,
30 | )
31 |
32 |
33 | def update_default(filepath):
34 | with open(filepath, 'w', encoding='utf-8') as fh:
35 | fh.write('''
36 |
37 |
38 |
39 |
40 |
41 | ''')
42 |
43 |
44 | def main():
45 | for path in bpy.utils.preset_paths("interface_theme"):
46 | for fn in os.listdir(path):
47 | if fn.endswith(".xml"):
48 | fn_full = os.path.join(path, fn)
49 | if fn == "blender_dark.xml":
50 | update_default(fn_full)
51 | else:
52 | update(fn_full)
53 |
54 |
55 | if __name__ == "__main__":
56 | main()
57 |
--------------------------------------------------------------------------------
/utils_maintenance/c_sort_blocks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import os
5 | import sys
6 |
7 | PWD = os.path.dirname(__file__)
8 | sys.path.append(os.path.join(PWD, "modules"))
9 |
10 | from batch_edit_text import run
11 |
12 | from typing import (
13 | Optional,
14 | )
15 |
16 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(PWD, "..", "..", ".."))))
17 |
18 | # TODO, move to config file
19 | SOURCE_DIRS = (
20 | "source",
21 | "intern/ghost",
22 | )
23 |
24 | SOURCE_EXT = (
25 | # C/C++
26 | ".c", ".h", ".cpp", ".hpp", ".cc", ".hh", ".cxx", ".hxx", ".inl",
27 | # Objective C
28 | ".m", ".mm",
29 | )
30 |
31 |
32 | def sort_struct_lists(fn: str, data_src: str) -> Optional[str]:
33 | import re
34 |
35 | # eg:
36 | # struct Foo;
37 | re_match_struct = re.compile(r"struct\s+[A-Za-z_][A-Za-z_0-9]*\s*;")
38 | # eg:
39 | # struct Foo Bar;
40 | re_match_struct_type = re.compile(r"struct\s+[A-Za-z_][A-Za-z_0-9]*\s+[A-Za-z_][A-Za-z_0-9]*\s*;")
41 |
42 | # typedef struct Foo Bar;
43 | re_match_typedef_struct_type = re.compile(
44 | r"typedef\s+struct\s+[A-Za-z_][A-Za-z_0-9]*\s+[A-Za-z_][A-Za-z_0-9]*\s*;")
45 |
46 | re_match_enum = re.compile(r"enum\s+[A-Za-z_][A-Za-z_0-9]*\s*;")
47 |
48 | # eg:
49 | # extern char datatoc_splash_png[];
50 | # re_match_datatoc = re.compile(r"extern\s+(char)\s+datatoc_[A-Za-z_].*;")
51 |
52 | lines = data_src.splitlines(keepends=True)
53 |
54 | def can_sort(l: str) -> Optional[int]:
55 | if re_match_struct.match(l):
56 | return 1
57 | if re_match_struct_type.match(l):
58 | return 2
59 | if re_match_typedef_struct_type.match(l):
60 | return 3
61 | if re_match_enum.match(l):
62 | return 4
63 | # Disable for now.
64 | # if re_match_datatoc.match(l):
65 | # return 5
66 | return None
67 |
68 | i = 0
69 | while i < len(lines):
70 | i_type = can_sort(lines[i])
71 | if i_type is not None:
72 | j = i
73 | while j + 1 < len(lines):
74 | if can_sort(lines[j + 1]) != i_type:
75 | break
76 | j = j + 1
77 | if i != j:
78 | lines[i:j + 1] = list(sorted(lines[i:j + 1]))
79 | i = j
80 | i = i + 1
81 |
82 | data_dst = "".join(lines)
83 | if data_src != data_dst:
84 | return data_dst
85 | return None
86 |
87 |
88 | run(
89 | directories=[os.path.join(SOURCE_DIR, d) for d in SOURCE_DIRS],
90 | is_text=lambda fn: fn.endswith(SOURCE_EXT),
91 | text_operation=sort_struct_lists,
92 | use_multiprocess=True,
93 | )
94 |
--------------------------------------------------------------------------------
/utils_maintenance/c_struct_clean.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | When a source file declares a struct which isn't used anywhere else in the file.
6 | Remove it.
7 |
8 | There may be times this is needed, however they can typically be removed
9 | and any errors caused can be added to the headers which require the forward declarations.
10 | """
11 |
12 | import os
13 | import sys
14 | import re
15 |
16 | from typing import (
17 | Dict,
18 | Optional,
19 | )
20 |
21 | PWD = os.path.dirname(__file__)
22 | sys.path.append(os.path.join(PWD, "modules"))
23 |
24 | from batch_edit_text import run
25 |
26 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(PWD, "..", "..", ".."))))
27 |
28 | # TODO: move to configuration file.
29 | SOURCE_DIRS = (
30 | "source",
31 | os.path.join("intern", "ghost"),
32 | )
33 |
34 | SOURCE_EXT = (
35 | # C/C++
36 | ".c", ".h", ".cpp", ".hpp", ".cc", ".hh", ".cxx", ".hxx", ".inl",
37 | # Objective C
38 | ".m", ".mm",
39 | )
40 |
41 | re_words = re.compile("[A-Za-z_][A-Za-z_0-9]*")
42 | re_match_struct = re.compile(r"struct\s+([A-Za-z_][A-Za-z_0-9]*)\s*;")
43 |
44 |
45 | def clean_structs(fn: str, data_src: str) -> Optional[str]:
46 | from pygments.token import Token
47 | from pygments import lexers
48 |
49 | word_occurance: Dict[str, int] = {}
50 |
51 | lex = lexers.get_lexer_by_name("c++")
52 | lex.get_tokens(data_src)
53 |
54 | ty_exact = (Token.Comment.Preproc, Token.Comment.PreprocFile)
55 |
56 | for ty, _text in lex.get_tokens(data_src):
57 | if ty not in ty_exact:
58 | if ty in Token.String: # type: ignore
59 | continue
60 | if ty in Token.Comment: # type: ignore
61 | continue
62 |
63 | for w_match in re_words.finditer(data_src):
64 | w = w_match.group(0)
65 | try:
66 | word_occurance[w] += 1
67 | except KeyError:
68 | word_occurance[w] = 1
69 |
70 | lines = data_src.splitlines(keepends=True)
71 |
72 | i = 0
73 | while i < len(lines):
74 | m = re_match_struct.match(lines[i])
75 | if m is not None:
76 | struct_name = m.group(1)
77 | if word_occurance[struct_name] == 1:
78 | print(struct_name, fn)
79 | del lines[i]
80 | i -= 1
81 |
82 | i += 1
83 |
84 | data_dst = "".join(lines)
85 | if data_src != data_dst:
86 | return data_dst
87 | return None
88 |
89 |
90 | run(
91 | directories=[os.path.join(SOURCE_DIR, d) for d in SOURCE_DIRS],
92 | is_text=lambda fn: fn.endswith(SOURCE_EXT),
93 | text_operation=clean_structs,
94 | use_multiprocess=False,
95 | )
96 |
--------------------------------------------------------------------------------
/utils_maintenance/clang_format_paths.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 | """
4 | This script runs clang-format on multiple files/directories.
5 |
6 | While it can be called directly, you may prefer to run this from Blender's root directory with the command:
7 |
8 | make format
9 |
10 | """
11 |
12 | import argparse
13 | import multiprocessing
14 | import os
15 | import sys
16 | import subprocess
17 |
18 | from typing import (
19 | List,
20 | Optional,
21 | Sequence,
22 | Tuple,
23 | )
24 |
25 | VERSION_MIN = (8, 0, 0)
26 | VERSION_MAX_RECOMMENDED = (12, 0, 0)
27 | CLANG_FORMAT_CMD = "clang-format"
28 |
29 | BASE_DIR = os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
30 | os.chdir(BASE_DIR)
31 |
32 |
33 | extensions = (
34 | ".c", ".cc", ".cpp", ".cxx",
35 | ".h", ".hh", ".hpp", ".hxx",
36 | ".m", ".mm",
37 | ".osl", ".glsl",
38 | )
39 |
40 | extensions_only_retab = (
41 | ".cmake",
42 | "CMakeLists.txt",
43 | ".sh",
44 | )
45 |
46 | ignore_files = {
47 | "intern/cycles/render/sobol.cpp", # Too heavy for clang-format
48 | }
49 |
50 |
51 | def compute_paths(paths: List[str], use_default_paths: bool) -> List[str]:
52 | # Optionally pass in files to operate on.
53 | if use_default_paths:
54 | paths = [
55 | "intern/atomic",
56 | "intern/audaspace",
57 | "intern/clog",
58 | "intern/cycles",
59 | "intern/dualcon",
60 | "intern/eigen",
61 | "intern/ffmpeg",
62 | "intern/ghost",
63 | "intern/glew-mx",
64 | "intern/guardedalloc",
65 | "intern/iksolver",
66 | "intern/libmv",
67 | "intern/locale",
68 | "intern/memutil",
69 | "intern/mikktspace",
70 | "intern/opencolorio",
71 | "intern/opensubdiv",
72 | "intern/openvdb",
73 | "intern/rigidbody",
74 | "intern/utfconv",
75 | "source",
76 | "tests/gtests",
77 | ]
78 | else:
79 | # Filter out files, this is only done so this utility wont print that it's
80 | # "Operating" on files that will be filtered out later on.
81 | paths = [
82 | f for f in paths
83 | if os.path.isdir(f) or (os.path.isfile(f) and f.endswith(extensions))
84 | ]
85 |
86 | if os.sep != "/":
87 | paths = [f.replace("/", os.sep) for f in paths]
88 | return paths
89 |
90 |
91 | def source_files_from_git(paths: Sequence[str], changed_only: bool) -> List[str]:
92 | if changed_only:
93 | cmd = ("git", "diff", "HEAD", "--name-only", "-z", "--", *paths)
94 | else:
95 | cmd = ("git", "ls-tree", "-r", "HEAD", *paths, "--name-only", "-z")
96 | files = subprocess.check_output(cmd).split(b'\0')
97 | return [f.decode('ascii') for f in files]
98 |
99 |
100 | def convert_tabs_to_spaces(files: Sequence[str]) -> None:
101 | for f in files:
102 | print("TabExpand", f)
103 | with open(f, 'r', encoding="utf-8") as fh:
104 | data = fh.read()
105 | if False:
106 | # Simple 4 space
107 | data = data.expandtabs(4)
108 | else:
109 | # Complex 2 space
110 | # because some comments have tabs for alignment.
111 | def handle(l: str) -> str:
112 | ls = l.lstrip("\t")
113 | d = len(l) - len(ls)
114 | if d != 0:
115 | return (" " * d) + ls.expandtabs(4)
116 | else:
117 | return l.expandtabs(4)
118 |
119 | lines = data.splitlines(keepends=True)
120 | lines = [handle(l) for l in lines]
121 | data = "".join(lines)
122 | with open(f, 'w', encoding="utf-8") as fh:
123 | fh.write(data)
124 |
125 |
126 | def clang_format_ensure_version() -> Optional[Tuple[int, int, int]]:
127 | global CLANG_FORMAT_CMD
128 | clang_format_cmd = None
129 | version_output = ""
130 | for i in range(2, -1, -1):
131 | clang_format_cmd = (
132 | "clang-format-" + (".".join(["%d"] * i) % VERSION_MIN[:i])
133 | if i > 0 else
134 | "clang-format"
135 | )
136 | try:
137 | version_output = subprocess.check_output((clang_format_cmd, "-version")).decode('utf-8')
138 | except FileNotFoundError:
139 | continue
140 | CLANG_FORMAT_CMD = clang_format_cmd
141 | break
142 | version: Optional[str] = next(iter(v for v in version_output.split() if v[0].isdigit()), None)
143 | if version is None:
144 | return None
145 |
146 | version = version.split("-")[0]
147 | # Ensure exactly 3 numbers.
148 | version_num: Tuple[int, int, int] = (tuple(int(n) for n in version.split(".")) + (0, 0, 0))[:3] # type: ignore
149 | print("Using %s (%d.%d.%d)..." % (CLANG_FORMAT_CMD, version_num[0], version_num[1], version_num[2]))
150 | return version_num
151 |
152 |
153 | def clang_format_file(files: List[str]) -> bytes:
154 | cmd = [
155 | CLANG_FORMAT_CMD,
156 | # Update the files in-place.
157 | "-i",
158 | # Shows the list of processed files.
159 | "-verbose",
160 | ] + files
161 | return subprocess.check_output(cmd, stderr=subprocess.STDOUT)
162 |
163 |
164 | def clang_print_output(output: bytes) -> None:
165 | print(output.decode('utf8', errors='ignore').strip())
166 |
167 |
168 | def clang_format(files: List[str]) -> None:
169 | pool = multiprocessing.Pool()
170 |
171 | # Process in chunks to reduce overhead of starting processes.
172 | cpu_count = multiprocessing.cpu_count()
173 | chunk_size = min(max(len(files) // cpu_count // 2, 1), 32)
174 | for i in range(0, len(files), chunk_size):
175 | files_chunk = files[i:i + chunk_size]
176 | pool.apply_async(clang_format_file, args=[files_chunk], callback=clang_print_output)
177 |
178 | pool.close()
179 | pool.join()
180 |
181 |
182 | def argparse_create() -> argparse.ArgumentParser:
183 |
184 | parser = argparse.ArgumentParser(
185 | description="Format C/C++/GLSL & Objective-C source code.",
186 | epilog=__doc__,
187 | # Don't re-wrap text, keep newlines & indentation.
188 | formatter_class=argparse.RawTextHelpFormatter,
189 |
190 | )
191 | parser.add_argument(
192 | "--expand-tabs",
193 | dest="expand_tabs",
194 | default=False,
195 | action='store_true',
196 | help="Run a pre-pass that expands tabs "
197 | "(default=False)",
198 | required=False,
199 | )
200 | parser.add_argument(
201 | "--changed-only",
202 | dest="changed_only",
203 | default=False,
204 | action='store_true',
205 | help=(
206 | "Format only edited files, including the staged ones. "
207 | "Using this with \"paths\" will pick the edited files lying on those paths. "
208 | "(default=False)"
209 | ),
210 | required=False,
211 | )
212 | parser.add_argument(
213 | "paths",
214 | nargs=argparse.REMAINDER,
215 | help="All trailing arguments are treated as paths.",
216 | )
217 |
218 | return parser
219 |
220 |
221 | def main() -> None:
222 | version = clang_format_ensure_version()
223 | if version is None:
224 | print("Unable to detect 'clang-format -version'")
225 | sys.exit(1)
226 | if version < VERSION_MIN:
227 | print("Version of clang-format is too old:", version, "<", VERSION_MIN)
228 | sys.exit(1)
229 | if version > VERSION_MAX_RECOMMENDED:
230 | print(
231 | "WARNING: Version of clang-format is too recent:",
232 | version, ">", VERSION_MAX_RECOMMENDED,
233 | )
234 | print(
235 | "You may want to install clang-format-%d.%d, "
236 | "or use the precompiled libs repository." %
237 | (VERSION_MAX_RECOMMENDED[0], VERSION_MAX_RECOMMENDED[1]),
238 | )
239 |
240 | args = argparse_create().parse_args()
241 |
242 | use_default_paths = not (bool(args.paths) or bool(args.changed_only))
243 |
244 | paths = compute_paths(args.paths, use_default_paths)
245 | print("Operating on:" + (" (%d changed paths)" % len(paths) if args.changed_only else ""))
246 | for p in paths:
247 | print(" ", p)
248 |
249 | files = [
250 | f for f in source_files_from_git(paths, args.changed_only)
251 | if f.endswith(extensions)
252 | if f not in ignore_files
253 | ]
254 |
255 | # Always operate on all CMAKE files (when expanding tabs and no paths given).
256 | files_retab = [
257 | f for f in source_files_from_git((".",) if use_default_paths else paths, args.changed_only)
258 | if f.endswith(extensions_only_retab)
259 | if f not in ignore_files
260 | ]
261 |
262 | if args.expand_tabs:
263 | convert_tabs_to_spaces(files + files_retab)
264 | clang_format(files)
265 |
266 |
267 | if __name__ == "__main__":
268 | main()
269 |
--------------------------------------------------------------------------------
/utils_maintenance/cmake_sort_filelists.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | """
5 | Sorts CMake path lists
6 | - Don't cross blank newline boundaries.
7 | - Don't cross different path prefix boundaries.
8 | """
9 |
10 | import os
11 | import sys
12 |
13 | from typing import (
14 | Optional,
15 | )
16 |
17 | PWD = os.path.dirname(__file__)
18 | sys.path.append(os.path.join(PWD, "modules"))
19 |
20 | from batch_edit_text import run
21 |
22 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(PWD, "..", "..", ".."))))
23 |
24 | # TODO, move to config file
25 | SOURCE_DIRS = (
26 | "source",
27 | "intern/ghost",
28 | )
29 |
30 | SOURCE_EXT = (
31 | # C/C++
32 | ".c", ".h", ".cpp", ".hpp", ".cc", ".hh", ".cxx", ".hxx", ".inl",
33 | # Objective C
34 | ".m", ".mm",
35 | )
36 |
37 |
38 | def sort_cmake_file_lists(fn: str, data_src: str) -> Optional[str]:
39 | fn_dir = os.path.dirname(fn)
40 | lines = data_src.splitlines(keepends=True)
41 |
42 | def can_sort(l: str) -> bool:
43 | l = l.split("#", 1)[0].strip()
44 | # Source files.
45 | if l.endswith(SOURCE_EXT):
46 | if "(" not in l and ')' not in l:
47 | return True
48 | # Headers.
49 | if l and os.path.isdir(os.path.join(fn_dir, l)):
50 | return True
51 | # Libs.
52 | if l.startswith(("bf_", "extern_")) and "." not in l and "/" not in l:
53 | return True
54 | return False
55 |
56 | def can_sort_compat(a: str, b: str) -> bool:
57 | # Strip comments.
58 | a = a.split("#", 1)[0]
59 | b = b.split("#", 1)[0]
60 |
61 | # Compare leading white-space.
62 | if a[:-(len(a.lstrip()))] == b[:-(len(b.lstrip()))]:
63 | # return False
64 |
65 | # Compare loading paths.
66 | a_ls = a.split("/")
67 | b_ls = b.split("/")
68 | if len(a_ls) == 1 and len(b_ls) == 1:
69 | return True
70 | if len(a_ls) == len(b_ls):
71 | if len(a_ls) == 1:
72 | return True
73 | if a_ls[:-1] == b_ls[:-1]:
74 | return True
75 | return False
76 |
77 | i = 0
78 | while i < len(lines):
79 | if can_sort(lines[i]):
80 | j = i
81 | while j + 1 < len(lines):
82 | if not can_sort(lines[j + 1]):
83 | break
84 | if not can_sort_compat(lines[i], lines[j + 1]):
85 | break
86 | j = j + 1
87 | if i != j:
88 | lines[i:j + 1] = list(sorted(lines[i:j + 1]))
89 | i = j
90 | i = i + 1
91 |
92 | data_dst = "".join(lines)
93 | if data_src != data_dst:
94 | return data_dst
95 | return None
96 |
97 |
98 | run(
99 | directories=[os.path.join(SOURCE_DIR, d) for d in SOURCE_DIRS],
100 | is_text=lambda fn: fn.endswith("CMakeLists.txt"),
101 | text_operation=sort_cmake_file_lists,
102 | use_multiprocess=True,
103 | )
104 |
--------------------------------------------------------------------------------
/utils_maintenance/modules/batch_edit_text.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | from typing import (
4 | Callable,
5 | Generator,
6 | Optional,
7 | Sequence,
8 | )
9 |
10 | TextOpFn = Callable[
11 | # file_name, data_src
12 | [str, str],
13 | # data_dst or None when no change is made.
14 | Optional[str]
15 | ]
16 |
17 |
18 | def operation_wrap(fn: str, text_operation: TextOpFn) -> None:
19 | with open(fn, "r", encoding="utf-8") as f:
20 | data_src = f.read()
21 | data_dst = text_operation(fn, data_src)
22 |
23 | if data_dst is None or (data_src == data_dst):
24 | return
25 |
26 | with open(fn, "w", encoding="utf-8") as f:
27 | f.write(data_dst)
28 |
29 |
30 | def run(
31 | *,
32 | directories: Sequence[str],
33 | is_text: Callable[[str], bool],
34 | text_operation: TextOpFn,
35 | use_multiprocess: bool,
36 | ) -> None:
37 | print(directories)
38 |
39 | import os
40 |
41 | def source_files(path: str) -> Generator[str, None, None]:
42 | for dirpath, dirnames, filenames in os.walk(path):
43 | dirnames[:] = [d for d in dirnames if not d.startswith(".")]
44 | for filename in filenames:
45 | if filename.startswith("."):
46 | continue
47 | filepath = os.path.join(dirpath, filename)
48 | if is_text(filepath):
49 | yield filepath
50 |
51 | if use_multiprocess:
52 | args = [
53 | (fn, text_operation) for directory in directories
54 | for fn in source_files(directory)
55 | ]
56 | import multiprocessing
57 | job_total = multiprocessing.cpu_count()
58 | pool = multiprocessing.Pool(processes=job_total * 2)
59 | pool.starmap(operation_wrap, args)
60 | else:
61 | for directory in directories:
62 | for fn in source_files(directory):
63 | operation_wrap(fn, text_operation)
64 |
--------------------------------------------------------------------------------
/utils_maintenance/trailing_space_clean.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # SPDX-License-Identifier: GPL-2.0-or-later
3 |
4 | import os
5 | from os.path import join
6 |
7 | from trailing_space_clean_config import PATHS
8 |
9 | SOURCE_EXT = (
10 | # C/C++
11 | ".c", ".h", ".cpp", ".hpp", ".cc", ".hh", ".cxx", ".hxx", ".inl",
12 | # Objective C
13 | ".m", ".mm",
14 | # GLSL
15 | ".glsl",
16 | # Python
17 | ".py",
18 | # Text (also CMake)
19 | ".txt", ".cmake", ".rst",
20 | # MS-Windows Scripts.
21 | ".bat", ".cmd",
22 | )
23 |
24 |
25 | def is_source(filename):
26 | return filename.endswith(SOURCE_EXT)
27 |
28 |
29 | def path_iter(path, filename_check=None):
30 | for dirpath, dirnames, filenames in os.walk(path):
31 | # skip ".git"
32 | dirnames[:] = [d for d in dirnames if not d.startswith(".")]
33 |
34 | for filename in filenames:
35 | if filename.startswith("."):
36 | continue
37 | filepath = join(dirpath, filename)
38 | if filename_check is None or filename_check(filepath):
39 | yield filepath
40 |
41 |
42 | def path_expand(paths, filename_check=None):
43 | for f in paths:
44 | if not os.path.exists(f):
45 | print("Missing:", f)
46 | elif os.path.isdir(f):
47 | yield from path_iter(f, filename_check)
48 | else:
49 | yield f
50 |
51 |
52 | def rstrip_file(filename):
53 | reports = []
54 | with open(filename, "r", encoding="utf-8") as fh:
55 | data_src = fh.read()
56 |
57 | # Strip trailing space.
58 | data_dst = []
59 | for l in data_src.rstrip().splitlines(True):
60 | data_dst.append(l.rstrip() + "\n")
61 |
62 | data_dst = "".join(data_dst)
63 |
64 | # Remove BOM.
65 | if data_dst and (data_dst[0] == '\ufeff'):
66 | data_dst = data_dst[1:]
67 |
68 | len_strip = len(data_src) - len(data_dst)
69 | if len_strip != 0:
70 | reports.append("STRIP=%d" % len_strip)
71 |
72 | if len_strip:
73 | with open(filename, "w", encoding="utf-8") as fh:
74 | fh.write(data_dst)
75 | return tuple(reports)
76 |
77 |
78 | def main():
79 | for f in path_expand(PATHS, is_source):
80 | report = rstrip_file(f)
81 | if report:
82 | print("Strip (%s): %s" % (', '.join(report), f))
83 |
84 |
85 | if __name__ == "__main__":
86 | main()
87 |
--------------------------------------------------------------------------------
/utils_maintenance/trailing_space_clean_config.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-2.0-or-later
2 |
3 | import os
4 | PATHS = (
5 | "build_files/build_environment/cmake",
6 | "build_files/cmake",
7 | "doc/python_api",
8 | "intern/clog",
9 | "intern/cycles",
10 | "intern/ghost",
11 | "intern/guardedalloc",
12 | "intern/memutil",
13 | "release/scripts/modules",
14 | "release/scripts",
15 | "source",
16 | "tests",
17 |
18 | # files
19 | "GNUmakefile",
20 | "make.bat",
21 | )
22 |
23 |
24 | SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(
25 | os.path.join(os.path.dirname(__file__), "..", "..", ".."))))
26 |
27 | PATHS = tuple(
28 | os.path.join(SOURCE_DIR, p.replace("/", os.sep))
29 | for p in PATHS
30 | )
31 |
32 |
33 | def files(path, test_fn):
34 | for dirpath, dirnames, filenames in os.walk(path):
35 | # skip '.git'
36 | dirnames[:] = [d for d in dirnames if not d.startswith(".")]
37 | for filename in filenames:
38 | if test_fn(filename):
39 | filepath = os.path.join(dirpath, filename)
40 | yield filepath
41 |
42 |
43 | PATHS = PATHS + tuple(
44 | files(
45 | os.path.join(SOURCE_DIR),
46 | lambda filename: filename in {"CMakeLists.txt"} or filename.endswith((".cmake"))
47 | )
48 | )
49 |
--------------------------------------------------------------------------------