├── .gitignore
├── LICENSE
├── MetsTools
├── __init__.py
├── bbone_ops.py
├── cleanup_blend.py
├── cleanup_mesh.py
├── convert_images.py
├── copy_vert_ids.py
├── create_lightmap_uvs.py
├── force_apply_mirror.py
├── join_as_shape_key_by_uvs.py
├── join_as_shape_key_by_weights.py
├── make_modifiers_consistent.py
├── make_physics_bones.py
├── mark_sharp_by_autosmooth.py
├── mirror_constraints.py
├── psk_cleanup.py
├── rename_skeleton_to_metsrig.py
├── smart_weight_transfer.py
├── utils.py
└── weighted_normals.py
├── blender_addons.code-workspace
├── clean_weight_islands.py
├── cloudrig.py
├── copy_drivers.py
├── io_witcher3_fbx
├── __init__.py
├── cleanup_mesh.py
├── import_witcher3_fbx.py
├── weighted_normals.py
└── witcher3_materials.blend
├── merge_w3_skeleton_into_metsrig.py
├── metsrig.py
├── mirror_vertex_groups.py
└── ue4map-tools
├── README.md
├── map_material_cleanup.py
└── map_mesh_import.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # vscode
2 | *.code-workspace
3 |
4 | # Byte-compiled / optimized / DLL files
5 | __pycache__/
6 | *.py[cod]
7 | *$py.class
8 |
9 | # C extensions
10 | *.so
11 |
12 | # Distribution / packaging
13 | .Python
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | .hypothesis/
51 | .pytest_cache/
52 |
53 | # Translations
54 | *.mo
55 | *.pot
56 |
57 | # Django stuff:
58 | *.log
59 | local_settings.py
60 | db.sqlite3
61 |
62 | # Flask stuff:
63 | instance/
64 | .webassets-cache
65 |
66 | # Scrapy stuff:
67 | .scrapy
68 |
69 | # Sphinx documentation
70 | docs/_build/
71 |
72 | # PyBuilder
73 | target/
74 |
75 | # Jupyter Notebook
76 | .ipynb_checkpoints
77 |
78 | # pyenv
79 | .python-version
80 |
81 | # celery beat schedule file
82 | celerybeat-schedule
83 |
84 | # SageMath parsed files
85 | *.sage.py
86 |
87 | # Environments
88 | .env
89 | .venv
90 | env/
91 | venv/
92 | ENV/
93 | env.bak/
94 | venv.bak/
95 |
96 | # Spyder project settings
97 | .spyderproject
98 | .spyproject
99 |
100 | # Rope project settings
101 | .ropeproject
102 |
103 | # mkdocs documentation
104 | /site
105 |
106 | # mypy
107 | .mypy_cache/
108 |
--------------------------------------------------------------------------------
/MetsTools/__init__.py:
--------------------------------------------------------------------------------
1 | # MetsTools addon for Blender
2 | # Copyright (C) 2019 Mets 3D
3 | #
4 | # This program is free software: you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation, either version 3 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program. If not, see .
16 |
17 | # TODO
18 | # Mirror selected bones (names, transforms, constraints, drivers, settings)
19 | # UnfuckSFMArmature - Could add more SFM clean-up related stuff. Probably fix/re-write the SFM to Cycles material converter. Best wait for Red Eye to improve his mdl importer though.
20 | # Copy Cloth Settings (for some reason Copy Attributes doesn't do this)
21 | # this __init__ and the way we register stuff could probably be a lot nicer.
22 | bl_info = {
23 | "name": "MetsTools",
24 | "author": "Mets3D",
25 | "version": (2,3),
26 | "blender": (2, 80, 0),
27 | "location": "View3D > Search ",
28 | "description": "Random collection of tools I built for myself",
29 | "category": "3D View"}
30 |
31 | import bpy
32 |
33 | from . import create_lightmap_uvs
34 | from . import mark_sharp_by_autosmooth
35 | from . import make_physics_bones
36 | from . import cleanup_blend
37 | from . import make_modifiers_consistent
38 | from . import cleanup_mesh
39 | from . import weighted_normals
40 | from . import convert_images
41 | from . import smart_weight_transfer
42 | from . import join_as_shape_key_by_uvs
43 | from . import force_apply_mirror
44 | from . import rename_skeleton_to_metsrig
45 | from . import mirror_constraints
46 |
47 | def register():
48 | from bpy.utils import register_class
49 | create_lightmap_uvs.register()
50 | mark_sharp_by_autosmooth.register()
51 | make_physics_bones.register()
52 | cleanup_blend.register()
53 | make_modifiers_consistent.register()
54 | cleanup_mesh.register()
55 | weighted_normals.register()
56 | convert_images.register()
57 | smart_weight_transfer.register()
58 | join_as_shape_key_by_uvs.register()
59 | force_apply_mirror.register()
60 | rename_skeleton_to_metsrig.register()
61 | mirror_constraints.register()
62 |
63 | #bpy.types.VIEW3D_MT_pose_specials.append(draw_func_MakePhysicsBones)
64 | #bpy.types.VIEW3D_MT_edit_mesh.append(draw_func_MarkSharpByAutoSmooth)
65 | #bpy.types.VIEW3D_MT_uv_map.append(draw_func_CreateLightMapUVs)
66 |
67 | def unregister():
68 | from bpy.utils import unregister_class
69 | create_lightmap_uvs.unregister()
70 | mark_sharp_by_autosmooth.unregister()
71 | make_physics_bones.unregister()
72 | cleanup_blend.unregister()
73 | make_modifiers_consistent.unregister()
74 | cleanup_mesh.unregister()
75 | weighted_normals.unregister()
76 | convert_images.unregister()
77 | smart_weight_transfer.unregister()
78 | join_as_shape_key_by_uvs.unregister()
79 | force_apply_mirror.unregister()
80 | rename_skeleton_to_metsrig.unregister()
81 | mirror_constraints.unregister()
82 |
83 | #bpy.types.VIEW3D_MT_pose_specials.remove(draw_func_MakePhysicsBones)
84 | #bpy.types.VIEW3D_MT_edit_mesh.remove(draw_func_MarkSharpByAutoSmooth)
85 | #bpy.types.VIEW3D_MT_uv_map.remove(draw_func_CreateLightMapUVs)
--------------------------------------------------------------------------------
/MetsTools/bbone_ops.py:
--------------------------------------------------------------------------------
1 | import bpy
2 |
3 | # I want an operator that sets up tangent controls for a BBone. Might need to be two operators, one for the start handle and one for the end handle.
4 |
5 | # Usage:
6 | # First select the first bbone
7 | # Then select the 2nd bbone
8 | # Run 'Create BBone Tangent' Operator.
9 | # Parent the created bone to the control bone.
10 |
11 | # Workings:
12 | # Get unit vectors from each bone and average them (?)
13 | # Spawn a bone at the head of the 2nd bone
14 | # Tail = Head+that vector (maybe negative?)
15 | # Set the bbone settings (First bone's end handle, second bone's start handle, is tangent of the new bone.)
16 | # Name the new bone? First bone's name.replace('DEF', 'TAN')?
17 |
18 | scalar = 0.005
19 | bone_shape = bpy.data.objects.get('Shape_Arrow')
20 |
21 | def CreateBBoneTangent(context):
22 | """ Create a tangent control for two connected BBones. """
23 | assert len(context.selected_pose_bones) == 2, "Only two bones should be selected."
24 | armature = context.object
25 | assert armature.data.use_mirror_x==False, "Things glitch out when X axis mirror is enabled, disable it plz."
26 |
27 | bpy.ops.object.mode_set(mode='EDIT')
28 |
29 | # Identifying bones
30 | eb1 = None
31 | eb2 = None
32 | for b in context.selected_editable_bones:
33 | # Removing bbone drivers
34 | for d in armature.animation_data.drivers: # Look through every driver on the armature
35 | if('pose.bones["' + b.name + '"]' in d.data_path): # If the driver belongs to the active bone
36 | # The way drivers on bones work is weird af. You have to create the driver relative to the bone, but you have to read the driver relative to the armature. So d.data_path might look like "pose.bones["bone_name"].bone_property" but when we create a driver we just need the "bone_property" part.
37 | data_path = d.data_path.split("].")[1]
38 | pb = armature.pose.bones[b.name]
39 | pb.driver_remove(data_path)
40 |
41 | if(b==context.active_bone):
42 | eb2 = b
43 | else:
44 | eb1 = b
45 |
46 | bone1_name = eb1.name
47 | bone2_name = eb2.name
48 |
49 | bone1_vec = (eb1.head - eb1.tail).normalized()
50 | bone2_vec = (eb2.head - eb2.tail).normalized()
51 | tan_eb_vec = (bone1_vec + bone2_vec) * 1/2 *-1
52 |
53 | bone_name = eb1.name.replace("DEF", "TAN")
54 |
55 | armature.data.use_mirror_x = False
56 | bpy.ops.armature.bone_primitive_add(name=bone_name)
57 | flipped_name = bone_name.replace(".L", ".R")#utils.flip_name(bone_name)
58 | if(flipped_name!=bone_name):
59 | bpy.ops.armature.bone_primitive_add(name=flipped_name)
60 |
61 | # For data and pose bone datablocks to be initialized, we need to enter pose mode.
62 | bpy.ops.object.mode_set(mode='POSE')
63 | bpy.ops.object.mode_set(mode='EDIT')
64 | # This apparently makes my bone pointers point to the wrong bones, so I'll re-initialize those too.
65 | eb1 = armature.data.edit_bones.get(bone1_name)
66 | eb2 = armature.data.edit_bones.get(bone2_name)
67 |
68 | tan_b = armature.data.bones.get(bone_name)
69 | tan_b.use_deform=False
70 | tan_b.layers = [False, False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
71 | tan_eb = armature.data.edit_bones.get(bone_name)
72 | tan_pb = armature.pose.bones.get(bone_name)
73 |
74 | if(bone_shape):
75 | tan_b.show_wire=True
76 | tan_pb.custom_shape = bone_shape
77 | tan_pb.use_custom_shape_bone_size = False
78 |
79 | # Setting parent
80 | ctr_bone_name = eb2.name.replace("DEF", "CTR")
81 | ctr_eb = armature.data.edit_bones.get(ctr_bone_name)
82 | if(ctr_eb):
83 | tan_eb.parent = ctr_eb
84 |
85 | armature.data.use_mirror_x = True # TODO: Set back to original value, and operate according to original value(ie. if it was set to off originally, don't do it on both sides)
86 |
87 | tan_eb.head = eb1.tail
88 | tan_eb.tail = tan_eb.head + tan_eb_vec * scalar
89 | tan_eb.roll = (eb1.roll + eb2.roll) / 2
90 |
91 | tan_eb.bbone_x = tan_eb.bbone_z = scalar * .05
92 | armature.data.use_mirror_x = False
93 |
94 | eb1.bbone_handle_type_end = 'TANGENT'
95 | eb1.bbone_custom_handle_end = tan_eb
96 | eb2.bbone_handle_type_start = 'TANGENT'
97 | eb2.bbone_custom_handle_start = tan_eb
98 |
99 | bpy.ops.object.mode_set(mode='POSE')
100 |
101 | CreateBBoneTangent(bpy.context)
--------------------------------------------------------------------------------
/MetsTools/cleanup_blend.py:
--------------------------------------------------------------------------------
1 | import bmesh
2 | import bpy
3 | from bpy.props import *
4 | from . import utils
5 |
6 | # TODO testing:
7 | # Mirror modifier vertex groups getting spared when they should
8 | # Oh, and also everything else.
9 | # More specifically, I wonder if the "All" settings, to operate on bpy.data.objects, will work, when some objects are hidden or disabled, etc.
10 |
11 | class DeleteUnusedMaterialSlots(bpy.types.Operator):
12 | """ Delete material slots on selected objects that have no faces assigned. """
13 | bl_idname = "object.delete_unused_material_slots"
14 | bl_label = "Delete Unused Material Slots"
15 | bl_options = {'REGISTER', 'UNDO'}
16 |
17 | # TODO: Add this to the UI, to the material arrow panel, with opt_active_only=True.
18 |
19 | opt_objects: EnumProperty(name="Objects",
20 | items=[ ('Active', 'Active', 'Active'),
21 | ('Selected', 'Selected', 'Selected'),
22 | ('All', 'All', 'All')
23 | ],
24 | default='Selected',
25 | description="Which objects to operate on")
26 |
27 | def draw(self, context):
28 | operator = self.layout.operator(DeleteUnusedMaterialSlots.bl_idname, text="Delete Unused Slots", icon='X')
29 | operator.opt_objects = 'Active'
30 |
31 | def execute(self, context):
32 | org_active = context.object
33 |
34 | objs = context.selected_objects
35 | if(self.opt_objects=='Active'):
36 | objs = [context.object]
37 | elif(self.opt_objects=='All'):
38 | objs = bpy.data.objects
39 |
40 | for obj in objs:
41 | if(type(obj)!=bpy.types.Object or
42 | obj.type!='MESH' or
43 | len(obj.data.polygons)==0): continue
44 |
45 | bpy.context.view_layer.objects.active = obj
46 | used_mat_indices = []
47 | for f in obj.data.polygons:
48 | if(f.material_index not in used_mat_indices):
49 | used_mat_indices.append(f.material_index)
50 |
51 | # To remove the material slots, we iterate in reverse.
52 | for i in range(len(obj.material_slots)-1, -1, -1):
53 | if(i not in used_mat_indices):
54 | obj.active_material_index = i
55 | print("Removed material slot " + str(i))
56 | bpy.ops.object.material_slot_remove()
57 |
58 | bpy.context.view_layer.objects.active = org_active
59 | return {'FINISHED'}
60 |
61 | class DeleteUnusedVGroups(bpy.types.Operator):
62 | """ Delete vertex groups that have no weights and/or aren't being used by any modifiers and/or don't correlate to any bones. """
63 | bl_idname = "object.delete_unused_vgroups"
64 | bl_label = "Delete Unused Vertex Groups"
65 | bl_options = {'REGISTER', 'UNDO'}
66 |
67 | # TODO: Should also consider vertex groups used by any kinds of constraints.
68 | # Oof. We'd have to look through the objects and bones of the entire scene for that. Maybe worth?
69 |
70 | # TODO: Should also consider shape keys masks.
71 |
72 | opt_objects: EnumProperty(name="Objects",
73 | items=[ ('Active', 'Active', 'Active'),
74 | ('Selected', 'Selected', 'Selected'),
75 | ('All', 'All', 'All')
76 | ],
77 | description="Which objects to operate on")
78 |
79 | opt_save_bone_vgroups: BoolProperty(name="Save Bone Vertex Groups",
80 | default=True,
81 | description="Don't delete vertex groups that correspond with a bone name in any of the object's armatures")
82 |
83 | opt_save_nonzero_vgroups: BoolProperty(name="Save Any Weights",
84 | default=False,
85 | description="Don't delete vertex groups that have any non-zero weights. Having this disabled shouldn't break Mirror modifiers")
86 |
87 | opt_save_modifier_vgroups: BoolProperty(name="Save Modifier Groups",
88 | default=True,
89 | description="Don't delete vertex groups that are referenced by a modifier, including physics settings")
90 |
91 | opt_save_shapekey_vgroups: BoolProperty(name="Save Shape Key Groups",
92 | default=True,
93 | description="Save vertex groups that are used by a shape key as a mask")
94 |
95 | @classmethod
96 | def poll(cls, context):
97 | return len(context.object.vertex_groups) > 0
98 |
99 | def draw_delete_unused(self, context):
100 | operator = self.layout.operator(DeleteUnusedVGroups.bl_idname, text="Delete Unused Groups", icon='X')
101 | operator.opt_objects = 'Active'
102 |
103 | def draw_delete_empty(self, context):
104 | operator = self.layout.operator(DeleteUnusedVGroups.bl_idname, text="Delete Empty Groups", icon='X')
105 | operator.opt_objects = 'Active'
106 | operator.opt_save_nonzero_vgroups = True
107 |
108 | def execute(self, context):
109 | org_active = context.object
110 |
111 | objs = context.selected_objects
112 | if(self.opt_objects=='Active'):
113 | objs = [context.object]
114 | elif(self.opt_objects=='All'):
115 | objs = bpy.data.objects
116 |
117 | for obj in objs:
118 | if(len(obj.vertex_groups) == 0): continue
119 |
120 | bpy.context.view_layer.objects.active = obj
121 |
122 | # Clean 0 weights
123 | bpy.ops.object.vertex_group_clean(group_select_mode='ALL', limit=0)
124 |
125 | # Saving vertex groups that are used by modifiers and therefore should not be removed
126 | safe_groups = []
127 | def save_groups_by_attributes(owner):
128 | # Look through an object's attributes. If its value is a string, try to find a vertex group with the same name. If found, make sure we don't delete it.
129 | for attr in dir(owner):
130 | value = getattr(owner, attr)
131 | if(type(value)==str):
132 | vg = obj.vertex_groups.get(value)
133 | if(vg):
134 | safe_groups.append(vg)
135 |
136 | # Save any vertex groups used by modifier parameters.
137 | if(self.opt_save_modifier_vgroups):
138 | for m in obj.modifiers:
139 | save_groups_by_attributes(m)
140 | if(hasattr(m, 'settings')): #Physics modifiers
141 | save_groups_by_attributes(m.settings)
142 |
143 | # Getting a list of bone names from all armature modifiers.
144 | bone_names = []
145 | for m in obj.modifiers:
146 | if(m.type == 'ARMATURE'):
147 | armature = m.object
148 | if armature is None:
149 | continue
150 | if(bone_names is None):
151 | bone_names = list(map(lambda x: x.name, armature.pose.bones))
152 | else:
153 | bone_names.extend(list(map(lambda x: x.name, armature.pose.bones)))
154 |
155 | # Saving any vertex groups that correspond to a bone name
156 | if(self.opt_save_bone_vgroups):
157 | for bn in bone_names:
158 | vg = obj.vertex_groups.get(bn)
159 | if(vg):
160 | safe_groups.append(vg)
161 |
162 | # Saving vertex groups that have any weights assigned to them, also considering mirror modifiers
163 | if(self.opt_save_nonzero_vgroups):
164 | for vg in obj.vertex_groups: # For each vertex group
165 | for i in range(0, len(obj.data.vertices)): # For each vertex
166 | try:
167 | vg.weight(i) # If there's a weight assigned to this vert (else exception)
168 | if(vg not in safe_groups):
169 | safe_groups.append(vg)
170 |
171 | opp_name = utils.flip_name(vg.name)
172 | opp_group = obj.vertex_groups.get(opp_name)
173 | if(opp_group):
174 | safe_groups.append(opp_group)
175 | break
176 | except RuntimeError:
177 | continue
178 |
179 | # Clearing vertex groups that didn't get saved
180 | for vg in obj.vertex_groups:
181 | if(vg not in safe_groups):
182 | print("Unused vgroup removed: "+vg.name)
183 | obj.vertex_groups.remove(vg)
184 |
185 | bpy.context.view_layer.objects.active = org_active
186 | return {'FINISHED'}
187 |
188 | def get_linked_nodes(nodes, node): # Recursive function to collect all nodes connected BEFORE the second parameter.
189 | nodes.append(node)
190 | for i in node.inputs:
191 | if(len(i.links) > 0):
192 | get_linked_nodes(nodes, i.links[0].from_node)
193 | return nodes
194 |
195 | def clean_node_tree(node_tree, delete_unused_nodes=True, fix_groups=False, center_nodes=True, fix_tex_refs=False, rename_tex_nodes=True, hide_sockets=False, min_sockets=2, tex_width=300): # nodes = nodeTree.nodes
196 | nodes = node_tree.nodes
197 | if(len(nodes)==0): return
198 |
199 | if(delete_unused_nodes):
200 | # Deleting unconnected nodes
201 | output_nodes = list(filter(lambda x: x.type in ['OUTPUT_MATERIAL', 'OUTPUT_WORLD', 'COMPOSITE', 'VIEWER'], nodes))
202 | used_nodes = []
203 | for on in output_nodes:
204 | used_nodes.extend(get_linked_nodes([], on))
205 |
206 | for n in nodes:
207 | if(n not in used_nodes and n.type != 'FRAME'):
208 | print("Removing unconnected node: Type: " + n.type + " Name: " + n.name + " Label: " + n.label)
209 | nodes.remove(n)
210 | continue
211 |
212 | # Finding bounding box of all nodes
213 | x_min = min(n.location.x for n in nodes if n.type!= 'FRAME')
214 | x_max = max(n.location.x for n in nodes if n.type!= 'FRAME')
215 | y_min = min(n.location.y for n in nodes if n.type!= 'FRAME')
216 | y_max = max(n.location.y for n in nodes if n.type!= 'FRAME')
217 |
218 | # Finding bounding box center
219 | x_mid = (x_min+x_max)/2
220 | y_mid = (y_min+y_max)/2
221 |
222 | for n in nodes:
223 | if(fix_tex_refs):
224 | # Changing .xxx texture references
225 | if(n.type == 'TEX_IMAGE'):
226 | if(n.image is not None and n.image.name[-4] == '.'):
227 | existing = bpy.data.images.get(n.image.name[:-4])
228 | if(existing):
229 | print("Changed a texture reference to: "+n.image.name)
230 | n.image = bpy.data.images.get(n.image.name[:-4])
231 | else:
232 | n.image.name = n.image.name[:-4]
233 |
234 | if(n.type=='TEX_IMAGE'):
235 | # Resizing image texture nodes
236 | if(tex_width!=-1):
237 | n.width=tex_width
238 | n.width_hidden=tex_width
239 |
240 | if(rename_tex_nodes):
241 | # Renaming and relabelling image texture nodes
242 | if(n.image is not None):
243 | extension = "." + n.image.filepath.split(".")[-1]
244 | n.name = n.image.name.replace(extension, "")
245 | n.label = n.name
246 | if(n.label[-4] == '.'):
247 | n.label = n.label[:-4]
248 |
249 | if(hide_sockets):
250 | # Hiding unplugged sockets, if there are more than min_sockets.
251 | unplugged = []
252 | for i in n.inputs:
253 | if(len(i.links) == 0):
254 | unplugged.append(i)
255 | if(len(unplugged) > min_sockets):
256 | for u in unplugged:
257 | u.hide = True
258 |
259 | for i in n.outputs:
260 | if(len(i.links) == 0):
261 | unplugged.append(i)
262 | if(len(unplugged) > min_sockets):
263 | for u in unplugged:
264 | u.hide = True
265 |
266 | if(center_nodes):
267 | # Moving all nodes by the amount of the bounding box center(therefore making the center 0,0) - except Frame nodes, which move by themselves.
268 | if(n.type != 'FRAME'):
269 | n.location.x -= x_mid
270 | n.location.y -= y_mid
271 |
272 | if(fix_groups):
273 | # Changing references to nodegroups ending in .00x to their original. If the original doesn't exist, rename the nodegroup.
274 | for n in nodes:
275 | if(n.type=='GROUP'):
276 | if('.00' in n.node_tree.name):
277 | existing = bpy.data.node_groups.get(n.node_tree.name[:-4])
278 | if(existing):
279 | n.node_tree = existing
280 | else:
281 | n.node_tree.name = n.node_tree.name[:-4]
282 |
283 | class CleanUpArmature(bpy.types.Operator):
284 | # TODO: turn into a valid operator
285 | # TODO: disable Deform tickbox on bones with no corresponding vgroups. (This would ideally be done before vgroup cleanup) - Always print a warning for this.
286 | # TODO: vice versa, warn is a non-deform bone has a corresponding vgroup.
287 | def execute(self, context):
288 | armature = context.object
289 |
290 | if(type(armature) != bpy.types.Object or
291 | armature.type != 'ARMATURE' ): return {'CANCELLED'}
292 |
293 | for b in armature.pose.bones:
294 | # Closing and naming bone constraints
295 | for c in b.constraints:
296 | c.show_expanded = False
297 | if(c.type=='ACTION'):
298 | c.name = "Action_" + c.action.name
299 |
300 | # Making B-Bone thickness and envelope properties consistent
301 | for b in armature.data.bones:
302 | b.bbone_x = 0.005
303 | b.bbone_z = 0.005
304 | b.head_radius = 0.01
305 | b.tail_radius = 0.01
306 | b.envelope_distance = 0.03
307 | b.envelope_weight = 1
308 |
309 | return {'FINISHED'}
310 |
311 | class CleanUpMaterials(bpy.types.Operator):
312 | bl_idname = "material.clean_up"
313 | bl_label = "Clean Up Material"
314 | bl_options = {'REGISTER', 'UNDO'}
315 |
316 | opt_objects: EnumProperty(name="Objects",
317 | items=[ ('Active', 'Active', 'Active'),
318 | ('Selected', 'Selected', 'Selected'),
319 | ('All', 'All', 'All')
320 | ],
321 | default='Selected',
322 | description="Which objects to operate on")
323 |
324 | opt_fix_name: BoolProperty(name="Fix .00x Material Names",
325 | default=False,
326 | description="Materials ending in .001 or similar will be attempted to be renamed")
327 |
328 | opt_delete_unused_nodes: BoolProperty(name="Clear Unused Nodes",
329 | default=False,
330 | description="Clear all nodes (except Frames) in all materials that aren't linked to the 'Material Output' node")
331 |
332 | opt_hide_sockets: BoolProperty(name="Hide Node Sockets",
333 | default=False,
334 | description="Hide all unplugged sockets on either side of group nodes if they have more than 2 unplugged sockets on either side")
335 |
336 | opt_fix_groups: BoolProperty(name="Fix .00x Group Nodes",
337 | default=True,
338 | description="If a group node's nodegroup ends in .00x but a nodegroup exists without it, replace the reference. If such nodegroup doesn't exist, rename it")
339 |
340 | opt_fix_tex_refs: BoolProperty(name="Fix .00x Texture References",
341 | default=True,
342 | description="If a texture node references a texture ending in .00x but a texture without it exists, change the reference. If such texture doesn't exist, rename it")
343 |
344 | opt_rename_nodes: BoolProperty(name="Rename Texture Nodes",
345 | default=False,
346 | description="Rename and relabel texture nodes to the filename of their image, without extension")
347 |
348 | opt_set_tex_widths: IntProperty(name="Set Texture Node Widths",
349 | default=400,
350 | description="Set all Texture Node widths to this value")
351 |
352 | # TODO: Can be added to the UI the same place as delete unused material slots.
353 |
354 | def execute(self, context):
355 | mats_done = []
356 |
357 | objs = context.selected_objects
358 | if(self.opt_objects=='Active'):
359 | objs = [context.object]
360 | elif(self.opt_objects=='All'):
361 | objs = bpy.data.objects
362 |
363 | for o in objs:
364 | for ms in o.material_slots:
365 | m = ms.material
366 | if(m==None or m in mats_done): continue
367 | if(self.opt_fix_name):
368 | # Clearing .00x from end of names
369 | if(('.' in m.name) and (m.name[-4] == '.')):
370 | existing = bpy.data.materials.get(m.name[:-4])
371 | if(not existing):
372 | m.name = m.name[:-4]
373 | print("...Renamed to " + m.name)
374 | # Cleaning nodetree
375 | if(m.use_nodes):
376 | clean_node_tree(m.node_tree,
377 | delete_unused_nodes=self.opt_delete_unused_nodes,
378 | fix_groups=self.opt_fix_groups,
379 | center_nodes=True,
380 | fix_tex_refs=self.opt_fix_tex_refs,
381 | rename_tex_nodes=self.opt_rename_nodes,
382 | hide_sockets=self.opt_hide_sockets,
383 | min_sockets=2,
384 | tex_width=self.opt_set_tex_widths)
385 | mats_done.append(m)
386 | return {'FINISHED'}
387 |
388 | class CleanUpObjects(bpy.types.Operator):
389 | bl_idname = "object.clean_up"
390 | bl_label = "Clean Up Objects"
391 | bl_options = {'REGISTER', 'UNDO'}
392 |
393 | opt_objects: EnumProperty(name="Objects",
394 | items=[ ('Active', 'Active', 'Active'),
395 | ('Selected', 'Selected', 'Selected'),
396 | ('All', 'All', 'All')
397 | ],
398 | description="Which objects to operate on")
399 |
400 | opt_rename_data: BoolProperty(
401 | name="Rename Datas",
402 | default=True,
403 | description="If an object or armature is named 'Apple', its data will be renamed to 'Data_Apple'")
404 |
405 | opt_rename_uvs: BoolProperty(
406 | name="Rename UV Maps",
407 | default=True,
408 | description="If an object has only one UVMap, rename that to the default: 'UVMap'")
409 |
410 | opt_clean_material_slots: BoolProperty(
411 | name="Clean Material Slots",
412 | default=True,
413 | description="Delete material slots on selected objects that have no faces assigned")
414 |
415 | opt_rename_materials: BoolProperty(
416 | name="Fix .00x Material Names",
417 | default=False,
418 | description="Materials ending in .001 or similar will be attempted to be renamed")
419 |
420 | opt_clean_materials: BoolProperty(
421 | name="Clean Material Nodes",
422 | default=False,
423 | description="Remove unused nodes, resize and rename image nodes, hide unused group node sockets, and center nodes")
424 |
425 | opt_clean_vgroups: BoolProperty(name="Clear Unused Vertex Groups",
426 | default=True,
427 | description="Clear unused vertex groups")
428 |
429 | opt_create_mirror_vgroups: BoolProperty(
430 | name="Create Mirror Vertex Groups",
431 | default=True,
432 | description="If there is a Mirror modifier, create any missing left/right sided vertex groups")
433 |
434 | def execute(self, context):
435 | org_active = context.object
436 |
437 | objs = context.selected_objects
438 | if(self.opt_objects=='Active'):
439 | objs = [context.object]
440 | elif(self.opt_objects=='All'):
441 | objs = bpy.data.objects
442 |
443 | for obj in objs:
444 | if(type(obj) != bpy.types.Object or
445 | (obj.type != 'MESH' and
446 | obj.type != 'ARMATURE') ): continue
447 |
448 | bpy.ops.object.mode_set(mode="OBJECT")
449 | bpy.context.view_layer.objects.active = obj
450 |
451 | # Naming mesh/skeleton data blocks
452 | if(self.opt_rename_data):
453 | obj.data.name = "Data_" + obj.name
454 |
455 | # Closing and naming object constraints
456 | for c in obj.constraints:
457 | c.show_expanded = False
458 | if(c.type=='ACTION'):
459 | c.name = "Action_" + c.action.name
460 |
461 | # Closing modifiers
462 | for m in obj.modifiers:
463 | m.show_expanded = False
464 |
465 | # That's it for armatures.
466 | if(obj.type == 'ARMATURE'):
467 | continue
468 |
469 | # Wireframes
470 | obj.show_wire = False
471 | obj.show_all_edges = True
472 |
473 | # Sorting vertex groups by hierarchy
474 | bpy.ops.object.vertex_group_sort(sort_type='BONE_HIERARCHY')
475 |
476 | # Renaming UV map if there is only one
477 | if(self.opt_rename_uvs):
478 | if(len(obj.data.uv_layers) == 1):
479 | obj.data.uv_layers[0].name = "UVMap"
480 |
481 | # Creating missing vertex groups for Mirror modifier
482 | if(self.opt_create_mirror_vgroups):
483 | for m in obj.modifiers:
484 | if(m.type=='MIRROR'):
485 | vgs = obj.vertex_groups
486 | for vg in vgs:
487 | flippedName = utils.flip_name(vg.name)
488 | print(flippedName)
489 | if(flippedName not in vgs):
490 | obj.vertex_groups.new(name=flippedName)
491 | break
492 |
493 | # Deleting unused material slots
494 | if(self.opt_clean_material_slots):
495 | bpy.ops.object.delete_unused_material_slots(opt_objects=self.opt_objects)
496 |
497 | # Cleaning node trees
498 | bpy.ops.material.clean_up(opt_objects=self.opt_objects,
499 | opt_fix_name=self.opt_rename_materials,
500 | opt_delete_unused_nodes=self.opt_clean_materials,
501 | opt_fix_groups=self.opt_clean_materials,
502 | opt_fix_tex_refs=self.opt_clean_materials,
503 | opt_rename_nodes=self.opt_clean_materials)
504 |
505 | if(self.opt_clean_vgroups):
506 | bpy.ops.object.delete_unused_vgroups(opt_objects=self.opt_objects)
507 |
508 | bpy.context.view_layer.objects.active = org_active
509 | return {'FINISHED'}
510 |
511 | class CleanUpScene(bpy.types.Operator):
512 | bl_idname = "scene.clean_up"
513 | bl_label = "Clean Up Scene"
514 | bl_options = {'REGISTER', 'UNDO'}
515 |
516 | opt_freeze: BoolProperty(
517 | name="Freeze Operator",
518 | default=False,
519 | description="Freeze the operator to change settings without having to wait for the operator to run")
520 |
521 | opt_selected_only: BoolProperty(
522 | name="Selected Objects",
523 | default=True,
524 | description="DIsable to affect all objects")
525 |
526 | opt_removeUnusedMats: BoolProperty(
527 | name="Clean Material Slots",
528 | default=True,
529 | description="If a material has no faces assigned to it, it will be removed from the object. Objects with no faces are ignored")
530 |
531 | opt_clean_worlds: BoolProperty(
532 | name="Clean Worlds",
533 | default=True,
534 | description="Clean up World node setups")
535 |
536 | opt_clean_comp: BoolProperty(
537 | name="Clean Compositing",
538 | default=True,
539 | description="Clean up Compositing nodes")
540 |
541 | opt_clean_nodegroups: BoolProperty(
542 | name="Clean Nodegroups",
543 | default=True,
544 | description="Clean up Nodegroups")
545 |
546 | opt_clean_vgroups: BoolProperty(name="Clear Unused Vertex Groups",
547 | default=True,
548 | description="Clear unused vertex groups")
549 |
550 | opt_clean_material_slots: BoolProperty(
551 | name="Clean Material Slots",
552 | default=True,
553 | description="Delete material slots on selected objects that have no faces assigned")
554 |
555 | opt_rename_materials: BoolProperty(
556 | name="Fix .00x Material Names",
557 | default=False,
558 | description="Materials ending in .001 or similar will be attempted to be renamed")
559 |
560 | opt_clean_materials: BoolProperty(
561 | name="Clean Material Nodes",
562 | default=True,
563 | description="Remove unused nodes, resize and rename image nodes, hide unused group node sockets, and center nodes")
564 |
565 | def execute(self, context):
566 | if(self.opt_freeze):
567 | return {'FINISHED'}
568 |
569 | org_active = bpy.context.view_layer.objects.active
570 |
571 | if(self.opt_clean_worlds):
572 | for w in bpy.data.worlds:
573 | if(w.use_nodes):
574 | clean_node_tree(w.node_tree)
575 |
576 | if(self.opt_clean_comp):
577 | for s in bpy.data.scenes:
578 | if(s.use_nodes):
579 | clean_node_tree(s.node_tree)
580 |
581 | if(self.opt_clean_nodegroups):
582 | for nt in bpy.data.node_groups:
583 | clean_node_tree(nt)
584 |
585 | objects = 'Selected' if self.opt_selected_only else 'All'
586 | bpy.ops.object.clean_up(opt_objects=objects,
587 | opt_clean_vgroups=self.opt_clean_vgroups,
588 | opt_clean_material_slots=self.opt_clean_material_slots,
589 | opt_rename_materials=self.opt_rename_materials,
590 | opt_clean_materials=self.opt_clean_materials)
591 |
592 | bpy.context.view_layer.objects.active = org_active
593 | return {'FINISHED'}
594 |
595 | def register():
596 | from bpy.utils import register_class
597 | bpy.types.MATERIAL_MT_context_menu.prepend(DeleteUnusedMaterialSlots.draw)
598 | bpy.types.MESH_MT_vertex_group_context_menu.prepend(DeleteUnusedVGroups.draw_delete_unused)
599 | bpy.types.MESH_MT_vertex_group_context_menu.prepend(DeleteUnusedVGroups.draw_delete_empty)
600 | register_class(DeleteUnusedMaterialSlots)
601 | register_class(DeleteUnusedVGroups)
602 | register_class(CleanUpObjects)
603 | #register_class(CleanUpMeshes)
604 | #register_class(CleanUpArmatures)
605 | register_class(CleanUpMaterials)
606 | register_class(CleanUpScene)
607 |
608 | def unregister():
609 | bpy.types.MATERIAL_MT_context_menu.remove(DeleteUnusedMaterialSlots.draw)
610 | bpy.types.TOPBAR_MT_file_import.remove(DeleteUnusedVGroups.draw_delete_unused)
611 | bpy.types.TOPBAR_MT_file_import.remove(DeleteUnusedVGroups.draw_delete_empty)
612 | from bpy.utils import unregister_class
613 | unregister_class(DeleteUnusedMaterialSlots)
614 | unregister_class(DeleteUnusedVGroups)
615 | unregister_class(CleanUpObjects)
616 | #unregister_class(CleanUpMeshes)
617 | #unregister_class(CleanUpArmatures)
618 | unregister_class(CleanUpMaterials)
619 | unregister_class(CleanUpScene)
--------------------------------------------------------------------------------
/MetsTools/cleanup_mesh.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from math import pi
3 | import bmesh
4 |
5 | # TODO: test if removing useless UV maps works.
6 |
7 | def cleanup_mesh(obj,
8 | remove_doubles=False,
9 | quadrangulate=False,
10 | weight_normals=True,
11 | seams_from_islands=True,
12 | clear_unused_UVs=True,
13 | rename_single_UV=True):
14 |
15 | # Mode management
16 | org_active = bpy.context.object
17 | org_mode = org_active.mode
18 | org_selected = bpy.context.selected_objects[:]
19 | bpy.ops.object.mode_set(mode='OBJECT')
20 | bpy.ops.object.select_all(action='DESELECT')
21 | bpy.context.view_layer.objects.active = obj
22 | obj.select_set(True)
23 | bpy.ops.object.mode_set(mode='EDIT')
24 |
25 | # Unhide and deselect verts
26 | bpy.ops.mesh.reveal()
27 | bpy.ops.mesh.select_all(action='DESELECT')
28 |
29 | # Renaming shape key blocks
30 | if(obj.data.shape_keys is not None):
31 | obj.data.shape_keys.name = "Keys_" + obj.name
32 |
33 | # Setting auto-smooth to 180 is necessary so that splitnormals_clear() doesn't mark sharp edges
34 | obj.data.use_auto_smooth = True
35 | org_angle = obj.data.auto_smooth_angle
36 | obj.data.auto_smooth_angle = pi
37 | bpy.ops.mesh.customdata_custom_splitnormals_clear()
38 | obj.data.auto_smooth_angle = org_angle
39 |
40 | # Tris to Quads
41 | if(quadrangulate):
42 | bpy.ops.mesh.tris_convert_to_quads(shape_threshold=1.0472, uvs=True, materials=True)
43 |
44 | # Remove Doubles / Merge By Distance
45 | if(remove_doubles):
46 | bpy.ops.mesh.remove_doubles(threshold=0.0001)
47 |
48 | bpy.ops.object.mode_set(mode='OBJECT')
49 | if(weight_normals):
50 | bpy.ops.object.calculate_weighted_normals()
51 | bpy.ops.object.mode_set(mode='EDIT')
52 |
53 | ### Removing useless UVMaps
54 | if(clear_unused_UVs):
55 | mesh = obj.data
56 | bm = bmesh.from_edit_mesh(mesh)
57 |
58 | # Invalid UV maps usually have all the verts on the top left or top right corner, so that's what we'll be checking for.
59 | # If all verts of a UV map have an X coordinate of 0, we're deleting it.
60 |
61 | for uv_idx in reversed(range(0, len(mesh.uv_layers))): # For each UV layer (in reverse, since we're deleting)
62 | delet_this=True
63 | mesh.uv_layers.active_index = uv_idx
64 | bm.faces.ensure_lookup_table()
65 | for f in bm.faces: # For each face
66 | for l in f.loops: # For each loop(whatever that means)
67 | if(l[bm.loops.layers.uv.active].uv[0] != 0.0): # If the loop's UVs first vert's x coord is NOT 0
68 | delet_this=False
69 | break
70 | if(delet_this==False):
71 | break
72 | if(delet_this):
73 | obj.data.uv_layers.remove(obj.data.uv_layers[uv_idx])
74 |
75 | bmesh.update_edit_mesh(mesh, True)
76 |
77 | # Renaming single UV maps
78 | if(len(mesh.uv_layers)==1 and rename_single_UV):
79 | mesh.uv_layers[0].name = 'UVMap'
80 |
81 | # Seams from islands
82 | if(seams_from_islands):
83 | bpy.ops.uv.seams_from_islands(mark_seams=True, mark_sharp=False)
84 |
85 | # Mode management
86 | bpy.ops.object.mode_set(mode='OBJECT')
87 | for o in org_selected:
88 | o.select_set(True)
89 | bpy.context.view_layer.objects.active = org_active
90 | bpy.ops.object.mode_set(mode=org_mode)
91 |
92 | class CleanUpMesh(bpy.types.Operator):
93 | """ Clean up meshes of selected objects. """
94 | bl_idname = "object.mesh_cleanup"
95 | bl_label = "Clean Up Mesh"
96 | bl_options = {'REGISTER', 'UNDO'}
97 |
98 | # TODO: unhide all verts in edit mode.
99 |
100 | remove_doubles: bpy.props.BoolProperty(
101 | name="Remove Doubles",
102 | description="Enable remove doubles",
103 | default=False
104 | )
105 |
106 | quadrangulate: bpy.props.BoolProperty(
107 | name="Tris to Quads",
108 | description="Enable Tris to Quads (UV Seams enabledd)",
109 | default=False
110 | )
111 |
112 | weight_normals: bpy.props.BoolProperty(
113 | name="Weight Normals",
114 | description="Enable weighted normals",
115 | default=False
116 | )
117 |
118 | seams_from_islands: bpy.props.BoolProperty(
119 | name="Seams from Islands",
120 | description="Create UV seams based on UV islands",
121 | default=False
122 | )
123 |
124 | clear_unused_UVs: bpy.props.BoolProperty(
125 | name="Delete Unused UV Maps",
126 | description="If all UV verts' X coordinate is 0, the UV map will be deleted.",
127 | default=True
128 | )
129 |
130 | rename_single_UV: bpy.props.BoolProperty(
131 | name="Rename Singular UV Maps",
132 | description="If an object is only left with one UV map, rename it to the default name, 'UVMap'.",
133 | default=True
134 | )
135 |
136 | def execute(self, context):
137 | for o in bpy.context.selected_objects:
138 | cleanup_mesh(o,
139 | self.remove_doubles,
140 | self.quadrangulate,
141 | self.weight_normals,
142 | self.seams_from_islands,
143 | self.clear_unused_UVs,
144 | self.rename_single_UV)
145 | return {'FINISHED'}
146 |
147 | def register():
148 | from bpy.utils import register_class
149 | register_class(CleanUpMesh)
150 |
151 | def unregister():
152 | from bpy.utils import unregister_class
153 | unregister_class(CleanUpMesh)
--------------------------------------------------------------------------------
/MetsTools/convert_images.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from bpy.props import *
3 |
4 | class ConvertImages(bpy.types.Operator):
5 | """ Convert images with one or any extension to whatever you have set in your render output settings. Uses Image.save_as_render(). """
6 | bl_idname = "image.convert_images"
7 | bl_label = "Convert Images"
8 | bl_options = {'REGISTER', 'UNDO'}
9 |
10 | from_ext: StringProperty(
11 | name="From Extension",
12 | default="dds",
13 | description="Images with this file extension will be converted. The old images will still be in their original directory, but will no longer be referenced by this .blend file. Leaving this empty will convert all images. The target format is defined by your render output settings."
14 | )
15 |
16 | rename_files: BoolProperty(
17 | name="Rename Files",
18 | default=False,
19 | description="If enabled, rename the converted files to the name of the image datablock (the name displayed in the image editor's header) - IMPORTANT: Image datablock name should NOT contain extension ",
20 | options={'SKIP_SAVE'}
21 | )
22 |
23 | start: BoolProperty(
24 | name="Go",
25 | default=False,
26 | description="Tick to begin converting. DO NOT TOUCH THIS PANEL ONCE YOU'RE DONE!",
27 | options={'SKIP_SAVE'}
28 | )
29 |
30 | def execute(self, context):
31 | # Saving view settings
32 | view_settings = context.scene.view_settings
33 | org_view_transform = view_settings.view_transform
34 | org_exposure = view_settings.exposure
35 | org_gamma = view_settings.gamma
36 | org_look = view_settings.look
37 | org_curve = view_settings.use_curve_mapping
38 |
39 | # Resetting view settings to default values
40 | view_settings.view_transform = 'Standard'
41 | view_settings.exposure = 0
42 | view_settings.gamma = 1
43 | view_settings.look = 'None'
44 | view_settings.use_curve_mapping = False
45 |
46 | to_format = context.scene.render.image_settings.file_format
47 | to_ext = to_format.lower()
48 | ext_dict = {
49 | 'IRIS' : 'rgb',
50 | 'JPEG' : 'jpg',
51 | 'JPEG2000' : 'jp2',
52 | 'TARGA' : 'tga',
53 | 'TARGA_RAW' : 'tga',
54 | 'CINEON' : 'cin',
55 | 'OPEN_EXR_MULTILAYER' : 'exr',
56 | 'OPEN_EXR' : 'exr',
57 | 'TIFF' : 'tif'
58 | }
59 |
60 | if(to_format in ext_dict.keys()):
61 | to_ext = to_format[ext_dict]
62 |
63 | assert bpy.data.is_saved, "Please save your file, open the system console, and make a backup of your textures before running this operator."
64 | # Check some things first, to make sure conversion will go smoothly.
65 | for img in bpy.data.images:
66 | if(not img.filepath.endswith(self.from_ext)): continue
67 |
68 | assert len(img.packed_files) == 0, "Image has packed files:\n" + img.filepath +"\nPlease unpack all files (ideally pack everything first, then unpack all to current directory)"
69 | if(self.rename_files):
70 | assert "." not in img.name, "It looks like you want to rename files to the image datablock's name, but your image datablock contains an extension:\n" + img.name + "\nMake sure your image names don't contain a period."
71 |
72 | if(not self.start):
73 | return {'FINISHED'}
74 |
75 | for img in bpy.data.images:
76 | if(not img.filepath.endswith(self.from_ext)): continue
77 |
78 | print("Working on: "+img.filepath)
79 | old_path = img.filepath # Full path
80 | old_name = img.filepath.split("\\")[-1] # Name with extension
81 | old_ext = old_name.split(".")[-1]
82 | if(old_ext == self.from_ext):
83 | new_path = old_path.replace("."+self.from_ext, "."+to_ext)
84 | # Optional: Change file name to the image object's name (make sure image object names do not contain extension)
85 | if(self.rename_files):
86 | old_name_no_ext = old_name[:-(len(old_ext)+1)]
87 | new_path = new_path.replace(old_name_no_ext, img.name)
88 |
89 | if new_path == old_path:
90 | print('Skipping ' + img.filepath )
91 | continue
92 | try:
93 | # Convert the image
94 | img.save_render(bpy.path.abspath(new_path))
95 | # Load the converted image over the old one
96 | img.filepath = new_path
97 | except RuntimeError:
98 | print( "FAILED:" )
99 | print( "...Dimensions: " + str(i.size[0]) + " x " + str(i.size[1]) ) # If it's 0x0 then the .dds failed to be read by Blender to begin with, nothing we can do(these textures are usually bogus anyways, don't need them). Otherwise, something's gone wrong.
100 |
101 | # Resetting view settings to original values
102 | view_settings.view_transform = org_view_transform
103 | view_settings.exposure = org_exposure
104 | view_settings.gamma = org_gamma
105 | view_settings.look = org_look
106 | view_settings.use_curve_mapping = org_curve
107 |
108 | bpy.context.scene.view_settings.view_transform = org_view_transform
109 | print("FIN")
110 | return {'FINISHED'}
111 |
112 | def register():
113 | from bpy.utils import register_class
114 | register_class(ConvertImages)
115 |
116 | def unregister():
117 | from bpy.utils import unregister_class
118 | unregister_class(ConvertImages)
--------------------------------------------------------------------------------
/MetsTools/copy_vert_ids.py:
--------------------------------------------------------------------------------
1 | # ##### BEGIN GPL LICENSE BLOCK #####
2 | #
3 | # This program is free software; you can redistribute it and/or
4 | # modify it under the terms of the GNU General Public License
5 | # as published by the Free Software Foundation; either version 2
6 | # of the License, or (at your option) any later version.
7 | #
8 | # This program is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU General Public License
14 | # along with this program; if not, write to the Free Software Foundation,
15 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
16 | #
17 | # ##### END GPL LICENSE BLOCK #####
18 |
19 | bl_info = {
20 | "name": "Copy Vert IDs",
21 | "author": "Jose Conseco based on UV Transfer from MagicUV Nutti",
22 | "version": (1, 0),
23 | "blender": (2, 79, 0),
24 | "location": "CTRL+C - requires \'Copy attributes Menu\' to be enabled",
25 | "description": "Transfer Vert IDs form selected obj",
26 | "warning": "",
27 | "wiki_url": "",
28 | "category": "Object",
29 | }
30 |
31 | # TODO: make this compatible with 2.8.
32 |
33 | from collections import OrderedDict
34 |
35 | import bpy
36 | import bmesh
37 | from bpy.props import BoolProperty,BoolProperty
38 | from mathutils import kdtree
39 |
40 |
41 | # import sys
42 | # dir = 'C:\\Users\\JoseConseco\\AppData\\Local\\Programs\\Python\\Python35\\Lib\\site-packages'
43 | # if not dir in sys.path:
44 | # sys.path.append(dir )
45 | # import ipdb
46 | # Properties used in this add-on.
47 | class CopyIDs():
48 | def __init__(self):
49 | self.transuv = ID_DATA()
50 |
51 | class ID_DATA():
52 | topology_copied = []
53 |
54 |
55 | def append_copy_ids(self, context):
56 | layout = self.layout
57 | layout.separator()
58 | layout.operator("object.vert_id_transfer_proximity")
59 |
60 | def append_copy_ids_editmode(self, context):
61 | layout = self.layout
62 | layout.separator()
63 | layout.menu(CopySub.bl_idname)
64 |
65 | class CopySub(bpy.types.Menu):
66 | bl_label = 'Copy Vert IDs topological'
67 | bl_idname = 'view3d.copyid_submenu'
68 |
69 | def draw(self, context):
70 | layout = self.layout
71 | layout.operator("object.copy_vert_id")
72 | layout.operator("object.paste_vert_id")
73 |
74 |
75 | class TransferVertId(bpy.types.Operator):
76 | """Transfer vert ID by vert proximity"""
77 | bl_label = "Transfer vert ID by vert proximity"
78 | bl_idname = "object.vert_id_transfer_proximity"
79 | bl_description = "Transfer vert ID by vert proximity (for meshes with same vertex positions)"
80 | bl_options = {'REGISTER', 'UNDO'}
81 |
82 | delta = bpy.props.FloatProperty(name="Delta", description="SearchDistance", default=0.1, min=0, max=1, precision = 4)
83 | def execute(self, context):
84 |
85 | sourceObj=context.active_object
86 | TargetObjs = [obj for obj in context.selected_objects if obj!=sourceObj and obj.type=='MESH']
87 |
88 | mesh = sourceObj.data
89 | size = len(mesh.vertices)
90 | kdSourceObj = kdtree.KDTree(size)
91 |
92 | for i, v in enumerate(mesh.vertices):
93 | kdSourceObj.insert(v.co, i)
94 |
95 | kdSourceObj.balance()
96 | preocessedVertsIdDict = {}
97 | for target in TargetObjs:
98 | copiedCount = 0
99 | preocessedVertsIdDict.clear()
100 | bm = bmesh.new() # load mesh
101 | bm.from_mesh(target.data)
102 | for targetVert in bm.verts:
103 | co, index, dist = kdSourceObj.find(targetVert.co)
104 | if dist 0:
278 | vert1 = shared_edge.verts[0]
279 | vert2 = shared_edge.verts[1]
280 | else:
281 | vert1 = shared_edge.verts[1]
282 | vert2 = shared_edge.verts[0]
283 |
284 | # get active face stuff and uvs
285 | # ipdb.set_trace()
286 | face_stuff = get_other_verts_edges(active_face, vert1, vert2, shared_edge)
287 | all_sorted_faces[active_face] = face_stuff
288 | used_verts.update(active_face.verts)
289 | used_edges.update(active_face.edges)
290 |
291 | # get first selected face stuff and uvs as they share shared_edge
292 | second_face = sel_faces[0]
293 | if second_face is active_face:
294 | second_face = sel_faces[1]
295 | face_stuff = get_other_verts_edges(second_face, vert1, vert2, shared_edge)
296 | all_sorted_faces[second_face] = face_stuff
297 | used_verts.update(second_face.verts)
298 | used_edges.update(second_face.edges)
299 |
300 | # first Grow
301 | faces_to_parse.append(active_face)
302 | faces_to_parse.append(second_face)
303 |
304 | else:
305 | self.report({'WARNING'}, "Two faces should share one edge")
306 | return None
307 |
308 | # parse all faces
309 | while True:
310 | new_parsed_faces = []
311 |
312 | if not faces_to_parse:
313 | break
314 | for face in faces_to_parse:
315 | face_stuff = all_sorted_faces.get(face)
316 | new_faces = parse_faces(face, face_stuff, used_verts, used_edges, all_sorted_faces)
317 | if new_faces == 'CANCELLED':
318 | self.report({'WARNING'}, "More than 2 faces share edge")
319 | return None
320 |
321 | new_parsed_faces += new_faces
322 | faces_to_parse = new_parsed_faces
323 |
324 | return all_sorted_faces
325 |
326 |
327 | def parse_faces(check_face, face_stuff, used_verts, used_edges, all_sorted_faces):
328 | """recurse faces around the new_grow only"""
329 |
330 | new_shared_faces = []
331 | for sorted_edge in face_stuff[1]:
332 | shared_faces = sorted_edge.link_faces
333 | if shared_faces:
334 | if len(shared_faces) > 2:
335 | bpy.ops.mesh.select_all(action='DESELECT')
336 | for face_sel in shared_faces:
337 | face_sel.select = True
338 | shared_faces = []
339 | return 'CANCELLED'
340 |
341 | clear_shared_faces = get_new_shared_faces(check_face, sorted_edge, shared_faces, all_sorted_faces.keys())
342 | if clear_shared_faces:
343 | shared_face = clear_shared_faces[0]
344 | # get vertices of the edge
345 | vert1 = sorted_edge.verts[0]
346 | vert2 = sorted_edge.verts[1]
347 |
348 | if face_stuff[0].index(vert1) > face_stuff[0].index(vert2):
349 | vert1 = sorted_edge.verts[1]
350 | vert2 = sorted_edge.verts[0]
351 |
352 | new_face_stuff = get_other_verts_edges(shared_face, vert1, vert2, sorted_edge)
353 | all_sorted_faces[shared_face] = new_face_stuff
354 | used_verts.update(shared_face.verts)
355 | used_edges.update(shared_face.edges)
356 |
357 | new_shared_faces.append(shared_face)
358 |
359 | return new_shared_faces
360 |
361 |
362 | def get_new_shared_faces(orig_face, shared_edge, check_faces, used_faces):
363 | shared_faces = []
364 |
365 | for face in check_faces:
366 | is_shared_edge = shared_edge in face.edges
367 | not_used = face not in used_faces
368 | not_orig = face is not orig_face
369 | not_hide = face.hide is False
370 | if is_shared_edge and not_used and not_orig and not_hide:
371 | shared_faces.append(face)
372 |
373 | return shared_faces
374 |
375 |
376 | def get_other_verts_edges(face, vert1, vert2, first_edge):
377 | face_edges = [first_edge]
378 | face_verts = [vert1, vert2]
379 |
380 | other_edges = [edge for edge in face.edges if edge not in face_edges]
381 |
382 | for _ in range(len(other_edges)):
383 | found_edge = None
384 | # get sorted verts and edges
385 | for edge in other_edges:
386 | if face_verts[-1] in edge.verts:
387 | other_vert = edge.other_vert(face_verts[-1])
388 |
389 | if other_vert not in face_verts:
390 | face_verts.append(other_vert)
391 |
392 | found_edge = edge
393 | if found_edge not in face_edges:
394 | face_edges.append(edge)
395 | break
396 |
397 | other_edges.remove(found_edge)
398 |
399 | return [face_verts, face_edges]
400 |
401 | import addon_utils
402 |
403 | def register():
404 | bpy.utils.register_module(__name__)
405 | bpy.types.Scene.copy_indices = CopyIDs()
406 | # bpy.ops.wm.addon_enable(module="space_view3d_copy_attributes")
407 | addon_utils.enable("space_view3d_copy_attributes", default_set=True, persistent=True)
408 | bpy.types.VIEW3D_MT_copypopup.append(append_copy_ids) #add copy ids to Ctrl+C 'Copy Attributes addon'
409 | bpy.types.MESH_MT_CopyFaceSettings.append(append_copy_ids_editmode) #add copy ids to Ctrl+C 'Copy Attributes addon'
410 |
411 |
412 | def unregister():
413 | bpy.utils.unregister_module(__name__)
414 | del bpy.types.Scene.copy_indices
415 | bpy.types.VIEW3D_MT_copypopup.remove(append_copy_ids) #remove copy ids to Ctrl+C 'Copy Attributes addon'
416 | bpy.types.MESH_MT_CopyFaceSettings.remove(append_copy_ids_editmode) #remove copy ids to Ctrl+C 'Copy Attributes addon'
417 |
418 | if __name__ == "__main__":
419 | register()
420 |
--------------------------------------------------------------------------------
/MetsTools/create_lightmap_uvs.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from bpy.props import *
3 |
4 | class CreateLightMapUVs(bpy.types.Operator):
5 | """ Create Lightmap UVs using Smart UV Project on the second UV channel. Useful for exporting to UE4, since the result should be better than default UE4 generated lightmaps. """
6 | bl_idname = "object.create_lightmap_uvs"
7 | bl_label = "Create LightMap UVs"
8 | bl_options = {'REGISTER', 'UNDO'}
9 |
10 | opt_angle: IntProperty(name="Angle Limit",
11 | description="Angle limit for Smart UV Project Operator",
12 | default=66, min=1, max=89, soft_min=1, soft_max=89)
13 |
14 | opt_margin: FloatProperty(name="Island Margin",
15 | description="Island Margin for Smart UV Project Operator",
16 | default=0.01, min=0, max=1, soft_min=0, soft_max=1)
17 |
18 | opt_overwrite: BoolProperty(name="Overwrite Existing",
19 | description="Overwrite any existing UV maps generated with by this operator",
20 | default=True)
21 | opt_reset_slot: BoolProperty(name="Keep active layer",
22 | description="Keep the original active UV layer, rather than making the newly created one active",
23 | default=True)
24 |
25 | def execute(self, context):
26 | # For each selected object:
27 | # Create new UV map (hopefully this will become selected by default)
28 | # Rename it to UV_LightMap
29 | # Make mesh active
30 | # Go edit mode
31 | # Select all
32 | # Smart UV project with default values
33 | # re-Select first UV map
34 |
35 | org_active = bpy.context.view_layer.objects.active
36 | org_mode = org_active.mode
37 | bpy.ops.object.mode_set(mode='OBJECT')
38 |
39 | for o in bpy.context.selected_objects:
40 | o_mode = o.mode
41 | bpy.context.view_layer.objects.active = o
42 | if(self.opt_overwrite):
43 | UVLayer = o.data.uv_layers.get("UV_LightMap")
44 | if(UVLayer is not None):
45 | UVLayer.active = True
46 | bpy.ops.mesh.uv_texture_remove()
47 |
48 | if(len(o.data.uv_layers) is 1):
49 | bpy.ops.object.mode_set(mode='EDIT')
50 | bpy.ops.mesh.uv_texture_add()
51 | o.data.uv_layers[-1].name = "UV_LightMap"
52 | bpy.ops.mesh.select_all(action='SELECT')
53 | bpy.ops.uv.smart_project(island_margin=self.opt_margin, angle_limit=self.opt_angle)
54 | if(self.opt_reset_slot):
55 | o.data.uv_layers[0].active = True
56 |
57 | bpy.ops.object.mode_set(mode=o_mode)
58 |
59 | bpy.context.view_layer.objects.active = org_active
60 | bpy.ops.object.mode_set(mode=org_mode)
61 |
62 | return { 'FINISHED' }
63 |
64 | def draw_func_CreateLightMapUVs(self, context):
65 | # No UI beside spacebar menu.
66 | self.layout.operator(CreateLightMapUVs.bl_idname, text=CreateLightMapUVs.bl_label)
67 |
68 | def register():
69 | from bpy.utils import register_class
70 | register_class(CreateLightMapUVs)
71 |
72 | def unregister():
73 | from bpy.utils import unregister_class
74 | unregister_class(CreateLightMapUVs)
--------------------------------------------------------------------------------
/MetsTools/force_apply_mirror.py:
--------------------------------------------------------------------------------
1 |
2 | import bpy
3 | from . import utils
4 |
5 | # TODO: Should find a way to select the X axis verts before doing Remove Doubles, or don't Remove Doubles at all. Also need to select the Basis shape before doing Remove Doubles.
6 | # TODO: Implement our own Remove Doubles algo with kdtree, which would average the vertex weights of the merged verts rather than just picking the weights of one of them at random.
7 |
8 | class ForceApplyMirror(bpy.types.Operator):
9 | """ Force apply mirror modifier by duplicating the object and flipping it on the X axis. """
10 | bl_idname = "object.force_apply_mirror_modifier"
11 | bl_label = "Force Apply Mirror Modifier"
12 | bl_options = {'REGISTER', 'UNDO'}
13 |
14 | def execute(self, context):
15 | # Remove Mirror Modifier
16 | # Copy mesh
17 | # Scale it -1 on X
18 | # Flip vgroup names
19 | # Join into original mesh
20 | # Remove doubles
21 | # Recalc Normals
22 | # Weight Normals
23 |
24 | o = context.object
25 | bpy.ops.object.mode_set(mode='OBJECT')
26 | bpy.ops.object.select_all(action='DESELECT')
27 | org_scale = o.scale[:]
28 | for m in o.modifiers:
29 | if(m.type=='MIRROR'):
30 | o.modifiers.remove(m)
31 | o.select_set(True)
32 | context.view_layer.objects.active = o
33 |
34 | # Removing Doubles - This should print out removed 0, otherwise we're gonna remove some important verts.
35 | #print("Checking for doubles pre-mirror. If it doesn't say Removed 0 vertices, you should undo.")
36 | #bpy.ops.object.mode_set(mode='EDIT')
37 | #bpy.ops.mesh.remove_doubles(use_unselected=True)
38 |
39 | # Reset scale
40 | #bpy.ops.object.mode_set(mode='OBJECT')
41 | o.scale = (1, 1, 1)
42 |
43 | bpy.ops.object.duplicate() # Duplicated object becomes selected and active
44 |
45 | # We continue operating on the original half, since it shouldn't matter
46 | o.scale = (-1, 1, 1)
47 |
48 | # Flipping vertex group names
49 | for vg in o.vertex_groups:
50 | old_name = vg.name
51 | flipped_name = utils.flip_name(vg.name)
52 | if(old_name == flipped_name): continue
53 |
54 | if(flipped_name in o.vertex_groups): # If the target name is already taken
55 | vg.name = "temp_lskjghsjdfkhbnsdf" # Rename this to some garbage
56 | opp_group = o.vertex_groups[flipped_name] # Find the other group that's taking the name
57 | opp_group.name = old_name # Rename it to our original name
58 |
59 | vg.name = flipped_name
60 |
61 | o.select_set(True)
62 | context.view_layer.objects.active = o # We want to be sure the original is the active so the object name doesn't get a .001
63 | bpy.ops.object.join()
64 | bpy.ops.object.mode_set(mode='EDIT')
65 | bpy.ops.mesh.select_all(action='SELECT')
66 | bpy.ops.mesh.normals_make_consistent(inside=False)
67 | #bpy.ops.mesh.remove_doubles()
68 | bpy.ops.object.mode_set(mode='OBJECT')
69 | bpy.ops.object.calculate_weighted_normals()
70 | break
71 | context.object.scale = org_scale
72 | return {'FINISHED'}
73 |
74 | def register():
75 | from bpy.utils import register_class
76 | register_class(ForceApplyMirror)
77 |
78 | def unregister():
79 | from bpy.utils import unregister_class
80 | unregister_class(ForceApplyMirror)
--------------------------------------------------------------------------------
/MetsTools/join_as_shape_key_by_uvs.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | import bmesh
3 | from bpy.props import *
4 | from mathutils import Vector
5 |
6 | def uv_from_vert_average(uv_layer, v):
7 | uv_average = Vector((0.0, 0.0))
8 | total = 0.0
9 | for loop in v.link_loops:
10 | uv_average += loop[uv_layer].uv
11 | total += 1.0
12 |
13 | if total != 0.0:
14 | return uv_average * (1.0 / total)
15 | else:
16 | return None
17 |
18 | class JoinAsShapeKeyByUVs(bpy.types.Operator):
19 | """ Transfer the shape of selected objects into shape keys on the active object. The objects need to have identical topology and UV layout on UV layer 1. Those UV layers shouldn't have any overlapping UVs. """
20 | bl_idname = "object.join_as_shape_key_by_uvs"
21 | bl_label = "Join as Shape Key by UVs"
22 | bl_options = {'REGISTER', 'UNDO'}
23 |
24 | # Some of this code is from StackOverflow, but then again, what code isn't?
25 |
26 | precision: FloatProperty(
27 | name='Precision',
28 | default=0.0001,
29 | description="UV coord matching precision. Higher values are less precise. Too high will cause mismatches and too low will cause no matches. Ideally your UVs are absolutely exactly the same and you can keep this value very low without getting any non-matches."
30 | )
31 |
32 | def execute(self, context):
33 | # Saving active object's verts and average UV coords
34 | bpy.ops.object.mode_set(mode='EDIT')
35 | active = bpy.context.object
36 | active_bm = bmesh.from_edit_mesh(active.data)
37 | active_uv_layer = active_bm.loops.layers.uv.active
38 | active_verts = []
39 | active_verts_uv_averages = []
40 | for active_v in active_bm.verts:
41 | active_verts.append( Vector((active_v.co.x, active_v.co.y, active_v.co.z)) )
42 | active_verts_uv_averages.append(uv_from_vert_average(active_uv_layer, active_v))
43 |
44 | bpy.ops.object.mode_set(mode='OBJECT')
45 |
46 | for obj in bpy.context.selected_objects:
47 | if(obj == active): continue
48 | #if(len(obj.data.vertices) != len(active.data.vertices)): continue # Forcing matching vert count is not important.
49 |
50 | if(active.data.shape_keys == None):
51 | active.shape_key_add(name='Basis', from_mix=False)
52 | sk = active.shape_key_add(name=obj.name, from_mix=False)
53 |
54 | bpy.ops.object.mode_set(mode='EDIT')
55 |
56 | obj_bm = bmesh.from_edit_mesh(obj.data)
57 | obj_uv_layer = obj_bm.loops.layers.uv.active
58 | obj_verts = []
59 | obj_verts_uv_averages = []
60 | for obj_v in obj_bm.verts:
61 | obj_verts.append( Vector((obj_v.co.x, obj_v.co.y, obj_v.co.z)) )
62 | obj_verts_uv_averages.append(uv_from_vert_average(obj_uv_layer, obj_v))
63 |
64 | # Changing the shape key data needs to be done in object mode, otherwise the shape key data disappears when leaving edit mode. I'm not sure if this is a 2.8 bug.
65 | # This is the whole reason I have to save the vert coords and UV coords before doing this. Otherwise everything could be done in multi-object edit mode.
66 | bpy.ops.object.mode_set(mode='OBJECT')
67 |
68 | for oi, obj_v in enumerate(obj_verts):
69 | obj_uv_average = obj_verts_uv_averages[oi]
70 | for ai, active_v in enumerate(active_verts):
71 | active_uv_average = active_verts_uv_averages[ai]
72 | diff = active_uv_average - obj_uv_average
73 | if(abs(diff.x) < self.precision and
74 | abs(diff.y) < self.precision):
75 | sk.data[ai].co = obj_v
76 | break
77 | return {'FINISHED'}
78 |
79 | def register():
80 | from bpy.utils import register_class
81 | register_class(JoinAsShapeKeyByUVs)
82 |
83 | def unregister():
84 | from bpy.utils import unregister_class
85 | unregister_class(JoinAsShapeKeyByUVs)
--------------------------------------------------------------------------------
/MetsTools/join_as_shape_key_by_weights.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | import bmesh
3 | from mathutils import Vector
4 |
5 | # TODO: I never actually made use of this code, but I think it works? Or at least worked in 2.7.
6 |
7 | def build_weight_dict(obj, vgroups=None, mask_vgroup=None, bone_combine_dict=None):
8 | # Returns a dictionary that matches the vertex indicies of the object to a list of tuples containing the vertex group names that the vertex belongs to and the weight of the vertex in that group.
9 | # Optionally, if vgroups is passed, don't bother saving groups that aren't in vgroups.
10 | # Also optionally, bone_combine_dict can be specified if we want some bones to be merged into others, eg. passing in {'Toe_Main' : ['Toe1', 'Toe2', 'Toe3']} will combine the weights in the listed toe bones into Toe_Main. You would do this when transferring weights from a model of actual feet onto shoes.
11 |
12 | weight_dict = {} # {vert index : [('vgroup_name', vgroup_value), ...], ...}
13 |
14 | if(vgroups==None):
15 | vgroups = obj.vertex_groups
16 |
17 | for v in obj.data.vertices:
18 | # TODO: instead of looking through all vgroups we should be able to get only the groups that this vert is assigned to via v.groups[0].group which gives the group id which we can use to get the group via Object.vertex_groups[id]
19 | # With this maybe it's useless altogether to save the weights into a dict? idk.
20 | # Although the reason we are doing it this way is because we wanted some bones to be considered the same as others. (eg. toe bones should be considered a single combined bone)
21 | for vg in vgroups:
22 | w = 0
23 | try:
24 | w = vg.weight(v.index)
25 | except:
26 | pass
27 |
28 | # Adding the weights from any sub-vertexgroups defined in bone_combine_dict
29 | if(vg.name in bone_combine_dict.keys()):
30 | for sub_vg_name in bone_combine_dict[vg.name]:
31 | sub_vg = obj.vertex_groups.get(sub_vg_name)
32 | if(sub_vg==None): continue
33 | try:
34 | w = w + sub_vg.weight(v.index)
35 | except RuntimeError:
36 | pass
37 |
38 | if(w==0): continue
39 |
40 | # Masking transfer influence
41 | if(mask_vgroup):
42 | try:
43 | multiplier = mask_vgroup.weight(v.index)
44 | w = w * multiplier
45 | except:
46 | pass
47 |
48 | # Create or append entry in the dict.
49 | if(v.index not in weight_dict):
50 | weight_dict[v.index] = [(vg.name, w)]
51 | else:
52 | weight_dict[v.index].append((vg.name, w))
53 |
54 | return weight_dict
55 |
56 | active = bpy.context.object
57 | active_weights = build_weight_dict(active)
58 |
59 | for obj in bpy.context.selected_objects:
60 | if(obj == active): continue
61 |
62 | if(active.data.shape_keys == None):
63 | active.shape_key_add(name='Basis', from_mix=False)
64 | sk = active.shape_key_add(name=obj.name, from_mix=False)
65 |
66 | obj_weights = build_weight_dict(obj)
67 |
68 | for active_vert_index in active_weights.keys():
69 | active_vert_weights = active_weights[active_vert_index]
70 | for obj_vert_index in obj_weights.keys():
71 | obj_vert_weights = obj_weights[obj_vert_index]
72 | matching = True # Stores whether this vert has the same weights as the active vert.
73 | for weight_tuple in obj_vert_weights:
74 | if(weight_tuple not in active_vert_weights):
75 | matching=False
76 | break
77 | if(matching):
78 | sk.data[active_vert_index].co = obj_vert_index
--------------------------------------------------------------------------------
/MetsTools/make_modifiers_consistent.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from bpy.props import *
3 |
4 | class MakeModifiersConsistent(bpy.types.Operator):
5 | """ Set certain settings on modifiers of selected objects, or copy them from an active object. """
6 | bl_idname = "object.make_modifiers_consistent"
7 | bl_label = "Make Modifiers Consistent"
8 | bl_options = {'REGISTER', 'UNDO'}
9 |
10 | use_active: BoolProperty(
11 | name="From Active",
12 | default=False,
13 | description="If enabled, use the active object's modifier settings. Otherwise, use hard coded settings.")
14 |
15 | do_mirror: BoolProperty(
16 | name="Mirror",
17 | default=True,
18 | description="Affect Mirror modifiers")
19 | do_subsurf: BoolProperty(
20 | name="SubSurf",
21 | default=True,
22 | description="Affect SubSurf modifiers")
23 | do_armature: BoolProperty(
24 | name="Armature",
25 | default=True,
26 | description="Affect Armature modifiers")
27 | do_solidify: BoolProperty(
28 | name="Solidify",
29 | default=True,
30 | description="Affect Solidify modifiers")
31 | do_bevel: BoolProperty(
32 | name="Bevel",
33 | default=True,
34 | description="Affect Bevel modifiers")
35 |
36 | def execute(self, context):
37 | active = context.object
38 | active_mirror = None
39 | active_solidify = None
40 | active_subsurf = None
41 | active_bevel = None
42 | active_armature = None
43 |
44 | if(self.use_active):
45 | for m in active.modifiers:
46 | if(m.type=='MIRROR'):
47 | active_mirror = m
48 | if(m.type=='SOLIDIFY'):
49 | active_solidify = m
50 | if(m.type=='SUBSURF'):
51 | active_subsurf = m
52 | if(m.type=='BEVEL'):
53 | active_bevel = m
54 | if(m.type=='ARMATURE'):
55 | active_armature = m
56 |
57 | objs = context.selected_objects
58 | for obj in objs:
59 | if(obj.type != 'MESH'): continue
60 |
61 | obj.show_wire = False
62 | obj.show_all_edges = True
63 | for m in obj.modifiers:
64 | if(m.type == 'MIRROR' and self.do_mirror):
65 | m.name = 'Mirror'
66 | if(active_mirror):
67 | m.show_viewport = active_mirror.show_viewport
68 | m.show_render = active_mirror.show_render
69 | m.show_in_editmode = active_mirror.show_in_editmode
70 | m.use_clip = active_mirror.use_clip
71 | else:
72 | m.show_viewport = True
73 | m.show_render = True
74 | m.show_in_editmode = True
75 | m.use_clip = True
76 | elif(m.type == 'ARMATURE' and self.do_armature):
77 | m.name = 'Armature'
78 | if(active_armature):
79 | m.show_viewport = active_armature.show_viewport
80 | m.show_render = active_armature.show_render
81 | m.show_in_editmode = active_armature.show_in_editmode
82 | m.show_on_cage = active_armature.show_on_cage
83 | else:
84 | m.show_viewport = True
85 | m.show_render = True
86 | m.show_in_editmode = True
87 | m.show_on_cage = True
88 | elif(m.type == 'SOLIDIFY' and self.do_solidify):
89 | m.name = 'Solidify'
90 | if(active_solidify):
91 | m.show_viewport = False
92 | m.show_render = True
93 | else:
94 | m.show_viewport = False
95 | m.show_render = True
96 | #print("Object: "+o.name + " Only Rim: " + str(m.use_rim_only))
97 | elif(m.type == 'BEVEL' and self.do_bevel):
98 | m.name = 'Bevel'
99 | if(active_bevel):
100 | m.show_viewport = active_bevel.show_viewport
101 | m.show_render = active_bevel.show_render
102 | m.segments = active_bevel.segments
103 | m.limit_method = active_bevel.limit_method
104 | m.offset_type = active_bevel.offset_type
105 | m.harden_normals = active_bevel.harden_normals
106 | m.width_pct = active_bevel.width_pct
107 | m.width = active_bevel.width
108 | else:
109 | m.show_viewport = False
110 | m.show_render = True
111 | m.segments = 2
112 | m.limit_method = 'WEIGHT'
113 | m.offset_type = 'PERCENT'
114 | m.harden_normals = True
115 | m.width_pct = 1
116 | m.width = 0.1
117 | elif(m.type == 'SUBSURF' and self.do_subsurf):
118 | m.name = 'Subdivision'
119 | if(active_subsurf):
120 | m.show_viewport = active_subsurf.show_viewport
121 | m.show_render = active_subsurf.show_render
122 | m.show_in_editmode = active_subsurf.show_in_editmode
123 | m.levels = active_subsurf.levels
124 | m.render_levels = active_subsurf.render_levels
125 | m.show_only_control_edges = active_subsurf.show_only_control_edges
126 | m.quality = active_subsurf.quality
127 | else:
128 | m.show_viewport = True
129 | m.show_render = True
130 | m.show_in_editmode = True
131 | m.levels = 0
132 | m.render_levels = 2
133 | m.show_only_control_edges = True
134 | m.quality = 3
135 | return {'FINISHED'}
136 |
137 | def register():
138 | from bpy.utils import register_class
139 | register_class(MakeModifiersConsistent)
140 |
141 | def unregister():
142 | from bpy.utils import unregister_class
143 | unregister_class(MakeModifiersConsistent)
--------------------------------------------------------------------------------
/MetsTools/make_physics_bones.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from bpy.props import *
3 | import mathutils
4 | import math
5 | import bmesh
6 |
7 | def make_physics_bone_chain(armature, bones, pMesh=None):
8 | """ Apply physics to a single chain of bones. Armature needs to have clean transforms and be in rest pose.
9 | bones: list of bones in the chain, in correct hierarchical order"""
10 | # This function expects the armature with clean transforms and in rest pose.
11 |
12 | parent_bone = bones[0].parent # Can be None.
13 |
14 | if(not pMesh):
15 | # Create physics mesh
16 | bpy.ops.mesh.primitive_plane_add(enter_editmode=True)
17 | # The new object is active and in edit mode
18 | pMesh = bpy.context.object
19 | bpy.context.object.name = "_Phys_" + bones[0].name
20 |
21 | # Deleting all verts
22 | bpy.ops.mesh.delete(type='VERT')
23 |
24 | bpy.context.view_layer.objects.active = pMesh
25 | bpy.ops.object.mode_set(mode='EDIT')
26 | bpy.ops.mesh.select_mode(use_extend=False, use_expand=False, type='VERT')
27 |
28 | # Using bmesh to add first vertex
29 | bm = bmesh.new()
30 | bm.from_mesh(pMesh.data)
31 | bm.verts.ensure_lookup_table()
32 | bpy.ops.object.mode_set(mode='OBJECT')
33 | for v in bm.verts:
34 | v.select_set(False)
35 | vert = bm.verts.new(bones[0].head) # Add a new vert at first bone's location
36 | vert.select_set(True)
37 | bm.verts.ensure_lookup_table()
38 | bm.to_mesh(pMesh.data)
39 | bm.free()
40 |
41 | pin_group = pMesh.vertex_groups.get("Pin")
42 | if(not pin_group):
43 | pin_group = pMesh.vertex_groups.new(name='Pin')
44 |
45 | bpy.ops.object.mode_set(mode='EDIT')
46 | pMesh.vertex_groups.active_index = pin_group.index
47 | bpy.context.scene.tool_settings.vertex_group_weight = 1
48 | bpy.ops.object.vertex_group_assign()
49 |
50 | # Extruding verts to each bone's head
51 | for b in bones:
52 | bpy.ops.mesh.extrude_region_move(TRANSFORM_OT_translate={"value":(
53 | b.tail.x - b.head.x,
54 | b.tail.y - b.head.y,
55 | b.tail.z - b.head.z)})
56 |
57 | bpy.ops.object.vertex_group_remove_from()
58 | vg = pMesh.vertex_groups.new(name=b.name)
59 | pMesh.vertex_groups.active_index = vg.index
60 | bpy.ops.object.vertex_group_assign()
61 |
62 | bpy.ops.object.mode_set(mode='OBJECT')
63 |
64 | # Adding Cloth modifier
65 | bpy.ops.object.modifier_add(type='CLOTH')
66 | m_cloth = pMesh.modifiers["Cloth"]
67 | m_cloth.settings.vertex_group_mass = "Pin"
68 |
69 | if(parent_bone):
70 | bpy.ops.object.mode_set(mode='OBJECT') # Go to object mode
71 | bpy.ops.object.select_all(action='DESELECT') # Deselect everything
72 | pMesh.select_set(True) # Select physics mesh
73 | armature.select_set(True) # Select armature
74 | bpy.context.view_layer.objects.active = armature# Make armature active
75 | bpy.ops.object.mode_set(mode='POSE') # Go into pose mode
76 | bpy.ops.pose.select_all(action='DESELECT') # Deselect everything
77 | parent_bone.bone.select = True # Select parent bone
78 | armature.data.bones.active = parent_bone.bone # Make the parent bone active
79 | bpy.ops.object.parent_set(type='BONE') # Set parent (=Ctrl+P->Bone)
80 | parent_bone.bone.select = False
81 |
82 | # Setting up bone constraints
83 | bpy.context.view_layer.objects.active = armature
84 | for i, b in enumerate(bones):
85 | b.bone.select=True
86 | # Removing any existing constraints
87 | for c in b.constraints:
88 | b.constraints.remove(c)
89 | DT = bones[i].constraints.new(type='DAMPED_TRACK')
90 | DT.name = 'phys'
91 | DT.target = pMesh
92 | DT.subtarget = b.name
93 |
94 | bpy.ops.object.mode_set(mode='POSE')
95 |
96 | return pMesh
97 |
98 | class MakePhysicsBones(bpy.types.Operator):
99 | """ Set up physics to all selected bone chains. Only the first bone of each chain should be selected. """
100 | bl_idname = "pose.make_physics_bones"
101 | bl_label = "Make Physics Bones"
102 | bl_options = {'REGISTER', 'UNDO'}
103 |
104 | def execute(self, context):
105 | armature = bpy.context.object
106 |
107 | if(armature.type!='ARMATURE'):
108 | print( "ERROR: Active object must be an armature. Select a chain of bones.")
109 | return { "CANCELLED" }
110 |
111 | # Preparing the armature and saving state
112 | org_pose = armature.data.pose_position
113 | armature.data.pose_position = 'REST'
114 | org_loc = armature.location[:]
115 | armature.location = (0,0,0)
116 | org_rot_euler = armature.rotation_euler[:]
117 | armature.rotation_euler = (0,0,0)
118 | org_rot_quat = armature.rotation_quaternion[:]
119 | armature.rotation_quaternion = (0,0,0,0)
120 | org_scale = armature.scale[:]
121 | armature.scale = (1,1,1)
122 | org_mode = armature.mode
123 | org_layers = armature.data.layers[:]
124 | armature.data.layers = [True]*32
125 |
126 | org_transform_orientation = bpy.context.scene.transform_orientation_slots[0].type
127 | bpy.context.scene.transform_orientation_slots[0].type = 'GLOBAL'
128 | org_cursor = bpy.context.scene.cursor.location[:]
129 | bpy.context.scene.cursor.location = ((0, 0, 0))
130 |
131 | bpy.ops.object.mode_set(mode='POSE')
132 |
133 |
134 | def get_chain(bone, ret=[]):
135 | """ Recursively build a list of the first children. """
136 | ret.append(bone)
137 | if(len(bone.children) > 0):
138 | return get_chain(bone.children[0], ret)
139 | return ret
140 |
141 | bones = bpy.context.selected_pose_bones
142 | pMesh = None
143 | for b in bones:
144 | if(b.parent not in bones):
145 | chain = get_chain(b, []) # I don't know why but I have to pass the empty list for it to reset the return list.
146 | if(not pMesh):
147 | pMesh = make_physics_bone_chain(armature, chain)
148 | else:
149 | pMesh = make_physics_bone_chain(armature, chain, pMesh)
150 |
151 |
152 | # Extruding all verts to have faces, which is necessary for collision.
153 | # Additionally, the Angular bending model won't move if it has faces with 0 area, so I'm spreading the verts out a tiny amount on the X axis(picked arbitrarily).
154 | bpy.context.view_layer.objects.active = pMesh
155 | bpy.ops.object.mode_set(mode='EDIT')
156 | bpy.ops.mesh.select_all(action='SELECT')
157 | bpy.ops.mesh.extrude_region_move(TRANSFORM_OT_translate={"value":(0.01, 0, 0)})
158 | bpy.ops.mesh.select_linked()
159 | bpy.ops.transform.translate(value=(-0.005, 0, 0))
160 |
161 | # Reset armature
162 | bpy.ops.object.mode_set(mode='OBJECT')
163 | bpy.context.view_layer.objects.active = armature
164 | armature.data.pose_position = org_pose
165 | armature.location = org_loc
166 | armature.rotation_euler = org_rot_euler
167 | armature.rotation_quaternion = org_rot_quat
168 | armature.scale = org_scale
169 | bpy.ops.object.mode_set(mode=org_mode)
170 | armature.data.layers = org_layers
171 |
172 | bpy.context.scene.transform_orientation_slots[0].type = org_transform_orientation
173 | bpy.context.scene.cursor.location = (org_cursor)
174 |
175 | return { 'FINISHED' }
176 |
177 | def draw_func_MakePhysicsBones(self, context):
178 | self.layout.operator(MakePhysicsBones.bl_idname, text=MakePhysicsBones.bl_label)
179 |
180 | def register():
181 | from bpy.utils import register_class
182 | register_class(MakePhysicsBones)
183 |
184 | def unregister():
185 | from bpy.utils import unregister_class
186 | unregister_class(MakePhysicsBones)
--------------------------------------------------------------------------------
/MetsTools/mark_sharp_by_autosmooth.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | import bmesh
3 | from bpy.props import *
4 |
5 | class MarkSharpByAutoSmooth(bpy.types.Operator):
6 | """ Marks hard edges of all selected meshes based on the Auto Smooth angle. Only works if auto smooth is turned on for the object."""
7 | bl_idname = "object.mark_sharp_by_auto_smooth"
8 | bl_label = "Mark Sharp By Auto Smooth"
9 | bl_options = {'REGISTER', 'UNDO'}
10 |
11 | def execute(self, context):
12 | org_active = bpy.context.view_layer.objects.active
13 | org_mode = org_active.mode
14 |
15 | for o in bpy.context.selected_objects:
16 | bpy.context.view_layer.objects.active = o
17 | if(o.data.use_auto_smooth == False):
18 | print(str(o.name) + "Auto smooth is off, not doing anything")
19 | else:
20 | bpy.ops.object.mode_set(mode='EDIT')
21 | bpy.ops.mesh.select_all(action='SELECT')
22 | bpy.ops.mesh.faces_shade_smooth()
23 | #bpy.context.object.data.show_edge_sharp = True # This is no longer per-object in 2.8, and the new setting in bpy.context.screen.overlays doesn't seem to be readable from python.
24 |
25 | bm = bmesh.from_edit_mesh(o.data)
26 |
27 | for e in bm.edges:
28 | if( e.calc_face_angle(0) >= o.data.auto_smooth_angle ):
29 | e.smooth = False
30 |
31 | bpy.ops.object.mode_set(mode='OBJECT')
32 |
33 | bpy.context.view_layer.objects.active = org_active
34 | bpy.ops.object.mode_set(mode=org_mode)
35 |
36 | return { 'FINISHED' }
37 |
38 | def draw_func_MarkSharpByAutoSmooth(self, context):
39 | self.layout.operator(MarkSharpByAutoSmooth.bl_idname, text=MarkSharpByAutoSmooth.bl_label)
40 |
41 | def register():
42 | from bpy.utils import register_class
43 | register_class(MarkSharpByAutoSmooth)
44 |
45 | def unregister():
46 | from bpy.utils import unregister_class
47 | unregister_class(MarkSharpByAutoSmooth)
--------------------------------------------------------------------------------
/MetsTools/mirror_constraints.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from . import utils
3 |
4 | def copy_attributes(from_thing, to_thing):
5 | bad_stuff = ['__doc__', '__module__', '__slots__', 'active', 'bl_rna', 'error_location', 'error_rotation']
6 | for prop in dir(from_thing):
7 | if(prop in bad_stuff): continue
8 | if(hasattr(to_thing, prop)):
9 | value = getattr(from_thing, prop)
10 | try:
11 | setattr(to_thing, prop, value)
12 | except AttributeError: # Read Only properties
13 | continue
14 |
15 | class XMirrorConstraints(bpy.types.Operator):
16 | """ Mirror constraints to the opposite of all selected bones. """
17 | bl_idname = "armature.x_mirror_constraints"
18 | bl_label = "X Mirror Selected Bones' Constraints"
19 | bl_options = {'REGISTER', 'UNDO'}
20 |
21 | def execute(self, context):
22 | for b in context.selected_pose_bones:
23 | #TODO: Finish adding all the constraint types.
24 | #TODO: Should also make sure constraints are in the correct order. - They should already be, though. Are we not wiping constraints before copying them? I thought we did.
25 | #TODO: Make a separate operator for "splitting" constraints in left/right parts. (by halving their influence, then mirror copying them onto the same bone)
26 | #TODO: mirror constraint's name.
27 | #TODO: copy axis locks and rotation mode.
28 |
29 | # would be cool if we could mirror on any axis, not just X. How on earth would that work though.
30 | # Maybe this can be done as an afterthought. Consider that during X mirroring, the X axis is the "mirror" axis, the Y axis is the forward axis and Z is the up axis.
31 | # If we wanted to mirror on the Y axis, it would be Y=Mirror, X = Forward, Z = Up
32 | # For Z axis mirroring though, X and Y are interchangable, are they not? I mean, neither of them are strictly forward or up. One of them is FOrward and the other is Left.
33 |
34 | armature = context.object
35 |
36 | flipped_name = utils.flip_name(b.name)
37 | opp_b = armature.pose.bones.get(flipped_name)
38 |
39 | data_b = armature.data.bones.get(b.name)
40 | opp_data_b = armature.data.bones.get(opp_b.name)
41 |
42 | for c in b.constraints:
43 | opp_c = opp_b.constraints.get(c.name)
44 | if(not opp_c):
45 | opp_c = opp_b.constraints.new(type=c.type)
46 |
47 | copy_attributes(c, opp_c)
48 |
49 | # Targets
50 | opp_c.target = c.target # TODO: could argue that this should be attempted to be flipped as well.
51 | opp_subtarget = utils.flip_name(c.subtarget)
52 | opp_c.subtarget = opp_subtarget
53 |
54 | if(c.type=='TRANSFORM'):
55 | ###### SOURCES #######
56 |
57 | ### Source Locations
58 | # X Loc: Flipped and Inverted
59 | opp_c.from_min_x = c.from_max_x *-1
60 | opp_c.from_max_x = c.from_min_x *-1
61 | # Y Loc: Same
62 | # Z Loc: Same
63 |
64 | ### Source Rotations
65 | # X Rot: Same
66 | # Y Rot: Flipped and Inverted
67 | opp_c.from_min_y_rot = c.from_max_y_rot * -1
68 | opp_c.from_max_y_rot = c.from_min_y_rot * -1
69 | # Z Rot: Flipped and Inverted
70 | opp_c.from_min_z_rot = c.from_max_z_rot * -1
71 | opp_c.from_max_z_rot = c.from_min_z_rot * -1
72 |
73 | ### Source Scales are same.
74 |
75 | ###### DESTINATIONS #######
76 |
77 | ### Destination Rotations
78 |
79 | ### Location to Rotation
80 | if(c.map_from == 'LOCATION'):
81 | # X Loc to X Rot: Flipped
82 | if(c.map_to_x_from == 'X'):
83 | opp_c.to_min_x_rot = c.to_max_x_rot
84 | opp_c.to_max_x_rot = c.to_min_x_rot
85 | # X Loc to Y Rot: Same
86 | # X Loc to Z Rot: Flipped and Inverted
87 | if(c.map_to_z_from == 'X'):
88 | opp_c.to_min_z_rot = c.to_max_z_rot *-1
89 | opp_c.to_max_z_rot = c.to_min_z_rot *-1
90 |
91 | # Y Loc to X Rot: Same
92 | # Y Loc to Y Rot: Inverted
93 | if(c.map_to_y_from == 'Y'):
94 | opp_c.to_min_y_rot = c.to_min_y_rot *-1
95 | opp_c.to_max_y_rot = c.to_max_y_rot *-1
96 | # Y Loc to Z Rot: Inverted
97 | if(c.map_to_z_from == 'Y'):
98 | opp_c.to_min_z_rot = c.to_min_z_rot *-1
99 | opp_c.to_max_z_rot = c.to_max_z_rot *-1
100 |
101 | # Z Loc to X Rot: Same
102 | # Z Loc to Y Rot: Inverted
103 | if(c.map_to_y_from == 'Z'):
104 | opp_c.to_min_y_rot = c.to_min_y_rot *-1
105 | opp_c.to_max_y_rot = c.to_max_y_rot *-1
106 | # Z Loc to Z Rot: Inverted
107 | if(c.map_to_z_from == 'Z'):
108 | opp_c.to_min_z_rot = c.to_min_z_rot *-1
109 | opp_c.to_max_z_rot = c.to_max_z_rot *-1
110 |
111 | ### Rotation to Rotation
112 | if(c.map_from == 'ROTATION'):
113 | # X Rot to X Rot: Same
114 | # X Rot to Y Rot: Inverted
115 | if(c.map_to_y_from == 'X'):
116 | opp_c.to_min_y_rot = c.to_min_y_rot *-1
117 | opp_c.to_max_y_rot = c.to_max_y_rot *-1
118 | # X Rot to Z Rot: Inverted
119 | if(c.map_to_z_from == 'X'):
120 | opp_c.to_min_z_rot = c.to_min_z_rot *-1
121 | opp_c.to_max_z_rot = c.to_max_z_rot *-1
122 |
123 | # Y Rot to X Rot: Flipped
124 | if(c.map_to_x_from == 'Y'):
125 | opp_c.to_min_x_rot = c.to_max_x_rot
126 | opp_c.to_max_x_rot = c.to_min_x_rot
127 | # Y Rot to Y Rot: Same
128 | # Y Rot to Z Rot: Flipped and Inverted
129 | if(c.map_to_z_from == 'Y'):
130 | opp_c.to_min_z_rot = c.to_max_z_rot * -1
131 | opp_c.to_max_z_rot = c.to_min_z_rot * -1
132 |
133 | # Z Rot to X Rot: Flipped
134 | if(c.map_to_x_from == 'Z'):
135 | opp_c.to_min_x_rot = c.to_max_x_rot
136 | opp_c.to_max_x_rot = c.to_min_x_rot
137 | # Z Rot to Y Rot: Flipped and Inverted
138 | if(c.map_to_y_from == 'Z'):
139 | opp_c.to_min_y_rot = c.to_max_y_rot * -1
140 | opp_c.to_max_y_rot = c.to_min_y_rot * -1
141 | # Z Rot to Z Rot: Flipped and Inverted
142 | if(c.map_to_z_from == 'Z'):
143 | opp_c.to_min_z_rot = c.to_max_z_rot * -1
144 | opp_c.to_max_z_rot = c.to_min_z_rot * -1
145 |
146 | ### Scale to Rotation
147 | if(c.map_from == 'SCALE'):
148 | # ALL Scale to X Rot: Same
149 | # All Scale to Y Rot: Inverted
150 | opp_c.to_min_y_rot = c.to_min_y_rot *-1
151 | opp_c.to_max_y_rot = c.to_max_y_rot *-1
152 | # All Scale to Z Rot: Inverted
153 | opp_c.to_min_z_rot = c.to_min_z_rot *-1
154 | opp_c.to_max_z_rot = c.to_max_z_rot *-1
155 |
156 | ### Destination Locations
157 | ### Location to Location
158 | if(c.map_from == 'LOCATION'):
159 | # X Loc to X Loc: Flipped and Inverted
160 | if(c.map_to_x_from == 'X'):
161 | opp_c.to_min_x = c.to_max_x *-1
162 | opp_c.to_max_x = c.to_min_x *-1
163 | # X Loc to Y Loc: Flipped
164 | if(c.map_to_y_from == 'X'):
165 | opp_c.to_min_y = c.to_max_y
166 | opp_c.to_max_y = c.to_min_y
167 | # X Loc to Z Loc: Flipped
168 | if(c.map_to_z_from == 'X'):
169 | opp_c.to_min_z = c.to_max_z
170 | opp_c.to_max_z = c.to_min_z
171 |
172 | # Y Loc to X Loc: Inverted
173 | if(c.map_to_x_from == 'Y'):
174 | opp_c.to_min_x = c.to_min_x *-1
175 | opp_c.to_max_x = c.to_max_x *-1
176 | # Y Loc to Y Loc: Same
177 | # Y Loc to Z Loc: Same
178 |
179 | # Z Loc to X Loc: Inverted
180 | if(c.map_to_x_from == 'Z'):
181 | opp_c.to_min_x = c.to_min_x *-1
182 | opp_c.to_max_x = c.to_max_x *-1
183 | # Z Loc to Y Loc: Same
184 | # Z Loc to Z Loc: Same
185 |
186 | ### Rotation to Location
187 | if(c.map_from == 'ROTATION'):
188 | # X Rot to X Loc: Inverted
189 | if(c.map_to_x_from == 'X'):
190 | opp_c.to_min_x = c.to_min_x * -1
191 | opp_c.to_max_x = c.to_max_x * -1
192 | # X Rot to Y Loc: Same
193 | # X Rot to Z Loc: Same
194 |
195 | # Y Rot to X Loc: Flipped and Inverted
196 | if(c.map_to_x_from == 'Y'):
197 | opp_c.to_min_x = c.to_max_x * -1
198 | opp_c.to_max_x = c.to_min_x * -1
199 | # Y Rot to Y Loc: Flipped
200 | if(c.map_to_y_from == 'Y'):
201 | opp_c.to_min_y = c.to_max_y
202 | opp_c.to_max_y = c.to_min_y
203 | # Y Rot to Z Loc: Flipped
204 | if(c.map_to_z_from == 'Y'):
205 | opp_c.to_min_z = c.to_max_z
206 | opp_c.to_max_z = c.to_min_z
207 |
208 | # Z Rot to X Loc: Flipped and inverted
209 | if(c.map_to_x_from == 'Z'):
210 | opp_c.to_min_x = c.to_max_x * -1
211 | opp_c.to_max_x = c.to_min_x * -1
212 | # Z Rot to Y Loc: Flipped
213 | if(c.map_to_y_from == 'Z'):
214 | opp_c.to_min_y = c.to_max_y
215 | opp_c.to_max_y = c.to_min_y
216 | # Z Rot to Z Loc: Flipped
217 | if(c.map_to_z_from == 'Z'):
218 | opp_c.to_min_z = c.to_max_z
219 | opp_c.to_max_z = c.to_min_z
220 |
221 | ### Scale to Location
222 | if(c.map_from == 'SCALE'):
223 | # All Scale to X Loc: Inverted
224 | opp_c.to_min_x = c.to_min_x *-1
225 | opp_c.to_max_x = c.to_max_x *-1
226 | # All Scale to Y Loc: Same
227 | # All Scale to Z Loc: Same
228 |
229 | ### Destination Scales
230 | # Location to Scale
231 | if(c.map_from == 'LOCATION'):
232 | # X Loc to All Scale: Flipped
233 | if(c.map_to_x_from == 'X'):
234 | opp_c.to_min_x_scale = c.to_max_x_scale
235 | opp_c.to_max_x_scale = c.to_min_x_scale
236 | if(c.map_to_y_from == 'X'):
237 | opp_c.to_min_y_scale = c.to_max_y_scale
238 | opp_c.to_max_y_scale = c.to_min_y_scale
239 | if(c.map_to_z_from == 'X'):
240 | opp_c.to_min_z_scale = c.to_max_z_scale
241 | opp_c.to_max_z_scale = c.to_min_z_scale
242 | # Y Loc to All Scale: Same
243 | # Z Loc to All Scale: Same
244 |
245 | # Rotation to Scale
246 | if(c.map_from == 'ROTATION'):
247 | # X Rot to All Scale: Same
248 | # Y Rot to All Scale: Flipped
249 | if(c.map_to_x_from == 'Y'):
250 | opp_c.to_min_x_scale = c.to_max_x_scale
251 | opp_c.to_max_x_scale = c.to_min_x_scale
252 | if(c.map_to_y_from == 'Y'):
253 | opp_c.to_min_y_scale = c.to_max_y_scale
254 | opp_c.to_max_y_scale = c.to_min_y_scale
255 | if(c.map_to_z_from == 'Y'):
256 | opp_c.to_min_z_scale = c.to_max_z_scale
257 | opp_c.to_max_z_scale = c.to_min_z_scale
258 | # Z Rot to All Scale: Flipped
259 | if(c.map_to_x_from == 'Z'):
260 | opp_c.to_min_x_scale = c.to_max_x_scale
261 | opp_c.to_max_x_scale = c.to_min_x_scale
262 | if(c.map_to_y_from == 'Z'):
263 | opp_c.to_min_y_scale = c.to_max_y_scale
264 | opp_c.to_max_y_scale = c.to_min_y_scale
265 | if(c.map_to_z_from == 'Z'):
266 | opp_c.to_min_z_scale = c.to_max_z_scale
267 | opp_c.to_max_z_scale = c.to_min_z_scale
268 |
269 | # Scale to Scale is all same.
270 |
271 | # Mirroring Bendy Bone settings
272 | opp_data_b.bbone_handle_type_start = data_b.bbone_handle_type_start
273 | opp_data_b.bbone_handle_type_end = data_b.bbone_handle_type_end
274 | if(data_b.bbone_custom_handle_start):
275 | opp_data_b.bbone_custom_handle_start = armature.data.bones.get(utils.flip_name(data_b.bbone_custom_handle_start.name))
276 | else:
277 | opp_data_b.bbone_custom_handle_start = None
278 | if(data_b.bbone_custom_handle_end):
279 | opp_data_b.bbone_custom_handle_end = armature.data.bones.get(utils.flip_name(data_b.bbone_custom_handle_end.name))
280 | else:
281 | opp_data_b.bbone_custom_handle_end = None
282 | opp_data_b.bbone_segments = data_b.bbone_segments
283 | # Inherit End Roll
284 | opp_data_b.use_endroll_as_inroll = data_b.use_endroll_as_inroll
285 |
286 | # Edit mode curve settings
287 | opp_data_b.bbone_curveinx = data_b.bbone_curveinx *-1
288 | opp_data_b.bbone_curveoutx = data_b.bbone_curveoutx *-1
289 | opp_data_b.bbone_curveiny = data_b.bbone_curveiny
290 | opp_data_b.bbone_curveouty = data_b.bbone_curveouty
291 | opp_data_b.bbone_rollin = data_b.bbone_rollin *-1
292 | opp_data_b.bbone_rollout = data_b.bbone_rollout *-1
293 | opp_data_b.bbone_scaleinx = data_b.bbone_scaleinx
294 | opp_data_b.bbone_scaleiny = data_b.bbone_scaleiny
295 | opp_data_b.bbone_scaleoutx = data_b.bbone_scaleoutx
296 | opp_data_b.bbone_scaleouty = data_b.bbone_scaleouty
297 |
298 | # Mirroring bone shape
299 | if(b.custom_shape):
300 | opp_b.custom_shape = bpy.data.objects.get(utils.flip_name(b.custom_shape.name))
301 | opp_data_b.show_wire = data_b.show_wire
302 | opp_b.custom_shape_scale = b.custom_shape_scale
303 | opp_b.use_custom_shape_bone_size = b.use_custom_shape_bone_size
304 | if(b.custom_shape_transform):
305 | opp_b.custom_shape_transform = armature.pose.bones.get(utils.flip_name(b.custom_shape_transform.name))
306 |
307 |
308 | #NOTE: curve values are not mirrored, since as far as my use cases go, they would always be the default values, unless there are drivers on them, and driver mirroring is a TODO.
309 |
310 | return {"FINISHED"}
311 |
312 | def register():
313 | from bpy.utils import register_class
314 | register_class(XMirrorConstraints)
315 |
316 | def unregister():
317 | from bpy.utils import unregister_class
318 | unregister_class(XMirrorConstraints)
--------------------------------------------------------------------------------
/MetsTools/psk_cleanup.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | import bmesh
3 |
4 | # .psk models import as a single mesh with multiple UV channels. Each UV channel corresponds to a material.
5 | # This script will select every UV face that isn't on 0.0 on each UV laayer, separate that part of the mesh into an object and clean up material and UV slots.
6 | # Wprls pm pme se;ected object for now
7 |
8 | def assign_materials_by_uv_layers():
9 | obj = bpy.context.object
10 | mesh = obj.data
11 | orig_mode = obj.mode
12 | bpy.ops.object.mode_set(mode='EDIT')
13 |
14 | bm = bmesh.from_edit_mesh(mesh)
15 |
16 | for uv_idx in range(0, len(mesh.uv_layers)): # For each UV layer
17 | obj.active_material_index = uv_idx
18 | mesh.uv_layers.active_index = uv_idx
19 | bpy.ops.mesh.select_all(action='DESELECT')
20 | bm.faces.ensure_lookup_table()
21 | for f in bm.faces: # For each face
22 | for l in f.loops: # For each loop(whatever that means)
23 | if(l[bm.loops.layers.uv.active].uv[0] != 0.0): # If the loop's UVs first vert are NOT in the bottom left corner
24 | f.select_set(True) # Select this face
25 | bpy.ops.object.material_slot_assign()
26 | obj.active_material.name = mesh.uv_layers[uv_idx].name
27 |
28 | bmesh.update_edit_mesh(mesh, True)
29 | bpy.ops.object.mode_set(mode=orig_mode)
30 |
31 | assign_materials_by_uv_layers()
32 |
33 | # Separating object by materials
34 | bpy.ops.object.mode_set(mode='EDIT')
35 | bpy.ops.mesh.select_all(action='SELECT')
36 | bpy.ops.mesh.separate(type='MATERIAL')
37 | bpy.ops.object.mode_set(mode='OBJECT')
38 |
39 | # Cleaning up objects
40 | for o in bpy.context.selected_objects:
41 | # Cleaning up UV Maps
42 | mat_name = o.material_slots[0].name
43 | for uv_map in reversed(o.data.uv_layers):
44 | if(uv_map.name not in mat_name):
45 | o.data.uv_layers.remove(uv_map)
46 | if(len(o.data.uv_layers) > 0):
47 | o.data.uv_layers[0].name = "UVMap"
48 |
49 | # Cleaning mesh
50 | bpy.context.view_layer.objects.active = o
51 | bpy.ops.object.mode_set(mode='EDIT')
52 | bpy.ops.mesh.select_all(action='SELECT')
53 | bpy.ops.mesh.tris_convert_to_quads(uvs=True, seam=True)
54 | bpy.ops.mesh.faces_shade_smooth()
55 | if(len(o.data.uv_layers) > 0):
56 | bpy.ops.uv.seams_from_islands()
57 | bpy.ops.object.mode_set(mode='OBJECT')
58 | if(hasattr(bpy.ops.object, "calculate_weighted_normals")): # Check if the Weighted Normals addon is available
59 | bpy.ops.object.calculate_weighted_normals()
--------------------------------------------------------------------------------
/MetsTools/rename_skeleton_to_metsrig.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from bpy.props import *
3 |
4 | prefix_ow = "bone_0"
5 | prefix_bi = "GenericHuman"
6 | prefix_valve = "ValveBiped.Bip01_"
7 |
8 | valve = { # TODO: I haven't tested this one.
9 | 'Pelvis' : 'Spine1_Def',
10 | 'ValveBiped.Bip01_Spine1' : 'Spine2_Def',
11 | 'ValveBiped.Bip01_Spine2' : 'Spine3_Def',
12 | 'ValveBiped.Bip01_Hips' : 'Hip_Def',
13 | 'ValveBiped.Bip01_Neck' : 'Neck_Def',
14 | 'ValveBiped.Bip01_Head' : 'Head_Def',
15 |
16 | 'ValveBiped.Bip01_L_Clavicle' : 'Clavicle_Def.L',
17 | 'ValveBiped.Bip01_R_Clavicle' : 'Clavicle_Def.R',
18 | 'ValveBiped.Bip01_L_Upperarm' : 'Shoulder_Def.L',
19 | 'ValveBiped.Bip01_R_Upperarm' : 'Shoulder_Def.R',
20 | 'ValveBiped.Bip01_L_Forearm' : 'Elbow_Def.L',
21 | 'ValveBiped.Bip01_R_Forearm' : 'Elbow_Def.R',
22 | 'ValveBiped.Bip01_L_Hand' : 'Hand_Def.L',
23 | 'ValveBiped.Bip01_R_Hand' : 'Hand_Def.R',
24 |
25 | 'ValveBiped.Bip01_L_Finger0' : 'Thumb1.L',
26 | 'ValveBiped.Bip01_L_Finger01' : 'Thumb2.L',
27 | 'ValveBiped.Bip01_L_Finger02' : 'Thumb3.L',
28 | 'ValveBiped.Bip01_L_Finger1' : 'Index1.L',
29 | 'ValveBiped.Bip01_L_Finger11' : 'Index2.L',
30 | 'ValveBiped.Bip01_L_Finger12' : 'Index3.L',
31 | 'ValveBiped.Bip01_L_Finger2' : 'Middle1.L',
32 | 'ValveBiped.Bip01_L_Finger21' : 'Middle2.L',
33 | 'ValveBiped.Bip01_L_Finger22' : 'Middle3.L',
34 | 'ValveBiped.Bip01_L_Finger3' : 'Ring1.L',
35 | 'ValveBiped.Bip01_L_Finger31' : 'Ring2.L',
36 | 'ValveBiped.Bip01_L_Finger32' : 'Ring3.L',
37 | 'ValveBiped.Bip01_L_Finger4' : 'Pinky1.L',
38 | 'ValveBiped.Bip01_L_Finger41' : 'Pinky2.L',
39 | 'ValveBiped.Bip01_L_Finger42' : 'Pinky3.L',
40 |
41 | 'ValveBiped.Bip01_R_Finger0' : 'Thumb1.L',
42 | 'ValveBiped.Bip01_R_Finger01' : 'Thumb2.L',
43 | 'ValveBiped.Bip01_R_Finger02' : 'Thumb3.L',
44 | 'ValveBiped.Bip01_R_Finger1' : 'Index1.R',
45 | 'ValveBiped.Bip01_R_Finger11' : 'Index2.R',
46 | 'ValveBiped.Bip01_R_Finger12' : 'Index3.R',
47 | 'ValveBiped.Bip01_R_Finger2' : 'Middle1.R',
48 | 'ValveBiped.Bip01_R_Finger21' : 'Middle2.R',
49 | 'ValveBiped.Bip01_R_Finger22' : 'Middle3.R',
50 | 'ValveBiped.Bip01_R_Finger3' : 'Ring1.R',
51 | 'ValveBiped.Bip01_R_Finger31' : 'Ring2.R',
52 | 'ValveBiped.Bip01_R_Finger32' : 'Ring3.R',
53 | 'ValveBiped.Bip01_R_Finger4' : 'Pinky1.R',
54 | 'ValveBiped.Bip01_R_Finger41' : 'Pinky2.R',
55 | 'ValveBiped.Bip01_R_Finger42' : 'Pinky3.R',
56 |
57 | 'ValveBiped.Bip01_L_Thigh' : 'Thigh_Def.L',
58 | 'ValveBiped.Bip01_R_Thigh' : 'Thigh_Def.R',
59 | 'ValveBiped.Bip01_L_Calf' : 'Knee_Def.L',
60 | 'ValveBiped.Bip01_R_Calf' : 'Knee_Def.R',
61 | 'ValveBiped.Bip01_L_Foot' : 'Foot_Def.L',
62 | 'ValveBiped.Bip01_R_Foot' : 'Foot_Def.R',
63 | 'ValveBiped.Bip01_L_Toe0' : 'Toe_Def.L',
64 | 'ValveBiped.Bip01_R_Toe0' : 'Toe_Def.R'
65 | }
66 |
67 | overwatch = {
68 | "000" : "useless",
69 | "001" : "root",
70 | "38D" : "useless",
71 | "180" : "useless",
72 | "3BE" : "useless",
73 | "181" : "useless",
74 | "07D" : "useless",
75 | "5F0" : "useless",
76 | "07E" : "useless",
77 | "0C1" : "useless",
78 | "0C2" : "useless",
79 | "002" : "useless",
80 | "97F" : "useless",
81 | "985" : "useless",
82 | "052" : "Spine1_Def",
83 | "003" : "useless",
84 | "004" : "useless",
85 | "051" : "Spine2_Def",
86 | "005" : "useless",
87 | "109" : "useless",
88 | "00F" : "Spine3_Def",
89 | "010" : "Neck_Def",
90 | "0EF" : "Collar_Back",
91 | "0EE" : "Collar.R",
92 | "0EC" : "Collar_Front",
93 | "0ED" : "Collar.L",
94 | "050" : "useless",
95 | "034" : "Clavicle_Def.L",
96 | "00D" : "useless",
97 | "031" : "Shoulder_Def.L",
98 | "04D" : "Shoulder_Def.R",
99 | "00E" : "useless",
100 | "032" : "Twist_Arm_6.L",
101 | "04F" : "Twist_Arm_5.L",
102 | "01A" : "Twist_Arm_4.L",
103 | "01B" : "Twist_Arm_3.L",
104 | "030" : "Twist_Arm_2.L",
105 | "01C" : "Hand_Def.L",
106 | "029" : "Thumb1.L",
107 | "02A" : "Thumb2.L",
108 | "02B" : "Thumb3.L",
109 | "0E6" : "Thumb_Adjust_1.L",
110 | "0E4" : "Thumb_Adjust_2.L",
111 | "0DE" : "Thumb_Adjust_3.L",
112 | "0E2" : "Palm_Adjust_1.L",
113 | "835" : "Index1.L",
114 | "01D" : "Index2.L",
115 | "01E" : "Index3.L",
116 | "01F" : "Index4.L",
117 | "837" : "Middle1.L",
118 | "020" : "Middle2.L",
119 | "021" : "Middle3.L",
120 | "022" : "Middle4.L",
121 | "839" : "Ring1.L",
122 | "026" : "Ring2.L",
123 | "027" : "Ring3.L",
124 | "028" : "Ring4.L",
125 | "0E0" : "Pinky1.L",
126 | "023" : "Pinky2.L",
127 | "024" : "Pinky3.L",
128 | "025" : "Pinky4.L",
129 | "0EA" : "Palm_Adjust_2.L",
130 | "036" : "useless",
131 | "00C" : "Clavicle_Def.R",
132 | "037" : "useless",
133 | "04E" : "Twist_Arm_6.R",
134 | "033" : "Twist_Arm_5.R",
135 | "038" : "Twist_Arm_4.R",
136 | "039" : "Twist_Arm_3.R",
137 | "04A" : "useless",
138 | "04C" : "Twist_Arm_2.R",
139 | "03A" : "Hand_Def.R",
140 | "047" : "Thumb1.R",
141 | "0E7" : "Thumb_Adjust_1.R",
142 | "0E5" : "Thumb_Adjust_2.R",
143 | "0DF" : "Thumb_Adjust_3.R",
144 | "048" : "Thumb2.R",
145 | "049" : "Thumb3.R",
146 | "0E3" : "Palm_Adjust_1.R",
147 | "836" : "Index1.R",
148 | "03B" : "Index2.R",
149 | "03C" : "Index3.R",
150 | "03D" : "Index4.R",
151 | "838" : "Middle1.R",
152 | "03E" : "Middle2.R",
153 | "03F" : "Middle3.R",
154 | "040" : "Middle4.R",
155 | "83A" : "Ring1.R",
156 | "044" : "Ring2.R",
157 | "045" : "Ring3.R",
158 | "046" : "Ring4.R",
159 | "0E1" : "Pinky1.R",
160 | "041" : "Pinky2.R",
161 | "042" : "Pinky3.R",
162 | "043" : "Pinky4.R",
163 | "0EB" : "Palm_Adjust_2.R",
164 | "011" : "Head_Def",
165 | "016" : "Eyebrow_Mid",
166 | "017" : "EyeBrow1.L",
167 | "018" : "EyeBrow2.L",
168 | "019" : "EyeBrow3.L",
169 | "385" : "EyeBrow4.L",
170 | "012" : "Head_Top",
171 | "388" : "Nose_Bridge.L",
172 | "39A" : "Eye.L",
173 | "397" : "Eyelid_Top_2_Rot.L",
174 | "396" : "Eyelid_Bot_2_Rot.L",
175 | "007" : "Eyelid_Corner_Outer.L",
176 | "38C" : "Eyelid_Top_3.L",
177 | "38B" : "Eyelid_Top_2.L",
178 | "38A" : "Eyelid_Top_1.L",
179 | "006" : "Eyelid_Corner_Inner.L",
180 | "38D" : "Eyelid_Bot_1.L",
181 | "38E" : "Eyelid_Bot_2.L",
182 | "38F" : "Eyelid_Bot_3.L",
183 | "608" : "Cheek_Upper_Inner.L",
184 | "3A2" : "Cheek_Upper_Middle.L",
185 | "60A" : "Cheek_Upper_Outer.L",
186 | "71D" : "Cheek_Mid_Outer.L",
187 | "39E" : "Cheek_Mid.L",
188 | "3A4" : "Cheek_Mid_Inner.L",
189 | "009" : "Nose.L",
190 | "008" : "Nose_Tip",
191 | "00B" : "useless",
192 | "3BC" : "Jaw",
193 | "3B7" : "Teeth_Top",
194 | "3B8" : "Teeth_Bottom",
195 | "3BB" : "Tongue1",
196 | "3BA" : "Tongue2",
197 | "3B9" : "Tongue3",
198 | "3A0" : "Cheek_Jaw_2.L",
199 | "3A6" : "Cheek_Jaw_1.L",
200 | "60C" : "Cheek_LaughLine.L",
201 | "3AA" : "Lip_Top_Mid",
202 | "3A9" : "Lip_Top_2.L",
203 | "3A8" : "Lip_Top_3.L",
204 | "3B0" : "Lip_Corner.L",
205 | "3AF" : "Lip_Bottom_3.L",
206 | "3AC" : "Lip_Bottom_2.L",
207 | "3AD" : "Chin_Outer.L",
208 | "3AB" : "Lip_Bottom_Mid",
209 | "3B6" : "Chin_Mid",
210 | "015" : "EyeBrow1.R",
211 | "014" : "EyeBrow2.R",
212 | "013" : "EyeBrow3.R",
213 | "384" : "EyeBrow4.R",
214 | "39B" : "Eye.R",
215 | "398" : "Eyelid_Bot_2_Rot.R",
216 | "399" : "Eyelid_Top_2_Rot.R",
217 | "389" : "Nose_Bridge.R",
218 | "386" : "Eyelid_Corner_Outer.R",
219 | "395" : "Eyelid_Top_3.R",
220 | "394" : "Eyelid_Top_2.R",
221 | "393" : "Eyelid_Top_1.R",
222 | "387" : "Eyelid_Corner_Inner.R",
223 | "391" : "Eyelid_Bot_1.R",
224 | "390" : "Eyelid_Bot_2.R",
225 | "392" : "Eyelid_Bot_3.R",
226 | "609" : "Cheek_Upper_Inner.R",
227 | "3A3" : "Cheek_Upper_Middle.R",
228 | "60B" : "Cheek_Upper_Outer.R",
229 | "71E" : "Cheek_Mid_Outer.R",
230 | "39F" : "Cheek_Mid.R",
231 | "3A5" : "Cheek_Mid_Inner.R",
232 | "00A" : "Nose.R",
233 | "60D" : "Cheek_LaughLine.R",
234 | "3A7" : "Cheek_Jaw_1.R",
235 | "3A1" : "Cheek_Jaw_2.R",
236 | "3B4" : "Lip_Top_2.R",
237 | "3B5" : "Lip_Top_3.R",
238 | "3B3" : "Lip_Corner.R",
239 | "3B2" : "Lip_Bottom_3.R",
240 | "3B1" : "Lip_Bottom_2.R",
241 | "3AE" : "Chin_Outer.R",
242 | "056" : "Thigh_Def.L",
243 | "055" : "useless",
244 | "060" : "Thigh_Def.R",
245 | "05F" : "useless",
246 | "057" : "Twist_Leg_4.L",
247 | "061" : "Twist_Leg_4.R",
248 | "058" : "Twist_Leg_3.L",
249 | "062" : "Twist_Leg_3.R",
250 | "05C" : "Knee_Def.L",
251 | "066" : "Knee_Def.R",
252 | "05D" : "Twist_Leg_2.L",
253 | "067" : "Twist_Leg_2.R",
254 | "05E" : "Twist_Leg_1.L",
255 | "068" : "Twist_Leg_1.R",
256 | "05A" : "Foot_Def.L",
257 | "064" : "Foot_Def.R",
258 | "05B" : "Toe_Def.L",
259 | "065" : "Toe_Def.R",
260 | "3BD" : "useless",
261 | "180" : "useless",
262 | "17A" : "BackThing",
263 | "984" : "useless",
264 | "054" : "Hip_Def",
265 | "053" : "useless",
266 | "7B3" : "Hair_1",
267 | "7B4" : "Hair_2",
268 | "7B5" : "Hair_3",
269 | "7B6" : "Hair_4",
270 |
271 | #Dva stuff
272 | "624" : "Twist_Arm_1.L",
273 | "61F" : "Twist_Arm_1.R",
274 | "0E8" : "Hand_Adjust.L",
275 | "0E9" : "Hand_Adjust.R",
276 | "57B" : "Clavicle_Adjust.L",
277 | "57C" : "Clavicle_Adjust.R",
278 | "57E" : "Neck_Def"
279 | }
280 |
281 | sombra_gun = {
282 | "001" : "Root",
283 | "002" : "Base",
284 | "075" : "Back_Part1",
285 | "072" : "Back_Part2",
286 | "073" : "Back_Part3",
287 | "076" : "Back_Part4",
288 | "06B" : "Back_Part5",
289 | "08B" : "Back_Part6",
290 | "6E1" : "Back_Main",
291 | "97E" : "Thing.L",
292 | "97A" : "Thing.R",
293 | "423" : "AmmoDisplayBase",
294 | "0C5" : "AmmoDisplay",
295 | "41F" : "Cable.L",
296 | "098" : "Trigger",
297 | "09F" : "Barrel",
298 | "911" : "Phys1",
299 |
300 | "F78" : "Sight",
301 | }
302 |
303 | dva_mech = {
304 | "001" : "ROOT_Mech",
305 | "3BD" : "useless",
306 | "3BE" : "useless",
307 | "07D" : "useless",
308 | "180" : "useless",
309 | "181" : "useless",
310 | "5F0" : "useless",
311 | "002" : "useless",
312 | "914" : "useless",
313 | "003" : "Upper",
314 | "0C3" : "Boosters_Back.L",
315 | "11F" : "Boosters_Back.L.001",
316 | "0C4" : "Boosters_Back.R",
317 | "121" : "Boosters_Back.R.001",
318 | "00D" : "useless",
319 | "00E" : "useless",
320 | "036" : "useless",
321 | "037" : "useless",
322 | "050" : "Gun.L",
323 | "032" : "Gun.L.001",
324 | "01B" : "Gun.L.002",
325 | "09B" : "Gun.L.003",
326 | "01C" : "Gun.L.004",
327 | "099" : "Gun.L.005",
328 | "8BA" : "Gun.L.006",
329 | "8BC" : "Gun.L.007",
330 | "16F" : "Gun_Barrel_1.L",
331 | "8B9" : "Gun_Barrel_1.L.001",
332 | "182" : "Gun_Barrel_2.L",
333 | "8B7" : "Gun_Barrel_2.L.002",
334 | "8B2" : "Gun_Barrel_3.L",
335 | "8B5" : "Gun_Barrel_3.L.001",
336 | "035" : "Gun.R",
337 | "04E" : "Gun.R.001",
338 | "039" : "Gun.R.002",
339 | "09C" : "Gun.R.003",
340 | "03A" : "Gun.R.004",
341 | "09A" : "Gun.R.005",
342 | "8BB" : "Gun.R.006",
343 | "8BD" : "Gun.R.007",
344 | "170" : "Gun_Barrel_1.R",
345 | "8B8" : "Gun_Barrel_1.R.001",
346 | "183" : "Gun_Barrel_2.R",
347 | "8B6" : "Gun_Barrel_2.R.001",
348 | "8B3" : "Gun_Barrel_3.R",
349 | "8B4" : "Gun_Barrel_3.R.001",
350 | "11E" : "Wing.L",
351 | "13B" : "Wing.L.001",
352 | "120" : "Wing.R",
353 | "13C" : "Wing.R.001",
354 | "0C5" : "Door",
355 | "082" : "Top_Back",
356 | "081" : "Top_Front",
357 | "8FD" : "useless",
358 | "871" : "Seat_Top",
359 | "179" : "Door_Lower",
360 | "7E5" : "Seat_Back",
361 | "052" : "Upper_Base",
362 | "004" : "Seat",
363 | "511" : "Stick.L",
364 | "8E9" : "Stick.L.001",
365 | "8FE" : "Stick.L.002",
366 | "518" : "Stick.R",
367 | "8EA" : "Stick.R.001",
368 | "8FF" : "Stick.R.002",
369 | "054" : "Lower_Base",
370 | "0C2" : "useless",
371 | "0C1" : "useless",
372 | "053" : "Lower",
373 | "436" : "Hip.L",
374 | "055" : "Thigh.L",
375 | "12E" : "Knee.L",
376 | "059" : "Calf.L",
377 | "431" : "Ankle.L",
378 | "42A" : "Foot.L",
379 | "05B" : "Toe_Base.L",
380 | "439" : "Toe_Front.L",
381 | "43D" : "ToeFlap.L",
382 | "11A" : "useless",
383 | "425" : "useless",
384 | "05A" : "Toe_Root.L",
385 | "43B" : "Thumb.L",
386 | "435" : "Hip.R",
387 | "05F" : "Thigh.R",
388 | "12D" : "Knee.R",
389 | "063" : "Calf.R",
390 | "426" : "useless",
391 | "11B" : "useless",
392 | "432" : "Ankle.R",
393 | "429" : "Foot.R",
394 | "064" : "Toe_Root.R",
395 | "43C" : "Thumb.R",
396 | "065" : "Toe_Base.R",
397 | "43A" : "Toe_Front.R",
398 | "43E" : "ToeFlap.R",
399 | "159" : "Gun_Latch.L",
400 | "158" : "Gun_Latch.R"
401 | }
402 |
403 | dva_pistol = {
404 | "001" : "ROOT_Pistol",
405 | "002" : "Grip",
406 | "098" : "Trigger",
407 | "070" : "Sight",
408 | "09F" : "Barrel",
409 | "069" : "Back",
410 | "06B" : "Phys",
411 | "06C" : "Phys.001",
412 | "06D" : "Phys.002",
413 | "06E" : "Phys.003",
414 | }
415 |
416 | elizabeth_bioshock = {
417 | "Pelvis" : "Hip_Def",
418 | "Spine1" : "Spine1_Def",
419 | "Spine2" : "Spine2_Def",
420 | "Spine3" : "Spine3_Def",
421 | "Ribcage" : "Spine4_Def",
422 | "BreathingBone" : "BreathingBone", # Has no equivalent in my rig, but still want to remove the GenericHuman prefix
423 | "Neck" : "Neck_Def",
424 | "Head" : "Head_Def",
425 | "LCollarbone" : "Clavicle_Def.L",
426 | "RCollarbone" : "Clavicle_Def.R",
427 | "LUpperarm1" : "Shoulder_Def.L",
428 | "RUpperarm1" : "Shoulder_Def.R",
429 | "LUpperarm2" : "Twist_Arm_6.L",
430 | "RUpperarm2" : "Twist_Arm_6.R",
431 | "LForearm1" : "Elbow_Def.L",
432 | "RForearm1" : "Elbow_Def.R",
433 | "LForearm2" : "Twist_Arm_3.L",
434 | "RForearm2" : "Twist_Arm_3.R",
435 | "LPalm" : "Hand_Def.L",
436 | "RPalm" : "Hand_Def.R",
437 | "LDigit11" : "Thumb1.L",
438 | "RDigit11" : "Thumb1.R",
439 | "LDigit12" : "Thumb2.L",
440 | "RDigit12" : "Thumb2.R",
441 | "LDigit13" : "Thumb3.L",
442 | "RDigit13" : "Thumb3.R",
443 | "LDigit21" : "Index2.L",
444 | "RDigit21" : "Index2.R",
445 | "LDigit22" : "Index3.L",
446 | "RDigit22" : "Index3.R",
447 | "LDigit23" : "Index4.L",
448 | "RDigit23" : "Index4.R",
449 | "LDigit31" : "Middle2.L",
450 | "RDigit31" : "Middle2.R",
451 | "LDigit32" : "Middle3.L",
452 | "RDigit32" : "Middle3.R",
453 | "LDigit33" : "Middle4.L",
454 | "RDigit33" : "Middle4.R",
455 | "LDigit41" : "Ring2.L",
456 | "RDigit41" : "Ring2.R",
457 | "LDigit42" : "Ring3.L",
458 | "RDigit42" : "Ring3.R",
459 | "LDigit43" : "Ring4.L",
460 | "RDigit43" : "Ring4.R",
461 | "LDigit51" : "Pinky2.L",
462 | "RDigit51" : "Pinky2.R",
463 | "LDigit52" : "Pinky3.L",
464 | "RDigit52" : "Pinky3.R",
465 | "LDigit53" : "Pinky4.L",
466 | "RDigit53" : "Pinky4.R",
467 | "_C_jawJnt" : "Jaw",
468 | "_l_cornerLipJnt" : "Lip_Corner.L",
469 | "_r_cornerLipJnt" : "Lip_Corner.R",
470 | "_l_uppLipJnt" : "Lip_Top_2.L",
471 | "_r_uppLipJnt" : "Lip_Top_2.R",
472 | "_l_loLipJnt" : "Lip_Bottom_2.L",
473 | "_r_loLipJnt" : "Lip_Bottom_2.R",
474 | "_c_uppLipJnt" : "Lip_Top_Mid",
475 | "_c_loLipJnt" : "Lip_Bottom_Mid",
476 | "_C_tongue_a_Jnt" : "Tongue2",
477 | "_C_tongue_b_Jnt" : "Tongue3",
478 | "_l_loCheekJnt" : "Cheek_Jaw_2.L",
479 | "_r_loCheekJnt" : "Cheek_Jaw_2.R",
480 | "_LSquint" : "Cheek_Upper_Middle.L",
481 | "_RSquint" : "Cheek_Upper_Middle.R",
482 | "_l_uppCheekJnt" : "Cheek_Mid_Outer.L",
483 | "_r_uppCheekJnt" : "Cheek_Mid_Outer.R",
484 | "_L_InnerCheek" : "Cheek_Mid_Inner.L",
485 | "_R_InnerCheek" : "Cheek_Mid_Inner.R",
486 | "_l_nostrilJnt" : "Nose.L",
487 | "_r_nostrilJnt" : "Nose.R",
488 | "_l_loLidJnt" : "Eyelid_Bot_2.L",
489 | "_r_loLidJnt" : "Eyelid_Bot_2.R",
490 | "_l_uppLidJnt" : "Eyelid_Top_2.L",
491 | "_r_uppLidJnt" : "Eyelid_Top_2.R",
492 | "_l_EyeJnt" : "Eye.L",
493 | "_r_EyeJnt" : "Eye.R",
494 | "_l_browAJnt" : "EyeBrow1.L",
495 | "_r_browAJnt" : "EyeBrow1.R",
496 | "_l_browBJnt" : "EyeBrow2.L",
497 | "_r_browBJnt" : "EyeBrow2.R",
498 | "_l_browCJnt" : "EyeBrow3.L",
499 | "_r_browCJnt" : "EyeBrow3.R",
500 | "_C_forehead" : "Eyebrow_Mid",
501 | "LThigh" : "Thigh_Def.L",
502 | "RThigh" : "Thigh_Def.R",
503 | "LCalf" : "Knee_Def.L",
504 | "RCalf" : "Knee_Def.R",
505 | "LFoot" : "Foot_Def.L",
506 | "RFoot" : "Foot_Def.R",
507 | "LToe1" : "Toes_Def.L",
508 | "RToe1" : "Toes_Def.R",
509 | "DressBone_FL1" : "Skirt_Front.L",
510 | "DressBone_FL2" : "Skirt_Front.L.001",
511 | "DressBone_FL3" : "Skirt_Front.L.002",
512 | "DressBone_FL4" : "Skirt_Front.L.003",
513 | "DressBone_FL5" : "Skirt_Front.L.004",
514 | "DressBone_FR1" : "Skirt_Front.R",
515 | "DressBone_FR2" : "Skirt_Front.R.001",
516 | "DressBone_FR3" : "Skirt_Front.R.002",
517 | "DressBone_FR4" : "Skirt_Front.R.003",
518 | "DressBone_FR5" : "Skirt_Front.R.004",
519 | "DressBone_L1" : "Skirt.L",
520 | "DressBone_L2" : "Skirt.L.001",
521 | "DressBone_L3" : "Skirt.L.002",
522 | "DressBone_R1" : "Skirt.R",
523 | "DressBone_R2" : "Skirt.R.001",
524 | "DressBone_R3" : "Skirt.R.002",
525 | "DressBone_BL1" : "Skirt_Back.L",
526 | "DressBone_BL2" : "Skirt_Back.L.001",
527 | "DressBone_BL3" : "Skirt_Back.L.002",
528 | "DressBone_BR1" : "Skirt_Back.R",
529 | "DressBone_BR2" : "Skirt_Back.R.001",
530 | "DressBone_BR3" : "Skirt_Back.R.002",
531 | "_LFaceHair01" : "Hair_Front_01",
532 | "_LFaceHair02" : "Hair_Front_01.001",
533 | "_FrontHair01" : "Hair_Front_02",
534 | "_FrontHair02" : "Hair_Front_02.001",
535 | "_RFaceHair01" : "Hair_Front_03",
536 | "_RFaceHair02" : "Hair_Front_03.001",
537 | "_LHair01" : "Hair.L",
538 | "_LHair02" : "Hair.L.001",
539 | "_RHair01" : "Hair.R",
540 | "_RHair02" : "Hair.R.001",
541 | "_BackHair01" : "Hair_Back",
542 | "_BackHair02" : "Hair_Back.001",
543 | "_PonyTail01" : "Hair_Ponytail",
544 | "_PonyTail02" : "Hair_Ponytail.001",
545 | "_PonyTail03" : "Hair_Ponytail.002",
546 | "_C_chinJnt" : "Chin_Mid",
547 | }
548 |
549 | elizabeth_revzin = {
550 | "bip_pelvis" : "Hip_Def",
551 | "bip_spine_0" : "Spine1_Def",
552 | "bip_spine_1" : "Spine2_Def",
553 | "bip_spine_2" : "Spine3_Def",
554 | "bip_spine_3" : "Spine4_Def",
555 | "bip_neck" : "Neck_Def",
556 | "bip_head" : "Head_Def",
557 | "bip_collar_L" : "Clavicle_Def.L",
558 | "bip_collar_R" : "Clavicle_Def.R",
559 | "bip_upperArm_L" : "Shoulder_Def.L",
560 | "bip_upperArm_R" : "Shoulder_Def.R",
561 | "hlp_upperArm_L" : "Twist_Arm_6.L",
562 | "hlp_upperArm_R" : "Twist_Arm_6.R",
563 | "bip_lowerArm_L" : "Elbow_Def.L",
564 | "bip_lowerArm_R" : "Elbow_Def.R",
565 | "hlp_lowerArm_L" : "Twist_Arm_3.L",
566 | "hlp_lowerArm_R" : "Twist_Arm_3.R",
567 | "hlp_chestbreath" : "BreathingBone",
568 | "bip_hand_L" : "Hand_Def.L",
569 | "bip_hand_R" : "Hand_Def.R",
570 | "bip_thumb_0_L" : "Thumb1.L",
571 | "bip_thumb_0_R" : "Thumb1.R",
572 | "bip_thumb_1_L" : "Thumb2.L",
573 | "bip_thumb_1_R" : "Thumb2.R",
574 | "bip_thumb_2_L" : "Thumb3.L",
575 | "bip_thumb_2_R" : "Thumb3.R",
576 | "bip_index_0_L" : "Index2.L",
577 | "bip_index_0_R" : "Index2.R",
578 | "bip_index_1_L" : "Index3.L",
579 | "bip_index_1_R" : "Index3.R",
580 | "bip_index_2_L" : "Index4.L",
581 | "bip_index_2_R" : "Index4.R",
582 | "bip_middle_0_L" : "Middle2.L",
583 | "bip_middle_0_R" : "Middle2.R",
584 | "bip_middle_1_L" : "Middle3.L",
585 | "bip_middle_1_R" : "Middle3.R",
586 | "bip_middle_2_L" : "Middle4.L",
587 | "bip_middle_2_R" : "Middle4.R",
588 | "bip_ring_0_L" : "Ring2.L",
589 | "bip_ring_0_R" : "Ring2.R",
590 | "bip_ring_1_L" : "Ring3.L",
591 | "bip_ring_1_R" : "Ring3.R",
592 | "bip_ring_2_L" : "Ring4.L",
593 | "bip_ring_2_R" : "Ring4.R",
594 | "bip_pinky_0_L" : "Pinky2.L",
595 | "bip_pinky_0_R" : "Pinky2.R",
596 | "bip_pinky_1_L" : "Pinky3.L",
597 | "bip_pinky_1_R" : "Pinky3.R",
598 | "bip_pinky_2_L" : "Pinky4.L",
599 | "bip_pinky_2_R" : "Pinky4.R",
600 | # Revzin's model have no face bones.
601 | "bip_hip_L" : "Thigh_Def.L",
602 | "bip_hip_R" : "Thigh_Def.R",
603 | "bip_knee_L" : "Knee_Def.L",
604 | "bip_knee_R" : "Knee_Def.R",
605 | "bip_foot_L" : "Foot_Def.L",
606 | "bip_foot_R" : "Foot_Def.R",
607 | "bip_toe_L" : "Toes_Def.L",
608 | "bip_toe_R" : "Toes_Def.R",
609 | "prp_dress_l01_1" : "Skirt_Front.L",
610 | "prp_dress_l01_2" : "Skirt_Front.L.001",
611 | "prp_dress_l01_3" : "Skirt_Front.L.002",
612 | "prp_dress_l01_4" : "Skirt_Front.L.003",
613 | "prp_dress_l01_5" : "Skirt_Front.L.004",
614 | "prp_dress_r01_1" : "Skirt_Front.R",
615 | "prp_dress_r01_2" : "Skirt_Front.R.001",
616 | "prp_dress_r01_3" : "Skirt_Front.R.002",
617 | "prp_dress_r01_4" : "Skirt_Front.R.003",
618 | "prp_dress_r01_5" : "Skirt_Front.R.004",
619 | "prp_dress_l02_1" : "Skirt.L",
620 | "prp_dress_l02_2" : "Skirt.L.001",
621 | "prp_dress_l02_3" : "Skirt.L.002",
622 | "prp_dress_r02_1" : "Skirt.R",
623 | "prp_dress_r02_2" : "Skirt.R.001",
624 | "prp_dress_r02_3" : "Skirt.R.002",
625 | "prp_dress_r03_1" : "Skirt_Back.L",
626 | "prp_dress_r03_2" : "Skirt_Back.L.001",
627 | "prp_dress_r03_3" : "Skirt_Back.L.002",
628 | "prp_dress_l03_1" : "Skirt_Back.R",
629 | "prp_dress_l03_2" : "Skirt_Back.R.001",
630 | "prp_dress_l03_3" : "Skirt_Back.R.002",
631 | }
632 |
633 | class RenameSkeletonToMetsRig(bpy.types.Operator):
634 | """ Rename skeletons from various naming conventions to my own. """
635 | bl_idname = "object.rename_skeleton"
636 | bl_label = "Rename Bones To MetsRig Convention"
637 | bl_options = {'REGISTER', 'UNDO'}
638 |
639 | source_convention: EnumProperty(
640 | name="Source Convention",
641 | items=[
642 | ("Overwatch","Overwatch","Overwatch"),
643 | ("Bioshock","Bioshock","Bioshock"),
644 | ("Valve","Valve","Valve"),
645 | ("Revzin","Revzin","Revzin"),
646 | ],
647 | default="Overwatch"
648 | )
649 |
650 | def execute(self, context):
651 | if(type(context.object) != bpy.types.Object or
652 | context.object.type != 'ARMATURE'): return {'CANCELLED'}
653 |
654 | prefix = prefix_ow
655 | name_dict = overwatch
656 | if(self.source_convention=='Bioshock'):
657 | prefix = prefix_bi
658 | name_dict = elizabeth_bioshock
659 | elif(self.source_convention=='Valve'):
660 | prefix = prefix_valve
661 | name_dict = valve
662 | elif(self.source_convention=='Revzin'):
663 | prefix = ""
664 | name_dict = elizabeth_revzin
665 |
666 | for b in context.object.pose.bones:
667 | try:
668 | new_name = name_dict[b.name[len(prefix):]]
669 | print("Renaming " + b.name + " to: " + new_name)
670 | b.name = new_name
671 | except KeyError:
672 | print("Not renaming:" + b.name)
673 | return {"FINISHED"}
674 |
675 | def register():
676 | from bpy.utils import register_class
677 | register_class(RenameSkeletonToMetsRig)
678 |
679 | def unregister():
680 | from bpy.utils import unregister_class
681 | unregister_class(RenameSkeletonToMetsRig)
--------------------------------------------------------------------------------
/MetsTools/smart_weight_transfer.py:
--------------------------------------------------------------------------------
1 | bl_info = {
2 | "name": "Distance Weighted Weight Transfer",
3 | "description": "Smart Transfer Weights operator",
4 | "author": "Mets 3D",
5 | "version": (1, 2),
6 | "blender": (2, 80, 0),
7 | "location": "Search -> Smart Weight Transfer", # TODO: Add to Weights menu.
8 | "category": "Object"
9 | }
10 |
11 | import bpy
12 | import mathutils
13 | from mathutils import Vector
14 | import math
15 | from bpy.props import *
16 |
17 | def build_weight_dict(obj, vgroups=None, mask_vgroup=None, bone_combine_dict=None):
18 | """ Builds and returns a dictionary that matches the vertex indicies of the object to a list of tuples containing the vertex group names that the vertex belongs to, and the weight of the vertex in that group.
19 | vgroups: If passed, skip groups that aren't in vgroups.
20 | bone_combine_dict: Can be specified if we want some bones to be merged into others, eg. passing in {'Toe_Main' : ['Toe1', 'Toe2', 'Toe3']} will combine the weights in the listed toe bones into Toe_Main. You would do this when transferring weights from a model of actual feet onto shoes.
21 | """
22 |
23 | weight_dict = {} # {vert index : [('vgroup_name', vgroup_value), ...], ...}
24 |
25 | if(vgroups==None):
26 | vgroups = obj.vertex_groups
27 |
28 | for v in obj.data.vertices:
29 | # TODO: instead of looking through all vgroups we should be able to get only the groups that this vert is assigned to via v.groups[0].group which gives the group id which we can use to get the group via Object.vertex_groups[id]
30 | # With this maybe it's useless altogether to save the weights into a dict? idk.
31 | # Although the reason we are doing it this way is because we wanted some bones to be considered the same as others. (eg. toe bones should be considered a single combined bone)
32 | for vg in vgroups:
33 | w = 0
34 | try:
35 | w = vg.weight(v.index)
36 | except:
37 | pass
38 |
39 | # Adding the weights from any sub-vertexgroups defined in bone_combine_dict
40 | if(vg.name in bone_combine_dict.keys()):
41 | for sub_vg_name in bone_combine_dict[vg.name]:
42 | sub_vg = obj.vertex_groups.get(sub_vg_name)
43 | if(sub_vg==None): continue
44 | try:
45 | w = w + sub_vg.weight(v.index)
46 | except RuntimeError:
47 | pass
48 |
49 | if(w==0): continue
50 |
51 | # Masking transfer influence
52 | if(mask_vgroup):
53 | try:
54 | multiplier = mask_vgroup.weight(v.index)
55 | w = w * multiplier
56 | except:
57 | pass
58 |
59 | # Create or append entry in the dict.
60 | if(v.index not in weight_dict):
61 | weight_dict[v.index] = [(vg.name, w)]
62 | else:
63 | weight_dict[v.index].append((vg.name, w))
64 |
65 | return weight_dict
66 |
67 | def build_kdtree(obj):
68 | kd = mathutils.kdtree.KDTree(len(obj.data.vertices))
69 | for i, v in enumerate(obj.data.vertices):
70 | kd.insert(v.co, i)
71 | kd.balance()
72 | return kd
73 |
74 | def smart_transfer_weights(obj_from, obj_to, weights, max_verts=30, max_dist=10, dist_multiplier=1000, ):
75 | """ Smart Vertex Weight Transfer.
76 | The number of nearby verts which it searches for depends on how far the nearest vert is. (This is controlled by max_verts, max_dist and dist_multiplier)
77 | This means if a very close vert is found, it won't look for any more verts.
78 | If the nearest vert is quite far away(or dist_multiplier is set high), it will average the influences of a larger number few verts.
79 | The averaging of the influences is also weighted by their distance, so that a vertex which is twice as far away will contribute half as much influence to the final result.
80 | weights: a dictionary of vertex weights that needs to be built with build_weight_dict().
81 | """
82 | kd = build_kdtree(obj_from)
83 |
84 | for v in obj_to.data.vertices:
85 | # Finding the nearest vertex on source object
86 | nearest_co, nearest_idx, nearest_dist = kd.find(v.co)
87 |
88 | # Determine how many verts in total should be checked, based on the distance of the closest vert.
89 | number_of_source_verts = 1 + round( pow( (nearest_dist * dist_multiplier), 2 ) )
90 | number_of_source_verts = max_verts if number_of_source_verts > max_verts else number_of_source_verts
91 |
92 |
93 | # Find the previously calculated amount of nearest verts. Save their index and distance to a list of (index, distance) tuples.
94 | source_verts = []
95 |
96 | for(co, index, dist) in kd.find_n(v.co, number_of_source_verts):
97 | if( (index not in weights) or (dist > max_dist) ): # If the found vert doesn't have any weights OR is too far away
98 | continue
99 | source_verts.append((index, dist))
100 |
101 | # Sort valid verts by distance (least to most distance)
102 | source_verts.sort(key=lambda tup: tup[1])
103 |
104 | # Iterating through the source verts, from closest to furthest, and accumulating our target weight for each vertex group.
105 | vgroup_weights = {} # Dictionary of Vertex Group Name : Weight
106 | for i in range(0, len(source_verts)):
107 | vert = source_verts[i]
108 | # The closest vert's weights are multiplied by the farthest vert's distance, and vice versa. The 2nd closest will use the 2nd farthest, etc.
109 | # Note: The magnitude of the distance vectors doesn't matter because at the end they will be normalized anyways.
110 | pair_distance = source_verts[-i-1][1]
111 | for vg_name, vg_weight in weights[vert[0]]:
112 | new_weight = vg_weight * pair_distance
113 | if(vg_name not in vgroup_weights):
114 | vgroup_weights[vg_name] = new_weight
115 | else:
116 | vgroup_weights[vg_name] = vgroup_weights[vg_name] + new_weight
117 |
118 | # The sum is used to normalize the weights. This is important because otherwise the values would depend on object scale, and in the case of very small or very large objects, stuff could get culled.
119 | weights_sum = sum(vgroup_weights.values())
120 |
121 | # Assigning the final, normalized weights of this vertex to the vertex groups.
122 | for vg_avg in vgroup_weights.keys():
123 | target_vg = obj_to.vertex_groups.get(vg_avg)
124 | if(target_vg == None):
125 | target_vg = obj_to.vertex_groups.new(name=vg_avg)
126 | target_vg.add([v.index], vgroup_weights[vg_avg]/weights_sum, 'REPLACE')
127 |
128 | bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
129 |
130 | w3_bone_dict_str = """{
131 | 'Hip_Def' : ['Gens_Root', 'Vagoo_Root', 'Anus_Root', 'Gens_Mid', 'Butt_Mid',
132 | 'Vagoo_Top', 'Vagoo.L', 'Vagoo.R', 'Vagoo_Bottom',
133 | 'Anus_Top', 'Anus_Bottom',
134 | 'Anus.L.004', 'Anus.L.003', 'Anus.L.002', 'Anus.L.001', 'Anus.L',
135 | 'Anus.R', 'Anus.R.001', 'Anus.R.002', 'Anus.R.003', 'Anus.R.004'],
136 |
137 | 'Butt.L' : ['Butt_Top.L', 'Butt_Inner.L', 'Butt_Bot.L', 'Butt_Outer.L'],
138 |
139 | 'Butt.R' : ['Butt_Top.R', 'Butt_Outer.R', 'Butt_Bot.R', 'Butt_Inner.R'],
140 |
141 | 'Breast.L' : ['Breast_Top.L', 'Breast_Outer.L', 'Breast_Inner.L', 'Breast_Bot.L', 'Breast_Nipple.L'],
142 |
143 | 'Breast.R' : ['Breast_Top.R', 'Breast_Inner.R', 'Breast_Outer.R', 'Breast_Bot.R'],
144 |
145 | 'Toe_Def.L' : ['Toe_Thumb1.L', 'Toe_Thumb2.L', 'Toe_Index1.L', 'Toe_Index2.L', 'Toe_Middle1.L', 'Toe_Middle2.L', 'Toe_Ring1.L', 'Toe_Ring2.L', 'Toe_Pinky1.L', 'Toe_Pinky2.L'],
146 |
147 | 'Toe_Def.R' : ['Toe_Thumb1.R', 'Toe_Thumb2.R', 'Toe_Index1.R', 'Toe_Index2.R', 'Toe_Middle1.R', 'Toe_Middle2.R', 'Toe_Ring1.R', 'Toe_Ring2.R', 'Toe_Pinky1.R', 'Toe_Pinky2.R'],
148 |
149 | 'Hand_Def.L' : ['l_thumb_roll', 'l_pinky0', 'l_index_knuckleRoll', 'l_middle_knuckleRoll', 'l_ring_knuckleRoll'],
150 |
151 | 'Hand_Def.R' : ['r_thumb_roll', 'r_pinky0', 'r_index_knuckleRoll', 'r_middle_knuckleRoll', 'r_ring_knuckleRoll'],
152 | }"""
153 |
154 | w3_vgroups = ['Hip_Def', 'Butt_Mid', 'Neck_Def', 'Head_Def', 'Breast.R', 'Breast.L', 'Clavicle_Def.R', 'Clavicle_Adjust.R', 'Clavicle_Def.L', 'Clavicle_Adjust.L', 'Spine3_Def', 'Spine2_Def', 'Spine1_Def', 'Adjust_Knee.R', 'Twist_Leg_2.R', 'Twist_Leg_1.R', 'Foot_Def.R', 'Toes_Def.R', 'Toe_Def.R', 'Butt.R', 'Thigh_Def.R', 'Twist_Leg_3.R', 'Adjust_Thigh_Front.R', 'Adjust_Thigh_Side.R', 'Twist_Leg_4.R', 'Adjust_Knee.L', 'Twist_Leg_2.L', 'Twist_Leg_1.L', 'Foot_Def.L', 'Toes_Def.L', 'Toe_Def.L', 'Butt.L', 'Thigh_Def.L', 'Twist_Leg_3.L', 'Adjust_Thigh_Front.L', 'Adjust_Thigh_Side.L', 'Twist_Leg_4.L', 'Elbow_Def.R', 'Adjust_Elbow_Lower.R', 'Shoulder_Def.R', 'Adjust_Elbow_Upper.R', 'Twist_Arm_5.R', 'Twist_Arm_6.R', 'Twist_Arm_2.R', 'Twist_Arm_1.R', 'Twist_Arm_4.R', 'Twist_Arm_3.R', 'Hand_Def.R', 'Elbow_Def.L', 'Adjust_Elbow_Lower.L', 'Shoulder_Def.L', 'Adjust_Elbow_Upper.L', 'Twist_Arm_5.L', 'Twist_Arm_6.L', 'Twist_Arm_1.L', 'Twist_Arm_2.L', 'Twist_Arm_4.L', 'Twist_Arm_3.L', 'Hand_Def.L']
155 |
156 | class SmartWeightTransferOperator(bpy.types.Operator):
157 | """Transfer weights from active to selected objects based on weighted vert distances."""
158 | bl_idname = "object.smart_weight_transfer"
159 | bl_label = "Smart Transfer Weights"
160 | bl_options = {'REGISTER', 'UNDO'}
161 |
162 | opt_source_vgroups: EnumProperty(name="Source Groups",
163 | items=[("ALL", "All", "All"),
164 | ("SELECTED", "Selected Bones", "Selected Bones"),
165 | ("DEFORM", "Deform Bones", "Deform Bones"),
166 | ],
167 | description="Which vertex groups to transfer from the source object",
168 | default="ALL")
169 |
170 | opt_wipe_originals: BoolProperty(name="Wipe originals",
171 | default=True,
172 | description="Wipe original vertex groups before transferring. Recommended. Does not wipe vertex groups that aren't being transferred in the first place")
173 |
174 | opt_max_verts: IntProperty(name="Max considered verts",
175 | default=5,
176 | description="Increase this if your mesh is very high poly or decrease for very low poly. Set to 1 will give the same result as the built-in Transfer Weights operator")
177 |
178 | opt_max_dist: FloatProperty(name="Max distance",
179 | default=1000,
180 | description="Higher values allow weights from further away verts to contribute to the result.")
181 |
182 | opt_dist_multiplier: FloatProperty(name="Smoothness",
183 | default=1000,
184 | description="Higher values will consider more verts based on the distance of the closest vert. Has less effect on verts that are close to the source mesh. If the source and the target mesh are exactly the same, this has no effect. Increasing this after a certain point will have no effect since the maximum allowed verts will be reached before the maximum distance")
185 |
186 | def get_vgroups(self, context):
187 | items = [('None', 'None', 'None')]
188 | for vg in context.object.vertex_groups:
189 | items.append((vg.name, vg.name, vg.name))
190 | return items
191 |
192 | opt_mask_vgroup: EnumProperty(name="Operator Mask",
193 | items=get_vgroups,
194 | description="The operator's effect will be masked by this vertex group, unless 'None'")
195 |
196 | opt_bone_combine_dict: StringProperty(name='Combine Dict',
197 | description="If you want some groups to be considered part of others(eg. to avoid transferring individual toe weights onto shoes), you can enter them here in the form of a valid Python dictionary, where the keys are the parent group name, and values are lists of child group names, eg: {'Toe_Main.L' : ['Toe1.L', 'Toe2.L'], 'Toe_Main.R' : ['Toe1.R', 'Toe2.R']}",
198 | default=w3_bone_dict_str
199 | )
200 |
201 | @classmethod
202 | def poll(cls, context):
203 | return (context.object is not None) and (context.object.mode=='WEIGHT_PAINT')
204 |
205 | def draw(self, context):
206 | operator = self.layout.operator(SmartWeightTransferOperator.bl_idname, text=SmartWeightTransferOperator.bl_label)
207 |
208 | def execute(self, context):
209 | assert len(context.selected_objects) > 1, "At least two objects must be selected. Select the source object last, and enter weight paint mode."
210 |
211 | bone_dict = ""
212 | if(self.opt_bone_combine_dict != ""):
213 | bone_dict = eval(self.opt_bone_combine_dict)
214 |
215 | source_obj = context.object
216 | for o in context.selected_objects:
217 | if(o==source_obj or o.type!='MESH'): continue
218 | bpy.ops.object.mode_set(mode='OBJECT')
219 | bpy.ops.object.select_all(action='DESELECT')
220 |
221 | vgroups = []
222 | error = ""
223 | if(self.opt_source_vgroups == "ALL"):
224 | vgroups = o.vertex_groups
225 | error = "the source has no vertex groups."
226 | elif(self.opt_source_vgroups == "SELECTED"):
227 | assert context.selected_pose_bones, "No selected pose bones to transfer from."
228 | vgroups = [source_obj.vertex_groups.get(b.name) for b in context.selected_pose_bones]
229 | error = "no bones were selected."
230 | elif(self.opt_source_vgroups == "DEFORM"):
231 | vgroups = [source_obj.vertex_groups.get(b.name) for b in context.pose_object.data.bones if b.use_deform]
232 | error = "there are no deform bones"
233 |
234 | # Using hard coded vertex group names because it's easier than selecting all the right bones, I guess? TODO: could turn that hardcoded list into a parameter, just like the bone dict.
235 | # vgroups = [source_obj.vertex_groups.get(vgn) for vgn in w3_vgroups]
236 |
237 | # Clean up
238 | vgroups = [vg for vg in vgroups if vg != None]
239 | assert len(vgroups) > 0, "No transferable Vertex Groups were found, " + error
240 |
241 | # Delete the vertex groups from the destination mesh first...
242 | if(self.opt_wipe_originals):
243 | for vg in vgroups:
244 | if(vg.name in o.vertex_groups):
245 | o.vertex_groups.remove(o.vertex_groups.get(vg.name))
246 |
247 | mask_vgroup = o.vertex_groups.get(self.opt_mask_vgroup)
248 |
249 | weights = build_weight_dict(source_obj, vgroups, mask_vgroup, bone_dict)
250 | smart_transfer_weights(source_obj, o, weights, self.opt_max_verts, self.opt_max_dist, self.opt_dist_multiplier)
251 |
252 | bpy.context.view_layer.objects.active = o
253 | bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
254 |
255 | return { 'FINISHED' }
256 |
257 | def register():
258 | from bpy.utils import register_class
259 | register_class(SmartWeightTransferOperator)
260 | bpy.types.VIEW3D_MT_paint_weight.append(SmartWeightTransferOperator.draw)
261 |
262 | def unregister():
263 | from bpy.utils import unregister_class
264 | unregister_class(SmartWeightTransferOperator)
265 | bpy.types.VIEW3D_MT_paint_weight.remove(SmartWeightTransferOperator.draw)
--------------------------------------------------------------------------------
/MetsTools/utils.py:
--------------------------------------------------------------------------------
1 | # Collection of functions that are either used by other parts of the addon, or random code snippets that I wanted to include but aren't actually used.
2 |
3 | def assign_object_and_material_ids(start=1):
4 | counter = start
5 |
6 | for o in bpy.context.selected_objects:
7 | if(o.type=='MESH'):
8 | o.pass_index = counter
9 | counter = counter + 1
10 |
11 | counter = start
12 | for m in bpy.data.materials:
13 | m.pass_index = counter
14 | counter = counter + 1
15 |
16 | def connect_parent_bones():
17 | # If the active object is an Armature
18 | # For each bone
19 | # If there is only one child
20 | # Move the tail to the child's head
21 | # Set Child's Connected to True
22 |
23 | armature = bpy.context.object
24 | if(armature.type != 'ARMATURE'): return
25 | else:
26 | bpy.ops.object.mode_set(mode="EDIT")
27 | for b in armature.data.edit_bones:
28 | if(len(b.children) == 1):
29 | b.tail = b.children[0].head
30 | #b.children[0].use_connect = True
31 |
32 | def uniform_scale():
33 | for o in bpy.context.selected_objects:
34 | o.dimensions = [1, 1, 1]
35 | o.scale = [min(o.scale), min(o.scale), min(o.scale)]
36 |
37 | def flip_name(from_name):
38 | # based on BLI_string_flip_side_name in https://developer.blender.org/diffusion/B/browse/master/source/blender/blenlib/intern/string_utils.c
39 |
40 | l = len(from_name) # Number of characters from left to right, that we still care about. At first we care about all of them.
41 |
42 | # Handling .### cases
43 | if("." in from_name):
44 | # Make sure there are only digits after the last period
45 | after_last_period = from_name.split(".")[-1]
46 | before_last_period = from_name.replace("."+after_last_period, "")
47 | all_digits = True
48 | for c in after_last_period:
49 | if( c not in "0123456789" ):
50 | all_digits = False
51 | break
52 | # If that is so, then we don't care about the characters after this last period.
53 | if(all_digits):
54 | l = len(before_last_period)
55 |
56 | # Case: Suffix or prefix R r L l separated by . - _
57 | name = from_name[:l]
58 | new_name = name
59 | separators = ".-_"
60 | for s in separators:
61 | # Suffixes
62 | if(s+"L" == name[-2:]):
63 | new_name = name[:-1] + 'R'
64 | break
65 | if(s+"R" == name[-2:]):
66 | new_name = name[:-1] + 'L'
67 | break
68 |
69 | if(s+"l" == name[-2:]):
70 | new_name = name[:-1] + 'r'
71 | break
72 | if(s+"r" == name[-2:]):
73 | new_name = name[:-1] + 'l'
74 | break
75 |
76 | # Prefixes
77 | if("L"+s == name[:2]):
78 | new_name = "R" + name[1:]
79 | break
80 | if("R"+s == name[:2]):
81 | new_name = "L" + name[1:]
82 | break
83 |
84 | if("l"+s == name[:2]):
85 | new_name = "r" + name[1:]
86 | break
87 | if("r"+s == name[:2]):
88 | new_name = "l" + name[1:]
89 | break
90 |
91 | if(new_name != name):
92 | return new_name + from_name[l:]
93 |
94 | # Case: "left" or "right" with any case found anywhere in the string.
95 |
96 | left = ['left', 'Left', 'LEFT']
97 | right = ['right', 'Right', 'RIGHT']
98 |
99 | lists = [left, right, left] # To get the opposite side, we just get lists[i-1]. No duplicate code, yay!
100 |
101 | # Trying to find any left/right string.
102 | for list_idx in range(1, 3):
103 | for side_idx, side in enumerate(lists[list_idx]):
104 | if(side in name):
105 | # If it occurs more than once, only replace the last occurrence.
106 | before_last_side = "".join(name.split(side)[:-1])
107 | after_last_side = name.split(side)[-1]
108 | opp_side = lists[list_idx-1][side_idx]
109 | return before_last_side + opp_side + after_last_side + from_name[l:]
110 |
111 | # If nothing was found, return the original string.
112 | return from_name
--------------------------------------------------------------------------------
/MetsTools/weighted_normals.py:
--------------------------------------------------------------------------------
1 | # Originally written for Blender 2.79 by blenderartists.org user "notallowed":
2 | # https://blenderartists.org/t/weighted-normals-calculator/643637
3 |
4 | bl_info = {
5 | "name": "Weighted Normals Calculation",
6 | "description": "Simple operator to calculate weighted normals on the mesh.",
7 | "author": "Simon Lusenc (50keda)",
8 | "version": (1, 3),
9 | "blender": (2, 80, 0),
10 | "location": "3D View > Quick Search",
11 | "category": "Object",
12 | "support": "COMMUNITY"
13 | }
14 |
15 | import bpy, bmesh, array
16 | from mathutils import Vector
17 |
18 | def calc_weighted_normal(bm, vert_index, edge_index, cache):
19 | """Calculates weighted normal for given combination of vertex and edge index.
20 | WARNING: There is no safety check if those two belongs together.
21 |
22 | :param bm: bmesh object
23 | :type bm: bmesh
24 | :param vert_index: index of the vertex to calculate normal for
25 | :type vert_index: int
26 | :param edge_index: index of the edge to use for calculation (vertex has to belong to this edge)
27 | :returns: Vector
28 | """
29 | normal_hash = str(vert_index) + ":" + str(edge_index)
30 |
31 | if normal_hash in cache:
32 | return cache[normal_hash]
33 |
34 | edge = bm.edges[edge_index]
35 | vert = bm.verts[vert_index]
36 |
37 | selected_faces = []
38 |
39 | # edge.seam = True
40 | # edge.select_set(True)
41 |
42 | for f in edge.link_faces:
43 | if not f.select:
44 | f.select = True
45 | selected_faces.append(f)
46 |
47 | # select linked faces of already selected edges
48 | # until every smooth face around current loop is selected
49 | more_selected = 1
50 | while more_selected > 0:
51 | more_selected = 0
52 | for edge1 in vert.link_edges:
53 | if edge1.smooth and edge1.select:
54 | for f in edge1.link_faces:
55 | if not f.select:
56 | f.select = True
57 | selected_faces.append(f)
58 | more_selected += 1
59 |
60 | # calc areas
61 | max_area = 0
62 | areas = {}
63 | for i, f in enumerate(selected_faces):
64 | area = f.calc_area()
65 | areas[i] = area
66 |
67 | if area > max_area:
68 | max_area = area
69 |
70 | # calc normal
71 | normal = Vector()
72 | for i, f in enumerate(selected_faces):
73 | perc = areas[i] / max_area
74 | f.normal_update()
75 | normal += perc * f.normal
76 |
77 | # also unselect all the faces
78 | f.select = False
79 |
80 | cache[normal_hash] = normal.normalized()
81 | return normal.normalized()
82 |
83 | class WeightNormalsCalculator(bpy.types.Operator):
84 | """Calculate weighted normals for active object."""
85 | bl_idname = "object.calculate_weighted_normals"
86 | bl_label = "Weight Normals"
87 | bl_options = {'REGISTER', 'UNDO'}
88 |
89 | @classmethod
90 | def poll(cls, context):
91 | return context.object and context.object.mode == "OBJECT" and context.object.type == "MESH"
92 |
93 | def execute(self, context):
94 | for obj in context.selected_objects:
95 | if(obj.type!='MESH'): continue
96 | cache = {} # Cache for calculated weighted normals. It stores normals by key: 'vert_index:edge_index'.
97 |
98 | mesh = obj.data
99 |
100 | bm = bmesh.new()
101 | bm.from_mesh(mesh)
102 | bm.verts.ensure_lookup_table()
103 | bm.edges.ensure_lookup_table()
104 |
105 | # unselect everything first
106 | for v in bm.faces:
107 | v.select = False
108 | v.hide = False
109 |
110 | for v in bm.edges:
111 | v.select = False
112 | v.hide = False
113 |
114 | for v in bm.verts:
115 | v.select = False
116 | v.hide = False
117 |
118 | nor_list = [(0,)] * len(mesh.loops)
119 | for f in bm.faces:
120 |
121 | # map both edge indices into vertex (loop has info only about one edge)
122 | verts_edge_map = {}
123 | for e in f.edges:
124 | for v in e.verts:
125 | v_i = v.index
126 | if v_i not in verts_edge_map:
127 | verts_edge_map[v_i] = {e.index: 1}
128 | else:
129 | verts_edge_map[v_i][e.index] = 1
130 |
131 | for curr_loop in f.loops:
132 | edge_keys = verts_edge_map[curr_loop.vert.index].keys()
133 |
134 | # if current loop vertex has at leas one sharp edge around calculate weighted normal
135 | for e_i in edge_keys:
136 | if not mesh.edges[e_i].use_edge_sharp:
137 | curr_n = calc_weighted_normal(bm, curr_loop.vert.index, e_i, cache)
138 | nor_list[curr_loop.index] = curr_n
139 | break
140 | else:
141 | nor_list[curr_loop.index] = mesh.loops[curr_loop.index].normal
142 |
143 | bm.free()
144 |
145 | mesh.use_auto_smooth = True
146 | bpy.ops.mesh.customdata_custom_splitnormals_clear()
147 | bpy.ops.mesh.customdata_custom_splitnormals_add()
148 | mesh.normals_split_custom_set(nor_list)
149 | mesh.free_normals_split()
150 | return {'FINISHED'}
151 |
152 | def register():
153 | from bpy.utils import register_class
154 | register_class(WeightNormalsCalculator)
155 |
156 | def unregister():
157 | from bpy.utils import unregister_class
158 | unregister_class(WeightNormalsCalculator)
159 |
160 | if __name__ == '__main__':
161 | register()
--------------------------------------------------------------------------------
/blender_addons.code-workspace:
--------------------------------------------------------------------------------
1 | {
2 | "folders": [
3 | {
4 | "path": "."
5 | },
6 | {
7 | "path": "C:\\Users\\Met\\AppData\\Roaming\\Blender Foundation\\Blender\\2.80\\scripts\\addons\\MetsTools"
8 | },
9 | {
10 | "path": "C:\\Users\\Met\\AppData\\Roaming\\Blender Foundation\\Blender\\2.80\\scripts\\addons\\io_witcher3_fbx"
11 | }
12 | ],
13 | "settings": {}
14 | }
--------------------------------------------------------------------------------
/clean_weight_islands.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | import bmesh
3 | import sys
4 |
5 | # Select linked verts if they are in the given vertex group.
6 | def select_linked_verts(bvert, mesh, group_index):
7 | bvert.select_set(True) # Select starting vert
8 | for be in bvert.link_edges: # For each edge connected to the vert
9 | for bv in be.verts: # For each of the edge's 2 verts
10 | if(not bv.select): # If this vert is not selected yet
11 | for g in mesh.vertices[bv.index].groups: # For each group this vertex belongs to
12 | if(g.group == group_index): # If this vert IS in the group
13 | select_linked_verts(bv, mesh, group_index) # Continue recursion
14 |
15 | def clean_weight_islands(o, groups=None, use_influence=False):
16 | # Removes weight "Islands" in all vertex groups in all selected objects.
17 |
18 | if(groups==None):
19 | groups = o.vertex_groups
20 |
21 | # Saving state
22 | start_object = bpy.context.object
23 | start_mode = bpy.context.object.mode
24 |
25 | # Cleaning 0-weights
26 | bpy.context.view_layer.objects.active = o
27 | bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
28 | bpy.ops.object.vertex_group_clean(group_select_mode='ALL', limit=0)
29 |
30 | mesh = o.data
31 | for group in groups:
32 | print("group: " + group.name)
33 | bpy.ops.object.mode_set(mode='EDIT')
34 | bm = bmesh.from_edit_mesh(mesh)
35 | bm.verts.ensure_lookup_table()
36 |
37 | # Deselecting all verts
38 | bpy.ops.mesh.select_all(action='DESELECT')
39 |
40 | islands = [] # list of integer lists.
41 | checked_verts = []
42 |
43 | # Finding a random vert in the vertex group
44 | for v in mesh.vertices:
45 | if(v.index not in checked_verts):
46 | checked_verts.append(v.index)
47 | for g in v.groups:
48 | if(group.index == g.group): # If we found one
49 | sys.setrecursionlimit(10000)
50 | select_linked_verts(bm.verts[v.index], o.data, group.index)
51 | sys.setrecursionlimit(1000)
52 | island = []
53 | for bv in bm.verts:
54 | if(bv.select == True):
55 | checked_verts.append(bv.index)
56 | island.append(bv.index)
57 | islands.append(island)
58 | bpy.ops.mesh.select_all(action='DESELECT')
59 |
60 | marked_verts = [] # Verts marked for deletion from the vertex group due to not being part of the biggest island
61 |
62 | winning_island = [] # Verts of the current biggest island
63 | max_influence = 0 # Greatest total influence of an island
64 | max_verts = 0 # Greatest number of verts in an island
65 |
66 | for isl in islands:
67 | if(use_influence):
68 | total_influence = 0
69 | for i in isl:
70 | vert = mesh.vertices[i]
71 | for g in vert.groups:
72 | if(g.group == group.index):
73 | total_influence = total_influence + g.weight
74 | if(total_influence > max_influence):
75 | max_influence = total_influence
76 | marked_verts.extend(winning_island)
77 | winning_island = isl
78 | else:
79 | marked_verts.extend(isl)
80 | else:
81 | if(len(isl) > max_verts):
82 | max_verts = len(isl)
83 | marked_verts.extend(winning_island)
84 | winning_island = isl
85 | else:
86 | marked_verts.extend(isl)
87 | bpy.ops.object.mode_set(mode='OBJECT')
88 | group.remove(marked_verts)
89 |
90 |
91 | # Resetting state
92 | bpy.context.view_layer.objects.active = start_object
93 | bpy.ops.object.mode_set(mode=start_mode)
94 |
95 | for o in bpy.context.selected_objects:
96 | clean_weight_islands(o)
--------------------------------------------------------------------------------
/copy_drivers.py:
--------------------------------------------------------------------------------
1 | import bpy, sys
2 | from . import utils
3 |
4 | # Copy all drivers from active to one selected bone.
5 | # Can also switch out driver variable targets and flip desired variable axes on the new drivers.
6 |
7 | # To Mirror drivers, set target_dict to None and enable X and Y flip.
8 |
9 | top_to_bottom_dict = { "CTR-Lip_Top.L" : "CTR-Lip_Bot.L",
10 | "CTR-Lip_Corner_Top.L" : "CTR-Lip_Corner_Bot.L",
11 | "CTR-Lip_Top.M" : "CTR-Lip_Bot.M",
12 | "CTR-Lip_Top.R" : "CTR-Lip_Bot.R",
13 | "CTR-Lip_Corner_Top.R" : "CTR-Lip_Corner_Bot.R"}
14 |
15 | lip_to_laughline_dict = {
16 | "CTR-Lip_Top.L" : "CTR-LaughLine.L.003",
17 | "CTR-Lip_Corner_Top.L" : "CTR-LaughLine.L.002",
18 | "CTR-Lip_Corner_Bot.L" : "CTR-LaughLine.L.001",
19 | "CTR-Lip_Bot.L" : "CTR-LaughLine.L",
20 | "CTR-Lip_Bot.M" : "CTR-LaughLine.M",
21 |
22 | "CTR-Lip_Top.R" : "CTR-LaughLine.R.003",
23 | "CTR-Lip_Corner_Top.R" : "CTR-LaughLine.R.002",
24 | "CTR-Lip_Corner_Bot.R" : "CTR-LaughLine.R.001",
25 | "CTR-Lip_Bot.R" : "CTR-LaughLine.R",
26 | }
27 |
28 | lip_to_eye_bot_l = {
29 | "CTR-Lip_Bot.M" : "CTR-Eyelid_Bot_1.L",
30 | "CTR-Lip_Bot.L" : "CTR-Eyelid_Bot_2.L",
31 | "CTR-Lip_Corner_Bot.L" : "CTR-Eyelid_Bot_3.L",
32 | "CTR-Lip_Corner_Top.L" : "CTR-Eyelid_Bot_4.L",
33 | "CTR-Lip_Top.L" : "CTR-Eyelid_Bot_5.L",
34 | }
35 |
36 | eye_bot_to_top = {
37 | "CTR-Eyelid_Bot_1.L" : "CTR-Eyelid_Top_1.L",
38 | "CTR-Eyelid_Bot_2.L" : "CTR-Eyelid_Top_2.L",
39 | "CTR-Eyelid_Bot_3.L" : "CTR-Eyelid_Top_3.L",
40 | "CTR-Eyelid_Bot_4.L" : "CTR-Eyelid_Top_4.L",
41 | "CTR-Eyelid_Bot_5.L" : "CTR-Eyelid_Top_5.L",
42 | }
43 |
44 | target_dict = None
45 |
46 | flip_x = True
47 | flip_y = True
48 | flip_z = False
49 |
50 | armature = bpy.context.object
51 | bones = armature.pose.bones
52 |
53 | from_bone = bpy.context.active_bone.name # Bone to copy drivers from
54 | to_bones = bpy.context.selected_pose_bones # Bones to copy drivers to
55 |
56 | for to_bone in to_bones:
57 | if(to_bone.name == from_bone):continue
58 |
59 | for d in armature.animation_data.drivers: # Look through every driver on the armature
60 | if('pose.bones["' + from_bone + '"]' in d.data_path): # If the driver belongs to the active bone
61 | ### Copying driver to selected bone...
62 |
63 | # The way drivers on bones work is weird af. You have to create the driver relative to the bone, but you have to read the driver relative to the armature. So d.data_path might look like "pose.bones["bone_name"].bone_property" but when we create a driver we just need the "bone_property" part.
64 | data_path = d.data_path.split("].")[1]
65 | to_bone.driver_remove(data_path)
66 | new_d = to_bone.driver_add(data_path)
67 |
68 | expression = d.driver.expression
69 |
70 | print("")
71 | print(expression)
72 |
73 | # Copy the variables
74 | for from_var in d.driver.variables:
75 | to_var = new_d.driver.variables.new()
76 | to_var.type = from_var.type
77 | to_var.name = from_var.name
78 | print(to_var.name)
79 | print(from_var.targets[0].transform_type)
80 |
81 | target_bone = from_var.targets[0].bone_target
82 | if(target_dict == None):
83 | target_bone = utils.flip_name(target_bone)
84 | else:
85 | new_target_bone = target_dict.get(target_bone)
86 | if(new_target_bone):
87 | target_bone = new_target_bone
88 |
89 | to_var.targets[0].id = from_var.targets[0].id
90 | to_var.targets[0].bone_target = target_bone
91 | to_var.targets[0].data_path = from_var.targets[0].data_path#.replace(fb, to_bones[i])
92 | to_var.targets[0].transform_type = from_var.targets[0].transform_type
93 | to_var.targets[0].transform_space = from_var.targets[0].transform_space
94 | # TODO: If transform is X Rotation, have a "mirror" option, to invert it in the expression. Better yet, detect if the new_target_bone is the opposite of the original.
95 |
96 | print(from_var.targets[0].transform_type)
97 | if( to_var.targets[0].bone_target and
98 | "SCALE" not in from_var.targets[0].transform_type and
99 | (from_var.targets[0].transform_type.endswith("_X") and flip_x) or
100 | (from_var.targets[0].transform_type.endswith("_Y") and flip_y) or
101 | (from_var.targets[0].transform_type.endswith("_Z") and flip_z)
102 | ):
103 | # This is painful, I know.
104 | if("-"+to_var.name in expression):
105 | expression = expression.replace("-"+to_var.name, "+"+to_var.name)
106 | print(1)
107 | elif("+ "+to_var.name in expression):
108 | expression = expression.replace("+ "+to_var.name, "- "+to_var.name)
109 | print(2)
110 | else:
111 | expression = expression.replace(to_var.name, "-"+to_var.name)
112 | print("3")
113 |
114 | # Copy the expression
115 | new_d.driver.expression = expression
116 | print(expression)
--------------------------------------------------------------------------------
/io_witcher3_fbx/__init__.py:
--------------------------------------------------------------------------------
1 | # Blender Witcher 3 Importer Add-on
2 | # Copyright (C) 2019 MetsSFM
3 | #
4 | # This program is free software: you can redistribute it and/or modify
5 | # it under the terms of the GNU General Public License as published by
6 | # the Free Software Foundation, either version 3 of the License, or
7 | # (at your option) any later version.
8 | #
9 | # This program is distributed in the hope that it will be useful,
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 | # GNU General Public License for more details.
13 | #
14 | # You should have received a copy of the GNU General Public License
15 | # along with this program. If not, see .
16 |
17 | bl_info = {
18 | "name": "Witcher 3 FBX Import Tools",
19 | "author": "Mets3D",
20 | "version": (1, 0),
21 | "blender": (2, 80, 0),
22 | "location": "File->Import->Witcher 3 FBX",
23 | "description": "For importing Witcher 3 characters that were extracted by wcc_lite.exe.",
24 | "warning": "",
25 | "wiki_url": "",
26 | "tracker_url": "",
27 | "category": "Object"}
28 |
29 | import bpy
30 | from bpy.props import *
31 | from . import import_witcher3_fbx
32 | from . import weighted_normals
33 | from . import cleanup_mesh
34 |
35 | class Witcher3AddonPrefs(bpy.types.AddonPreferences):
36 | # this must match the addon name, use '__package__'
37 | # when defining this in a submodule of a python package.
38 | bl_idname = __package__
39 |
40 | uncook_path: StringProperty(
41 | name="Uncook Path",
42 | subtype='DIR_PATH',
43 | default='E:\\Path_to_your_uncooked_folder\\Uncooked\\',
44 | description="Path to where you uncooked the game using wcc_lite.exe or another tool. Will be searching for .tga textures here."
45 | )
46 |
47 | def draw(self, context):
48 | layout = self.layout
49 | layout.label(text="Witcher 3 FBX Importer settings:")
50 | layout.prop(self, "uncook_path")
51 |
52 | def register():
53 | import_witcher3_fbx.register()
54 | weighted_normals.register()
55 | cleanup_mesh.register()
56 | bpy.utils.register_class(Witcher3AddonPrefs)
57 |
58 | def unregister():
59 | import_witcher3_fbx.unregister()
60 | weighted_normals.unregister()
61 | cleanup_mesh.unregister()
62 | bpy.utils.unregister_class(Witcher3AddonPrefs)
--------------------------------------------------------------------------------
/io_witcher3_fbx/cleanup_mesh.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | from math import pi
3 | import bmesh
4 |
5 | def cleanup_mesh(obj,
6 | remove_doubles=False,
7 | quadrangulate=False,
8 | weight_normals=True,
9 | seams_from_islands=True,
10 | clear_unused_UVs=True,
11 | rename_single_UV=True):
12 |
13 | # Mode management
14 | org_active = bpy.context.object
15 | org_mode = org_active.mode
16 | org_selected = bpy.context.selected_objects[:]
17 | bpy.ops.object.mode_set(mode='OBJECT')
18 | bpy.ops.object.select_all(action='DESELECT')
19 | bpy.context.view_layer.objects.active = obj
20 | bpy.ops.object.mode_set(mode='EDIT')
21 |
22 | # Setting auto-smooth to 180 is necessary so that splitnormals_clear() doesn't mark sharp edges
23 | obj.data.use_auto_smooth = True
24 | obj.data.auto_smooth_angle = pi
25 | bpy.ops.mesh.customdata_custom_splitnormals_clear()
26 |
27 | if(quadrangulate):
28 | bpy.ops.mesh.tris_convert_to_quads(shape_threshold=1.0472, uvs=True, materials=True)
29 |
30 | if(remove_doubles):
31 | bpy.ops.mesh.remove_doubles(threshold=0.0001)
32 | bpy.ops.mesh.mark_sharp(clear=True)
33 |
34 | bpy.ops.object.mode_set(mode='OBJECT')
35 | bpy.context.view_layer.objects.active = obj # Active object needs to be a mesh for calculate_weighted_normals()
36 | if(weight_normals and remove_doubles): # Weight normals only works with remove doubles, otherwise throws ZeroDivisionError.
37 | bpy.ops.object.calculate_weighted_normals()
38 | bpy.ops.object.mode_set(mode='EDIT')
39 |
40 | ### Removing useless UVMaps
41 | if(clear_unused_UVs):
42 | mesh = obj.data
43 | bm = bmesh.from_edit_mesh(mesh)
44 |
45 | for uv_idx in reversed(range(0, len(mesh.uv_layers))): # For each UV layer
46 | delet_this=True
47 | mesh.uv_layers.active_index = uv_idx
48 | bm.faces.ensure_lookup_table()
49 | for f in bm.faces: # For each face
50 | for l in f.loops: # For each loop(whatever that means)
51 | if(l[bm.loops.layers.uv.active].uv[0] != 0.0): # If the loop's UVs first vert's x coord is NOT 0
52 | delet_this=False
53 | if(delet_this):
54 | break
55 | if(delet_this):
56 | obj.data.uv_layers.remove(obj.data.uv_layers[uv_idx])
57 |
58 | bmesh.update_edit_mesh(mesh, True)
59 |
60 | # Renaming single UV maps
61 | if(len(mesh.uv_layers)==1 and rename_single_UV):
62 | mesh.uv_layers[0].name = 'UVMap'
63 |
64 | # Seams from islands
65 | if(seams_from_islands):
66 | bpy.ops.uv.seams_from_islands(mark_seams=True, mark_sharp=False)
67 |
68 | # Mode management
69 | bpy.ops.object.mode_set(mode='OBJECT')
70 | for o in org_selected:
71 | o.select_set(True)
72 | bpy.context.view_layer.objects.active = org_active
73 | bpy.ops.object.mode_set(mode=org_mode)
74 |
75 | class CleanUpMesh(bpy.types.Operator):
76 | """Clean up meshes"""
77 | bl_idname = "object.mesh_cleanup"
78 | bl_label = "Clean Up Mesh"
79 | bl_options = {'REGISTER', 'UNDO'}
80 |
81 | remove_doubles: bpy.props.BoolProperty(
82 | name="Remove Doubles",
83 | description="Enable remove doubles",
84 | default=False
85 | )
86 |
87 | quadrangulate: bpy.props.BoolProperty(
88 | name="Tris to Quads",
89 | description="Enable Tris to Quads (UV Seams enabledd)",
90 | default=False
91 | )
92 |
93 | weight_normals: bpy.props.BoolProperty(
94 | name="Weight Normals",
95 | description="Enable weighted normals",
96 | default=False
97 | )
98 |
99 | seams_from_islands: bpy.props.BoolProperty(
100 | name="Seams from Islands",
101 | description="Create UV seams based on UV islands",
102 | default=False
103 | )
104 |
105 | clear_unused_UVs: bpy.props.BoolProperty(
106 | name="Delete Unused UV Maps",
107 | description="If all UV verts' X coordinate is 0, the UV map will be deleted.",
108 | default=True
109 | )
110 |
111 | rename_single_UV: bpy.props.BoolProperty(
112 | name="Rename Singular UV Maps",
113 | description="If an object is only left with one UV map, rename it to the default name, 'UVMap'.",
114 | default=True
115 | )
116 |
117 |
118 | def execute(self, context):
119 | for o in bpy.context.selected_objects:
120 | cleanup_mesh(o,
121 | self.remove_doubles,
122 | self.quadrangulate,
123 | self.weight_normals,
124 | self.seams_from_islands,
125 | self.clear_unused_UVs,
126 | self.rename_single_UV)
127 | return {'FINISHED'}
128 |
129 | def register():
130 | from bpy.utils import register_class
131 | register_class(CleanUpMesh)
132 |
133 | def unregister():
134 | from bpy.utils import unregister_class
135 | unregister_class(CleanUpMesh)
--------------------------------------------------------------------------------
/io_witcher3_fbx/weighted_normals.py:
--------------------------------------------------------------------------------
1 | # Originally written for Blender 2.79 by blenderartists.org user "notallowed":
2 | # https://blenderartists.org/t/weighted-normals-calculator/643637
3 |
4 | bl_info = {
5 | "name": "Weighted Normals Calculation",
6 | "description": "Simple operator to calculate weighted normals on the mesh.",
7 | "author": "Simon Lusenc (50keda)",
8 | "version": (1, 3),
9 | "blender": (2, 80, 0),
10 | "location": "3D View > Quick Search",
11 | "category": "Object",
12 | "support": "COMMUNITY"
13 | }
14 |
15 | import bpy, bmesh, array
16 | from mathutils import Vector
17 |
18 | class WeightNormalsCalculator(bpy.types.Operator):
19 | """Calculate weighted normals for active object."""
20 | bl_idname = "object.calculate_weighted_normals"
21 | bl_label = "Weight Normals"
22 | bl_options = set()
23 |
24 | cache = {}
25 | """Cache for calculated weighted normals. It stores normals by key: 'vert_index:edge_index'."""
26 |
27 | @staticmethod
28 | def calc_weighted_normal(bm, vert_index, edge_index):
29 | """Calculates weighted normal for given combination of vertex and edge index.
30 | WARNING: There is no safety check if those two belongs together.
31 |
32 | :param bm: bmesh object
33 | :type bm: bmesh
34 | :param vert_index: index of the vertex to calculate normal for
35 | :type vert_index: int
36 | :param edge_index: index of the edge to use for calculation (vertex has to belong to this edge)
37 | :returns: Vector
38 | """
39 | normal_hash = str(vert_index) + ":" + str(edge_index)
40 |
41 | if normal_hash in WeightNormalsCalculator.cache:
42 | return WeightNormalsCalculator.cache[normal_hash]
43 |
44 | edge = bm.edges[edge_index]
45 | vert = bm.verts[vert_index]
46 |
47 | selected_faces = []
48 |
49 | # edge.seam = True
50 | # edge.select_set(True)
51 |
52 | for f in edge.link_faces:
53 |
54 | if not f.select:
55 |
56 | f.select = True
57 | selected_faces.append(f)
58 |
59 | # select linked faces of already selected edges
60 | # until every smooth face around current loop is selected
61 | more_selected = 1
62 | while more_selected > 0:
63 |
64 | more_selected = 0
65 | for edge1 in vert.link_edges:
66 |
67 | if edge1.smooth and edge1.select:
68 |
69 | for f in edge1.link_faces:
70 |
71 | if not f.select:
72 |
73 | f.select = True
74 | selected_faces.append(f)
75 |
76 | more_selected += 1
77 |
78 | # calc areas
79 | max_area = 0
80 | areas = {}
81 | for i, f in enumerate(selected_faces):
82 | area = f.calc_area()
83 | areas[i] = area
84 |
85 | if area > max_area:
86 | max_area = area
87 |
88 | # calc normal
89 | normal = Vector()
90 | for i, f in enumerate(selected_faces):
91 | perc = areas[i] / max_area
92 | f.normal_update()
93 | normal += perc * f.normal
94 |
95 | # also unselect all the faces
96 | f.select = False
97 |
98 | WeightNormalsCalculator.cache[normal_hash] = normal.normalized()
99 |
100 | return normal.normalized()
101 |
102 | @classmethod
103 | def poll(cls, context):
104 | return context.object and context.object.mode == "OBJECT" and context.object.type == "MESH"
105 |
106 | def execute(self, context):
107 |
108 | WeightNormalsCalculator.cache = {}
109 |
110 | mesh = context.object.data
111 |
112 | bm = bmesh.new()
113 | bm.from_mesh(mesh)
114 | bm.verts.ensure_lookup_table()
115 | bm.edges.ensure_lookup_table()
116 |
117 | # unselect everything first
118 | for v in bm.faces:
119 | v.select = False
120 | v.hide = False
121 |
122 | for v in bm.edges:
123 | v.select = False
124 | v.hide = False
125 |
126 | for v in bm.verts:
127 | v.select = False
128 | v.hide = False
129 |
130 | nor_list = [(0,)] * len(mesh.loops)
131 | for f in bm.faces:
132 |
133 | # map both edge indices into vertex (loop has info only about one edge)
134 | verts_edge_map = {}
135 | for e in f.edges:
136 | for v in e.verts:
137 |
138 | v_i = v.index
139 |
140 | if v_i not in verts_edge_map:
141 | verts_edge_map[v_i] = {e.index: 1}
142 | else:
143 | verts_edge_map[v_i][e.index] = 1
144 |
145 | for curr_loop in f.loops:
146 |
147 | edge_keys = verts_edge_map[curr_loop.vert.index].keys()
148 |
149 | # if current loop vertex has at leas one sharp edge around calculate weighted normal
150 | for e_i in edge_keys:
151 |
152 | if not mesh.edges[e_i].use_edge_sharp:
153 |
154 | curr_n = WeightNormalsCalculator.calc_weighted_normal(bm, curr_loop.vert.index, e_i)
155 | nor_list[curr_loop.index] = curr_n
156 |
157 | break
158 |
159 | else:
160 |
161 | nor_list[curr_loop.index] = mesh.loops[curr_loop.index].normal
162 |
163 | bm.free()
164 |
165 | mesh.use_auto_smooth = True
166 | bpy.ops.mesh.customdata_custom_splitnormals_clear()
167 |
168 | bpy.ops.mesh.customdata_custom_splitnormals_add()
169 | mesh.normals_split_custom_set(nor_list)
170 | mesh.free_normals_split()
171 |
172 | WeightNormalsCalculator.cache.clear()
173 |
174 | return {'FINISHED'}
175 |
176 | def register():
177 | from bpy.utils import register_class
178 | register_class(WeightNormalsCalculator)
179 |
180 | def unregister():
181 | from bpy.utils import unregister_class
182 | unregister_class(WeightNormalsCalculator)
183 |
184 | if __name__ == '__main__':
185 | register()
--------------------------------------------------------------------------------
/io_witcher3_fbx/witcher3_materials.blend:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ganonmaster/Blender-Scripts/14dac68979079501d5d5e979e4733fb42bc20d51/io_witcher3_fbx/witcher3_materials.blend
--------------------------------------------------------------------------------
/merge_w3_skeleton_into_metsrig.py:
--------------------------------------------------------------------------------
1 | import bpy
2 |
3 | # Expects WItcher3 skeleton to be selected, and MetsRig to be active.
4 | # Expects Witcher3 skeleton to be named "Witcher3_Skeleton_Charname".
5 |
6 | def combine_armatures(armatures, main_armature, face_to_char=False):
7 | # For combining Witcher 3 Skeletons into MetsRig.
8 | # Alternatively, for combining Face skeletons into Character skeletons.
9 |
10 | # Get the character name
11 | # Save the face bones' locations in said face bones' custom properties in the main armature.
12 | # Find non-duplicate bones(hair, clothes).
13 | # Create copies of the non-duplicate bones in the main armature(name, head, tail, parent)
14 | # Re-parent the child meshes and change their armature modifiers' target.
15 |
16 | bpy.ops.object.mode_set(mode='OBJECT')
17 | bpy.ops.object.select_all(action='DESELECT')
18 |
19 | face_bones = [
20 | "nose",
21 | "upper_lip",
22 | "right_nose1",
23 | "right_eyebrow1",
24 | "left_eyebrow1",
25 | "nose_base",
26 | "chin",
27 | "lowwer_lip",
28 | "lowwer_left_lip",
29 | "upper_left_lip",
30 | "upper_right_lip",
31 | "right_nose2",
32 | "left_nose2",
33 | "right_nose3",
34 | "lowwer_right_lip",
35 | "left_forehead",
36 | "right_forehead",
37 | "left_temple",
38 | "left_eyebrow3",
39 | "left_eyebrow2",
40 | "ears",
41 | "left_chick4",
42 | "left_chick3",
43 | "left_mouth1",
44 | "left_mouth2",
45 | "left_mouth_fold1",
46 | "left_mouth4",
47 | "left_nose1",
48 | "upper_left_eyelid_fold",
49 | "left_chick2",
50 | "left_mouth_fold3",
51 | "left_mouth_fold4",
52 | "left_chick1",
53 | "left_mouth_fold2",
54 | "left_mouth3",
55 | "left_corner_lip1",
56 | "left_corner_lip2",
57 | "lowwer_left_eyelid_fold",
58 | "left_nose3",
59 | "lowwer_left_eyelid1",
60 | "upper_left_eyelid1",
61 | "upper_left_eyelid2",
62 | "upper_left_eyelid3",
63 | "lowwer_left_eyelid2",
64 | "lowwer_left_eyelid3",
65 | "right_temple",
66 | "right_eyebrow3",
67 | "right_eyebrow2",
68 | "right_chick4",
69 | "right_chick3",
70 | "right_mouth1",
71 | "right_mouth2",
72 | "right_mouth_fold1",
73 | "right_mouth4",
74 | "upper_right_eyelid_fold",
75 | "right_chick2",
76 | "right_mouth_fold3",
77 | "right_mouth_fold4",
78 | "right_chick1",
79 | "right_mouth_fold2",
80 | "right_mouth3",
81 | "right_corner_lip1",
82 | "right_corner_lip2",
83 | "lowwer_right_eyelid_fold",
84 | "lowwer_right_eyelid1",
85 | "upper_right_eyelid1",
86 | "upper_right_eyelid2",
87 | "upper_right_eyelid3",
88 | "lowwer_right_eyelid2",
89 | "lowwer_right_eyelid3",
90 | "thyroid",
91 | "left_eye",
92 | "right_eye",
93 | "upper_left_eyelash",
94 | "upper_right_eyelash",
95 | "tongue1",
96 | "right_tongue_side",
97 | "tongue2",
98 | "left_tongue_side"
99 | ]
100 |
101 | face_bone_dependents = {
102 | 'left_eye' : ['Eye_LookAt.L'],
103 | 'right_eye' : ['Eye_LookAt.R']
104 | }
105 |
106 | for a in armatures:
107 | # moving "left" and "right" to the beginning of the bone name, rather than the middle.
108 | # This way Blender will recognize that these bones are opposites of each other.
109 | for b in a.pose.bones:
110 | if('left_' in b.name):
111 | new_name = b.name.replace("left_", "")
112 | new_name = "left_" + new_name
113 | b.name = new_name
114 | elif('right_' in b.name):
115 | new_name = b.name.replace("right_", "")
116 | new_name = "right_" + new_name
117 | b.name = new_name
118 |
119 | if(a.type != 'ARMATURE'):
120 | continue
121 | if(a == main_armature):
122 | continue
123 |
124 | # Getting the character name
125 | char_name = ""
126 | if(face_to_char):
127 | assert "Witcher3_Skeleton_" in main_armature.name, "Expected target armature name to contain the string 'Witcher3_Skeleton_'. But it doesn't: " + main_armature.name
128 | char_name = main_armature.name.replace("Witcher3_Skeleton_", "")
129 | else:
130 | assert "Witcher3_Skeleton_" in a.name, "Expected armature name to contain the string 'Witcher3_Skeleton_'. But it doesn't: " + a.name
131 | char_name = a.name.replace("Witcher3_Skeleton_", "")
132 | if(char_name == ''): continue
133 | print("Character name: " + char_name)
134 |
135 | bpy.context.view_layer.objects.active = a
136 | bpy.ops.object.mode_set(mode='EDIT') # We need the head coords from edit_bones.
137 |
138 | # Deleting "Bone" bone (TODO: make sure this works and is fine)
139 | shit = a.data.edit_bones.get('Bone')
140 | if(shit):
141 | a.data.edit_bones.remove(shit)
142 |
143 | # Saving face bones location into custom properties on the main armature's bones...
144 | for fb in face_bones:
145 | bone = a.data.edit_bones.get(fb)
146 | if(bone==None): continue
147 | main_arm_pbone = main_armature.pose.bones.get(fb)
148 | if(not main_arm_pbone): continue
149 | main_arm_pbone[fb+"_"+char_name] = bone.head
150 | # Doing same for dependent bones - TODO make sure this works.
151 | if(fb in face_bone_dependents.keys()):
152 | for d in face_bone_depentents[fb]:
153 | main_armature.pose.bones[d][d+"_"+char_name] = bone.head
154 |
155 | # Finding bones that exist in the main armature, and saving the parent of those that do not.
156 | duplicates = []
157 | parents = {}
158 | for b in a.data.bones:
159 | if(b.name in main_armature.data.bones):
160 | duplicates.append(b.name)
161 | else:
162 | if(b.parent):
163 | parents[b.name] = b.parent.name
164 |
165 | # Deleting the duplicate bones
166 | bpy.context.view_layer.objects.active = a
167 | bpy.ops.object.mode_set(mode='EDIT')
168 | for eb in duplicates:
169 | a.data.edit_bones.remove(a.data.edit_bones.get(eb))
170 | bpy.ops.object.mode_set(mode='OBJECT')
171 |
172 | # Parenting child meshes of this armature to main armature (They need to be enabled and visible since we use bpy.ops - could probably not use bpy.ops)
173 | for o in a.children:
174 | o.select_set(True)
175 | o.modifiers.clear()
176 | main_armature.select_set(True)
177 | bpy.context.view_layer.objects.active = main_armature
178 | bpy.ops.object.parent_set(type='ARMATURE')
179 |
180 | # Joining this armature with the main armature
181 | bpy.ops.object.select_all(action='DESELECT')
182 | a.select_set(True)
183 | main_armature.select_set(True)
184 | bpy.context.view_layer.objects.active = main_armature
185 | bpy.ops.object.join()
186 |
187 | # Parenting the bones back
188 | bpy.context.view_layer.objects.active = main_armature
189 | bpy.ops.object.mode_set(mode='EDIT')
190 | for b in parents.keys():
191 | eb = main_armature.data.edit_bones[b]
192 | parent = parents.get(b)
193 | if(eb.parent == None and parent != None):
194 | eb.parent = main_armature.data.edit_bones.get(parent)
195 | return main_armature
196 |
197 | combine_armatures(bpy.context.selected_objects, bpy.context.object, True)
--------------------------------------------------------------------------------
/mirror_vertex_groups.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | import mathutils
3 | from mathutils import Vector
4 |
5 | # Todo: Currently strictly mirrors from one side to the other. Could do something smart to figure out which side should be mirrored to which side. Or just forget about removing weights idk.
6 |
7 | side_dict = {
8 | 'l_' : 'r_',
9 | 'L_' : 'R_',
10 | '.l' : '.r',
11 | '.L' : '.R',
12 | '_l' : '_r',
13 | '_L' : '_R',
14 | '-l' : '-r',
15 | '-L' : '-R',
16 | 'left_' : 'right_',
17 | 'Left_' : 'Right_',
18 | '_left' : '_right',
19 | '_Left' : '_Right',
20 | }
21 |
22 | def flip_side_dict():
23 | global side_dict
24 | new_dict = {}
25 | for s in side_dict.keys():
26 | new_dict[side_dict[s]] = s
27 | side_dict = new_dict
28 |
29 | # Finding vertex group to mirror weights into
30 | def find_mirrored_group(obj, vg, allow_same=True, right_to_left=False, create=True):
31 | if(right_to_left):
32 | flip_side_dict()
33 | for s in side_dict.keys():
34 | if(s in vg.name):
35 | mirrored_name = vg.name.replace(s, side_dict[s])
36 | mirrored_vg = obj.vertex_groups.get(mirrored_name)
37 | if((not mirrored_vg) and create):
38 | return obj.vertex_groups.new(name=mirrored_name)
39 | else:
40 | return mirrored_vg
41 | return vg if allow_same else None
42 |
43 | def mirror_selected_pose_bones(right_to_left=False):
44 | if(not bpy.context.selected_pose_bones): return
45 |
46 | vgroups = []
47 | for b in bpy.context.selected_pose_bones:
48 | vg = obj.vertex_groups.get(b.name)
49 | if(not vg): continue
50 | vgroups.append(vg)
51 | mirror_vertex_groups(vgroups, right_to_left)
52 |
53 | def mirror_vertex_groups(vgroups=None, right_to_left=False):
54 | obj = bpy.context.object
55 | if(obj.type!='MESH'): return
56 |
57 | # Creating KDTree
58 | kd = mathutils.kdtree.KDTree(len(obj.data.vertices))
59 |
60 | for i, v in enumerate(obj.data.vertices):
61 | kd.insert(v.co, i)
62 |
63 | kd.balance()
64 | for vg in vgroups:
65 | print("Group: " + vg.name)
66 | if(not vg): continue
67 | vg_opp = find_mirrored_group(obj, vg, allow_same=True, right_to_left=right_to_left)
68 | print("Opposite group: " + vg_opp.name)
69 |
70 | for v in obj.data.vertices:
71 | # Flipping coordinates on X axis
72 | mirrored_co = Vector((v.co.x*-1, v.co.y, v.co.z))
73 |
74 | # Finding vertex closest to the flipped coordinates
75 | co, index, dist = kd.find(mirrored_co)
76 |
77 | #if(index == v.index): continue
78 |
79 | side = mirrored_co.x > 0 if right_to_left else mirrored_co.x < 0
80 |
81 | #if(side):
82 | if(True):
83 | try:
84 | # Removing old weights
85 | vg_opp.add(range(index, index+1), 0, 'REPLACE')
86 | # Adding new weights
87 | vg_opp.add(range(index, index+1), vg.weight(v.index), 'REPLACE')
88 | except:
89 | continue
90 |
91 | obj = bpy.context.object
92 | group_names = ['Triss_Masquerade+M-Sleeves1']
93 | groups = [obj.vertex_groups.get(g) for g in group_names]
94 | #mirror_vertex_groups(groups, True)
95 | mirror_selected_pose_bones()
--------------------------------------------------------------------------------
/ue4map-tools/README.md:
--------------------------------------------------------------------------------
1 | # Blender UE4 map import tools
2 |
3 | ## Basic steps
4 |
5 | - Dump the game's models and textures using UE Viewer.
6 | - Export models as GLTF
7 | - Export textures as TGA
8 | - Use the ACL compatible version of UE Viewer if necessary https://www.gildor.org/smf/index.php/topic,8304.0.html
9 | - Save the .umap file as .json using FModel (https://fmodel.app)
10 | - Open `map_mesh_import.py` in Blender's script editor
11 | - Adjust the variables as instructed in the script
12 | - Run the script
13 | - Monitor Blender system console for output for errors
14 | - If models imported correctly, continue with next script
15 | - Open `map_mesh_import.py` in the Blender script editor
16 | - Edit the `mat_dir` variable
17 | - Run script
18 | - Monitor Blender system console for output for errors
19 | - If materials show up in material preview mode, clean up any trash remaining in the scene
20 |
21 |
22 | ## Limitations
23 |
24 | - No UI
25 | - Model script will fail silently and continue if GLTF file is not found
26 |
27 | - Texture file name matching only returns first file found.
28 | - When multiple files in different directories have the same name, this may result in the wrong image textures being assigned to materials.
29 | - Texture names are read from .mat files.
30 | - If the wrong textures end up in a .mat file's Diffuse and Normal slots, the wrong textures will be assigned.
31 | - Script can't reliably read anything except Diffuse and Normal.
32 | - No consistent naming convention exists. For Hogwarts Legacy, you may find _SRXO and _MRO files for Specular/Metallic, Roughness and Occlusion, but there is no automatic matching for this.
33 |
34 | ## Credits
35 | - Aetheras Veras (main script)
36 | - Ganonmaster (refactoring)
37 | - AmmoBlender (testing)
--------------------------------------------------------------------------------
/ue4map-tools/map_material_cleanup.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | import os
3 |
4 | def dedup_materials(material_name_to_replace, replacement_material_name):
5 | # replaces and deletes any duplicate materials in the scene
6 |
7 | materials = bpy.data.materials
8 |
9 | # Get the material to use as replacement
10 | replacement_material = materials.get(replacement_material_name)
11 |
12 | if replacement_material is None:
13 | print(f"Error: Material '{replacement_material_name}' not found.")
14 | return None
15 |
16 | # Iterate over all objects in the scene
17 | for obj in bpy.context.scene.objects:
18 | # Check if the object has a material slot with the material to replace
19 | for i, slot in enumerate(obj.material_slots):
20 | if slot.material is not None and slot.material.name == material_name_to_replace:
21 | # Replace the material with the replacement material
22 | slot.material = replacement_material
23 | print(f"Replaced material in object '{obj.name}', slot {i}.")
24 |
25 |
26 | def search_directory(root_dir, file_name):
27 | # utility function to search for a file in a directory
28 | for subdir, dirs, files in os.walk(root_dir):
29 | for file in files:
30 | if file == file_name:
31 | file_path = os.path.join(subdir, file)
32 | return file_path
33 | return None
34 |
35 | mat_dir = "F:\HogwartsExport\Assets\\"
36 |
37 | # Get all materials in the scene
38 | materials = bpy.data.materials
39 |
40 | # Iterate over all materials and print their names
41 | for material in materials:
42 | if 'WorldGridMaterial' in material.name:
43 | #TODO: Also remove objects using this material
44 | bpy.data.materials.remove(material, do_unlink=True)
45 | continue
46 |
47 | # Disable Backface Culling - this will make the material double sided
48 | material.use_backface_culling = False
49 |
50 | mat_name = material.name
51 | split_matname = mat_name.split('.')
52 |
53 | # dedup material
54 | if len(split_matname) > 1:
55 | dedup_materials(material.name, split_matname[0])
56 | bpy.data.materials.remove(material, do_unlink=True)
57 | continue
58 |
59 | mat_name = split_matname[0]
60 |
61 | # Find the .mat file so we can pull the texture names out of it
62 | found_file = search_directory(mat_dir, mat_name + '.mat')
63 | if not found_file:
64 | print('No material found.')
65 | continue
66 |
67 | # TODO: this is ugly, clean it up
68 | diffuse_texturename = ''
69 | normal_texturename = ''
70 | diffuse_texture_path = ''
71 | normal_texture_path = ''
72 | with open(found_file) as mat_file:
73 | lines = mat_file.readlines()
74 |
75 | for line in lines:
76 | if line.startswith('Diffuse') or line.startswith('Normal'):
77 | splitline = line.split("=")
78 | if len(splitline) > 1:
79 | if splitline[0] == 'Diffuse':
80 | diffuse_texturename = splitline[1].strip()
81 | if splitline[0] == 'Normal':
82 | normal_texturename = splitline[1].strip()
83 |
84 | if not diffuse_texturename and not normal_texturename:
85 | print('We have no textures. Skipping.')
86 | continue
87 |
88 | diffuse_texture_path = None
89 | normal_texture_path = None
90 | if diffuse_texturename != '':
91 | diffuse_texture_path = search_directory(mat_dir, diffuse_texturename + '.tga')
92 | if normal_texturename != '':
93 | normal_texture_path = search_directory(mat_dir, normal_texturename + '.tga')
94 |
95 | # This convoluted mess is to set up the textures and connect all the material nodes
96 |
97 | # Create a new Principled BSDF shader node for the material
98 | # TODO: check if this node already exists
99 | shader_node = material.node_tree.nodes.new(type="ShaderNodeBsdfPrincipled")
100 |
101 | if diffuse_texture_path is not None:
102 | print(diffuse_texture_path)
103 | # Set the diffuse map for the material
104 | diffuse_texture = material.node_tree.nodes.new(type="ShaderNodeTexImage")
105 | diffuse_texture.image = bpy.data.images.load(diffuse_texture_path)
106 | material.node_tree.links.new(shader_node.inputs["Base Color"], diffuse_texture.outputs["Color"])
107 |
108 | if normal_texture_path is not None:
109 | # Set the normal map for the material
110 | normal_texture = material.node_tree.nodes.new(type="ShaderNodeTexImage")
111 | normal_texture.image = bpy.data.images.load(normal_texture_path)
112 | normal_map_node = material.node_tree.nodes.new(type="ShaderNodeNormalMap")
113 | normal_map_node.inputs["Strength"].default_value = 1.0
114 | material.node_tree.links.new(shader_node.inputs["Normal"], normal_map_node.outputs["Normal"])
115 |
116 | ##
117 | # Create nodes to invert the green channel
118 |
119 | seperate_color = material.node_tree.nodes.new(type="ShaderNodeSeparateColor")
120 | invert_node = material.node_tree.nodes.new(type="ShaderNodeInvert")
121 | combine_color = material.node_tree.nodes.new(type="ShaderNodeCombineColor")
122 |
123 | # link remaining seperate color inputs to combine color
124 | material.node_tree.links.new(combine_color.inputs["Red"], seperate_color.outputs["Red"])
125 | material.node_tree.links.new(combine_color.inputs["Blue"], seperate_color.outputs["Blue"])
126 |
127 | # link sep to invert
128 | material.node_tree.links.new(invert_node.inputs["Color"], seperate_color.outputs["Green"])
129 | # link invert to comb
130 | material.node_tree.links.new(combine_color.inputs["Green"], invert_node.outputs["Color"])
131 |
132 | # wire up the rest
133 | material.node_tree.links.new(seperate_color.inputs["Color"], normal_texture.outputs["Color"])
134 | material.node_tree.links.new(normal_map_node.inputs["Color"], combine_color.outputs["Color"])
135 |
136 | # Get the material output and wire it up to the shader node
137 | material_output = material.node_tree.nodes.get("Material Output")
138 |
139 | if material_output is None:
140 | print(f"Error: Material output node not found in material '{material.name}'.")
141 |
142 | material.node_tree.links.new(shader_node.outputs["BSDF"], material_output.inputs["Surface"])
143 |
144 | print('Done')
145 |
146 |
147 | # Remove objects without materials
148 | # Iterate over all objects in the scene
149 | for obj in bpy.context.scene.objects:
150 | # Check if the object has a material slot in the first position
151 | if len(obj.material_slots) > 0:
152 | # Check if the material slot in the first position is empty
153 | if obj.material_slots[0].material is None:
154 | # Object has no material assigned to the first slot
155 | print(f"Object '{obj.name}' has no material assigned to the first slot.")
156 | bpy.data.objects.remove(obj, do_unlink=True)
157 | else:
158 | # Object has no material slots
159 | print(f"Object '{obj.name}' has no material slots.")
160 |
--------------------------------------------------------------------------------
/ue4map-tools/map_mesh_import.py:
--------------------------------------------------------------------------------
1 | import bpy
2 | import json
3 | import mathutils
4 | from mathutils import Euler
5 | import math
6 | import os
7 |
8 | # This is the base dir that contains all the unpacked assets - unpack using the latest ACL compatible build UE Viewer
9 | base_dir = "F:\HogwartsExport\Assets"
10 |
11 | # This is a subdirectory where you can insert additional parts of the path to the assets
12 | asset_sub_dir = ""
13 |
14 | # This is the path to the JSON file that contains the map data - you can extract this from .umap files using FModel.exe
15 | map_json = [
16 | 'F:\HogwartsExport\MapJSON\SUB_ThreeBroomsticks_EXT.json',
17 | 'F:\HogwartsExport\MapJSON\SUB_ThreeBroomsticks_FX.json',
18 | 'F:\HogwartsExport\MapJSON\SUB_ThreeBroomsticks_INT.json',
19 | 'F:\HogwartsExport\MapJSON\SUB_ThreeBroomsticks_LIGHTS.json',
20 | 'F:\HogwartsExport\MapJSON\SUB_ThreeBroomsticks_POP.json',
21 | 'F:\HogwartsExport\MapJSON\SUB_ThreeBroomsticks_TECH.json',
22 | ]
23 |
24 | # importer toggles
25 | import_static = True
26 | import_lights = False # enable to also import lights
27 |
28 | # Import types supported by the script
29 | static_mesh_types = [
30 | 'StaticMeshComponent',
31 | # 'InstancedStaticMeshComponent' # buggy, positions wrong, seems to be used with splines as well
32 | ]
33 | light_types = [
34 | 'SpotLightComponent',
35 | 'AnimatedLightComponent',
36 | 'PointLightComponent'
37 | ]
38 |
39 | def split_object_path(object_path):
40 | # For some reason ObjectPaths end with a period and a digit.
41 | # This is kind of a sucky way to split that out.
42 |
43 | path_parts = object_path.split(".")
44 |
45 | if len(path_parts) > 1:
46 | # Usually works, but will fail If the path contains multiple periods.
47 | return path_parts[0]
48 |
49 | # Nothing to do
50 | return object_path
51 |
52 |
53 | class StaticMesh:
54 | entity_name = ""
55 | import_path = ""
56 | pos = [0, 0, 0]
57 | rot = [0, 0, 0]
58 | scale = [1, 1, 1]
59 |
60 | # these are just properties to help with debugging
61 | no_entity = False
62 | no_file = False
63 | no_mesh = False
64 | no_path = False
65 | base_shape = False
66 |
67 |
68 | def __init__(self, json_entity, base_dir):
69 | self.entity_name = json_entity.get("Outer", 'Error')
70 |
71 | props = json_entity.get("Properties", None)
72 | if not props:
73 | print('Invalid Entity: Lacking property')
74 | self.no_entity = True
75 | return None
76 |
77 | if not props.get("StaticMesh", None):
78 | print('Invalid Property: does not contain a static mesh')
79 | self.no_mesh = True
80 | return None
81 |
82 | object_path = props.get("StaticMesh").get("ObjectPath", None)
83 |
84 | if not object_path or object_path == '':
85 | print('Invalid StaticMesh: does not contain ObjectPath.')
86 | self.no_path = True
87 | return None
88 |
89 | if 'BasicShapes' in object_path:
90 | # What is a BasicShape? Do we need these?
91 | print('This is a BasicShape - skipping for now')
92 | self.base_shape = True
93 | return None
94 |
95 | objpath = split_object_path(object_path)
96 | self.import_path = base_dir + asset_sub_dir + objpath + ".gltf"
97 | print('Mesh Path', self.import_path)
98 | self.no_file = not os.path.exists(self.import_path)
99 |
100 | if props.get("RelativeLocation", False):
101 | pos = props.get("RelativeLocation")
102 | self.pos = [pos.get("X")/100,pos.get("Y")/-100,pos.get("Z")/100]
103 |
104 | if props.get("RelativeRotation", False):
105 | rot = props.get("RelativeRotation")
106 | self.rot = [rot.get("Roll"),rot.get("Pitch")*-1,rot.get("Yaw")*-1]
107 |
108 | if props.get("RelativeScale3D", False):
109 | scale = props.get("RelativeScale3D")
110 | self.scale = [scale.get("X", 1),scale.get("Y", 1),scale.get("Z", 1)]
111 |
112 | return None
113 |
114 | @property
115 | def invalid(self):
116 | return self.no_path or self.no_file or self.no_entity or self.base_shape or self.no_mesh
117 |
118 |
119 | def import_staticmesh(self, collection):
120 | if self.invalid:
121 | print('Refusing to import due to failed checks.')
122 | return False
123 | # Import the file and apply transforms
124 | bpy.ops.import_scene.gltf(filepath=self.import_path)
125 | imported_obj = bpy.context.object
126 |
127 | imported_obj.name = self.entity_name
128 | imported_obj.scale = (self.scale[0], self.scale[1], self.scale[2])
129 | imported_obj.location = (self.pos[0], self.pos[1], self.pos[2])
130 | imported_obj.rotation_mode = 'XYZ'
131 | imported_obj.rotation_euler = Euler((math.radians(self.rot[0]), math.radians(self.rot[1]), math.radians(self.rot[2])), 'XYZ')
132 | collection.objects.link(imported_obj)
133 | bpy.context.scene.collection.objects.unlink(imported_obj)
134 |
135 | print('StaticMesh imported:', self.entity_name)
136 | return imported_obj
137 |
138 |
139 | class GameLight:
140 | entity_name = ""
141 | type = ""
142 |
143 | pos = [0, 0, 0]
144 | rot = [0, 0, 0]
145 | scale = [1, 1, 1]
146 |
147 | energy = 1000
148 |
149 | no_entity = False
150 |
151 | def __init__(self, json_entity):
152 | self.entity_name = json_entity.get("Outer", 'Error')
153 | self.type = json_entity.get("SpotLightComponent", "SpotLightComponent")
154 |
155 | props = json_entity.get("Properties", None)
156 | if not props:
157 | print('Invalid Entity: Lacking property')
158 | self.no_entity = True
159 | return None
160 |
161 | if props.get("RelativeLocation", False):
162 | pos = props.get("RelativeLocation")
163 | self.pos = [pos.get("X")/100,pos.get("Y")/-100,pos.get("Z")/100]
164 |
165 | if props.get("RelativeRotation", False):
166 | rot = props.get("RelativeRotation")
167 | self.rot = [rot.get("Roll"),rot.get("Pitch")*-1,rot.get("Yaw")*-1]
168 |
169 | if props.get("RelativeScale3D", False):
170 | scale = props.get("RelativeScale3D")
171 | self.scale = [scale.get("X", 1),scale.get("Y", 1),scale.get("Z", 1)]
172 |
173 | #TODO: expand this method with more properties for the specific light types
174 | # Problem: I don't know how values for UE lights map to Blender's light types.
175 |
176 | def import_light(self, collection):
177 | if self.no_entity:
178 | print('Refusing to import due to failed checks.')
179 | return False
180 | print('importing light')
181 | if self.type == 'SpotLightComponent':
182 | light_data = bpy.data.lights.new(name=self.entity_name, type='SPOT')
183 | if self.type == 'PointLightComponent':
184 | light_data = bpy.data.lights.new(name=self.entity_name, type='POINT')
185 |
186 | light_obj = bpy.data.objects.new(name=self.entity_name, object_data=light_data)
187 | light_obj.scale = (self.scale[0], self.scale[1], self.scale[2])
188 | light_obj.location = (self.pos[0], self.pos[1], self.pos[2])
189 | light_obj.rotation_mode = 'XYZ'
190 | light_obj.rotation_euler = Euler((math.radians(self.rot[0]), math.radians(self.rot[1]), math.radians(self.rot[2])), 'XYZ')
191 | collection.objects.link(light_obj)
192 | bpy.context.scene.collection.objects.link(light_obj)
193 |
194 |
195 | # SCRIPT STARTS DOING STUFF HERE
196 | for map in map_json:
197 | print('Processing file', map)
198 |
199 | if not os.path.exists(map):
200 | print('File not found, skipping.', map)
201 | continue
202 |
203 | json_filename = os.path.basename(map)
204 | import_collection = bpy.data.collections.new(json_filename)
205 |
206 | bpy.context.scene.collection.children.link(import_collection)
207 |
208 | with open(map) as file:
209 | json_object = json.load(file)
210 | print("-------------============================-------------")
211 |
212 | # Handle the different entity types
213 | for entity in json_object:
214 | if not entity.get('Type', None):
215 | continue
216 |
217 | if import_lights and entity.get('Type') in light_types:
218 | print(entity)
219 | light = GameLight(entity)
220 | light.import_light(import_collection)
221 |
222 | if import_static and entity.get('Type') in static_mesh_types:
223 | static_mesh = StaticMesh(entity, base_dir)
224 | # TODO: optimize by instancing certain meshes
225 | static_mesh.import_staticmesh(import_collection)
226 | continue
227 | print('Done.')
--------------------------------------------------------------------------------