├── .gitignore ├── modules ├── ddsconv │ ├── directx │ │ ├── libtexconv.so │ │ ├── texconv.dll │ │ ├── libtexconv.dylib │ │ ├── util.py │ │ ├── dxgi_format.py │ │ ├── texconv.py │ │ └── dds.py │ └── credit.txt ├── gdeflate │ ├── GDeflateWrapper.dll │ ├── libGDeflateWrapper.so │ └── gdeflate.py ├── tex │ ├── enums │ │ ├── game_version_enum.py │ │ ├── legacy_mappings.py │ │ ├── dds_bpps.py │ │ ├── scanline_minima.py │ │ ├── dxgi_format_enum.py │ │ └── tex_format_enum.py │ ├── ui_re_tex_panels.py │ ├── tex_math.py │ ├── format_ops.py │ ├── re_tex_operators.py │ ├── blender_re_tex.py │ ├── re_tex_utils.py │ └── file_re_tex.py ├── fbxskel │ ├── re_fbxskel_propertyGroups.py │ ├── re_fbxskel_operators.py │ ├── blender_re_fbxskel.py │ └── file_re_fbxskel.py ├── mesh │ ├── ui_re_mesh_panels.py │ ├── re_mesh_propertyGroups.py │ └── re_mesh_export_errors.py ├── hashing │ └── mmh3 │ │ └── pymmh3.py ├── dds │ └── file_dds.py ├── mdf │ ├── re_mdf_operators.py │ ├── re_mdf_presets.py │ └── ui_re_mdf_panels.py ├── blender_utils.py └── gen_functions.py └── testMPLYPlot.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | # Spyder project settings 3 | .spyderproject 4 | .spyproject 5 | -------------------------------------------------------------------------------- /modules/ddsconv/directx/libtexconv.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NSACloud/RE-Mesh-Editor/HEAD/modules/ddsconv/directx/libtexconv.so -------------------------------------------------------------------------------- /modules/ddsconv/directx/texconv.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NSACloud/RE-Mesh-Editor/HEAD/modules/ddsconv/directx/texconv.dll -------------------------------------------------------------------------------- /modules/gdeflate/GDeflateWrapper.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NSACloud/RE-Mesh-Editor/HEAD/modules/gdeflate/GDeflateWrapper.dll -------------------------------------------------------------------------------- /modules/gdeflate/libGDeflateWrapper.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NSACloud/RE-Mesh-Editor/HEAD/modules/gdeflate/libGDeflateWrapper.so -------------------------------------------------------------------------------- /modules/ddsconv/credit.txt: -------------------------------------------------------------------------------- 1 | Credit to matyalatte for custom texconv dll and dds loading code 2 | https://github.com/matyalatte/Blender-DDS-Addon -------------------------------------------------------------------------------- /modules/ddsconv/directx/libtexconv.dylib: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NSACloud/RE-Mesh-Editor/HEAD/modules/ddsconv/directx/libtexconv.dylib -------------------------------------------------------------------------------- /modules/tex/enums/game_version_enum.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | gameNameToTexVersionDict = { 5 | "DMC5":11, 6 | "RE2":10, 7 | "RE3":190820018, 8 | "MHR":28, 9 | "MHRSB":28, 10 | "RE8":30, 11 | "RE2RT":34, 12 | "RE3RT":34, 13 | "RE7RT":35, 14 | "RE4":143221013, 15 | "SF6":241101895, 16 | "DD2":760230703, 17 | "KG":231106777, 18 | "DR":240606151, 19 | "MHWILDS":241106027, 20 | "ONI2":240701001, 21 | "PRAG":250813143, 22 | } -------------------------------------------------------------------------------- /modules/ddsconv/directx/util.py: -------------------------------------------------------------------------------- 1 | """Utils for I/O.""" 2 | 3 | import os 4 | import platform 5 | 6 | 7 | def mkdir(directory): 8 | """Make directory.""" 9 | os.makedirs(directory, exist_ok=True) 10 | 11 | 12 | def get_ext(file): 13 | """Get file extension.""" 14 | return file.split('.')[-1].lower() 15 | 16 | 17 | def get_size(f): 18 | pos = f.tell() 19 | f.seek(0, 2) 20 | size = f.tell() 21 | f.seek(pos) 22 | return size 23 | 24 | 25 | def get_os_name(): 26 | return platform.system() 27 | 28 | 29 | def is_windows(): 30 | return get_os_name() == 'Windows' 31 | 32 | 33 | def is_linux(): 34 | return get_os_name() == 'Linux' 35 | 36 | 37 | def is_mac(): 38 | return get_os_name() == 'Darwin' 39 | 40 | 41 | def is_arm(): 42 | return 'arm' in platform.machine().lower() 43 | -------------------------------------------------------------------------------- /modules/tex/enums/legacy_mappings.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Mar 7 17:34:42 2025 4 | 5 | @author: Asterisk 6 | """ 7 | 8 | #DDS To Tex 9 | DXT1 = 827611204 # 0x31545844 10 | DXT2 = 844388420 # 0x32545844 11 | DXT3 = 861165636 # 0x33545844 12 | DXT4 = 877942852 # 0x34545844 13 | DXT5 = 894720068 # 0x35545844 14 | ATI1 = 826889281 15 | ATI2 = 843666497 16 | BC4U = 1429488450 17 | BC4S = 1395934018 18 | BC5U = 1429553986 19 | BC5S = 1395999554 20 | DX10 = 808540228 # 0x30315844 21 | CCCC = 1128481603 22 | NULL = 0 23 | 24 | legacyMapping = { 25 | DXT1: "BC1UNORM", 26 | DXT2: "BC2UNORM", 27 | DXT3: "BC2UNORMSRGB", 28 | DXT4: "BC3UNORM", 29 | DXT5: "BC3UNORMSRGB", 30 | ATI1: "BC4UNORM", 31 | ATI2: "BC5UNORM", 32 | BC4U: "BC4UNORM", 33 | BC4S: "BC4SNORM", 34 | BC5U: "BC5UNORM", 35 | BC5S: "BC5SNORM", 36 | } -------------------------------------------------------------------------------- /modules/fbxskel/re_fbxskel_propertyGroups.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import bpy 3 | from bpy.props import (StringProperty, 4 | BoolProperty, 5 | IntProperty, 6 | FloatProperty, 7 | FloatVectorProperty, 8 | EnumProperty, 9 | PointerProperty, 10 | CollectionProperty, 11 | ) 12 | 13 | 14 | 15 | class ToggleStringPropertyGroup(bpy.types.PropertyGroup): 16 | enabled: BoolProperty( 17 | name="", 18 | description = "", 19 | default = True 20 | ) 21 | name: StringProperty( 22 | name="", 23 | description = "", 24 | ) 25 | 26 | 27 | class FBXSKEL_UL_ObjectCheckList(bpy.types.UIList): 28 | 29 | def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): 30 | layout.prop(item,"enabled") 31 | layout.label(text = item.name) 32 | 33 | def invoke(self, context, event): 34 | return {'PASS_THROUGH'} 35 | -------------------------------------------------------------------------------- /modules/tex/ui_re_tex_panels.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | 3 | from bpy.types import (Panel, 4 | Menu, 5 | Operator, 6 | PropertyGroup, 7 | ) 8 | 9 | 10 | class OBJECT_PT_TexConversionPanel(Panel): 11 | bl_label = "RE Tex Conversion" 12 | bl_idname = "OBJECT_PT_tex_tools_panel" 13 | bl_space_type = "VIEW_3D" 14 | bl_region_type = "UI" 15 | bl_category = "RE Mesh" 16 | bl_context = "objectmode" 17 | 18 | @classmethod 19 | def poll(self,context): 20 | return context is not None and "HIDE_RE_MDF_EDITOR_TAB" not in context.scene 21 | 22 | def draw(self, context): 23 | layout = self.layout 24 | scene = context.scene 25 | re_mdf_toolpanel = scene.re_mdf_toolpanel 26 | row = layout.row() 27 | row.emboss = "PULLDOWN_MENU" 28 | row.label(text="Drag files onto the 3D view to convert.") 29 | layout.operator("re_tex.convert_tex_dds_files") 30 | 31 | layout.label(text = "Convert Image Directory") 32 | layout.prop(re_mdf_toolpanel, "textureDirectory") 33 | layout.operator("re_tex.convert_tex_directory") 34 | #layout.prop(re_mdf_toolpanel, "createStreamingTextures") 35 | layout.prop(re_mdf_toolpanel, "openConvertedFolder") 36 | layout.operator("re_tex.copy_converted_tex") 37 | 38 | -------------------------------------------------------------------------------- /modules/tex/tex_math.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Mar 7 17:35:09 2025 4 | 5 | @author: Asterisk 6 | """ 7 | 8 | #Size of a texture packet in bytes 9 | packetSize = 16 10 | 11 | #Round up Divide 12 | def ruD(x,y): 13 | """ Rounded Up Integer Division 14 | Parameters 15 | ---------- 16 | x : Int 17 | Dividend 18 | y : Int 19 | Divisor 20 | 21 | Returns 22 | ---------- 23 | quotient : Int 24 | Result of operation 25 | """ 26 | return (x+y-1)//y 27 | 28 | #Round up to Next Multiple 29 | def ruNX(x,y): 30 | """ Round Up to Next Multiple 31 | Parameters 32 | ---------- 33 | x : Int 34 | Number to Round Up 35 | y : Int 36 | Target which we want a multiple off greater than x 37 | 38 | Returns 39 | ---------- 40 | x_prime : Int 41 | Smallest value of a such that a*y >= x 42 | """ 43 | return ruD(x,y)*y 44 | 45 | def product(listing): 46 | """ Multiplies all of the list inputs 47 | Parameters 48 | ---------- 49 | listing : List 50 | Inputs to be foldl with (*) 51 | 52 | Returns 53 | ---------- 54 | prod : Int 55 | Result of multiplying all of the list elements sequentially 56 | """ 57 | cur = 1 58 | for element in listing: 59 | cur *= element 60 | return cur 61 | 62 | def bitCount(int32): 63 | """ Counts number of bits set 64 | Parameters 65 | ---------- 66 | int32 : Int 67 | Int to count number of bits set 68 | 69 | Returns 70 | ---------- 71 | sum : Int 72 | Number of Bits set 73 | """ 74 | return sum(((int32 >> i) & 1 for i in range(32))) 75 | 76 | def dotDivide(num,denom): 77 | """ Component-wise Vector pair division 78 | Parameters 79 | ---------- 80 | num : Float [N] 81 | Numerator Vector 82 | denom : Float [N] 83 | Denominator Vector 84 | Returns 85 | ---------- 86 | result : Float [N] 87 | Result vector 88 | """ 89 | return tuple([ruD(vl,vr) for vl,vr in zip(num,denom)]) -------------------------------------------------------------------------------- /modules/tex/enums/dds_bpps.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Mar 7 16:26:14 2025 4 | 5 | @author: Asterisk 6 | """ 7 | 8 | ddsBpps = { 9 | "UNKNOWN":0, 10 | "R32G32B32A32TYPELESS":128, 11 | "R32G32B32A32FLOAT":128, 12 | "R32G32B32A32UINT":128, 13 | "R32G32B32A32SINT":128, 14 | "R32G32B32TYPELESS":96, 15 | "R32G32B32FLOAT":96, 16 | "R32G32B32UINT":96, 17 | "R32G32B32SINT":96, 18 | "R16G16B16A16TYPELESS":64, 19 | "R16G16B16A16FLOAT":64, 20 | "R16G16B16A16UNORM":64, 21 | "R16G16B16A16UINT":64, 22 | "R16G16B16A16SNORM":64, 23 | "R16G16B16A16SINT":64, 24 | "R32G32TYPELESS":64, 25 | "R32G32FLOAT":64, 26 | "R32G32UINT":64, 27 | "R32G32SINT":64, 28 | "R32G8X24TYPELESS":64, 29 | "D32FLOATS8X24UINT":64, 30 | "R32FLOATX8X24TYPELESS":64, 31 | "X32TYPELESSG8X24UINT":64, 32 | "R10G10B10A2TYPELESS":32, 33 | "R10G10B10A2UNORM":32, 34 | "R10G10B10A2UINT":32, 35 | "R11G11B10FLOAT":32, 36 | "R8G8B8A8TYPELESS":32, 37 | "R8G8B8A8UNORM":32, 38 | "R8G8B8A8UNORMSRGB":32, 39 | "R8G8B8A8UINT":32, 40 | "R8G8B8A8SNORM":32, 41 | "R8G8B8A8SINT":32, 42 | "R16G16TYPELESS":32, 43 | "R16G16FLOAT":32, 44 | "R16G16UNORM":32, 45 | "R16G16UINT":32, 46 | "R16G16SNORM":32, 47 | "R16G16SINT":32, 48 | "R32TYPELESS":32, 49 | "D32FLOAT":32, 50 | "R32FLOAT":32, 51 | "R32UINT":32, 52 | "R32SINT":32, 53 | "R24G8TYPELESS":32, 54 | "D24UNORMS8UINT":32, 55 | "R24UNORMX8TYPELESS":32, 56 | "X24TYPELESSG8UINT":32, 57 | "R8G8TYPELESS":16, 58 | "R8G8UNORM":16, 59 | "R8G8UINT":16, 60 | "R8G8SNORM":16, 61 | "R8G8SINT":16, 62 | "R16TYPELESS":16, 63 | "R16FLOAT":16, 64 | "D16UNORM":16, 65 | "R16UNORM":16, 66 | "R16UINT":16, 67 | "R16SNORM":16, 68 | "R16SINT":16, 69 | "R8TYPELESS":8, 70 | "R8UNORM":8, 71 | "R8UINT":8, 72 | "R8SNORM":8, 73 | "R8SINT":8, 74 | "A8UNORM":8, 75 | "R1UNORM":1, 76 | "R9G9B9E5SHAREDEXP":32, 77 | "R8G8B8G8UNORM":16, 78 | "G8R8G8B8UNORM":16, 79 | "B5G6R5UNORM":16, 80 | "B5G5R5A1UNORM":16, 81 | "B8G8R8A8UNORM":32, 82 | "B8G8R8X8UNORM":32, 83 | "B8G8R8A8TYPELESS":32, 84 | "R10G10B10XRBIASA2UNORM":32, 85 | "B8G8R8A8UNORMSRGB":32, 86 | "B8G8R8X8TYPELESS":32, 87 | "B8G8R8X8UNORMSRGB":32, 88 | "AYUV":32, 89 | "Y410":10, 90 | "Y416":16, 91 | "NV12":12, 92 | "P010":10, 93 | "P016":16, 94 | "DXGIFORMAT420OPAQUE":20, 95 | "YUY2":16, 96 | "Y210":10, 97 | "Y216":16, 98 | "NV11":11, 99 | "AI44":44, 100 | "IA44":44, 101 | "P8":8, 102 | "A8P8":16, 103 | "B4G4R4A4UNORM":16, 104 | "P208":8, 105 | "V208":8, 106 | "V408":8, 107 | "BC1TYPELESS":4, 108 | "BC1UNORM":4, 109 | "BC1UNORMSRGB":4, 110 | "BC2TYPELESS":8, 111 | "BC2UNORM":8, 112 | "BC2UNORMSRGB":8, 113 | "BC3TYPELESS":8, 114 | "BC3UNORM":8, 115 | "BC3UNORMSRGB":8, 116 | "BC4TYPELESS":4, 117 | "BC4UNORM":4, 118 | "BC4SNORM":4, 119 | "BC5TYPELESS":8, 120 | "BC5UNORM":8, 121 | "BC5SNORM":8, 122 | "BC6HTYPELESS":8, 123 | "BC6HUF16":8, 124 | "BC6HSF16":8, 125 | "BC7TYPELESS":8, 126 | "BC7UNORM":8, 127 | "BC7UNORMSRGB":8, 128 | } -------------------------------------------------------------------------------- /modules/mesh/ui_re_mesh_panels.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | 3 | from bpy.types import (Panel, 4 | Menu, 5 | Operator, 6 | PropertyGroup, 7 | ) 8 | 9 | 10 | def tag_redraw(context, space_type="PROPERTIES", region_type="WINDOW"): 11 | for window in context.window_manager.windows: 12 | for area in window.screen.areas: 13 | if area.spaces[0].type == space_type: 14 | for region in area.regions: 15 | if region.type == region_type: 16 | region.tag_redraw() 17 | 18 | class OBJECT_PT_MeshObjectModePanel(Panel): 19 | bl_label = "RE Mesh Tools" 20 | bl_idname = "OBJECT_PT_mesh_tools_panel" 21 | bl_space_type = "VIEW_3D" 22 | bl_region_type = "UI" 23 | bl_category = "RE Mesh" 24 | bl_context = "objectmode" 25 | bl_options = {'DEFAULT_CLOSED'} 26 | 27 | @classmethod 28 | def poll(self,context): 29 | return context is not None and "HIDE_RE_MDF_EDITOR_TAB" not in context.scene 30 | 31 | def draw(self, context): 32 | layout = self.layout 33 | scene = context.scene 34 | re_mdf_toolpanel = scene.re_mdf_toolpanel 35 | layout.operator("re_mesh.create_mesh_collection") 36 | layout.operator("re_mesh.rename_meshes") 37 | layout.operator("re_mesh.delete_loose") 38 | #TODO add solve repeated uvs again 39 | #layout.operator("re_mesh.solve_repeated_uvs") 40 | layout.operator("re_mesh.remove_zero_weight_vertex_groups") 41 | layout.operator("re_mesh.limit_total_normalize") 42 | layout.operator("re_mesh.batch_exporter") 43 | 44 | class OBJECT_PT_MeshArmatureToolsPanel(Panel): 45 | bl_label = "Armature Tools" 46 | bl_idname = "OBJECT_PT_mesh_armature_tools_panel" 47 | bl_parent_id = "OBJECT_PT_mesh_tools_panel" # Specify the ID of the parent panel 48 | bl_space_type = "VIEW_3D" 49 | bl_region_type = "UI" 50 | bl_category = "RE Mesh" 51 | bl_options = {'DEFAULT_CLOSED'} 52 | 53 | def draw(self, context): 54 | layout = self.layout 55 | obj = context.active_object 56 | re_mdf_toolpanel = context.scene.re_mdf_toolpanel 57 | layout.operator("re_fbxskel.link_armature_bones") 58 | layout.operator("re_fbxskel.clear_bone_linkages") 59 | class OBJECT_PT_REAssetExtensionPanel(Panel): 60 | bl_label = "RE Asset Extensions" 61 | bl_idname = "OBJECT_PT_re_asset_extension_panel" 62 | bl_space_type = "VIEW_3D" 63 | bl_region_type = "UI" 64 | bl_category = "RE Mesh" 65 | bl_context = "objectmode" 66 | 67 | @classmethod 68 | def poll(self,context): 69 | return context is not None and "HIDE_RE_MDF_EDITOR_TAB" not in context.scene 70 | 71 | def draw(self, context): 72 | layout = self.layout 73 | scene = context.scene 74 | if hasattr(bpy.types, "OBJECT_PT_re_pak_panel"): 75 | try: 76 | layout.operator("re_asset.create_pak_patch") 77 | except: 78 | pass 79 | if hasattr(bpy.types, "RE_ASSET_OT_unpack_mod_pak"): 80 | try: 81 | layout.operator("re_asset.unpack_mod_pak") 82 | except: 83 | pass 84 | 85 | if hasattr(bpy.types, "RE_ASSET_OT_batch_mdf_updater"): 86 | 87 | try: 88 | layout.operator("re_asset.blender_mdf_updater") 89 | layout.operator("re_asset.batch_mdf_updater") 90 | except: 91 | pass 92 | 93 | if hasattr(bpy.types, "RE_ASSET_OT_batch_rsz_updater"): 94 | try: 95 | layout.operator("re_asset.batch_rsz_updater") 96 | except: 97 | pass 98 | else: 99 | layout.label(text="Update RE Asset Library for more options.") 100 | -------------------------------------------------------------------------------- /modules/hashing/mmh3/pymmh3.py: -------------------------------------------------------------------------------- 1 | #murmur3 hash algorithm 2 | #Credit to Darkness for adapting this 3 | def hashUTF8(key, seed=0xffffffff): 4 | key = bytearray(key, 'utf8') 5 | 6 | def fmix(h): 7 | h ^= h >> 16 8 | h = (h * 0x85ebca6b) & 0xFFFFFFFF 9 | h ^= h >> 13 10 | h = (h * 0xc2b2ae35) & 0xFFFFFFFF 11 | h ^= h >> 16 12 | return h 13 | 14 | length = len(key) 15 | nblocks = int(length / 4) 16 | 17 | h1 = seed 18 | 19 | c1 = 0xcc9e2d51 20 | c2 = 0x1b873593 21 | 22 | for block_start in range(0, nblocks * 4, 4): 23 | k1 = key[block_start + 3] << 24 | \ 24 | key[block_start + 2] << 16 | \ 25 | key[block_start + 1] << 8 | \ 26 | key[block_start + 0] 27 | 28 | k1 = (c1 * k1) & 0xFFFFFFFF 29 | k1 = (k1 << 15 | k1 >> 17) & 0xFFFFFFFF 30 | k1 = (c2 * k1) & 0xFFFFFFFF 31 | 32 | h1 ^= k1 33 | h1 = (h1 << 13 | h1 >> 19) & 0xFFFFFFFF 34 | h1 = (h1 * 5 + 0xe6546b64) & 0xFFFFFFFF 35 | 36 | tail_index = nblocks * 4 37 | k1 = 0 38 | tail_size = length & 3 39 | 40 | if tail_size >= 3: 41 | k1 ^= key[tail_index + 2] << 16 42 | if tail_size >= 2: 43 | k1 ^= key[tail_index + 1] << 8 44 | if tail_size >= 1: 45 | k1 ^= key[tail_index + 0] 46 | 47 | if tail_size > 0: 48 | k1 = (k1 * c1) & 0xFFFFFFFF 49 | k1 = (k1 << 15 | k1 >> 17) & 0xFFFFFFFF 50 | k1 = (k1 * c2) & 0xFFFFFFFF 51 | h1 ^= k1 52 | 53 | unsigned_val = fmix(h1 ^ length) 54 | if unsigned_val & 0x80000000 == 0: 55 | return unsigned_val 56 | else: 57 | return -((unsigned_val ^ 0xFFFFFFFF) + 1) & 0xFFFFFFFF 58 | 59 | 60 | def hashUTF16Old(key, seed=0xffffffff): 61 | key_temp = '' 62 | 63 | for char in key: 64 | key_temp += char + '\x00' 65 | 66 | return hashUTF8(key_temp, seed) 67 | 68 | #About 19% faster 69 | def hashUTF16(key, seed=0xffffffff): 70 | key = bytearray(key, 'utf-16le') 71 | 72 | def fmix(h): 73 | h ^= h >> 16 74 | h = (h * 0x85ebca6b) & 0xFFFFFFFF 75 | h ^= h >> 13 76 | h = (h * 0xc2b2ae35) & 0xFFFFFFFF 77 | h ^= h >> 16 78 | return h 79 | 80 | length = len(key) 81 | nblocks = length // 4 82 | 83 | h1 = seed 84 | c1 = 0xcc9e2d51 85 | c2 = 0x1b873593 86 | 87 | for block_start in range(0, nblocks * 4, 4): 88 | k1 = (key[block_start] | 89 | key[block_start + 1] << 8 | 90 | key[block_start + 2] << 16 | 91 | key[block_start + 3] << 24) 92 | 93 | k1 = (k1 * c1) & 0xFFFFFFFF 94 | k1 = ((k1 << 15) | (k1 >> 17)) & 0xFFFFFFFF 95 | k1 = (k1 * c2) & 0xFFFFFFFF 96 | 97 | h1 ^= k1 98 | h1 = ((h1 << 13) | (h1 >> 19)) & 0xFFFFFFFF 99 | h1 = (h1 * 5 + 0xe6546b64) & 0xFFFFFFFF 100 | 101 | tail_index = nblocks * 4 102 | k1 = 0 103 | tail_size = length & 3 104 | 105 | if tail_size >= 3: 106 | k1 ^= key[tail_index + 2] << 16 107 | if tail_size >= 2: 108 | k1 ^= key[tail_index + 1] << 8 109 | if tail_size >= 1: 110 | k1 ^= key[tail_index] 111 | 112 | if tail_size > 0: 113 | k1 = (k1 * c1) & 0xFFFFFFFF 114 | k1 = ((k1 << 15) | (k1 >> 17)) & 0xFFFFFFFF 115 | k1 = (k1 * c2) & 0xFFFFFFFF 116 | h1 ^= k1 117 | 118 | unsigned_val = fmix(h1 ^ length) 119 | 120 | return unsigned_val if unsigned_val & 0x80000000 == 0 else -((unsigned_val ^ 0xFFFFFFFF) + 1) & 0xFFFFFFFF -------------------------------------------------------------------------------- /testMPLYPlot.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | from modules.mesh.file_re_mesh import readREMesh,writeREMesh,ParsedREMeshToREMesh 3 | from modules.mesh.re_mesh_parse import ParsedREMesh,Skeleton,LODLevel,VisconGroup 4 | 5 | #Mesh plotting script 6 | 7 | 8 | #--- Settings 9 | #RE4 leon body 10 | #INPUT = r"D:\EXTRACT\RE4_EXTRACT\re_chunk_000\natives\STM\_Chainsaw\Character\ch\cha0\cha000\00\cha000_00.mesh.221108797" 11 | 12 | #SF6 streaming stage mesh 13 | #INPUT = r"D:\EXTRACT\SF6_EXTRACT\re_chunk_000\natives\STM\Product\Environment\Props\Resource\sm3X\sm36\sm36_034_crane\sm36_034_crane_00.mesh.230110883" 14 | 15 | 16 | #MHWilds streaming body mesh 17 | #INPUT = r"D:\EXTRACT\MHWILDS_EXTRACT\re_chunk_000\natives\STM\Art\Model\Character\ch03\006\001\2\ch03_006_0012.mesh.241111606" 18 | 19 | #KG MPLY stage mesh 20 | #INPUT = r"D:\EXTRACT\KG_EXTRACT\re_chunk_000\natives\stm\environment\st\st22\st22_001\st22_001.mesh.240306278" 21 | 22 | #MHWilds MPLY totem pole pillar mesh 23 | #INPUT = r"D:\EXTRACT\MHWILDS_EXTRACT\re_chunk_000\natives\STM\Art\Model\StageModel\sm43\sm43_381\sm43_381_00.mesh.241111606" 24 | 25 | #MHWilds MPLY small statue mesh 26 | INPUT = r"D:\EXTRACT\MHWILDS_EXTRACT\re_chunk_000\natives\STM\Art\Model\StageModel\sm42\sm42_350\sm42_350_00.mesh.241111606" 27 | 28 | allLODs = True 29 | allGroups = True 30 | allSubmeshes = True 31 | 32 | maxMeshPlots = 999#Stop plotting if this amount of meshes is reached, plots can take a long time so this limits it 33 | 34 | solid = True#Shading type 35 | 36 | lodIndex = 0 37 | groupIndex = 0 38 | submeshIndex = 0 39 | 40 | #--- 41 | 42 | 43 | from mpl_toolkits.mplot3d import Axes3D 44 | import matplotlib.pyplot as plt 45 | import numpy as np 46 | 47 | reMesh = readREMesh(INPUT) 48 | 49 | 50 | if reMesh.meshBufferHeader != None and len(reMesh.meshBufferHeader.streamingBufferHeaderList) != 0: 51 | for bufferIndex,buffer in enumerate(reMesh.meshBufferHeader.streamingBufferHeaderList): 52 | print(f"Streaming Buffer {bufferIndex}") 53 | for key,value in buffer.__dict__.items(): 54 | if key != "vertexBuffer" and key != "faceBuffer": 55 | print(f"\t{key}:{value}") 56 | for element in buffer.vertexElementList: 57 | for key,value in element.__dict__.items(): 58 | print(f"\t{key}:{value}") 59 | print() 60 | 61 | parseMesh = ParsedREMesh() 62 | parseMesh.ParseREMesh(reMesh) 63 | #print(parseMesh.mainMeshLODList[0].visconGroupList[0].subMeshList[0].faceList) 64 | 65 | if allLODs: 66 | lodList = parseMesh.mainMeshLODList 67 | else: 68 | lodList = [parseMesh.mainMeshLODList[lodIndex]] 69 | plottedMeshes = 0 70 | for lodIndex, lod in enumerate(lodList): 71 | 72 | if allGroups: 73 | groupList = parseMesh.mainMeshLODList[lodIndex].visconGroupList 74 | else: 75 | groupList = [parseMesh.mainMeshLODList[lodIndex].visconGroupList[groupIndex]] 76 | if plottedMeshes >= maxMeshPlots: 77 | break 78 | for groupIndex, group in enumerate(groupList): 79 | if plottedMeshes >= maxMeshPlots: 80 | break 81 | if allSubmeshes: 82 | submeshList = parseMesh.mainMeshLODList[lodIndex].visconGroupList[groupIndex].subMeshList 83 | else: 84 | submeshList = [parseMesh.mainMeshLODList[lodIndex].visconGroupList[groupIndex].subMeshList[submeshIndex]] 85 | for submeshIndex, submesh in enumerate(submeshList): 86 | verts = np.array(submesh.vertexPosList) 87 | 88 | 89 | faces = parseMesh.mainMeshLODList[lodIndex].visconGroupList[groupIndex].subMeshList[submeshIndex].faceList 90 | x = verts[:,0] 91 | y = verts[:,1] 92 | z = verts[:,2] * -1#Flip 93 | 94 | ax = plt.axes(projection = "3d") 95 | 96 | #Z up 97 | ax.plot_trisurf(x, z, y, triangles = faces, edgecolor=[[0,0,0]], linewidth=0.15, alpha= 1.0 if solid else 0.0, shade=solid) 98 | ax.set_title(f"LOD_{lodIndex}_Group_{groupIndex}_Sub_{submeshIndex}") 99 | plt.show() 100 | 101 | plottedMeshes += 1 102 | if plottedMeshes >= maxMeshPlots: 103 | break 104 | print("done") 105 | -------------------------------------------------------------------------------- /modules/tex/format_ops.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Mar 7 18:43:54 2025 4 | 5 | @author: Asterisk 6 | """ 7 | import re 8 | from .enums.legacy_mappings import legacyMapping 9 | from .enums import dxgi_format_enum as dxgienum 10 | from .enums import scanline_minima as scMin 11 | from . import tex_math as tmath 12 | 13 | 14 | AstcRegex = re.compile("(ASTC)([0-9]+)X([0-9]+)(.*)") 15 | BCRegex = re.compile("(BC[0-9]+H?)(.*)") 16 | RGBRegex = re.compile("([RGBAX][0-9]+)?"*5+"(.*)") 17 | RGBChannel = re.compile("([RGBAX])([0-9]+)") 18 | 19 | #Texture packets in dds by standard are 16 bytes long 20 | packetSize = 16 21 | 22 | def getBCBPP(BC): 23 | BC = BC.upper() 24 | #print("FE: "+BC) 25 | if "BC1" in BC: return 8 26 | if "BC2" in BC: return 16 27 | if "BC3" in BC: return 16 28 | if "BC4" in BC: return 8 29 | if "BC5" in BC: return 16 30 | if "BC6H" in BC: return 16 31 | if "BC7" in BC: return 16 32 | 33 | def decomposeRGBFormat(rgb): 34 | channels = [] 35 | bitlen = 0 36 | for g in rgb.groups()[:-1]: 37 | if g: 38 | c,s = RGBChannel.match(str(g)).groups() 39 | channels.append((c,int(s))) 40 | bitlen += int(s) 41 | return bitlen,channels 42 | 43 | 44 | class FormatData(): 45 | """ Container for Format Texel Information 46 | Members: 47 | tx : Int 48 | Number of horizontal pixels per texel. Texel x dimension. 49 | ty : Int 50 | Number of vertical pixels per texel. Texel y dimension. 51 | bitlen : Int 52 | Number of bits a texel occupies. 53 | bytelen : Int 54 | Number of bytes required to store a single texel. 55 | If a texel bitlength is not byte aligned, then it rounds up. 56 | formatBase : Str 57 | Specifies the format family (ASTC, BC, R/G/B/A) 58 | formatColor : 59 | Specifies the color format (Eg: BC1Unorm -> Unorm) 60 | scanlineMinima : 61 | Minimum size in bytes for a capcom scanline of the format. 62 | """ 63 | def __init__(self,formatString): 64 | tx,ty,bl,Bl,fb,fs = _packetSizeData(formatString) 65 | fmin = scanlineMinima(formatString) 66 | self.tx = tx 67 | self.ty = ty 68 | self.bitlen = bl 69 | self.bytelen = Bl 70 | self.formatBase = fb 71 | self.formatColor = fs 72 | self.scanlineMinima = fmin 73 | @property 74 | def texelSize(self): 75 | return self.tx,self.ty 76 | @property 77 | def pixelPerPacket(self): 78 | #(packetSize*8)//bitlen*texelX,texelY 79 | return packetSize//self.bytelen*self.tx,self.ty 80 | 81 | def _packetSizeData(formatString): 82 | '''X Pixel Count, Y Pixel Count, Bitcount, Bytecount''' 83 | astc = AstcRegex.match(formatString) 84 | if astc: 85 | ASTC,bx,by,f = astc.groups() 86 | return bx,by,128,128//8,ASTC,f 87 | bc = BCRegex.match(formatString) 88 | if bc: 89 | BC,f = bc.groups() 90 | lbytes = getBCBPP(BC) 91 | return 4,4,lbytes*8,lbytes,BC,f 92 | rgb = RGBRegex.match(formatString) 93 | if rgb: 94 | bitlen,channels = decomposeRGBFormat(rgb) 95 | bytelen = (bitlen + 7)//8 96 | return 1,1,bitlen,bytelen,channels,rgb.groups()[-1] 97 | 98 | def packetSizeData(formatString): 99 | return FormatData(formatString) 100 | 101 | def scanlineMinima(formatString): 102 | '''X Pixel Count, Y Pixel Count, Bitcount, Bytecount''' 103 | return scMin.formatScanlineMinima.get(formatString,256) 104 | 105 | def buildFormatString(header): 106 | pixelFormat = header.ddpfPixelFormat 107 | fourCC = pixelFormat.dwFourCC 108 | if fourCC == 808540228:#DX10 109 | return dxgienum.DXGIToFormatStringDict[header.dx10Header.dxgiFormat] 110 | elif fourCC in legacyMapping: 111 | return legacyMapping[fourCC].replace("_", "") 112 | else: 113 | Rbc = tmath.bitCount(pixelFormat.dwRBitMask) 114 | Gbc = tmath.bitCount(pixelFormat.dwGBitMask) 115 | Bbc = tmath.bitCount(pixelFormat.dwBBitMask) 116 | Abc = tmath.bitCount(pixelFormat.dwABitMask) 117 | R = (pixelFormat.dwRBitMask, "R", "%d" % Rbc) 118 | G = (pixelFormat.dwGBitMask, "G", "%d" % Gbc) 119 | B = (pixelFormat.dwBBitMask, "B", "%d" % Bbc) 120 | A = (pixelFormat.dwABitMask, "A", "%d" % Abc) 121 | RGBA = ''.join([channel_code + bit_count for mask, 122 | channel_code, bit_count in sorted([R, G, B, A]) if mask]) 123 | knownBits = sum([Rbc, Gbc, Bbc, Abc]) 124 | if knownBits < pixelFormat.dwRGBBitCount: 125 | fill = "A" if RGBA[0] == "R" else "X" 126 | RGBA += "%s%d" % (fill, pixelFormat.dwRGBBitCount - knownBits) 127 | return RGBA+"UNORM" -------------------------------------------------------------------------------- /modules/mesh/re_mesh_propertyGroups.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import bpy 3 | import os 4 | from bpy.props import (StringProperty, 5 | BoolProperty, 6 | IntProperty, 7 | FloatProperty, 8 | FloatVectorProperty, 9 | EnumProperty, 10 | PointerProperty, 11 | CollectionProperty, 12 | ) 13 | 14 | 15 | def update_relPathToAbs(self,context): 16 | try: 17 | if "//" in self.path: 18 | #print("updated path") 19 | self.path = os.path.realpath(bpy.path.abspath(self.path)) 20 | except: 21 | pass 22 | if self.path == "" or self.path.count(".") < 2:#Check if path is empty or if number extension is missing 23 | self.invalid = True 24 | else: 25 | self.invalid = False 26 | class ExporterNodePropertyGroup(bpy.types.PropertyGroup): 27 | name: StringProperty( 28 | name="", 29 | description = "", 30 | ) 31 | icon: StringProperty( 32 | name="", 33 | description = "", 34 | ) 35 | enabled: BoolProperty( 36 | name="", 37 | description = "", 38 | default = True, 39 | 40 | ) 41 | show: BoolProperty( 42 | name="", 43 | description = "", 44 | default = True 45 | ) 46 | hasChild: BoolProperty( 47 | name="", 48 | description = "", 49 | default = False 50 | ) 51 | expand: BoolProperty( 52 | name="", 53 | description = "", 54 | default = True 55 | ) 56 | 57 | parentName: StringProperty( 58 | name="", 59 | description = "", 60 | default = "" 61 | ) 62 | hierarchyLevel: IntProperty( 63 | name="", 64 | description = "", 65 | default = 0 66 | ) 67 | exportType: StringProperty( 68 | name="", 69 | description = "", 70 | default = "" 71 | ) 72 | path: StringProperty( 73 | name="", 74 | subtype="FILE_PATH", 75 | description = "Path to where to export the file to", 76 | update = update_relPathToAbs 77 | ) 78 | invalid: BoolProperty( 79 | name="", 80 | description = "", 81 | default = False 82 | ) 83 | 84 | #mesh operator arguments 85 | 86 | exportAllLODs : BoolProperty( 87 | name = "Export All LODs", 88 | description = "Export all LODs. If disabled, only LOD0 will be exported. Note that LODs meshes must be grouped inside a collection for each level and that collection must be contained in another collection. See a mesh with LODs imported for reference on how it should look. A target collection must also be set", 89 | default = True) 90 | exportBlendShapes : BoolProperty( 91 | name = "Export Blend Shapes", 92 | description = "Exports blend shapes from mesh if present", 93 | default = True) 94 | rotate90 : BoolProperty( 95 | name = "Convert Z Up To Y Up", 96 | description = "Rotates objects 90 degrees for export. Leaving this option enabled is recommended", 97 | default = True) 98 | autoSolveRepeatedUVs : BoolProperty( 99 | name = "Auto Solve Repeated UVs", 100 | description = "Splits connected UV islands if present. The mesh format does not allow for multiple uvs assigned to a vertex.\nNOTE: This will modify the object and may slightly increase time taken to export", 101 | default = True) 102 | preserveSharpEdges : BoolProperty( 103 | name = "Split Sharp Edges", 104 | description = "Edge splits all edges marked as sharp to preserve them on the exported mesh.\nNOTE: This will modify the exported mesh", 105 | default = False) 106 | useBlenderMaterialName : BoolProperty( 107 | name = "Use Blender Material Names", 108 | description = "If left unchecked, the exporter will get the material names to be used from the end of each object name. For example, if a mesh is named LOD_0_Group_0_Sub_0__Shirts_Mat, the material name is Shirts_Mat. If this option is enabled, the material name will instead be taken from the first material assigned to the object", 109 | default = False) 110 | preserveBoneMatrices : BoolProperty( 111 | name = "Preserve Bone Matrices", 112 | description = "Export using the original matrices of the imported bones. Note that this option only applies armatures imported with this addon. Any newly added bones will have new matrices calculated", 113 | default = False) 114 | exportBoundingBoxes : BoolProperty( 115 | name = "Export Bounding Boxes", 116 | description = "Exports the original bounding boxes from the \"Import Bounding Boxes\" import option. New bounding boxes will be generated for any bones that do not have them", 117 | default = False) 118 | 119 | 120 | class MESH_UL_REExporterList(bpy.types.UIList): 121 | 122 | def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): 123 | 124 | row = layout.row() 125 | if not item.hasChild and item.invalid: 126 | row.alert = True 127 | col1 = row.column() 128 | #col1.prop(item,"expand") 129 | col1.alignment = "RIGHT" 130 | col1.label(text=" | "*item.hierarchyLevel if item.hierarchyLevel != 0 else " ") 131 | if not item.hasChild: 132 | col2 = row.column() 133 | col2.prop(item,"enabled") 134 | col3 = row.column() 135 | col3.label(icon = item.icon,text=item.name) 136 | col4 = row.column() 137 | 138 | # Disable double-click to rename 139 | def invoke(self, context, event): 140 | return {'PASS_THROUGH'} 141 | -------------------------------------------------------------------------------- /modules/tex/enums/scanline_minima.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Mar 7 16:59:10 2025 4 | 5 | @author: Asterisk 6 | """ 7 | """ 8 | import re 9 | 10 | def decomposeRGBFormat(rgb): 11 | channels = [] 12 | bitlen = 0 13 | for g in rgb.groups()[:-1]: 14 | if g: 15 | c,s = RGBChannel.match(str(g)).groups() 16 | channels.append((c,int(s))) 17 | bitlen += int(s) 18 | return bitlen,channels 19 | 20 | AstcRegex = re.compile("(ASTC)([0-9]+)X([0-9]+)(.*)") 21 | BCRegex = re.compile("(BC[0-9]+H?)(.*)") 22 | RGBRegex = re.compile("([RGBAX][0-9]+)?"*5+"(.*)") 23 | RGBChannel = re.compile("([RGBAX])([0-9]+)") 24 | def scanlineMinima(formatString): 25 | '''X Pixel Count, Y Pixel Count, Bitcount, Bytecount''' 26 | astc = AstcRegex.match(formatString) 27 | if astc: 28 | return 128//8 29 | bc = BCRegex.match(formatString) 30 | if bc: 31 | return 256 32 | rgb = RGBRegex.match(formatString) 33 | if rgb: 34 | bitlen,channels = decomposeRGBFormat(rgb) 35 | if bitlen > 32: 36 | return 256 37 | if len(channels) < 3: 38 | return 256 39 | else: 40 | return 32 41 | """ 42 | 43 | formatScanlineMinima = {'A8UNORM': 256, 44 | 'ASTC10X10TYPELESS': 16, 45 | 'ASTC10X10UNORM': 16, 46 | 'ASTC10X10UNORMSRGB': 16, 47 | 'ASTC10X5TYPELESS': 16, 48 | 'ASTC10X5UNORM': 16, 49 | 'ASTC10X5UNORMSRGB': 16, 50 | 'ASTC10X6TYPELESS': 16, 51 | 'ASTC10X6UNORM': 16, 52 | 'ASTC10X6UNORMSRGB': 16, 53 | 'ASTC10X8TYPELESS': 16, 54 | 'ASTC10X8UNORM': 16, 55 | 'ASTC10X8UNORMSRGB': 16, 56 | 'ASTC12X10TYPELESS': 16, 57 | 'ASTC12X10UNORM': 16, 58 | 'ASTC12X10UNORMSRGB': 16, 59 | 'ASTC12X12TYPELESS': 16, 60 | 'ASTC12X12UNORM': 16, 61 | 'ASTC12X12UNORMSRGB': 16, 62 | 'ASTC4X4TYPELESS': 16, 63 | 'ASTC4X4UNORM': 16, 64 | 'ASTC4X4UNORMSRGB': 16, 65 | 'ASTC5X4TYPELESS': 16, 66 | 'ASTC5X4UNORM': 16, 67 | 'ASTC5X4UNORMSRGB': 16, 68 | 'ASTC5X5TYPELESS': 16, 69 | 'ASTC5X5UNORM': 16, 70 | 'ASTC5X5UNORMSRGB': 16, 71 | 'ASTC6X5TYPELESS': 16, 72 | 'ASTC6X5UNORM': 16, 73 | 'ASTC6X5UNORMSRGB': 16, 74 | 'ASTC6X6TYPELESS': 16, 75 | 'ASTC6X6UNORM': 16, 76 | 'ASTC6X6UNORMSRGB': 16, 77 | 'ASTC8X5TYPELESS': 16, 78 | 'ASTC8X5UNORM': 16, 79 | 'ASTC8X5UNORMSRGB': 16, 80 | 'ASTC8X6TYPELESS': 16, 81 | 'ASTC8X6UNORM': 16, 82 | 'ASTC8X6UNORMSRGB': 16, 83 | 'ASTC8X8TYPELESS': 16, 84 | 'ASTC8X8UNORM': 16, 85 | 'ASTC8X8UNORMSRGB': 16, 86 | 'B5G5R5A1UNORM': 32, 87 | 'B5G6R5UNORM': 32, 88 | 'B8G8R8A8TYPELESS': 32, 89 | 'B8G8R8A8UNORM': 32, 90 | 'B8G8R8A8UNORMSRGB': 32, 91 | 'B8G8R8X8TYPELESS': 32, 92 | 'B8G8R8X8UNORM': 32, 93 | 'B8G8R8X8UNORMSRGB': 32, 94 | 'BC1TYPELESS': 256, 95 | 'BC1UNORM': 256, 96 | 'BC1UNORMSRGB': 256, 97 | 'BC2TYPELESS': 256, 98 | 'BC2UNORM': 256, 99 | 'BC2UNORMSRGB': 256, 100 | 'BC3TYPELESS': 256, 101 | 'BC3UNORM': 256, 102 | 'BC3UNORMSRGB': 256, 103 | 'BC4SNORM': 256, 104 | 'BC4TYPELESS': 256, 105 | 'BC4UNORM': 256, 106 | 'BC5SNORM': 256, 107 | 'BC5TYPELESS': 256, 108 | 'BC5UNORM': 256, 109 | 'BC6HSF16': 256, 110 | 'BC6HTYPELESS': 256, 111 | 'BC6HUF16': 256, 112 | 'BC7TYPELESS': 256, 113 | 'BC7UNORM': 256, 114 | 'BC7UNORMSRGB': 256, 115 | 'D16UNORM': 256, 116 | 'D24UNORMS8UINT': 256, 117 | 'D32FLOAT': 256, 118 | 'D32FLOATS8X24UINT': 256, 119 | 'FORCEUINT': 256, 120 | 'G8R8G8B8UNORM': 32, 121 | 'R10G10B10A2TYPELESS': 32, 122 | 'R10G10B10A2UINT': 32, 123 | 'R10G10B10A2UNORM': 32, 124 | 'R10G10B10XRBIASA2UNORM': 32, 125 | 'R11G11B10FLOAT': 32, 126 | 'R16FLOAT': 256, 127 | 'R16G16B16A16FLOAT': 256, 128 | 'R16G16B16A16SINT': 256, 129 | 'R16G16B16A16SNORM': 256, 130 | 'R16G16B16A16TYPELESS': 256, 131 | 'R16G16B16A16UINT': 256, 132 | 'R16G16B16A16UNORM': 256, 133 | 'R16G16FLOAT': 256, 134 | 'R16G16SINT': 256, 135 | 'R16G16SNORM': 256, 136 | 'R16G16TYPELESS': 256, 137 | 'R16G16UINT': 256, 138 | 'R16G16UNORM': 256, 139 | 'R16SINT': 256, 140 | 'R16SNORM': 256, 141 | 'R16TYPELESS': 256, 142 | 'R16UINT': 256, 143 | 'R16UNORM': 256, 144 | 'R1UNORM': 256, 145 | 'R24G8TYPELESS': 256, 146 | 'R24UNORMX8TYPELESS': 256, 147 | 'R32FLOAT': 256, 148 | 'R32FLOATX8X24TYPELESS': 256, 149 | 'R32G32B32A32FLOAT': 256, 150 | 'R32G32B32A32SINT': 256, 151 | 'R32G32B32A32TYPELESS': 256, 152 | 'R32G32B32A32UINT': 256, 153 | 'R32G32B32FLOAT': 256, 154 | 'R32G32B32SINT': 256, 155 | 'R32G32B32TYPELESS': 256, 156 | 'R32G32B32UINT': 256, 157 | 'R32G32FLOAT': 256, 158 | 'R32G32SINT': 256, 159 | 'R32G32TYPELESS': 256, 160 | 'R32G32UINT': 256, 161 | 'R32G8X24TYPELESS': 256, 162 | 'R32SINT': 256, 163 | 'R32TYPELESS': 256, 164 | 'R32UINT': 256, 165 | 'R8G8B8A8SINT': 32, 166 | 'R8G8B8A8SNORM': 32, 167 | 'R8G8B8A8TYPELESS': 32, 168 | 'R8G8B8A8UINT': 32, 169 | 'R8G8B8A8UNORM': 32, 170 | 'R8G8B8A8UNORMSRGB': 32, 171 | 'R8G8B8G8UNORM': 32, 172 | 'R8G8SINT': 256, 173 | 'R8G8SNORM': 256, 174 | 'R8G8TYPELESS': 256, 175 | 'R8G8UINT': 256, 176 | 'R8G8UNORM': 256, 177 | 'R8SINT': 256, 178 | 'R8SNORM': 256, 179 | 'R8TYPELESS': 256, 180 | 'R8UINT': 256, 181 | 'R8UNORM': 256, 182 | 'R9G9B9E5SHAREDEXP': 32, 183 | 'VIAEXTENSION': 256, 184 | 'X24TYPELESSG8UINT': 256, 185 | 'X32TYPELESSG8X24UINT': 256} -------------------------------------------------------------------------------- /modules/dds/file_dds.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import os 3 | 4 | from ..gen_functions import textColors,raiseWarning,raiseError,getPaddingAmount,read_uint,read_int,read_uint64,read_float,read_short,read_ushort,read_ubyte,read_unicode_string,read_byte,write_uint,write_int,write_uint64,write_float,write_short,write_ushort,write_ubyte,write_unicode_string,write_byte 5 | 6 | 7 | class DX10_Header(): 8 | def __init__(self): 9 | self.dxgiFormat = 0#enum 10 | self.resourceDimension = 0#enum 11 | self.miscFlags = 0 12 | self.arraySize = 0 13 | self.miscFlags2 = 0 14 | 15 | def read(self,file): 16 | self.dxgiFormat = read_uint(file)#enum 17 | self.resourceDimension = read_uint(file)#enum 18 | self.miscFlags = read_uint(file) 19 | self.arraySize = read_uint(file) 20 | self.miscFlags2 = read_uint(file) 21 | 22 | def write(self,file): 23 | write_uint(file, self.dxgiFormat) 24 | write_uint(file, self.resourceDimension) 25 | write_uint(file, self.miscFlags) 26 | write_uint(file, self.arraySize) 27 | write_uint(file, self.miscFlags2) 28 | 29 | class DDS_PixelFormat(): 30 | def __init__(self): 31 | self.dwSize = 32 32 | self.dwFlags = 0#enum 33 | self.dwFourCC = 0#enum 34 | self.dwRGBBitCount = 0 35 | self.dwRBitMask = 0 36 | self.dwGBitMask = 0 37 | self.dwBBitMask = 0 38 | self.dwABitMask = 0 39 | 40 | def read(self,file): 41 | self.dwSize = read_uint(file) 42 | self.dwFlags = read_uint(file)#enum 43 | self.dwFourCC = read_uint(file)#enum 44 | self.dwRGBBitCount = read_uint(file) 45 | self.dwRBitMask = read_uint(file) 46 | self.dwGBitMask = read_uint(file) 47 | self.dwBBitMask = read_uint(file) 48 | self.dwABitMask = read_uint(file) 49 | 50 | def write(self,file): 51 | write_uint(file, self.dwSize) 52 | write_uint(file, self.dwFlags) 53 | write_uint(file, self.dwFourCC) 54 | write_uint(file, self.dwRGBBitCount) 55 | write_uint(file, self.dwRBitMask) 56 | write_uint(file, self.dwGBitMask) 57 | write_uint(file, self.dwBBitMask) 58 | write_uint(file, self.dwABitMask) 59 | 60 | class DDSHeader(): 61 | def __init__(self): 62 | self.magic = 542327876 63 | self.dwSize = 0 64 | self.dwFlags = 0#enum 65 | self.dwHeight = 0 66 | self.dwWidth = 0 67 | self.dwPitchOrLinearSize = 0 68 | self.dwDepth = 0 69 | self.dwMipMapCount = 0 70 | self.dwReserved1 = [0]*11 71 | self.ddpfPixelFormat = DDS_PixelFormat() 72 | self.ddsCaps1 = 0#enum 73 | self.ddsCaps2 = 0#enum 74 | self.ddsCaps3 = 0 75 | self.ddsCaps4 = 0 76 | self.dwReserved2 = 0 77 | self.dx10Header = None 78 | 79 | def read(self,file): 80 | self.magic = read_uint(file) 81 | if self.magic != 542327876: 82 | raiseError("File is not a dds file.") 83 | self.dwSize = read_uint(file) 84 | self.dwFlags = read_uint(file)#enum 85 | self.dwHeight = read_uint(file) 86 | self.dwWidth = read_uint(file) 87 | self.dwPitchOrLinearSize = read_uint(file) 88 | self.dwDepth = read_uint(file) 89 | self.dwMipMapCount = read_uint(file) 90 | self.dwReserved1 = [] 91 | for i in range(11): 92 | self.dwReserved1.append(read_uint(file)) 93 | self.ddpfPixelFormat.read(file) 94 | self.ddsCaps1 = read_uint(file)#enum 95 | self.ddsCaps2 = read_uint(file)#enum 96 | self.ddsCaps3 = read_uint(file) 97 | self.ddsCaps4 = read_uint(file) 98 | self.dwReserved2 = read_uint(file) 99 | if self.ddpfPixelFormat.dwFourCC == 808540228:#DX10 100 | self.dx10Header = DX10_Header() 101 | self.dx10Header.read(file) 102 | 103 | def write(self,file): 104 | write_uint(file, self.magic) 105 | write_uint(file, self.dwSize) 106 | write_uint(file, self.dwFlags) 107 | write_uint(file, self.dwHeight) 108 | write_uint(file, self.dwWidth) 109 | write_uint(file, self.dwPitchOrLinearSize) 110 | write_uint(file, self.dwDepth) 111 | write_uint(file, self.dwMipMapCount) 112 | for entry in self.dwReserved1: 113 | write_uint(file,entry) 114 | self.ddpfPixelFormat.write(file) 115 | write_uint(file, self.ddsCaps1) 116 | write_uint(file, self.ddsCaps2) 117 | write_uint(file, self.ddsCaps3) 118 | write_uint(file, self.ddsCaps4) 119 | write_uint(file, self.dwReserved2) 120 | if self.ddpfPixelFormat.dwFourCC == 808540228 and self.dx10Header != None:#DX10 121 | self.dx10Header.write(file) 122 | class DDS(): 123 | def __init__(self): 124 | self.header = DDSHeader() 125 | self.data = bytes() 126 | def read(self,file): 127 | self.header.read(file) 128 | self.data = file.read() 129 | 130 | def write(self,file): 131 | self.header.write(file); 132 | file.write(self.data); 133 | class DDSFile: 134 | def __init__(self): 135 | self.dds = DDS() 136 | def read(self,filePath): 137 | #print("Opening " + filePath) 138 | try: 139 | file = open(filePath,"rb") 140 | except: 141 | raiseError("Failed to open " + filePath) 142 | self.dds.read(file) 143 | file.close() 144 | 145 | def write(self,filePath): 146 | os.makedirs(os.path.dirname(filePath),exist_ok = True) 147 | print("Writing " + filePath) 148 | try: 149 | file = open(filePath,"wb") 150 | self.dds.write(file) 151 | except Exception as err: 152 | raiseError("Failed to write " + filePath + str(err)) 153 | file.close() 154 | 155 | def getDDSHeader(ddsPath): 156 | try: 157 | file = open(ddsPath,"rb") 158 | except Exception as err: 159 | raiseError("Failed to open " + ddsPath + str(err)) 160 | header = DDSHeader() 161 | header.read(file) 162 | file.close() 163 | return header -------------------------------------------------------------------------------- /modules/tex/re_tex_operators.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import bpy 3 | import os 4 | 5 | 6 | from ..blender_utils import showErrorMessageBox,showMessageBox 7 | from bpy.types import Operator,OperatorFileListElement 8 | from bpy_extras.io_utils import ImportHelper 9 | from bpy.props import StringProperty,CollectionProperty 10 | from .file_re_tex import getTexVersionFromGameName 11 | from .blender_re_tex import convertTexDDSList 12 | from ..mdf.file_re_mdf import getMDFVersionToGameName 13 | from .re_tex_utils import DDSToTex 14 | import shutil 15 | #from .re_tex_utils import 16 | 17 | 18 | 19 | class WM_OT_ConvertDDSTexFile(Operator,ImportHelper): 20 | bl_label = "Convert DDS/Tex Files" 21 | bl_idname = "re_tex.convert_tex_dds_files" 22 | bl_description = "Opens a window to select textures to convert. Selected .dds files will be converted to .tex and tex files will be converted to dds.\nIf you are using Blender 4.1 or higher, you can drag .tex or .dds files into the 3D view to convert them" 23 | filter_glob: StringProperty(default="*.dds;*.tex.*", options={'HIDDEN'}) 24 | files : CollectionProperty( 25 | name="File Path", 26 | type=OperatorFileListElement, 27 | ) 28 | directory : StringProperty( 29 | subtype='DIR_PATH', 30 | options = {"SKIP_SAVE"} 31 | ) 32 | def execute(self, context): 33 | fileList = [file.name for file in self.files] 34 | successCount,failCount = convertTexDDSList(fileNameList = fileList,inDir = self.directory, outDir = self.directory, gameName = bpy.context.scene.re_mdf_toolpanel.activeGame,createStreamingTex=False) 35 | showMessageBox(f"Converted {str(successCount)} textures.",title = "Texture Conversion") 36 | self.report({"INFO"},f"Converted {str(successCount)} textures.") 37 | 38 | return {"FINISHED"} 39 | def invoke(self, context, event): 40 | if self.directory: 41 | return self.execute(context) 42 | context.window_manager.fileselect_add(self) 43 | return {'RUNNING_MODAL'} 44 | 45 | class WM_OT_ConvertFolderToTex(Operator): 46 | bl_label = "Convert Directory to Tex" 47 | bl_idname = "re_tex.convert_tex_directory" 48 | bl_description = "Converts all .dds files in the chosen directory to .tex\nConverted files will be saved inside a folder called \"converted\"\nSave DDS files with compression settings set to BC7 sRGB for albedo/color textures and BC7 Linear for anything else" 49 | def execute(self, context): 50 | texVersion = 28 51 | gameName = bpy.context.scene.re_mdf_toolpanel.activeGame 52 | if gameName != -1 and getTexVersionFromGameName(gameName) != -1: 53 | texVersion = getTexVersionFromGameName(gameName) 54 | #TODO Add support for other image formats, should be doable with texconv 55 | #Also add streaming texture generation, should also be doable with texconv's resize option 56 | texDir = os.path.realpath(bpy.context.scene.re_mdf_toolpanel.textureDirectory) 57 | convertedDir = os.path.join(texDir,"converted") 58 | if os.path.isdir(texDir): 59 | otherImageConversionList = [] 60 | ddsConversionList = [] 61 | 62 | for entry in os.scandir(texDir): 63 | if entry.is_file() and entry.name.lower().endswith(".dds"): 64 | ddsConversionList.append(entry.name) 65 | 66 | if ddsConversionList != []: 67 | successCount,failCount = convertTexDDSList(fileNameList = ddsConversionList,inDir = texDir, outDir = convertedDir, gameName = bpy.context.scene.re_mdf_toolpanel.activeGame,createStreamingTex=False) 68 | self.report({"INFO"},f"Converted {str(successCount)} textures") 69 | if bpy.context.scene.re_mdf_toolpanel.openConvertedFolder: 70 | os.startfile(convertedDir) 71 | else: 72 | showErrorMessageBox("No .dds files in provided directory") 73 | else: 74 | showErrorMessageBox("Provided Image Directory is not a directory or does not exist") 75 | return {"FINISHED"} 76 | 77 | class WM_OT_CopyConvertedTextures(Operator): 78 | bl_label = "Copy Converted Tex Files" 79 | bl_idname = "re_tex.copy_converted_tex" 80 | bl_options = {'UNDO'} 81 | bl_description = "Copies the textures in the converted tex folder into the specified Mod Natives Directory.\nThe textures are placed at the paths set in the active MDF collection" 82 | def execute(self, context): 83 | texDir = os.path.realpath(bpy.context.scene.re_mdf_toolpanel.textureDirectory) 84 | modDir = os.path.realpath(bpy.context.scene.re_mdf_toolpanel.modDirectory) 85 | convertedDir = os.path.join(texDir,"converted") 86 | mdfCollection = bpy.context.scene.re_mdf_toolpanel.mdfCollection 87 | pathDict = {} 88 | copyCount = 0 89 | if mdfCollection != None and os.path.exists(modDir): 90 | for obj in mdfCollection.all_objects: 91 | if obj.get("~TYPE") == "RE_MDF_MATERIAL": 92 | for textureBinding in obj.re_mdf_material.textureBindingList_items: 93 | pathDict[os.path.split(textureBinding.path)[1]] = textureBinding.path 94 | if os.path.isdir(convertedDir): 95 | for entry in os.scandir(convertedDir): 96 | if entry.is_file() and os.path.splitext(entry.name)[0] in pathDict: 97 | path = os.path.join(convertedDir,entry.name) 98 | outPath = os.path.realpath(os.path.join(modDir,pathDict[os.path.splitext(entry.name)[0]]+os.path.splitext(entry.name)[1])) 99 | os.makedirs(os.path.split(outPath)[0],exist_ok = True) 100 | shutil.copyfile(path, outPath) 101 | print(f"Copied {os.path.split(path)[1]} to {outPath}") 102 | copyCount += 1 103 | self.report({"INFO"},f"Copied {str(copyCount)} textures to mod directory") 104 | else: 105 | self.report({"ERROR"},f"Texture directory does not exist") 106 | 107 | return {"FINISHED"} -------------------------------------------------------------------------------- /modules/fbxskel/re_fbxskel_operators.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import bpy 3 | 4 | from bpy.types import Operator 5 | 6 | from ..blender_utils import showMessageBox 7 | from .re_fbxskel_propertyGroups import ToggleStringPropertyGroup 8 | EXTRACT_WINDOW_SIZE = 400 9 | SPLIT_FACTOR = .45 10 | 11 | def linkArmatures(mainArmatureObj,linkArmaturesList): 12 | armatureLinkCount = 0 13 | for armatureObj in linkArmaturesList: 14 | linkCount = 0 15 | for bone in armatureObj.pose.bones: 16 | if bone.name in mainArmatureObj.data.bones: 17 | if "BoneLinkage" in bone.constraints: 18 | constraint = bone.constraints["BoneLinkage"] 19 | else: 20 | constraint = bone.constraints.new("COPY_TRANSFORMS") 21 | constraint.name = "BoneLinkage" 22 | constraint.target = mainArmatureObj 23 | constraint.subtarget = bone.name 24 | linkCount += 1 25 | if linkCount != 0: 26 | armatureLinkCount += 1 27 | print(f"Linked {linkCount} bones on {armatureObj.name} to {mainArmatureObj.name}.") 28 | return armatureLinkCount 29 | def update_checkAllArmatures(self, context): 30 | if self.checkAllArmatures == True: 31 | for item in self.armatureList_items: 32 | item.enabled = True 33 | self.checkAllArmatures = False 34 | def update_uncheckAllArmatures(self, context): 35 | if self.uncheckAllArmatures == True: 36 | for item in self.armatureList_items: 37 | item.enabled = False 38 | self.uncheckAllArmatures = False 39 | 40 | class WM_OT_LinkArmatureBones(Operator): 41 | bl_label = "Link Armature Bones" 42 | bl_idname = "re_fbxskel.link_armature_bones" 43 | bl_description = "Link bones from different armatures to a main armature. This copies all transforms on the main armature to all chosen armatures.\nThe intended use is for constraining bones to an FBXSkel armature with an animation applied.\nAn armature must be selected" 44 | bl_options = {'INTERNAL'} 45 | 46 | targetArmature : bpy.props.StringProperty( 47 | name = "Main Armature", 48 | description = "", 49 | default = "", 50 | options = {"HIDDEN"}) 51 | 52 | armatureList_items: bpy.props.CollectionProperty(type = ToggleStringPropertyGroup) 53 | armatureList_index: bpy.props.IntProperty(name="") 54 | 55 | checkAllArmatures : bpy.props.BoolProperty( 56 | name = "Check All Armatures", 57 | description = "Select all armatures to be linked", 58 | default = False, 59 | update = update_checkAllArmatures 60 | ) 61 | uncheckAllArmatures : bpy.props.BoolProperty( 62 | name = "Uncheck All Armatures", 63 | description = "Deselect all armatures to be linked", 64 | default = False, 65 | update = update_uncheckAllArmatures 66 | ) 67 | 68 | def execute(self, context): 69 | mainArmature = context.active_object 70 | 71 | armatureLinkList = [] 72 | for item in self.armatureList_items: 73 | if item.enabled: 74 | if item.name in bpy.data.objects: 75 | obj = bpy.data.objects[item.name] 76 | if obj.type == "ARMATURE" and mainArmature != obj: 77 | armatureLinkList.append(obj) 78 | 79 | armatureLinkCount = linkArmatures(mainArmature,armatureLinkList) 80 | self.report({"INFO"},f"Linked {armatureLinkCount} armatures to the selected armature.") 81 | return {'FINISHED'} 82 | @classmethod 83 | def poll(self,context): 84 | return bpy.context.active_object is not None and bpy.context.active_object.type == "ARMATURE" 85 | 86 | def invoke(self, context, event): 87 | region = bpy.context.region 88 | centerX = region.width // 2 89 | centerY = region.height 90 | 91 | #currentX = event.mouse_region_X 92 | #currentY = event.mouse_region_Y 93 | 94 | armatureObjList = sorted([obj for obj in bpy.data.objects if obj.type == "ARMATURE" and obj != context.active_object],key = lambda obj: obj.name) 95 | self.armatureList_items.clear() 96 | for entry in armatureObjList: 97 | item = self.armatureList_items.add() 98 | item.name = entry.name 99 | 100 | 101 | #Move cursor to center so extract window is at the center of the window 102 | context.window.cursor_warp(centerX,centerY) 103 | 104 | return context.window_manager.invoke_props_dialog(self,width = EXTRACT_WINDOW_SIZE,confirm_text = "Link Main Armature") 105 | 106 | 107 | def draw(self,context): 108 | layout = self.layout 109 | rowCount = 8 110 | uifontscale = 9 * context.preferences.view.ui_scale 111 | max_label_width = int((EXTRACT_WINDOW_SIZE*(1-SPLIT_FACTOR)*(2-SPLIT_FACTOR)) // uifontscale) 112 | layout.label(text=f"Main Armature: {context.active_object.name}") 113 | col = layout.column() 114 | 115 | 116 | col.label(text = f"Armature Count: {str(len(self.armatureList_items))}") 117 | row = col.row() 118 | row.alignment = "RIGHT" 119 | row.prop(self,"checkAllArmatures",icon="CHECKMARK", icon_only=True) 120 | row.prop(self,"uncheckAllArmatures",icon="X", icon_only=True) 121 | col.template_list( 122 | listtype_name = "FBXSKEL_UL_ObjectCheckList", 123 | list_id = "armatureList", 124 | dataptr = self, 125 | propname = "armatureList_items", 126 | active_dataptr = self, 127 | active_propname = "armatureList_index", 128 | rows = rowCount, 129 | type='DEFAULT' 130 | ) 131 | layout.label(text=f"All checked armatures will be constrained to main armature.") 132 | class WM_OT_ClearBoneLinkages(Operator): 133 | bl_label = "Clear Bone Linkages" 134 | bl_description = "Removes all bone linkages on the selected armatures.\n" 135 | bl_idname = "re_fbxskel.clear_bone_linkages" 136 | @classmethod 137 | def poll(self,context): 138 | return bpy.context.active_object is not None and bpy.context.active_object.type == "ARMATURE" 139 | def execute(self, context): 140 | for obj in context.selected_objects: 141 | if obj.type == "ARMATURE": 142 | for bone in obj.pose.bones: 143 | if "BoneLinkage" in bone.constraints: 144 | constraint = bone.constraints["BoneLinkage"] 145 | bone.constraints.remove(constraint) 146 | self.report({"INFO"},"Cleared bone linkages on selected armatures.") 147 | return {'FINISHED'} 148 | -------------------------------------------------------------------------------- /modules/tex/enums/dxgi_format_enum.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | formatStringToDXGIDict = { 4 | "UNKNOWN":0, 5 | "R32G32B32A32TYPELESS":1, 6 | "R32G32B32A32FLOAT":2, 7 | "R32G32B32A32UINT":3, 8 | "R32G32B32A32SINT":4, 9 | "R32G32B32TYPELESS":5, 10 | "R32G32B32FLOAT":6, 11 | "R32G32B32UINT":7, 12 | "R32G32B32SINT":8, 13 | "R16G16B16A16TYPELESS":9, 14 | "R16G16B16A16FLOAT":10, 15 | "R16G16B16A16UNORM":11, 16 | "R16G16B16A16UINT":12, 17 | "R16G16B16A16SNORM":13, 18 | "R16G16B16A16SINT":14, 19 | "R32G32TYPELESS":15, 20 | "R32G32FLOAT":16, 21 | "R32G32UINT":17, 22 | "R32G32SINT":18, 23 | "R32G8X24TYPELESS":19, 24 | "D32FLOATS8X24UINT":20, 25 | "R32FLOATX8X24TYPELESS":21, 26 | "X32TYPELESSG8X24UINT":22, 27 | "R10G10B10A2TYPELESS":23, 28 | "R10G10B10A2UNORM":24, 29 | "R10G10B10A2UINT":25, 30 | "R11G11B10FLOAT":26, 31 | "R8G8B8A8TYPELESS":27, 32 | "R8G8B8A8UNORM":28, 33 | "R8G8B8A8UNORMSRGB":29, 34 | "R8G8B8A8UINT":30, 35 | "R8G8B8A8SNORM":31, 36 | "R8G8B8A8SINT":32, 37 | "R16G16TYPELESS":33, 38 | "R16G16FLOAT":34, 39 | "R16G16UNORM":35, 40 | "R16G16UINT":36, 41 | "R16G16SNORM":37, 42 | "R16G16SINT":38, 43 | "R32TYPELESS":39, 44 | "D32FLOAT":40, 45 | "R32FLOAT":41, 46 | "R32UINT":42, 47 | "R32SINT":43, 48 | "R24G8TYPELESS":44, 49 | "D24UNORMS8UINT":45, 50 | "R24UNORMX8TYPELESS":46, 51 | "X24TYPELESSG8UINT":47, 52 | "R8G8TYPELESS":48, 53 | "R8G8UNORM":49, 54 | "R8G8UINT":50, 55 | "R8G8SNORM":51, 56 | "R8G8SINT":52, 57 | "R16TYPELESS":53, 58 | "R16FLOAT":54, 59 | "D16UNORM":55, 60 | "R16UNORM":56, 61 | "R16UINT":57, 62 | "R16SNORM":58, 63 | "R16SINT":59, 64 | "R8TYPELESS":60, 65 | "R8UNORM":61, 66 | "R8UINT":62, 67 | "R8SNORM":63, 68 | "R8SINT":64, 69 | "A8UNORM":65, 70 | "R1UNORM":66, 71 | "R9G9B9E5SHAREDEXP":67, 72 | "R8G8B8G8UNORM":68, 73 | "G8R8G8B8UNORM":69, 74 | "BC1TYPELESS":70, 75 | "BC1UNORM":71, 76 | "BC1UNORMSRGB":72, 77 | "BC2TYPELESS":73, 78 | "BC2UNORM":74, 79 | "BC2UNORMSRGB":75, 80 | "BC3TYPELESS":76, 81 | "BC3UNORM":77, 82 | "BC3UNORMSRGB":78, 83 | "BC4TYPELESS":79, 84 | "BC4UNORM":80, 85 | "BC4SNORM":81, 86 | "BC5TYPELESS":82, 87 | "BC5UNORM":83, 88 | "BC5SNORM":84, 89 | "B5G6R5UNORM":85, 90 | "B5G5R5A1UNORM":86, 91 | "B8G8R8A8UNORM":87, 92 | "B8G8R8X8UNORM":88, 93 | "R10G10B10XRBIASA2UNORM":89, 94 | "B8G8R8A8TYPELESS":90, 95 | "B8G8R8A8UNORMSRGB":91, 96 | "B8G8R8X8TYPELESS":92, 97 | "B8G8R8X8UNORMSRGB":93, 98 | "BC6HTYPELESS":94, 99 | "BC6HUF16":95, 100 | "BC6HSF16":96, 101 | "BC7TYPELESS":97, 102 | "BC7UNORM":98, 103 | "BC7UNORMSRGB":99, 104 | "AYUV":100, 105 | "Y410":101, 106 | "Y416":102, 107 | "NV12":103, 108 | "P010":104, 109 | "P016":105, 110 | "DXGIFORMAT420OPAQUE":106, 111 | "YUY2":107, 112 | "Y210":108, 113 | "Y216":109, 114 | "NV11":110, 115 | "AI44":111, 116 | "IA44":112, 117 | "P8":113, 118 | "A8P8":114, 119 | "B4G4R4A4UNORM":115, 120 | "P208":130, 121 | "V208":131, 122 | "V408":132, 123 | "FORCEUINT":0xffffffff, 124 | } 125 | DXGIToFormatStringDict = { 126 | 1: 'R32G32B32A32TYPELESS', 127 | 2: 'R32G32B32A32FLOAT', 128 | 3: 'R32G32B32A32UINT', 129 | 4: 'R32G32B32A32SINT', 130 | 5: 'R32G32B32TYPELESS', 131 | 6: 'R32G32B32FLOAT', 132 | 7: 'R32G32B32UINT', 133 | 8: 'R32G32B32SINT', 134 | 9: 'R16G16B16A16TYPELESS', 135 | 10: 'R16G16B16A16FLOAT', 136 | 11: 'R16G16B16A16UNORM', 137 | 12: 'R16G16B16A16UINT', 138 | 13: 'R16G16B16A16SNORM', 139 | 14: 'R16G16B16A16SINT', 140 | 15: 'R32G32TYPELESS', 141 | 16: 'R32G32FLOAT', 142 | 17: 'R32G32UINT', 143 | 18: 'R32G32SINT', 144 | 19: 'R32G8X24TYPELESS', 145 | 20: 'D32FLOATS8X24UINT', 146 | 21: 'R32FLOATX8X24TYPELESS', 147 | 22: 'X32TYPELESSG8X24UINT', 148 | 23: 'R10G10B10A2TYPELESS', 149 | 24: 'R10G10B10A2UNORM', 150 | 25: 'R10G10B10A2UINT', 151 | 26: 'R11G11B10FLOAT', 152 | 27: 'R8G8B8A8TYPELESS', 153 | 28: 'R8G8B8A8UNORM', 154 | 29: 'R8G8B8A8UNORMSRGB', 155 | 30: 'R8G8B8A8UINT', 156 | 31: 'R8G8B8A8SNORM', 157 | 32: 'R8G8B8A8SINT', 158 | 33: 'R16G16TYPELESS', 159 | 34: 'R16G16FLOAT', 160 | 35: 'R16G16UNORM', 161 | 36: 'R16G16UINT', 162 | 37: 'R16G16SNORM', 163 | 38: 'R16G16SINT', 164 | 39: 'R32TYPELESS', 165 | 40: 'D32FLOAT', 166 | 41: 'R32FLOAT', 167 | 42: 'R32UINT', 168 | 43: 'R32SINT', 169 | 44: 'R24G8TYPELESS', 170 | 45: 'D24UNORMS8UINT', 171 | 46: 'R24UNORMX8TYPELESS', 172 | 47: 'X24TYPELESSG8UINT', 173 | 48: 'R8G8TYPELESS', 174 | 49: 'R8G8UNORM', 175 | 50: 'R8G8UINT', 176 | 51: 'R8G8SNORM', 177 | 52: 'R8G8SINT', 178 | 53: 'R16TYPELESS', 179 | 54: 'R16FLOAT', 180 | 55: 'D16UNORM', 181 | 56: 'R16UNORM', 182 | 57: 'R16UINT', 183 | 58: 'R16SNORM', 184 | 59: 'R16SINT', 185 | 60: 'R8TYPELESS', 186 | 61: 'R8UNORM', 187 | 62: 'R8UINT', 188 | 63: 'R8SNORM', 189 | 64: 'R8SINT', 190 | 65: 'A8UNORM', 191 | 66: 'R1UNORM', 192 | 67: 'R9G9B9E5SHAREDEXP', 193 | 68: 'R8G8B8G8UNORM', 194 | 69: 'G8R8G8B8UNORM', 195 | 70: 'BC1TYPELESS', 196 | 71: 'BC1UNORM', 197 | 72: 'BC1UNORMSRGB', 198 | 73: 'BC2TYPELESS', 199 | 74: 'BC2UNORM', 200 | 75: 'BC2UNORMSRGB', 201 | 76: 'BC3TYPELESS', 202 | 77: 'BC3UNORM', 203 | 78: 'BC3UNORMSRGB', 204 | 79: 'BC4TYPELESS', 205 | 80: 'BC4UNORM', 206 | 81: 'BC4SNORM', 207 | 82: 'BC5TYPELESS', 208 | 83: 'BC5UNORM', 209 | 84: 'BC5SNORM', 210 | 85: 'B5G6R5UNORM', 211 | 86: 'B5G5R5A1UNORM', 212 | 87: 'B8G8R8A8UNORM', 213 | 88: 'B8G8R8X8UNORM', 214 | 89: 'R10G10B10XRBIASA2UNORM', 215 | 90: 'B8G8R8A8TYPELESS', 216 | 91: 'B8G8R8A8UNORMSRGB', 217 | 92: 'B8G8R8X8TYPELESS', 218 | 93: 'B8G8R8X8UNORMSRGB', 219 | 94: 'BC6HTYPELESS', 220 | 95: 'BC6HUF16', 221 | 96: 'BC6HSF16', 222 | 97: 'BC7TYPELESS', 223 | 98: 'BC7UNORM', 224 | 99: 'BC7UNORMSRGB', 225 | 100: 'AYUV', 226 | 101: 'Y410', 227 | 102: 'Y416', 228 | 103: 'NV12', 229 | 104: 'P010', 230 | 105: 'P016', 231 | 106: 'DXGIFORMAT420OPAQUE', 232 | 107: 'YUY2', 233 | 108: 'Y210', 234 | 109: 'Y216', 235 | 110: 'NV11', 236 | 111: 'AI44', 237 | 112: 'IA44', 238 | 113: 'P8', 239 | 114: 'A8P8', 240 | 115: 'B4G4R4A4UNORM', 241 | 130: 'P208', 242 | 131: 'V208', 243 | 132: 'V408', 244 | 4294967295: 'FORCEUINT'} 245 | -------------------------------------------------------------------------------- /modules/tex/blender_re_tex.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import bpy 3 | import os 4 | from..gen_functions import raiseWarning,wildCardFileSearchList 5 | from .file_re_tex import getTexVersionFromGameName 6 | from .re_tex_utils import DDSToTex,convertTexFileToDDS 7 | DELETE_DDS = True 8 | BLENDER_MAX_IMAGE_ARRAY_SIZE = 16#Blender can't handle much more mix nodes than this 9 | def loadTex(texPath,outputPath,texConv,reloadCachedTextures,useDDS): 10 | ddsPath = os.path.splitext(outputPath)[0]+".dds" 11 | if useDDS: 12 | outputPath = ddsPath 13 | blenderImageList = None 14 | if not reloadCachedTextures and os.path.isfile(outputPath): 15 | blenderImageList = [bpy.data.images.load(outputPath,check_existing = True)] 16 | 17 | 18 | if blenderImageList == None: 19 | blenderImageList = [] 20 | #Check if there's an array texture extracted 21 | foundArrayTexture = False 22 | if not reloadCachedTextures: 23 | resultList = wildCardFileSearchList(os.path.splitext(outputPath)[0]+ " #ARRAY_*") 24 | #print("test") 25 | #print(resultList) 26 | for result in sorted(resultList): 27 | if result.endswith(".dds" if useDDS else ".tif"): 28 | foundArrayTexture = True 29 | #print(f"Found existing array texture {result}") 30 | blenderImageList.append(bpy.data.images.load(result,check_existing = True)) 31 | 32 | 33 | if not foundArrayTexture:#Convert array tex 34 | texInfo = convertTexFileToDDS(texPath, ddsPath) 35 | if texInfo["arrayNum"] > BLENDER_MAX_IMAGE_ARRAY_SIZE: 36 | arrayMaxExceeded = True 37 | print(f"Array size of {os.path.split(texPath)[1]} exceeds the max limit of {str(BLENDER_MAX_IMAGE_ARRAY_SIZE)} images, importing only the first array image.") 38 | else: 39 | arrayMaxExceeded = False 40 | if texInfo["isArray"] and not arrayMaxExceeded: 41 | digitCount = 2 42 | if texInfo["arrayNum"] > 99: 43 | digitCount = 3 44 | elif texInfo["arrayNum"] > 999:#Highest possible image count is technically 4095 but I'm going to pretend nobody will try to make something that unholy 45 | digitCount = 4 46 | #print("TEX ARRAY FOUND") 47 | newDDSPathRoot = os.path.splitext(ddsPath)[0]+ " #ARRAY_" 48 | print(f"Converting array texture: {texPath}") 49 | for i in range(texInfo["arrayNum"]): 50 | newDDSPath = f"{newDDSPathRoot}{str(i).zfill(digitCount)}.dds" 51 | newOutputPath = f"{newDDSPathRoot}{str(i).zfill(digitCount)}.tif" 52 | 53 | if not useDDS: 54 | texConv.convert_to_tif(newDDSPath,out = os.path.dirname(newOutputPath),verbose=False) 55 | else: 56 | newOutputPath = newDDSPath 57 | 58 | 59 | if os.path.isfile(newOutputPath): 60 | blenderImageList.append(bpy.data.images.load(newOutputPath,check_existing = not reloadCachedTextures)) 61 | if not useDDS: 62 | try: 63 | os.remove(newDDSPath) 64 | except: 65 | raiseWarning("Could not delete temporary dds file: {newDDSPath}") 66 | else:#Convert single image tex 67 | if arrayMaxExceeded: 68 | digitCount = 2 69 | if texInfo["arrayNum"] > 99: 70 | digitCount = 3 71 | elif texInfo["arrayNum"] > 999:#Highest possible image count is technically 4095 but I'm going to pretend nobody will try to make something that unholy 72 | digitCount = 4 73 | #print("TEX ARRAY FOUND") 74 | ddsPath = f"{os.path.splitext(ddsPath)[0]} #ARRAY_{str(0)*digitCount}.dds" 75 | if not useDDS: 76 | texConv.convert_to_tif(ddsPath,out = os.path.dirname(outputPath),verbose=False) 77 | if os.path.isfile(outputPath): 78 | blenderImageList = [bpy.data.images.load(outputPath,check_existing = not reloadCachedTextures)] 79 | #print(blenderImageList) 80 | if not useDDS: 81 | try: 82 | os.remove(ddsPath) 83 | except: 84 | raiseWarning("Could not delete temporary dds file: {ddsPath}") 85 | return blenderImageList 86 | 87 | supportedImageExtensionsSet = set([".png",".tga",".tif"])#Not implemented yet 88 | 89 | def convertTexDDSList (fileNameList,inDir,outDir,gameName,createStreamingTex = False): 90 | ddsConversionList = [] 91 | ddsArrayConversionDict = {} 92 | texConversionList = [] 93 | texVersion = 28 94 | 95 | conversionCount = 0 96 | failCount = 0 97 | 98 | #gameName = bpy.context.scene.re_mdf_toolpanel.activeGame 99 | if gameName != -1 and getTexVersionFromGameName(gameName) != -1: 100 | texVersion = getTexVersionFromGameName(gameName) 101 | for fileName in fileNameList: 102 | fullPath = os.path.join(inDir,fileName) 103 | if os.path.isfile(fullPath): 104 | if fileName.lower().endswith(".dds"): 105 | if " #ARRAY_" in fileName: 106 | split = fileName.split(" #ARRAY_") 107 | if split[0] in ddsArrayConversionDict: 108 | ddsArrayConversionDict[split[0]].append(os.path.join(os.path.join(inDir,fileName))) 109 | else: 110 | ddsArrayConversionDict[split[0]] = [os.path.join(os.path.join(inDir,fileName))] 111 | else: 112 | path = os.path.join(inDir,fileName) 113 | ddsConversionList.append(path) 114 | print(str(path)) 115 | elif ".tex." in fileName.lower(): 116 | path = os.path.join(inDir,fileName) 117 | texConversionList.append(path) 118 | elif os.path.splitext(fileName)[1] in supportedImageExtensionsSet: 119 | pass#TODO 120 | 121 | if ddsConversionList != [] or ddsArrayConversionDict != {}: 122 | os.makedirs(outDir,exist_ok = True) 123 | 124 | #Single Texture Conversion 125 | for ddsPath in ddsConversionList: 126 | texPath = os.path.join(outDir,os.path.splitext(os.path.split(ddsPath)[1])[0])+f".tex.{str(texVersion)}" 127 | print(str(texPath)) 128 | DDSToTex([ddsPath],texVersion,texPath,streamingFlag = False)#TODO Streaming 129 | conversionCount += 1 130 | 131 | 132 | #Array Texture Conversion 133 | for key in ddsArrayConversionDict.keys(): 134 | ddsPathList = sorted(ddsArrayConversionDict[key]) 135 | #print(key) 136 | #print(ddsPathList) 137 | texPath = os.path.join(outDir,key+f".tex.{str(texVersion)}") 138 | DDSToTex(ddsPathList,texVersion,texPath,streamingFlag = False)#TODO Streaming 139 | conversionCount += 1 140 | 141 | if texConversionList != []: 142 | os.makedirs(outDir,exist_ok = True) 143 | for texPath in texConversionList: 144 | try: 145 | convertTexFileToDDS(texPath,texPath.split(".tex.")[0]+".dds") 146 | conversionCount += 1 147 | except Exception as err: 148 | print(f"Failed to convert {texPath} - {str(err)}") 149 | failCount += 1 150 | return (conversionCount,failCount) -------------------------------------------------------------------------------- /modules/ddsconv/directx/dxgi_format.py: -------------------------------------------------------------------------------- 1 | '''Constants for DXGI formats 2 | 3 | Notes: 4 | - Official document for DXGI formats 5 | https://docs.microsoft.com/en-us/windows/win32/api/dxgiformat/ne-dxgiformat-dxgi_format 6 | - Official repo for DDS 7 | https://github.com/microsoft/DirectXTex 8 | ''' 9 | from enum import IntEnum 10 | 11 | 12 | class DXGI_FORMAT(IntEnum): 13 | """Enum for DDS format.""" 14 | UNKNOWN = 0 15 | R32G32B32A32_TYPELESS = 1 16 | R32G32B32A32_FLOAT = 2 17 | R32G32B32A32_UINT = 3 18 | R32G32B32A32_SINT = 4 19 | R32G32B32_TYPELESS = 5 20 | R32G32B32_FLOAT = 6 21 | R32G32B32_UINT = 7 22 | R32G32B32_SINT = 8 23 | R16G16B16A16_TYPELESS = 9 24 | R16G16B16A16_FLOAT = 10 25 | R16G16B16A16_UNORM = 11 26 | R16G16B16A16_UINT = 12 27 | R16G16B16A16_SNORM = 13 28 | R16G16B16A16_SINT = 14 29 | R32G32_TYPELESS = 15 30 | R32G32_FLOAT = 16 31 | R32G32_UINT = 17 32 | R32G32_SINT = 18 33 | R32G8X24_TYPELESS = 19 34 | D32_FLOAT_S8X24_UINT = 20 35 | R32_FLOAT_X8X24_TYPELESS = 21 36 | X32_TYPELESS_G8X24_UINT = 22 37 | R10G10B10A2_TYPELESS = 23 38 | R10G10B10A2_UNORM = 24 39 | R10G10B10A2_UINT = 25 40 | R11G11B10_FLOAT = 26 41 | R8G8B8A8_TYPELESS = 27 42 | R8G8B8A8_UNORM = 28 43 | R8G8B8A8_UNORM_SRGB = 29 44 | R8G8B8A8_UINT = 30 45 | R8G8B8A8_SNORM = 31 46 | R8G8B8A8_SINT = 32 47 | R16G16_TYPELESS = 33 48 | R16G16_FLOAT = 34 49 | R16G16_UNORM = 35 50 | R16G16_UINT = 36 51 | R16G16_SNORM = 37 52 | R16G16_SINT = 38 53 | R32_TYPELESS = 39 54 | D32_FLOAT = 40 55 | R32_FLOAT = 41 56 | R32_UINT = 42 57 | R32_SINT = 43 58 | R24G8_TYPELESS = 44 59 | D24_UNORM_S8_UINT = 45 60 | R24_UNORM_X8_TYPELESS = 46 61 | X24_TYPELESS_G8_UINT = 47 62 | R8G8_TYPELESS = 48 63 | R8G8_UNORM = 49 64 | R8G8_UINT = 50 65 | R8G8_SNORM = 51 66 | R8G8_SINT = 52 67 | R16_TYPELESS = 53 68 | R16_FLOAT = 54 69 | D16_UNORM = 55 70 | R16_UNORM = 56 71 | R16_UINT = 57 72 | R16_SNORM = 58 73 | R16_SINT = 59 74 | R8_TYPELESS = 60 75 | R8_UNORM = 61 76 | R8_UINT = 62 77 | R8_SNORM = 63 78 | R8_SINT = 64 79 | A8_UNORM = 65 80 | R1_UNORM = 66 81 | R9G9B9E5_SHAREDEXP = 67 82 | R8G8_B8G8_UNORM = 68 83 | G8R8_G8B8_UNORM = 69 84 | BC1_TYPELESS = 70 85 | BC1_UNORM = 71 86 | BC1_UNORM_SRGB = 72 87 | BC2_TYPELESS = 73 88 | BC2_UNORM = 74 89 | BC2_UNORM_SRGB = 75 90 | BC3_TYPELESS = 76 91 | BC3_UNORM = 77 92 | BC3_UNORM_SRGB = 78 93 | BC4_TYPELESS = 79 94 | BC4_UNORM = 80 95 | BC4_SNORM = 81 96 | BC5_TYPELESS = 82 97 | BC5_UNORM = 83 98 | BC5_SNORM = 84 99 | B5G6R5_UNORM = 85 100 | B5G5R5A1_UNORM = 86 101 | B8G8R8A8_UNORM = 87 102 | B8G8R8X8_UNORM = 88 103 | R10G10B10_XR_BIAS_A2_UNORM = 89 104 | B8G8R8A8_TYPELESS = 90 105 | B8G8R8A8_UNORM_SRGB = 91 106 | B8G8R8X8_TYPELESS = 92 107 | B8G8R8X8_UNORM_SRGB = 93 108 | BC6H_TYPELESS = 94 109 | BC6H_UF16 = 95 110 | BC6H_SF16 = 96 111 | BC7_TYPELESS = 97 112 | BC7_UNORM = 98 113 | BC7_UNORM_SRGB = 99 114 | AYUV = 100 115 | Y410 = 101 116 | Y416 = 102 117 | NV12 = 103 118 | P010 = 104 119 | P016 = 105 120 | OPAQUE_420 = 106 121 | YUY2 = 107 122 | Y210 = 108 123 | Y216 = 109 124 | NV11 = 110 125 | AI44 = 111 126 | IA44 = 112 127 | P8 = 113 128 | A8P8 = 114 129 | B4G4R4A4_UNORM = 115 130 | P208 = 130 131 | V208 = 131 132 | V408 = 132 133 | A4B4G4R4_UNORM = 191 134 | 135 | # non-official formats 136 | ASTC_4X4_TYPELESS = 133 137 | ASTC_4X4_UNORM = 134 138 | 139 | @classmethod 140 | def is_valid_format(cls, fmt_name): 141 | return fmt_name in cls._member_names_ 142 | 143 | @staticmethod 144 | def get_signed(fmt): 145 | name = fmt.name 146 | name_split = name.split("_") 147 | num_type = name_split[-1] 148 | 149 | new_num_types = { 150 | "UNORM": "SNORM", 151 | "UINT": "SINT" 152 | } 153 | 154 | if num_type in new_num_types: 155 | name = "_".join(name_split[:-1] + new_num_types[num_type]) 156 | else: 157 | return fmt 158 | 159 | if DXGI_FORMAT.is_valid_format(name): 160 | return DXGI_FORMAT[name] 161 | else: 162 | return fmt 163 | 164 | 165 | def int_to_byte(n): 166 | return n.to_bytes(1, byteorder="little") 167 | 168 | 169 | # For detecting DXGI from fourCC 170 | FOURCC_TO_DXGI = [ 171 | [[b'DXT1'], DXGI_FORMAT.BC1_UNORM], 172 | [[b'DXT2', b'DXT3'], DXGI_FORMAT.BC2_UNORM], 173 | [[b'DXT4', b'DXT5'], DXGI_FORMAT.BC3_UNORM], 174 | [[b'ATI1', b'BC4U', b'3DC1'], DXGI_FORMAT.BC4_UNORM], 175 | [[b'ATI2', b'BC5U', b'3DC2'], DXGI_FORMAT.BC5_UNORM], 176 | [[b'BC4S'], DXGI_FORMAT.BC4_SNORM], 177 | [[b'BC5S'], DXGI_FORMAT.BC5_SNORM], 178 | [[b'BC6H'], DXGI_FORMAT.BC6H_UF16], 179 | [[b'BC7L', b'BC7'], DXGI_FORMAT.BC7_UNORM], 180 | [[b'RGBG'], DXGI_FORMAT.R8G8_B8G8_UNORM], 181 | [[b'GRGB'], DXGI_FORMAT.G8R8_G8B8_UNORM], 182 | [[b'YUY2', b'UYVY'], DXGI_FORMAT.YUY2], 183 | [[int_to_byte(36)], DXGI_FORMAT.R16G16B16A16_UNORM], 184 | [[int_to_byte(110)], DXGI_FORMAT.R16G16B16A16_SNORM], 185 | [[int_to_byte(111)], DXGI_FORMAT.R16_FLOAT], 186 | [[int_to_byte(112)], DXGI_FORMAT.R16G16_FLOAT], 187 | [[int_to_byte(113)], DXGI_FORMAT.R16G16B16A16_FLOAT], 188 | [[int_to_byte(114)], DXGI_FORMAT.R32_FLOAT], 189 | [[int_to_byte(115)], DXGI_FORMAT.R32G32_FLOAT], 190 | [[int_to_byte(116)], DXGI_FORMAT.R32G32B32A32_FLOAT] 191 | ] 192 | 193 | 194 | # Used to detect DXGI format from DDS_PIXELFORMAT 195 | BITMASK_TO_DXGI = [ 196 | [[0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000], DXGI_FORMAT.B8G8R8A8_UNORM], 197 | [[0x00ff0000, 0x0000ff00, 0x000000ff, 0], DXGI_FORMAT.B8G8R8X8_UNORM], 198 | [[0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000], DXGI_FORMAT.R8G8B8A8_UNORM], 199 | [[0x3ff00000, 0x000ffc00, 0x000003ff, 0xc0000000], DXGI_FORMAT.R10G10B10A2_UNORM], 200 | [[0x0000ffff, 0xffff0000, 0, 0], DXGI_FORMAT.R16G16_UNORM], 201 | [[0xffffffff, 0, 0, 0], DXGI_FORMAT.R32_FLOAT], 202 | [[0x7c00, 0x03e0, 0x001f, 0x8000], DXGI_FORMAT.B5G5R5A1_UNORM], 203 | [[0xf800, 0x07e0, 0x001f, 0], DXGI_FORMAT.B5G6R5_UNORM], 204 | [[0x0f00, 0x00f0, 0x000f, 0xf000], DXGI_FORMAT.B4G4R4A4_UNORM], 205 | [[0x00ff, 0, 0, 0xff00], DXGI_FORMAT.R8G8_UNORM], 206 | [[0xffff, 0, 0, 0], DXGI_FORMAT.R16_UNORM], 207 | [[0xff, 0, 0, 0], DXGI_FORMAT.R8_UNORM], 208 | [[0, 0, 0, 0xff], DXGI_FORMAT.A8_UNORM] 209 | ] 210 | -------------------------------------------------------------------------------- /modules/fbxskel/blender_re_fbxskel.py: -------------------------------------------------------------------------------- 1 | import os 2 | import bpy 3 | from mathutils import Vector,Quaternion,Matrix 4 | from math import radians 5 | from ..blender_utils import showMessageBox 6 | from ..gen_functions import textColors,raiseWarning,splitNativesPath 7 | from .file_re_fbxskel import readFBXSkel,writeFBXSkel,FBXSkelFile,BoneEntry 8 | 9 | rotate90Matrix = Matrix.Rotation(radians(90.0), 4, 'X') 10 | rotateNeg90Matrix = Matrix.Rotation(radians(-90.0), 4, 'X') 11 | #FBXSKEL IMPORT 12 | 13 | def importFBXSkelFile(filePath): 14 | 15 | fbxSkelFile = readFBXSkel(filePath) 16 | try: 17 | fbxSkelVersion = int(os.path.splitext(filePath)[1].replace(".","")) 18 | except: 19 | print("Unable to parse fbxskel version number in file path.") 20 | fbxSkelVersion = 7 21 | bpy.context.scene["REMeshLastImportedFBXSkelVersion"] = fbxSkelVersion 22 | fileName = os.path.splitext(os.path.split(filePath)[1])[0] 23 | armatureData = bpy.data.armatures.new(fileName) 24 | armatureObj = bpy.data.objects.new(fileName, armatureData) 25 | 26 | try: 27 | split = splitNativesPath(filePath) 28 | if split != None: 29 | assetPath = os.path.splitext(split[1])[0].replace(os.sep,"/") 30 | armatureObj["~ASSETPATH"] = assetPath#Used to determine where to export automatically 31 | except: 32 | print("Failed to set asset path from file path, file is likely not in a natives folder.") 33 | 34 | armatureObj.show_in_front = True 35 | #armatureObj.display_type = "WIRE"#Change display type to make it visually different from a mesh armature 36 | armatureData.display_type = "STICK"#Change display type to make it visually different from a mesh armature 37 | bpy.context.scene.collection.objects.link(armatureObj) 38 | bpy.context.view_layer.objects.active = armatureObj 39 | bpy.ops.object.mode_set(mode='EDIT') 40 | boneIndexDict = {} 41 | boneNameIndexDict = {index: bone.boneName for index, bone in enumerate(fbxSkelFile.boneEntryList)} 42 | boneParentList = [] 43 | 44 | for bone in fbxSkelFile.boneEntryList: 45 | boneName = bone.boneName 46 | editBone = armatureData.edit_bones.new(boneName) 47 | editBone.head = (0.0, 0.0, 0.0) 48 | editBone.tail = (0.0, 0.1, 0.0) 49 | editBone.length = .03 50 | 51 | pos = Vector(bone.translation) 52 | rot = Quaternion((bone.rotation[3],bone.rotation[0],bone.rotation[1],bone.rotation[2])) 53 | scale = Vector(bone.scale) 54 | 55 | editBone.matrix = editBone.matrix @ Matrix.LocRotScale(pos,rot,scale) 56 | 57 | if bone.parentIndex != -1: 58 | boneParentName = boneNameIndexDict[bone.parentIndex] 59 | boneParentList.append((editBone,boneParentName))#Set bone parents after all bones have been imported 60 | 61 | #Assign bone parents 62 | 63 | for editBone,parentBoneName in boneParentList: 64 | editBone.parent = armatureData.edit_bones[parentBoneName] 65 | editBone.matrix = editBone.parent.matrix @ editBone.matrix 66 | 67 | #armatureData.transform(armatureObj.matrix_world @ rotate90Matrix) 68 | bpy.ops.object.mode_set(mode='OBJECT') 69 | #Set color 70 | for bone in armatureData.bones: 71 | bone.color.palette = "THEME01"#Change bone color to make it visually different from a mesh armature 72 | prevSelection = bpy.context.selected_objects 73 | for obj in prevSelection: 74 | obj.select_set(False) 75 | 76 | armatureObj.matrix_world = armatureObj.matrix_world @ rotate90Matrix 77 | armatureObj.select_set(True) 78 | bpy.ops.object.transform_apply(location = False,rotation = True,scale = False) 79 | for bone in fbxSkelFile.boneEntryList:#Apply scale to pose bones since scale can't be applied to edit bones 80 | armaturePoseBone = armatureObj.pose.bones[bone.boneName] 81 | armaturePoseBone.scale = bone.scale 82 | armaturePoseBone["useSegmentScaling"] = bone.segmentScaling 83 | armatureObj.select_set(False) 84 | 85 | for obj in prevSelection: 86 | obj.select_set(True) 87 | 88 | bpy.context.view_layer.objects.active = armatureObj 89 | 90 | 91 | 92 | return armatureObj 93 | 94 | 95 | #FBXSKEL EXPORT 96 | 97 | 98 | def exportFBXSkelFile(filepath,targetArmature,usePose=True): 99 | fbxSkelFile = FBXSkelFile() 100 | armatureObj = bpy.data.objects.get(targetArmature,None) 101 | if armatureObj != None and armatureObj.type == "ARMATURE": 102 | print(f"Armature: {armatureObj.name}") 103 | exportArmatureData = armatureObj.data.copy() 104 | exportArmatureData.transform(rotateNeg90Matrix @ armatureObj.matrix_world) 105 | boneIndexDict = {bone.name: index for index, bone in enumerate(armatureObj.data.bones)} 106 | #print(boneIndexDict) 107 | for bone in exportArmatureData.bones: 108 | parsedBone = BoneEntry() 109 | #Get hierarchy 110 | parsedBone.boneName = bone.name 111 | parsedBone.boneIndex = boneIndexDict[bone.name] 112 | 113 | 114 | if bone.name.startswith("L_") : 115 | if "R"+bone.name[1::] in armatureObj.data.bones: 116 | parsedBone.boneIndex = boneIndexDict["R"+bone.name[1::]] 117 | 118 | elif bone.name.startswith("R_"): 119 | if "L"+bone.name[1::] in armatureObj.data.bones: 120 | parsedBone.boneIndex = boneIndexDict["L"+bone.name[1::]] 121 | 122 | elif bone.name.endswith("_L"): 123 | if bone.name[:-1]+"R" in armatureObj.data.bones: 124 | parsedBone.boneIndex = boneIndexDict[bone.name[:-1]+"R"] 125 | elif bone.name.endswith("_R"): 126 | if bone.name[:-1]+"L" in armatureObj.data.bones: 127 | parsedBone.boneIndex = boneIndexDict[bone.name[:-1]+"L"] 128 | 129 | 130 | 131 | if bone.parent != None: 132 | parsedBone.parentIndex = boneIndexDict[bone.parent.name] 133 | for childBone in bone.parent.children: 134 | if childBone.name != bone.name and boneIndexDict[bone.name] < boneIndexDict[childBone.name]: 135 | parsedBone.nextSiblingIndex = boneIndexDict[childBone.name] 136 | break 137 | else: 138 | parsedBone.parentIndex = -1 139 | 140 | if armatureObj.pose.bones[bone.name].get("useSegmentScaling",None): 141 | parsedBone.segmentScaling = armatureObj.pose.bones[bone.name].get("useSegmentScaling") 142 | #Get matrices 143 | 144 | if bone.parent != None: 145 | mat = exportArmatureData.bones[bone.parent.name].matrix_local.inverted() @ exportArmatureData.bones[bone.name].matrix_local 146 | else: 147 | mat = armatureObj.matrix_world @ exportArmatureData.bones[bone.name].matrix_local 148 | if usePose: 149 | mat = mat @ armatureObj.pose.bones[bone.name].matrix_basis 150 | transform,rotation,scale = mat.decompose() 151 | parsedBone.translation = transform 152 | parsedBone.rotation = (rotation[1],rotation[2],rotation[3],rotation[0]) 153 | parsedBone.scale = scale 154 | fbxSkelFile.boneEntryList.append(parsedBone) 155 | 156 | if exportArmatureData != None: 157 | bpy.data.armatures.remove(exportArmatureData) 158 | writeFBXSkel(fbxSkelFile, filepath) 159 | return True -------------------------------------------------------------------------------- /modules/tex/enums/tex_format_enum.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | texFormatToDXGIStringDict = { 4 | 1:"R32G32B32A32TYPELESS", 5 | 2:"R32G32B32A32FLOAT", 6 | 3:"R32G32B32A32UINT", 7 | 4:"R32G32B32A32SINT", 8 | 5:"R32G32B32TYPELESS", 9 | 6:"R32G32B32FLOAT", 10 | 7:"R32G32B32UINT", 11 | 8:"R32G32B32SINT", 12 | 9:"R16G16B16A16TYPELESS", 13 | 0XA:"R16G16B16A16FLOAT", 14 | 0XB:"R16G16B16A16UNORM", 15 | 0XC:"R16G16B16A16UINT", 16 | 0XD:"R16G16B16A16SNORM", 17 | 0XE:"R16G16B16A16SINT", 18 | 0XF:"R32G32TYPELESS", 19 | 0X10:"R32G32FLOAT", 20 | 0X11:"R32G32UINT", 21 | 0X12:"R32G32SINT", 22 | 0X13:"R32G8X24TYPELESS", 23 | 0X14:"D32FLOATS8X24UINT", 24 | 0X15:"R32FLOATX8X24TYPELESS", 25 | 0X16:"X32TYPELESSG8X24UINT", 26 | 0X17:"R10G10B10A2TYPELESS", 27 | 0X18:"R10G10B10A2UNORM", 28 | 0X19:"R10G10B10A2UINT", 29 | 0X1A:"R11G11B10FLOAT", 30 | 0X1B:"R8G8B8A8TYPELESS", 31 | 0X1C:"R8G8B8A8UNORM", 32 | 0X1D:"R8G8B8A8UNORMSRGB", 33 | 0X1E:"R8G8B8A8UINT", 34 | 0X1F:"R8G8B8A8SNORM", 35 | 0X20:"R8G8B8A8SINT", 36 | 0X21:"R16G16TYPELESS", 37 | 0X22:"R16G16FLOAT", 38 | 0X23:"R16G16UNORM", 39 | 0X24:"R16G16UINT", 40 | 0X25:"R16G16SNORM", 41 | 0X26:"R16G16SINT", 42 | 0X27:"R32TYPELESS", 43 | 0X28:"D32FLOAT", 44 | 0X29:"R32FLOAT", 45 | 0X2A:"R32UINT", 46 | 0X2B:"R32SINT", 47 | 0X2C:"R24G8TYPELESS", 48 | 0X2D:"D24UNORMS8UINT", 49 | 0X2E:"R24UNORMX8TYPELESS", 50 | 0X2F:"X24TYPELESSG8UINT", 51 | 0X30:"R8G8TYPELESS", 52 | 0X31:"R8G8UNORM", 53 | 0X32:"R8G8UINT", 54 | 0X33:"R8G8SNORM", 55 | 0X34:"R8G8SINT", 56 | 0X35:"R16TYPELESS", 57 | 0X36:"R16FLOAT", 58 | 0X37:"D16UNORM", 59 | 0X38:"R16UNORM", 60 | 0X39:"R16UINT", 61 | 0X3A:"R16SNORM", 62 | 0X3B:"R16SINT", 63 | 0X3C:"R8TYPELESS", 64 | 0X3D:"R8UNORM", 65 | 0X3E:"R8UINT", 66 | 0X3F:"R8SNORM", 67 | 0X40:"R8SINT", 68 | 0X41:"A8UNORM", 69 | 0X42:"R1UNORM", 70 | 0X43:"R9G9B9E5SHAREDEXP", 71 | 0X44:"R8G8B8G8UNORM", 72 | 0X45:"G8R8G8B8UNORM", 73 | 0X46:"BC1TYPELESS", 74 | 0X47:"BC1UNORM", 75 | 0X48:"BC1UNORMSRGB", 76 | 0X49:"BC2TYPELESS", 77 | 0X4A:"BC2UNORM", 78 | 0X4B:"BC2UNORMSRGB", 79 | 0X4C:"BC3TYPELESS", 80 | 0X4D:"BC3UNORM", 81 | 0X4E:"BC3UNORMSRGB", 82 | 0X4F:"BC4TYPELESS", 83 | 0X50:"BC4UNORM", 84 | 0X51:"BC4SNORM", 85 | 0X52:"BC5TYPELESS", 86 | 0X53:"BC5UNORM", 87 | 0X54:"BC5SNORM", 88 | 0X55:"B5G6R5UNORM", 89 | 0X56:"B5G5R5A1UNORM", 90 | 0X57:"B8G8R8A8UNORM", 91 | 0X58:"B8G8R8X8UNORM", 92 | 0X59:"R10G10B10XRBIASA2UNORM", 93 | 0X5A:"B8G8R8A8TYPELESS", 94 | 0X5B:"B8G8R8A8UNORMSRGB", 95 | 0X5C:"B8G8R8X8TYPELESS", 96 | 0X5D:"B8G8R8X8UNORMSRGB", 97 | 0X5E:"BC6HTYPELESS", 98 | 0X5F:"BC6HUF16", 99 | 0X60:"BC6HSF16", 100 | 0X61:"BC7TYPELESS", 101 | 0X62:"BC7UNORM", 102 | 0X63:"BC7UNORMSRGB", 103 | 0X400:"VIAEXTENSION", 104 | 0X401:"ASTC4X4TYPELESS", 105 | 0X402:"ASTC4X4UNORM", 106 | 0X403:"ASTC4X4UNORMSRGB", 107 | 0X404:"ASTC5X4TYPELESS", 108 | 0X405:"ASTC5X4UNORM", 109 | 0X406:"ASTC5X4UNORMSRGB", 110 | 0X407:"ASTC5X5TYPELESS", 111 | 0X408:"ASTC5X5UNORM", 112 | 0X409:"ASTC5X5UNORMSRGB", 113 | 0X40A:"ASTC6X5TYPELESS", 114 | 0X40B:"ASTC6X5UNORM", 115 | 0X40C:"ASTC6X5UNORMSRGB", 116 | 0X40D:"ASTC6X6TYPELESS", 117 | 0X40E:"ASTC6X6UNORM", 118 | 0X40F:"ASTC6X6UNORMSRGB", 119 | 0X410:"ASTC8X5TYPELESS", 120 | 0X411:"ASTC8X5UNORM", 121 | 0X412:"ASTC8X5UNORMSRGB", 122 | 0X413:"ASTC8X6TYPELESS", 123 | 0X414:"ASTC8X6UNORM", 124 | 0X415:"ASTC8X6UNORMSRGB", 125 | 0X416:"ASTC8X8TYPELESS", 126 | 0X417:"ASTC8X8UNORM", 127 | 0X418:"ASTC8X8UNORMSRGB", 128 | 0X419:"ASTC10X5TYPELESS", 129 | 0X41A:"ASTC10X5UNORM", 130 | 0X41B:"ASTC10X5UNORMSRGB", 131 | 0X41C:"ASTC10X6TYPELESS", 132 | 0X41D:"ASTC10X6UNORM", 133 | 0X41E:"ASTC10X6UNORMSRGB", 134 | 0X41F:"ASTC10X8TYPELESS", 135 | 0X420:"ASTC10X8UNORM", 136 | 0X421:"ASTC10X8UNORMSRGB", 137 | 0X422:"ASTC10X10TYPELESS", 138 | 0X423:"ASTC10X10UNORM", 139 | 0X424:"ASTC10X10UNORMSRGB", 140 | 0X425:"ASTC12X10TYPELESS", 141 | 0X426:"ASTC12X10UNORM", 142 | 0X427:"ASTC12X10UNORMSRGB", 143 | 0X428:"ASTC12X12TYPELESS", 144 | 0X429:"ASTC12X12UNORM", 145 | 0X42A:"ASTC12X12UNORMSRGB", 146 | 0X7FFFFFFF:"FORCEUINT" 147 | } 148 | formatStringToTexFormatDict = { 149 | 'R32G32B32A32TYPELESS': 1, 150 | 'R32G32B32A32FLOAT': 2, 151 | 'R32G32B32A32UINT': 3, 152 | 'R32G32B32A32SINT': 4, 153 | 'R32G32B32TYPELESS': 5, 154 | 'R32G32B32FLOAT': 6, 155 | 'R32G32B32UINT': 7, 156 | 'R32G32B32SINT': 8, 157 | 'R16G16B16A16TYPELESS': 9, 158 | 'R16G16B16A16FLOAT': 10, 159 | 'R16G16B16A16UNORM': 11, 160 | 'R16G16B16A16UINT': 12, 161 | 'R16G16B16A16SNORM': 13, 162 | 'R16G16B16A16SINT': 14, 163 | 'R32G32TYPELESS': 15, 164 | 'R32G32FLOAT': 16, 165 | 'R32G32UINT': 17, 166 | 'R32G32SINT': 18, 167 | 'R32G8X24TYPELESS': 19, 168 | 'D32FLOATS8X24UINT': 20, 169 | 'R32FLOATX8X24TYPELESS': 21, 170 | 'X32TYPELESSG8X24UINT': 22, 171 | 'R10G10B10A2TYPELESS': 23, 172 | 'R10G10B10A2UNORM': 24, 173 | 'R10G10B10A2UINT': 25, 174 | 'R11G11B10FLOAT': 26, 175 | 'R8G8B8A8TYPELESS': 27, 176 | 'R8G8B8A8UNORM': 28, 177 | 'R8G8B8A8UNORMSRGB': 29, 178 | 'R8G8B8A8UINT': 30, 179 | 'R8G8B8A8SNORM': 31, 180 | 'R8G8B8A8SINT': 32, 181 | 'R16G16TYPELESS': 33, 182 | 'R16G16FLOAT': 34, 183 | 'R16G16UNORM': 35, 184 | 'R16G16UINT': 36, 185 | 'R16G16SNORM': 37, 186 | 'R16G16SINT': 38, 187 | 'R32TYPELESS': 39, 188 | 'D32FLOAT': 40, 189 | 'R32FLOAT': 41, 190 | 'R32UINT': 42, 191 | 'R32SINT': 43, 192 | 'R24G8TYPELESS': 44, 193 | 'D24UNORMS8UINT': 45, 194 | 'R24UNORMX8TYPELESS': 46, 195 | 'X24TYPELESSG8UINT': 47, 196 | 'R8G8TYPELESS': 48, 197 | 'R8G8UNORM': 49, 198 | 'R8G8UINT': 50, 199 | 'R8G8SNORM': 51, 200 | 'R8G8SINT': 52, 201 | 'R16TYPELESS': 53, 202 | 'R16FLOAT': 54, 203 | 'D16UNORM': 55, 204 | 'R16UNORM': 56, 205 | 'R16UINT': 57, 206 | 'R16SNORM': 58, 207 | 'R16SINT': 59, 208 | 'R8TYPELESS': 60, 209 | 'R8UNORM': 61, 210 | 'R8UINT': 62, 211 | 'R8SNORM': 63, 212 | 'R8SINT': 64, 213 | 'A8UNORM': 65, 214 | 'R1UNORM': 66, 215 | 'R9G9B9E5SHAREDEXP': 67, 216 | 'R8G8B8G8UNORM': 68, 217 | 'G8R8G8B8UNORM': 69, 218 | 'BC1TYPELESS': 70, 219 | 'BC1UNORM': 71, 220 | 'BC1UNORMSRGB': 72, 221 | 'BC2TYPELESS': 73, 222 | 'BC2UNORM': 74, 223 | 'BC2UNORMSRGB': 75, 224 | 'BC3TYPELESS': 76, 225 | 'BC3UNORM': 77, 226 | 'BC3UNORMSRGB': 78, 227 | 'BC4TYPELESS': 79, 228 | 'BC4UNORM': 80, 229 | 'BC4SNORM': 81, 230 | 'BC5TYPELESS': 82, 231 | 'BC5UNORM': 83, 232 | 'BC5SNORM': 84, 233 | 'B5G6R5UNORM': 85, 234 | 'B5G5R5A1UNORM': 86, 235 | 'B8G8R8A8UNORM': 87, 236 | 'B8G8R8X8UNORM': 88, 237 | 'R10G10B10XRBIASA2UNORM': 89, 238 | 'B8G8R8A8TYPELESS': 90, 239 | 'B8G8R8A8UNORMSRGB': 91, 240 | 'B8G8R8X8TYPELESS': 92, 241 | 'B8G8R8X8UNORMSRGB': 93, 242 | 'BC6HTYPELESS': 94, 243 | 'BC6HUF16': 95, 244 | 'BC6HSF16': 96, 245 | 'BC7TYPELESS': 97, 246 | 'BC7UNORM': 98, 247 | 'BC7UNORMSRGB': 99, 248 | 'VIAEXTENSION': 1024, 249 | 'ASTC4X4TYPELESS': 1025, 250 | 'ASTC4X4UNORM': 1026, 251 | 'ASTC4X4UNORMSRGB': 1027, 252 | 'ASTC5X4TYPELESS': 1028, 253 | 'ASTC5X4UNORM': 1029, 254 | 'ASTC5X4UNORMSRGB': 1030, 255 | 'ASTC5X5TYPELESS': 1031, 256 | 'ASTC5X5UNORM': 1032, 257 | 'ASTC5X5UNORMSRGB': 1033, 258 | 'ASTC6X5TYPELESS': 1034, 259 | 'ASTC6X5UNORM': 1035, 260 | 'ASTC6X5UNORMSRGB': 1036, 261 | 'ASTC6X6TYPELESS': 1037, 262 | 'ASTC6X6UNORM': 1038, 263 | 'ASTC6X6UNORMSRGB': 1039, 264 | 'ASTC8X5TYPELESS': 1040, 265 | 'ASTC8X5UNORM': 1041, 266 | } -------------------------------------------------------------------------------- /modules/fbxskel/file_re_fbxskel.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import os 3 | 4 | from ..gen_functions import textColors,raiseWarning,raiseError,getPaddingAmount,read_uint,read_int,read_uint64,read_float,read_short,read_ushort,read_ubyte,read_unicode_string,read_byte,write_uint,write_int,write_uint64,write_float,write_short,write_ushort,write_ubyte,write_unicode_string,write_byte 5 | from ..hashing.mmh3.pymmh3 import hashUTF16 6 | 7 | DEBUG_MODE = False 8 | class SIZEDATA(): 9 | def __init__(self): 10 | self.HEADER_SIZE = 48 11 | self.BONE_ENTRY_SIZE = 64 12 | self.HASH_ENTRY_SIZE = 8 13 | self.PROPERTY_ENTRY_SIZE = 24 14 | self.PROPERTY_VALUE_SIZE = 4 15 | 16 | def debugprint(string): 17 | if DEBUG_MODE: 18 | print(string) 19 | class FBXSkelHeader(): 20 | def __init__(self): 21 | self.version = 5 22 | self.magic = 1852599155#skln 23 | self.boneOffset = 48 24 | self.hashOffset = 0 25 | self.boneCount = 0 26 | def read(self,file): 27 | self.version = read_uint(file) 28 | self.magic = read_uint(file) 29 | if self.magic != 1852599155: 30 | raiseError("File is not an FBXSkel file.") 31 | file.seek(8,1) 32 | self.boneOffset = read_uint(file) 33 | file.seek(4,1) 34 | self.hashOffset = read_uint(file) 35 | file.seek(4,1) 36 | self.boneCount = read_uint(file) 37 | #self.reserved = read_uint64(file) 38 | def write(self,file): 39 | write_uint(file,self.version) 40 | write_uint(file,self.magic) 41 | file.seek(8,1) 42 | write_uint(file,self.boneOffset) 43 | file.seek(4,1) 44 | write_uint(file,self.hashOffset) 45 | file.seek(4,1) 46 | write_uint(file,self.boneCount) 47 | #write_uint64(file,self.reserved) 48 | def __str__(self): 49 | return str(self.__class__) + ": " + str(self.__dict__) 50 | 51 | class BoneEntry(): 52 | def __init__(self): 53 | self.boneNameOffset = 0 54 | self.boneName = "BONE_NAME" 55 | self.boneMMH3Hash = 0 56 | self.parentIndex = 0 57 | self.boneIndex = 0 58 | self.translation = (0.0,0.0,0.0) 59 | self.rotation = (0.0,0.0,0.0,1.0) 60 | self.scale = (1.0,1.0,1.0) 61 | self.segmentScaling = 0 62 | def read(self,file,version): 63 | self.boneNameOffset = read_uint64(file) 64 | debugprint(self.boneNameOffset) 65 | currentPos = file.tell() 66 | file.seek(self.boneNameOffset) 67 | self.boneName = read_unicode_string(file) 68 | file.seek(currentPos) 69 | self.boneMMH3Hash = read_uint(file) 70 | self.parentIndex = read_short(file) 71 | self.boneIndex = read_short(file) 72 | if version >= 5: 73 | self.rotation = (read_float(file),read_float(file),read_float(file),read_float(file)) 74 | self.translation = (read_float(file),read_float(file),read_float(file)) 75 | self.scale = (read_float(file),read_float(file),read_float(file)) 76 | self.segmentScaling = read_uint(file) 77 | file.seek(4,1) 78 | else: 79 | self.translation = (read_float(file),read_float(file),read_float(file)) 80 | file.seek(4,1) 81 | self.rotation = (read_float(file),read_float(file),read_float(file),read_float(file)) 82 | self.scale = (read_float(file),read_float(file),read_float(file)) 83 | file.seek(4,1) 84 | 85 | def write(self,file,version): 86 | write_uint64(file,self.boneNameOffset) 87 | write_uint(file,self.boneMMH3Hash) 88 | write_short(file,self.parentIndex) 89 | write_short(file,self.boneIndex) 90 | if version >= 5: 91 | write_float(file,self.rotation[0]) 92 | write_float(file,self.rotation[1]) 93 | write_float(file,self.rotation[2]) 94 | write_float(file,self.rotation[3]) 95 | write_float(file,self.translation[0]) 96 | write_float(file,self.translation[1]) 97 | write_float(file,self.translation[2]) 98 | write_float(file,self.scale[0]) 99 | write_float(file,self.scale[1]) 100 | write_float(file,self.scale[2]) 101 | write_uint(file,self.segmentScaling) 102 | file.seek(4,1) 103 | else: 104 | write_float(file,self.translation[0]) 105 | write_float(file,self.translation[1]) 106 | write_float(file,self.translation[2]) 107 | file.seek(4,1) 108 | write_float(file,self.rotation[0]) 109 | write_float(file,self.rotation[1]) 110 | write_float(file,self.rotation[2]) 111 | write_float(file,self.rotation[3]) 112 | write_float(file,self.scale[0]) 113 | write_float(file,self.scale[1]) 114 | write_float(file,self.scale[2]) 115 | file.seek(4,1) 116 | 117 | def __str__(self): 118 | return str(self.__class__) + ": " + str(self.__dict__) 119 | 120 | class HashEntry(): 121 | def __init__(self): 122 | self.mmh3Hash = 0 123 | self.boneIndex = 0 124 | def read(self,file): 125 | self.mmh3Hash = read_uint(file) 126 | self.boneIndex = read_uint(file) 127 | def write(self,file): 128 | write_uint(file,self.mmh3Hash) 129 | write_uint(file,self.boneIndex) 130 | 131 | def __str__(self): 132 | return str(self.__class__) + ": " + str(self.__dict__) 133 | 134 | class FBXSkelFile(): 135 | def __init__(self): 136 | self.sizeData = SIZEDATA() 137 | self.header = FBXSkelHeader() 138 | self.boneEntryList = [] 139 | self.boneHashList = []#Sorted by hash value 140 | self.stringList = []#Used during writing 141 | def read(self,file): 142 | self.header.read(file) 143 | debugprint(self.header) 144 | file.seek(self.header.boneOffset) 145 | for i in range(0,self.header.boneCount): 146 | entry = BoneEntry() 147 | entry.read(file,self.header.version) 148 | debugprint(entry) 149 | self.boneEntryList.append(entry) 150 | file.seek(self.header.hashOffset) 151 | for i in range(0,self.header.boneCount): 152 | entry = HashEntry() 153 | entry.read(file) 154 | self.boneHashList.append(entry) 155 | 156 | def gatherStrings(self): 157 | stringOffsetDict = {} 158 | currentStringOffset = 0 159 | for bone in self.boneEntryList: 160 | if stringOffsetDict.get(bone.boneName,None) == None: 161 | stringOffsetDict[bone.boneName] = currentStringOffset 162 | currentStringOffset += len(bone.boneName)*2+2 163 | return stringOffsetDict 164 | def recalculateHashesAndOffsets(self,stringOffsetDict): 165 | self.header.boneCount = len(self.boneEntryList) 166 | 167 | boneEntriesSize = self.sizeData.BONE_ENTRY_SIZE * len(self.boneEntryList) 168 | self.header.hashOffset = self.sizeData.HEADER_SIZE + boneEntriesSize 169 | 170 | stringTableOffset = self.header.hashOffset + (self.header.boneCount * self.sizeData.HASH_ENTRY_SIZE) 171 | for bone in self.boneEntryList: 172 | bone.boneMMH3Hash = hashUTF16(bone.boneName) 173 | bone.boneNameOffset = stringOffsetDict[bone.boneName] + stringTableOffset 174 | self.boneHashList = [] 175 | for index,boneEntry in enumerate(self.boneEntryList): 176 | hashEntry = HashEntry() 177 | hashEntry.mmh3Hash = boneEntry.boneMMH3Hash 178 | hashEntry.boneIndex = index 179 | self.boneHashList.append(hashEntry) 180 | self.boneHashList.sort(key=lambda x: x.mmh3Hash) 181 | 182 | def write(self,file,version): 183 | self.header.version = version 184 | stringOffsetDict = self.gatherStrings() 185 | self.recalculateHashesAndOffsets(stringOffsetDict) 186 | self.header.write(file) 187 | file.seek(self.header.boneOffset) 188 | print("Writing Bone Entries") 189 | for boneEntry in self.boneEntryList: 190 | debugprint(boneEntry) 191 | boneEntry.write(file,self.header.version) 192 | #Loop to write texture entries 193 | print("Writing Bone Hashes") 194 | for hashEntry in self.boneHashList: 195 | hashEntry.write(file) 196 | #Loop to write property headers 197 | print("Writing Bone Strings") 198 | for boneEntry in self.boneEntryList: 199 | write_unicode_string(file, boneEntry.boneName) 200 | def readFBXSkel(filepath): 201 | print(textColors.OKCYAN + "__________________________________\nFBXSkel read started." + textColors.ENDC) 202 | print("Opening " + filepath) 203 | try: 204 | file = open(filepath,"rb") 205 | except: 206 | raiseError("Failed to open " + filepath) 207 | fbxSkelFile = FBXSkelFile() 208 | fbxSkelFile.read(file) 209 | file.close() 210 | print(textColors.OKGREEN + "__________________________________\nFBXSkel read finished." + textColors.ENDC) 211 | return fbxSkelFile 212 | def writeFBXSkel(fbxSkelFile,filepath): 213 | print(textColors.OKCYAN + "__________________________________\nFBXSkel write started." + textColors.ENDC) 214 | print("Opening " + filepath) 215 | try: 216 | file = open(filepath,"wb") 217 | except: 218 | raiseError("Failed to open " + filepath) 219 | try: 220 | version = int(os.path.splitext(filepath)[1].replace(".","")) 221 | except: 222 | raiseWarning("No number extension found on FBXSkel file, defaulting to version 5") 223 | version = 5 224 | fbxSkelFile.write(file,version) 225 | file.close() 226 | print(textColors.OKGREEN + "__________________________________\nFBXSkel write finished." + textColors.ENDC) 227 | -------------------------------------------------------------------------------- /modules/mdf/re_mdf_operators.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import bpy 3 | import os 4 | 5 | from bpy.types import Operator 6 | from ..gen_functions import raiseWarning 7 | 8 | from .blender_re_mdf import createEmpty,reindexMaterials,createMDFCollection,checkNameUsage,buildMDF 9 | from .blender_re_mesh_mdf import importMDF 10 | from ..blender_utils import showErrorMessageBox 11 | from .file_re_mdf import getMDFVersionToGameName 12 | from .re_mdf_presets import saveAsPreset,readPresetJSON 13 | from .ui_re_mdf_panels import tag_redraw 14 | 15 | PRESET_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.split(os.path.abspath(__file__))[0])),"Presets") 16 | class WM_OT_NewMDFHeader(Operator): 17 | bl_label = "Create MDF Collection" 18 | bl_idname = "re_mdf.create_mdf_collection" 19 | bl_options = {'UNDO'} 20 | bl_description = "Create an MDF collection for putting materials into.\nNOTE: The name of the collection is not important, you can rename it if you want to" 21 | collectionName : bpy.props.StringProperty(name = "MDF Name", 22 | description = "The name of the newly created mdf collection.\nUse the same name as the mesh file", 23 | default = "newMDF" 24 | ) 25 | def execute(self, context): 26 | if self.collectionName.strip() != "": 27 | createMDFCollection(self.collectionName.strip()+".mdf2") 28 | self.report({"INFO"},"Created new RE MDF collection.") 29 | return {'FINISHED'} 30 | else: 31 | self.report({"ERROR"},"Invalid MDF collection name.") 32 | return {'CANCELLED'} 33 | 34 | def invoke(self,context,event): 35 | return context.window_manager.invoke_props_dialog(self) 36 | class WM_OT_ReindexMaterials(Operator): 37 | bl_label = "Reindex Materials" 38 | bl_description = "Reorders the materials and sets their names to the name set in the custom properties. This is done automatically upon exporting" 39 | bl_idname = "re_mdf.reindex_materials" 40 | 41 | def execute(self, context): 42 | reindexMaterials(bpy.context.scene.re_mdf_toolpanel.mdfCollection.name) 43 | self.report({"INFO"},"Reindexed materials.") 44 | return {'FINISHED'} 45 | 46 | class WM_OT_AddPresetMaterial(Operator): 47 | bl_label = "Add Preset Material" 48 | bl_description = "Add a new material to the file and configure it to the selected preset" 49 | bl_idname = "re_mdf.add_preset_material" 50 | bl_options = {'UNDO'} 51 | def execute(self, context): 52 | enumValue = bpy.context.scene.re_mdf_toolpanel.materialPresets 53 | 54 | if enumValue != "": 55 | finished = readPresetJSON(os.path.join(PRESET_DIR,enumValue)) 56 | else: 57 | finished = False 58 | tag_redraw(bpy.context) 59 | if finished: 60 | self.report({"INFO"},"Added preset material.") 61 | return {'FINISHED'} 62 | else: 63 | return {'CANCELLED'} 64 | class WM_OT_ApplyMDFToMeshCollection(Operator): 65 | bl_label = "Apply Active MDF" 66 | bl_description = "Applies the Active MDF Collection to the specified Mesh Collection.\nThis will remove all materials on the mesh and rebuild them using the active MDF.\nTextures will be fetched from the chunk path set in the addon preferences" 67 | bl_idname = "re_mdf.apply_mdf" 68 | 69 | def execute(self, context): 70 | #reindexMaterials() 71 | mdfCollection = bpy.context.scene.re_mdf_toolpanel.mdfCollection 72 | meshCollection = bpy.context.scene.re_mdf_toolpanel.meshCollection 73 | 74 | 75 | 76 | modDir = os.path.realpath(bpy.context.scene.re_mdf_toolpanel.modDirectory) 77 | #removedMaterialSet = set() 78 | if mdfCollection != None and meshCollection != None and os.path.isdir(modDir): 79 | mdfFile = buildMDF(mdfCollection.name) 80 | meshMaterialDict = dict() 81 | for obj in meshCollection.all_objects: 82 | if obj.type == "MESH" and not obj.get("MeshExportExclude"): 83 | materialName = None 84 | #Fix UV map naming so materials work properly on non RE meshes 85 | if len(obj.data.uv_layers) > 0: 86 | obj.data.uv_layers[0].name = "UVMap0" 87 | if len(obj.data.uv_layers) > 1: 88 | obj.data.uv_layers[1].name = "UVMap1" 89 | if "__" in obj.name: 90 | materialName = obj.name.split("__",1)[1].split(".")[0] 91 | for material in obj.data.materials: 92 | if material.name.split(".")[0] == materialName: 93 | meshMaterialDict[materialName] = material 94 | #removedMaterialSet.add(material) 95 | obj.data.materials.clear() 96 | if materialName not in meshMaterialDict: 97 | if materialName != None: 98 | newMat = bpy.data.materials.new(name=materialName) 99 | newMat.use_nodes = True 100 | obj.data.materials.append(newMat) 101 | meshMaterialDict[materialName] = newMat 102 | else: 103 | raiseWarning(f"No material in mesh name, cannot apply materials: {obj.name}") 104 | else: 105 | obj.data.materials.append(meshMaterialDict[materialName]) 106 | """ 107 | #If the removed materials have no more users, remove them 108 | for material in removedMaterialSet: 109 | if material.users == 0: 110 | print(f"Removed {material.name}") 111 | bpy.data.materials.remove(material) 112 | """ 113 | importMDF(mdfFile, meshMaterialDict,bpy.context.scene.re_mdf_toolpanel.loadUnusedTextures,bpy.context.scene.re_mdf_toolpanel.loadUnusedProps,bpy.context.scene.re_mdf_toolpanel.useBackfaceCulling,bpy.context.scene.re_mdf_toolpanel.reloadCachedTextures,chunkPath = modDir,gameName = bpy.context.scene.re_mdf_toolpanel.activeGame,arrangeNodes = True) 114 | self.report({"INFO"},"Applied MDF to mesh collection.") 115 | else: 116 | self.report({"ERROR"},"Invalid mesh or MDF collection.") 117 | return {'FINISHED'} 118 | class WM_OT_OpenPresetFolder(Operator): 119 | bl_label = "Open Preset Folder" 120 | bl_description = "Opens the preset folder in File Explorer" 121 | bl_idname = "re_mdf.open_preset_folder" 122 | 123 | def execute(self, context): 124 | os.startfile(PRESET_DIR) 125 | return {'FINISHED'} 126 | 127 | 128 | class WM_OT_SavePreset(Operator): 129 | bl_label = "Save Selected As Preset" 130 | bl_idname = "re_mdf.save_selected_as_preset" 131 | bl_context = "objectmode" 132 | bl_description = "Save the selected material object as a preset for easy reuse and sharing. Presets can be accessed using the Open Preset Folder button" 133 | presetName : bpy.props.StringProperty(name = "Enter Preset Name",default = "newPreset") 134 | 135 | @classmethod 136 | def poll(self,context): 137 | return context.active_object is not None 138 | 139 | def execute(self, context): 140 | gameName = bpy.context.scene.re_mdf_toolpanel.activeGame 141 | finished = saveAsPreset(context.active_object, self.presetName,gameName) 142 | if finished: 143 | self.report({"INFO"},"Saved preset.") 144 | return {'FINISHED'} 145 | else: 146 | return {'CANCELLED'} 147 | def invoke(self,context,event): 148 | return context.window_manager.invoke_props_dialog(self) 149 | 150 | return {'FINISHED'} 151 | 152 | def update_findValueCount(self, context): 153 | if context.active_object.get("~TYPE") == "RE_MDF_MATERIAL": 154 | material = context.active_object.re_mdf_material 155 | replaceCount = 0 156 | for entry in material.textureBindingList_items: 157 | replaceCount += entry.path.count(self.findValue) 158 | self.instanceCount = replaceCount 159 | class WM_OT_FindReplaceTextureBindings(Operator): 160 | bl_label = "Find and Replace" 161 | bl_idname = "re_mdf.replace_texture_bindings" 162 | bl_context = "objectmode" 163 | bl_description = "Find and replace specified strings inside texture paths" 164 | findValue : bpy.props.StringProperty(name = "Find",default = "ch02_001_0002",options = {"TEXTEDIT_UPDATE"},update = update_findValueCount) 165 | replaceValue : bpy.props.StringProperty(name = "Replace With",default = "") 166 | instanceCount : bpy.props.IntProperty(name = "Count",default = 0) 167 | @classmethod 168 | def poll(self,context): 169 | return context.active_object is not None 170 | 171 | def execute(self, context): 172 | replaceCount = 0 173 | if context.active_object.get("~TYPE") == "RE_MDF_MATERIAL": 174 | material = context.active_object.re_mdf_material 175 | for entry in material.textureBindingList_items: 176 | replaceCount += entry.path.count(self.findValue) 177 | entry.path = entry.path.replace(self.findValue,self.replaceValue) 178 | 179 | self.report({"INFO"},f"Replaced {replaceCount} instances of \"{self.findValue}\"") 180 | return {'FINISHED'} 181 | else: 182 | return {'CANCELLED'} 183 | 184 | def draw(self,context): 185 | layout = self.layout 186 | layout.prop(self,"findValue") 187 | layout.label(text=f"{self.instanceCount} instances found.") 188 | layout.prop(self,"replaceValue") 189 | 190 | def invoke(self,context,event): 191 | return context.window_manager.invoke_props_dialog(self) 192 | 193 | return {'FINISHED'} 194 | -------------------------------------------------------------------------------- /modules/blender_utils.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import bpy 3 | 4 | from .gen_functions import textColors 5 | import os 6 | from collections import OrderedDict 7 | from itertools import repeat 8 | def showMessageBox(message = "", title = "Message Box", icon = 'INFO'): 9 | 10 | def draw(self, context): 11 | self.layout.label(text = message) 12 | 13 | bpy.context.window_manager.popup_menu(draw, title = title, icon = icon) 14 | 15 | def showErrorMessageBox(message): 16 | print(textColors.FAIL + "ERROR: " + message + textColors.ENDC) 17 | showMessageBox(message,title = "Error", icon = "ERROR") 18 | 19 | class ContextExecuterOverride: 20 | def __init__(self, window, screen, area, region): 21 | self.window, self.screen, self.area, self.region = window, screen, area, region 22 | self.legacy = not hasattr(bpy.context, "temp_override") 23 | if self.legacy: 24 | self.context = bpy.context.copy() 25 | self.context['window'] = window 26 | self.context['screen'] = screen 27 | self.context['area'] = area 28 | self.context['region'] = region 29 | else: 30 | self.context = bpy.context.temp_override(window=window, screen=screen, area=area, region=region) 31 | 32 | def __enter__(self): 33 | if not self.legacy: 34 | self.context.__enter__() 35 | return self 36 | 37 | def __exit__(self, exc_type, exc_value, traceback): 38 | if not self.legacy: 39 | self.context.__exit__(self, exc_type, exc_value, traceback) 40 | return self 41 | 42 | class ContextScriptExecuter(): 43 | 44 | def __init__(self, area_type, ui_type=None, script=None): 45 | self.area_type = area_type 46 | self.ui_type = ui_type if ui_type else area_type 47 | self.script = script 48 | 49 | def script_content(self, override): 50 | self.script(override) 51 | 52 | def execute_script(self): 53 | window = bpy.context.window 54 | screen = window.screen 55 | areas = [area for area in screen.areas if area.type == self.area_type] 56 | area = areas[0] if len(areas) else screen.areas[0] 57 | prev_ui_type = area.ui_type 58 | area.ui_type = self.ui_type 59 | regions = [region for region in area.regions if region.type == 'WINDOW'] 60 | region = regions[0] if len(regions) else None 61 | with ContextExecuterOverride(window=window, screen=screen, area=area, region=region) as override: 62 | self.script_content(override) 63 | area.ui_type = prev_ui_type 64 | 65 | def outlinerShowObject(objName): 66 | if objName in bpy.data.objects: 67 | obj = bpy.data.objects[objName] 68 | bpy.context.view_layer.objects.active = obj 69 | ContextScriptExecuter( 70 | area_type='OUTLINER', 71 | script=lambda override: ( 72 | bpy.ops.outliner.show_active(override.context) 73 | if override.legacy 74 | else bpy.ops.outliner.show_active() 75 | ) 76 | ).execute_script() 77 | 78 | 79 | 80 | def operator_exists(idname): 81 | from bpy.ops import op_as_string 82 | try: 83 | op_as_string(idname) 84 | return True 85 | except: 86 | return False 87 | 88 | #-------------------------------- 89 | #Node arrange by JuhaW 90 | #https://github.com/blender/blender-addons/blob/main/node_arrange.py 91 | class values(): 92 | average_y = 0 93 | x_last = 0 94 | margin_x = 100 95 | mat_name = "" 96 | margin_y = 20 97 | 98 | def outputnode_search(ntree): # return node/None 99 | 100 | outputnodes = [] 101 | for node in ntree.nodes: 102 | if not node.outputs: 103 | for input in node.inputs: 104 | if input.is_linked: 105 | outputnodes.append(node) 106 | break 107 | 108 | if not outputnodes: 109 | print("No output node found") 110 | return None 111 | return outputnodes 112 | 113 | def nodes_odd(ntree, nodelist): 114 | 115 | nodes = ntree.nodes 116 | for i in nodes: 117 | i.select = False 118 | 119 | a = [x for x in nodes if x not in nodelist] 120 | # print ("odd nodes:",a) 121 | for i in a: 122 | i.select = True 123 | 124 | def nodes_arrange(nodelist, level, ntree): 125 | 126 | parents = [] 127 | for node in nodelist: 128 | parents.append(node.parent) 129 | node.parent = None 130 | ntree.nodes.update() 131 | 132 | 133 | #print ("nodes arrange def") 134 | # node x positions 135 | 136 | widthmax = max([x.dimensions.x for x in nodelist]) 137 | xpos = values.x_last - (widthmax + values.margin_x) if level != 0 else 0 138 | #print ("nodelist, xpos", nodelist,xpos) 139 | values.x_last = xpos 140 | 141 | # node y positions 142 | x = 0 143 | y = 0 144 | 145 | for node in nodelist: 146 | 147 | if node.hide: 148 | hidey = (node.dimensions.y / 2) - 8 149 | y = y - hidey 150 | else: 151 | hidey = 0 152 | 153 | node.location.y = y 154 | y = y - values.margin_y - node.dimensions.y + hidey 155 | 156 | node.location.x = xpos #if node.type != "FRAME" else xpos + 1200 157 | 158 | y = y + values.margin_y 159 | 160 | center = (0 + y) / 2 161 | values.average_y = center - values.average_y 162 | 163 | #for node in nodelist: 164 | 165 | #node.location.y -= values.average_y 166 | 167 | for i, node in enumerate(nodelist): 168 | node.parent = parents[i] 169 | 170 | def nodes_iterate(ntree, arrange=True): 171 | 172 | nodeoutput = outputnode_search(ntree) 173 | if nodeoutput is None: 174 | #print ("nodeoutput is None") 175 | return None 176 | a = [] 177 | a.append([]) 178 | for i in nodeoutput: 179 | a[0].append(i) 180 | 181 | 182 | level = 0 183 | 184 | while a[level]: 185 | a.append([]) 186 | 187 | for node in a[level]: 188 | inputlist = [i for i in node.inputs if i.is_linked] 189 | 190 | if inputlist: 191 | 192 | for input in inputlist: 193 | for nlinks in input.links: 194 | node1 = nlinks.from_node 195 | a[level + 1].append(node1) 196 | 197 | else: 198 | pass 199 | 200 | level += 1 201 | 202 | del a[level] 203 | level -= 1 204 | 205 | #remove duplicate nodes at the same level, first wins 206 | for x, nodes in enumerate(a): 207 | a[x] = list(OrderedDict(zip(a[x], repeat(None)))) 208 | 209 | #remove duplicate nodes in all levels, last wins 210 | top = level 211 | for row1 in range(top, 1, -1): 212 | for col1 in a[row1]: 213 | for row2 in range(row1-1, 0, -1): 214 | for col2 in a[row2]: 215 | if col1 == col2: 216 | a[row2].remove(col2) 217 | break 218 | 219 | """ 220 | for x, i in enumerate(a): 221 | print (x) 222 | for j in i: 223 | print (j) 224 | #print() 225 | """ 226 | """ 227 | #add node frames to nodelist 228 | frames = [] 229 | print ("Frames:") 230 | print ("level:", level) 231 | print ("a:",a) 232 | for row in range(level, 0, -1): 233 | 234 | for i, node in enumerate(a[row]): 235 | if node.parent: 236 | print ("Frame found:", node.parent, node) 237 | #if frame already added to the list ? 238 | frame = node.parent 239 | #remove node 240 | del a[row][i] 241 | if frame not in frames: 242 | frames.append(frame) 243 | #add frame to the same place than node was 244 | a[row].insert(i, frame) 245 | 246 | pprint.pprint(a) 247 | """ 248 | #return None 249 | ######################################## 250 | 251 | 252 | 253 | if not arrange: 254 | nodelist = [j for i in a for j in i] 255 | nodes_odd(ntree, nodelist=nodelist) 256 | return None 257 | 258 | ######################################## 259 | 260 | levelmax = level + 1 261 | level = 0 262 | values.x_last = 0 263 | 264 | while level < levelmax: 265 | 266 | values.average_y = 0 267 | nodes = [x for x in a[level]] 268 | #print ("level, nodes:", level, nodes) 269 | nodes_arrange(nodes, level, ntree) 270 | 271 | level = level + 1 272 | 273 | return None 274 | 275 | def nodes_center(ntree): 276 | 277 | bboxminx = [] 278 | bboxmaxx = [] 279 | bboxmaxy = [] 280 | bboxminy = [] 281 | 282 | for node in ntree.nodes: 283 | if not node.parent: 284 | bboxminx.append(node.location.x) 285 | bboxmaxx.append(node.location.x + node.dimensions.x) 286 | bboxmaxy.append(node.location.y) 287 | bboxminy.append(node.location.y - node.dimensions.y) 288 | 289 | # print ("bboxminy:",bboxminy) 290 | bboxminx = min(bboxminx) 291 | bboxmaxx = max(bboxmaxx) 292 | bboxminy = min(bboxminy) 293 | bboxmaxy = max(bboxmaxy) 294 | center_x = (bboxminx + bboxmaxx) / 2 295 | center_y = (bboxminy + bboxmaxy) / 2 296 | ''' 297 | print ("minx:",bboxminx) 298 | print ("maxx:",bboxmaxx) 299 | print ("miny:",bboxminy) 300 | print ("maxy:",bboxmaxy) 301 | 302 | print ("bboxes:", bboxminx, bboxmaxx, bboxmaxy, bboxminy) 303 | print ("center x:",center_x) 304 | print ("center y:",center_y) 305 | ''' 306 | 307 | x = 0 308 | y = 0 309 | 310 | for node in ntree.nodes: 311 | 312 | if not node.parent: 313 | node.location.x -= center_x 314 | node.location.y += -center_y 315 | 316 | def arrangeNodeTree(ntree,margin_x = 100,margin_y = 20,centerNodes = True): 317 | 318 | #TODO Fix, blender doesn't initialize node dimensions unless the shader editor is open 319 | 320 | values.margin_x = margin_x 321 | values.margin_y = margin_y 322 | 323 | ntree.nodes.update() 324 | #first arrange nodegroups 325 | n_groups = [] 326 | for i in ntree.nodes: 327 | if i.type == 'GROUP': 328 | n_groups.append(i) 329 | 330 | while n_groups: 331 | j = n_groups.pop(0) 332 | nodes_iterate(j.node_tree) 333 | for i in j.node_tree.nodes: 334 | if i.type == 'GROUP': 335 | n_groups.append(i) 336 | 337 | nodes_iterate(ntree) 338 | 339 | # arrange nodes + this center nodes together 340 | if centerNodes: 341 | nodes_center(ntree) 342 | #-------------------------------- -------------------------------------------------------------------------------- /modules/gdeflate/gdeflate.py: -------------------------------------------------------------------------------- 1 | import ctypes 2 | from ctypes import c_bool, c_uint8, c_uint32, c_uint64, POINTER, byref 3 | from pathlib import Path 4 | from typing import Union, Optional 5 | from enum import IntEnum 6 | import platform 7 | 8 | class GDeflateCompressionLevel(IntEnum): 9 | """ 10 | GDeflate compression levels that map to DirectStorage compression levels. 11 | """ 12 | FASTEST = 1 # Maps to DSTORAGE_COMPRESSION_FASTEST 13 | DEFAULT = 9 # Maps to DSTORAGE_COMPRESSION_DEFAULT 14 | BEST_RATIO = 12 # Maps to DSTORAGE_COMPRESSION_BEST_RATIO 15 | 16 | class GDeflateFlags (IntEnum): 17 | """ 18 | GDeflate compression flags 19 | """ 20 | COMPRESS_SINGLE_THREAD = 0x200 21 | 22 | class GDeflateError(Exception): 23 | """Custom exception for GDeflate-related errors""" 24 | pass 25 | 26 | 27 | def is_windows(): 28 | return platform.system() == 'Windows' 29 | 30 | 31 | def is_linux(): 32 | return platform.system() == 'Linux' 33 | 34 | 35 | def is_mac(): 36 | return platform.system() == 'Darwin' 37 | 38 | class GDeflate: 39 | """ 40 | A python interface for the GDeflate wrapper library. 41 | 42 | Args: 43 | dll_path (Union[str, Path], optional): Path to the GDeflate DLL. 44 | Defaults to "GDeflateWrapper-x86_64.dll" in the current directory. 45 | 46 | Raises: 47 | GDeflateError: If the DLL cannot be loaded or if compression/decompression fails 48 | """ 49 | 50 | # Expose compression levels as class attributes 51 | FASTEST = GDeflateCompressionLevel.FASTEST 52 | DEFAULT = GDeflateCompressionLevel.DEFAULT 53 | BEST_RATIO = GDeflateCompressionLevel.BEST_RATIO 54 | 55 | def __init__(self, dll_path: Union[str, Path] = None): 56 | if dll_path is None: 57 | # Try to find the DLL next to the .py file first 58 | module_dir = Path(__file__).parent.absolute() 59 | 60 | if is_windows(): 61 | dll_name = "GDeflateWrapper.dll" 62 | elif is_linux(): 63 | dll_name = "libGDeflateWrapper.so" 64 | #elif is_mac(): 65 | #Maybe TODO 66 | else: 67 | raise RuntimeError(f'This OS ({platform.system()}) is unsupported.') 68 | possible_paths = [ 69 | module_dir / dll_name, # Next to .py file 70 | Path.cwd() / dll_name, # Current working directory 71 | dll_name, # System PATH 72 | ] 73 | 74 | for path in possible_paths: 75 | try: 76 | self._dll = ctypes.CDLL(str(path)) 77 | break 78 | except OSError: 79 | continue 80 | else: 81 | raise GDeflateError( 82 | f"Could not find {dll_name} in any of these locations:\n" + 83 | "\n".join(f"- {p}" for p in possible_paths) 84 | ) 85 | else: 86 | try: 87 | self._dll = ctypes.CDLL(str(dll_path)) 88 | except OSError as e: 89 | raise GDeflateError(f"Failed to load GDeflate DLL from {dll_path}: {e}") 90 | 91 | # bool gdeflate_get_uncompressed_size( 92 | # uint8_t* input, 93 | # uint64_t input_size, 94 | # uint64_t* uncompressed_size); 95 | self._get_uncompressed_size_func = self._dll.gdeflate_get_uncompressed_size 96 | self._get_uncompressed_size_func.argtypes = [ 97 | POINTER(c_uint8), # input 98 | c_uint64, # input_size 99 | POINTER(c_uint64) # uncompressed_size 100 | ] 101 | self._get_uncompressed_size_func.restype = c_bool 102 | 103 | # uint64_t gdeflate_get_compress_bound(uint64_t size) 104 | self._get_compress_bound = self._dll.gdeflate_get_compress_bound 105 | self._get_compress_bound.argtypes = [ 106 | c_uint64, # input_size 107 | ] 108 | self._get_compress_bound.restype = c_uint64 109 | 110 | # bool gdeflate_decompress( 111 | # uint8_t* output, 112 | # uint64_t output_size, 113 | # uint8_t* input, 114 | # uint64_t input_size, 115 | # uint32_t num_workers); 116 | self._decompress_func = self._dll.gdeflate_decompress 117 | self._decompress_func.argtypes = [ 118 | POINTER(c_uint8), # output 119 | c_uint64, # output_size 120 | POINTER(c_uint8), # input 121 | c_uint64, # input_size 122 | c_uint32 # num_workers 123 | ] 124 | self._decompress_func.restype = c_bool 125 | 126 | # bool gdeflate_compress( 127 | # uint8_t* output, 128 | # uint64_t* output_size, 129 | # uint8_t* input, 130 | # uint64_t input_size, 131 | # uint32_t level, 132 | # uint32_t flags); 133 | self._compress_func = self._dll.gdeflate_compress 134 | self._compress_func.argtypes = [ 135 | POINTER(c_uint8), # output 136 | POINTER(c_uint64), # output_size 137 | POINTER(c_uint8), # input 138 | c_uint64, # input_size 139 | c_uint32, # level 140 | c_uint32 # flags 141 | ] 142 | self._compress_func.restype = c_bool 143 | 144 | def get_uncompressed_size(self, compressed_data: Union[bytes, bytearray]) -> int: 145 | """ 146 | Get the uncompressed size of compressed data. 147 | 148 | Args: 149 | compressed_data: The compressed data as bytes or bytearray 150 | 151 | Returns: 152 | int: The size of the data when uncompressed 153 | 154 | Raises: 155 | GDeflateError: If the size calculation fails 156 | """ 157 | input_array = (c_uint8 * len(compressed_data))(*compressed_data) 158 | uncompressed_size = c_uint64(0) 159 | 160 | success = self._get_uncompressed_size_func( 161 | input_array, 162 | c_uint64(len(compressed_data)), 163 | byref(uncompressed_size) 164 | ) 165 | 166 | if not success: 167 | raise GDeflateError("Failed to get uncompressed size") 168 | 169 | return uncompressed_size.value 170 | 171 | def decompress(self, 172 | compressed_data: Union[bytes, bytearray], 173 | num_workers: int = 1) -> bytes: 174 | """ 175 | Decompress GDeflate-compressed data. 176 | 177 | Args: 178 | compressed_data: The compressed data as bytes or bytearray 179 | num_workers: Number of worker threads to use (default: 1) 180 | 181 | Returns: 182 | bytes: The decompressed data 183 | 184 | Raises: 185 | GDeflateError: If decompression fails 186 | """ 187 | # Get the uncompressed size first 188 | output_size = self.get_uncompressed_size(compressed_data) 189 | 190 | # Prepare input and output buffers 191 | input_array = (c_uint8 * len(compressed_data))(*compressed_data) 192 | output_array = (c_uint8 * output_size)() 193 | 194 | success = self._decompress_func( 195 | output_array, 196 | c_uint64(output_size), 197 | input_array, 198 | c_uint64(len(compressed_data)), 199 | c_uint32(num_workers) 200 | ) 201 | 202 | if not success: 203 | raise GDeflateError("Decompression failed") 204 | 205 | return bytes(output_array) 206 | 207 | def compress(self, 208 | data: Union[bytes, bytearray], 209 | level: Union[int, GDeflateCompressionLevel] = GDeflateCompressionLevel.DEFAULT, 210 | flags: int = 0) -> bytes: 211 | """ 212 | Compress data using GDeflate. 213 | 214 | Args: 215 | data: The data to compress as bytes or bytearray 216 | level: Compression level (default: DEFAULT). Use GDeflateCompressionLevel enum or class constants 217 | flags: Compression flags (default: 0) 218 | 219 | Returns: 220 | bytes: The compressed data 221 | 222 | Raises: 223 | GDeflateError: If compression fails 224 | """ 225 | 226 | # Get size of output buffer to allocate, 227 | # small inputs _can_ compress to be larger than the input buffer. 228 | bounded_output_size = self._get_compress_bound(c_uint64(len(data))) 229 | 230 | # Allocate input/output buffers and output size var. 231 | output_size = c_uint64(bounded_output_size) 232 | output_array = (c_uint8 * bounded_output_size)() 233 | input_array = (c_uint8 * len(data))(*data) 234 | 235 | success = self._compress_func( 236 | output_array, 237 | byref(output_size), 238 | input_array, 239 | c_uint64(len(data)), 240 | c_uint32(int(level)), # Convert enum to int if needed 241 | c_uint32(flags) 242 | ) 243 | 244 | if not success: 245 | raise GDeflateError("Compression failed") 246 | 247 | # Return only the actual compressed bytes 248 | return bytes(output_array[:output_size.value]) -------------------------------------------------------------------------------- /modules/mdf/re_mdf_presets.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | import json 3 | import os 4 | import re 5 | import bpy 6 | 7 | from ..gen_functions import textColors,raiseWarning 8 | from ..blender_utils import showErrorMessageBox 9 | from .file_re_mdf import getMDFVersionToGameName 10 | from .blender_re_mdf import createEmpty, checkNameUsage 11 | 12 | from .blender_re_mdf import boolPropertySet,colorPropertySet 13 | def findHeaderObj(): 14 | if bpy.data.collections.get("MDFData",None) != None: 15 | objList = bpy.data.collections["MDFData"].all_objects 16 | headerList = [obj for obj in objList if obj.get("~TYPE",None) == "RE_MDF_HEADER"] 17 | if len(headerList) >= 1: 18 | return headerList[0] 19 | else: 20 | return None 21 | 22 | PRESET_VERSION = 5#To be changed when there are changes to material variables 23 | PRESET_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.split(os.path.abspath(__file__))[0])),"Presets") 24 | def saveAsPreset(activeObj,presetName,gameName): 25 | folderPath = os.path.join(PRESET_DIR,gameName) 26 | if activeObj != None: 27 | MDFObjType = activeObj.get("~TYPE",None) 28 | 29 | if not re.search(r'^[\w,\s-]+\.[A-Za-z]{3}$',presetName) and not ".." in presetName:#Check that the preset name contains no invalid characters for a file name 30 | presetDict = {} 31 | materialJSONDict = {} 32 | if MDFObjType == "RE_MDF_MATERIAL": 33 | materialJSONDict["presetType"] = "RE_MDF_MATERIAL" 34 | materialJSONDict["MDFVersion"] = bpy.context.scene.re_mdf_toolpanel.activeGame 35 | materialJSONDict["presetVersion"] = PRESET_VERSION 36 | 37 | materialJSONDict["Material Header"] = { 38 | "Material Name":activeObj.re_mdf_material.materialName, 39 | "Master Material Path":activeObj.re_mdf_material.mmtrPath, 40 | "Material Shader Type":activeObj.re_mdf_material.shaderType, 41 | } 42 | 43 | materialJSONDict["Flags"] = { 44 | "Ver32Unknown":activeObj.re_mdf_material.flags.ver32Unknown, 45 | "Ver32Unknown1":activeObj.re_mdf_material.flags.ver32Unknown1, 46 | "Ver32Unknown2":activeObj.re_mdf_material.flags.ver32Unknown2, 47 | "FlagBitFlag":activeObj.re_mdf_material.flags.flagIntValue, 48 | "FlagBitFlagB":activeObj.re_mdf_material.flags.flagIntValueB, 49 | "ShaderLODNum":activeObj.re_mdf_material.flags.shaderLODNum, 50 | "BakeTextureArraySize":activeObj.re_mdf_material.flags.bakeTextureArraySize, 51 | } 52 | 53 | materialJSONDict["Property List"] = [] 54 | for prop in activeObj.re_mdf_material.propertyList_items: 55 | 56 | if prop.data_type == "VEC4": 57 | value = list(prop.float_vector_value) 58 | elif prop.data_type == "COLOR": 59 | value = list(prop.color_value) 60 | 61 | elif prop.data_type == "BOOL": 62 | if prop.bool_value: 63 | value = 1.0 64 | else: 65 | value = 0.0 66 | else:#float 67 | value = prop.float_value 68 | 69 | if value.__class__.__name__ == "IDPropertyArray": 70 | value = value.to_list() 71 | #print(value) 72 | propDict = {"Property Name":prop.prop_name,"Data Type":prop.data_type,"Value":value,"Padding":prop.padding} 73 | materialJSONDict["Property List"].append(propDict) 74 | 75 | materialJSONDict["Texture Bindings"] = [] 76 | for binding in activeObj.re_mdf_material.textureBindingList_items: 77 | bindingDict = {"Texture Type":binding.textureType,"Texture Path":binding.path} 78 | materialJSONDict["Texture Bindings"].append(bindingDict) 79 | materialJSONDict["MMTRS Data"] = [] 80 | for item in activeObj.re_mdf_material.mmtrsData_items: 81 | materialJSONDict["MMTRS Data"].append(str(item.indexString)) 82 | 83 | materialJSONDict["GPBF Data"] = [] 84 | for item in activeObj.re_mdf_material.gpbfData_items: 85 | materialJSONDict["GPBF Data"].append(str(item.gpbfDataString)) 86 | else: 87 | showErrorMessageBox("Selected object can not be made into a preset.") 88 | 89 | if materialJSONDict != {}: 90 | 91 | 92 | jsonPath = os.path.join(PRESET_DIR,folderPath,presetName+".json") 93 | try: 94 | os.makedirs(os.path.split(jsonPath)[0]) 95 | except: 96 | pass 97 | with open(jsonPath, 'w', encoding='utf-8') as f: 98 | json.dump(materialJSONDict, f, ensure_ascii=False, indent=4) 99 | print(textColors.OKGREEN+"Saved preset to " + str(jsonPath) + textColors.ENDC) 100 | return True 101 | else: 102 | showErrorMessageBox("Invalid preset file name. ") 103 | else: 104 | showErrorMessageBox("A material object must be selected when saving a preset.") 105 | 106 | 107 | def readPresetJSON(filepath): 108 | mdfCollection = bpy.context.scene.re_mdf_toolpanel.mdfCollection 109 | if mdfCollection != None: 110 | try: 111 | with open(filepath) as jsonFile: 112 | materialJSONDict = json.load(jsonFile) 113 | if materialJSONDict["presetVersion"] > PRESET_VERSION: 114 | showErrorMessageBox("Preset was created in a newer version and cannot be used. Update to the latest version of the MDF editor.") 115 | return False 116 | 117 | except Exception as err: 118 | showErrorMessageBox("Failed to read json file. \n" + str(err)) 119 | return False 120 | 121 | if materialJSONDict["presetType"] != "RE_MDF_MATERIAL": 122 | showErrorMessageBox("Preset type is not supported") 123 | return False 124 | 125 | print("Adding preset material " + materialJSONDict["Material Header"]["Material Name"]) 126 | MDFVersion = materialJSONDict.get("MDFVersion",None) 127 | 128 | currentIndex = 0 129 | subName = "Material " + str(currentIndex).zfill(2) 130 | while(checkNameUsage(subName,checkSubString=True,objList = mdfCollection.all_objects)): 131 | currentIndex +=1 132 | subName = "Material " + str(currentIndex).zfill(2) 133 | RNADict = {"~TYPE":{"description":"For internal use. Do not change"}} 134 | name = subName + " ("+materialJSONDict["Material Header"]["Material Name"]+")" 135 | materialObj = createEmpty(name,[("~TYPE","RE_MDF_MATERIAL")],None,mdfCollection) 136 | 137 | if "MDFVersion" in materialJSONDict: 138 | if str(materialJSONDict["MDFVersion"]).isdigit():#Pre Version 4 Presets 139 | gameName = getMDFVersionToGameName(int(materialJSONDict["MDFVersion"])) 140 | if gameName == -1: 141 | gameName = "Unknown" 142 | else: 143 | gameName = str(materialJSONDict["MDFVersion"]) 144 | materialObj.re_mdf_material.gameName = gameName 145 | 146 | materialObj.re_mdf_material.materialName = materialJSONDict["Material Header"]["Material Name"] 147 | materialObj.re_mdf_material.shaderType = materialJSONDict["Material Header"]["Material Shader Type"] 148 | materialObj.re_mdf_material.mmtrPath = materialJSONDict["Material Header"]["Master Material Path"] 149 | materialObj.re_mdf_material.flags.ver32Unknown = materialJSONDict["Flags"]["Ver32Unknown"] 150 | try: 151 | materialObj.re_mdf_material.flags.ver32Unknown1 = materialJSONDict["Flags"]["Ver32Unknown1"] 152 | materialObj.re_mdf_material.flags.ver32Unknown2 = materialJSONDict["Flags"]["Ver32Unknown2"] 153 | except: 154 | pass 155 | materialObj.re_mdf_material.flags.flagIntValue = materialJSONDict["Flags"]["FlagBitFlag"] 156 | try: 157 | materialObj.re_mdf_material.flags.flagIntValueB = materialJSONDict["Flags"]["FlagBitFlagB"] 158 | materialObj.re_mdf_material.flags.shaderLODNum = materialJSONDict["Flags"]["ShaderLODNum"] 159 | materialObj.re_mdf_material.flags.bakeTextureArraySize = materialJSONDict["Flags"]["BakeTextureArraySize"] 160 | except: 161 | pass 162 | for propEntry in materialJSONDict["Property List"]: 163 | prop = materialObj.re_mdf_material.propertyList_items.add() 164 | prop.prop_name = propEntry["Property Name"] 165 | prop.data_type = propEntry["Data Type"] 166 | try: 167 | prop.padding = propEntry["Padding"] 168 | prop.frontPadding = propEntry["FrontPadding"] 169 | except: 170 | pass 171 | if prop.data_type == "VEC4": 172 | prop.float_vector_value = propEntry["Value"] 173 | elif prop.data_type == "COLOR": 174 | prop.color_value = propEntry["Value"] 175 | 176 | elif prop.data_type == "BOOL": 177 | prop.bool_value = propEntry["Value"] == 1.0 178 | else:#float 179 | prop.float_value = propEntry["Value"] 180 | 181 | for bindingEntry in materialJSONDict["Texture Bindings"]: 182 | binding = materialObj.re_mdf_material.textureBindingList_items.add() 183 | binding.textureType = bindingEntry["Texture Type"] 184 | binding.path = bindingEntry["Texture Path"] 185 | 186 | if "MMTRS Data" in materialJSONDict: 187 | for indexString in materialJSONDict["MMTRS Data"]: 188 | item = materialObj.re_mdf_material.mmtrsData_items.add() 189 | item.indexString = indexString 190 | if "GPBF Data" in materialJSONDict: 191 | for gpbfDataString in materialJSONDict["GPBF Data"]: 192 | item = materialObj.re_mdf_material.gpbfData_items.add() 193 | item.gpbfDataString = gpbfDataString 194 | bpy.context.view_layer.objects.active = materialObj 195 | else: 196 | showErrorMessageBox("The active MDF collection must be set.") 197 | return True 198 | def reloadPresets(folderPath): 199 | presetList = [] 200 | relPathStart = os.path.join(PRESET_DIR,folderPath) 201 | #print(relPathStart) 202 | if os.path.exists(relPathStart): 203 | for entry in os.scandir(relPathStart): 204 | if entry.name.endswith(".json") and entry.is_file(): 205 | presetList.append((os.path.relpath(os.path.join(relPathStart,entry),start = PRESET_DIR),os.path.splitext(entry.name)[0],"")) 206 | #print("Loading " + folderPath + " presets...") 207 | #print("DEBUG:" + str(presetList)+"\n")#debug 208 | return presetList -------------------------------------------------------------------------------- /modules/gen_functions.py: -------------------------------------------------------------------------------- 1 | #Author: NSA Cloud 2 | #V6 3 | import os 4 | import struct 5 | import glob 6 | from pathlib import Path 7 | import platform 8 | #---General Functions---# 9 | os.system("color")#Enable console colors 10 | class textColors: 11 | HEADER = '\033[95m' 12 | OKBLUE = '\033[94m' 13 | OKCYAN = '\033[96m' 14 | OKGREEN = '\033[92m' 15 | WARNING = '\033[93m' 16 | FAIL = '\033[91m' 17 | ENDC = '\033[0m' 18 | BOLD = '\033[1m' 19 | UNDERLINE = '\033[4m' 20 | 21 | # read unsigned byte from file 22 | def read_ubyte(file_object, endian = '<'): 23 | data = struct.unpack(endian+'B', file_object.read(1))[0] 24 | return data 25 | # read signed byte from file 26 | def read_byte(file_object, endian = '<'): 27 | data = struct.unpack(endian+'b', file_object.read(1))[0] 28 | return data 29 | # read signed short from file 30 | def read_short(file_object, endian = '<'): 31 | data = struct.unpack(endian+'h', file_object.read(2))[0] 32 | return data 33 | # read unsigned short from file 34 | def read_ushort(file_object, endian = '<'): 35 | data = struct.unpack(endian+'H', file_object.read(2))[0] 36 | return data 37 | 38 | # read unsigned integer from filel 39 | def read_uint(file_object, endian = '<'): 40 | data = struct.unpack(endian+'I', file_object.read(4))[0] 41 | return data 42 | 43 | # read signed integer from file 44 | def read_int(file_object, endian = '<'): 45 | data = struct.unpack(endian+'i', file_object.read(4))[0] 46 | return data 47 | # read unsigned long integer from file 48 | def read_uint64(file_object, endian = '<'): 49 | data = struct.unpack(endian+'Q', file_object.read(8))[0] 50 | return data 51 | # read signed long integer from file 52 | def read_int64(file_object, endian = '<'): 53 | data = struct.unpack(endian+'q', file_object.read(8))[0] 54 | return data 55 | # read floating point number from file 56 | def read_float(file_object, endian = '<'): 57 | data = struct.unpack(endian+'f', file_object.read(4))[0] 58 | return data 59 | # read double from file 60 | def read_double(file_object, endian = '<'): 61 | data = struct.unpack(endian+'d', file_object.read(8))[0] 62 | return data 63 | #read null terminated string from file 64 | def read_string(file_object): 65 | data =''.join(iter(lambda: file_object.read(1).decode('ascii'), '\x00')) 66 | return data 67 | def read_unicode_string(file_object):#Reads unicode string from file into utf-8 string 68 | wchar = file_object.read(2) 69 | byteString = wchar 70 | while wchar != b'\x00\x00': 71 | wchar = file_object.read(2) 72 | byteString += wchar 73 | unicodeString = byteString.decode("utf-16le").replace('\x00', '') 74 | return unicodeString 75 | # write unsigned byte to file 76 | def write_ubyte(file_object,input, endian = '<'): 77 | data = struct.pack(endian+'B', input) 78 | file_object.write(data) 79 | # write signed byte to file 80 | def write_byte(file_object,input, endian = '<'): 81 | data = struct.pack(endian+'b', input) 82 | file_object.write(data) 83 | # write signed short to file 84 | def write_short(file_object,input, endian = '<'): 85 | data = struct.pack(endian+'h', input) 86 | file_object.write(data) 87 | 88 | # write unsigned short to file 89 | def write_ushort(file_object,input, endian = '<'): 90 | data = struct.pack(endian+'H', input) 91 | file_object.write(data) 92 | 93 | # write unsigned integer to file 94 | def write_uint(file_object,input, endian = '<'): 95 | data = struct.pack(endian+'I', input) 96 | file_object.write(data) 97 | 98 | # write signed integer to file 99 | def write_int(file_object,input, endian = '<'): 100 | data = struct.pack(endian+'i', input) 101 | file_object.write(data) 102 | 103 | # write unsigned long integer to file 104 | def write_uint64(file_object,input, endian = '<'): 105 | data = struct.pack(endian+'Q', input) 106 | file_object.write(data) 107 | # write unsigned long integer to file 108 | def write_int64(file_object,input, endian = '<'): 109 | data = struct.pack(endian+'q', input) 110 | file_object.write(data) 111 | # write floating point number to file 112 | def write_float(file_object,input, endian = '<'): 113 | data = struct.pack(endian+'f', input) 114 | file_object.write(data) 115 | # write double to file 116 | def write_double(file_object,input, endian = '<'): 117 | data = struct.pack(endian+'d', input) 118 | file_object.write(data) 119 | #write null terminated string to file 120 | def write_string(file_object,input): 121 | input += '\x00' 122 | data = bytes(input, 'utf-8') 123 | file_object.write(data) 124 | def write_unicode_string(file_object,input):#Writes utf-8 string as utf-16 125 | data = input.encode('UTF-16LE') + b'\x00\x00'#Little endian utf16 126 | file_object.write(data) 127 | def getPaddingAmount(currentPos,alignment): 128 | padding = (currentPos*-1)%alignment 129 | return padding 130 | #bitflag operations 131 | def getBit(bitFlag, index):#Index starting from rightmost bit 132 | return bool((bitFlag >> index) & 1) 133 | def setBit(bitFlag, index): 134 | return bitFlag | (1 << index) 135 | def unsetBit(bitFlag, index): 136 | return bitFlag & ~(1 << index) 137 | def raiseError(error,errorCode = 999): 138 | 139 | try: 140 | raise Exception() 141 | except Exception: 142 | print(textColors.FAIL + "ERROR: " + error + textColors.ENDC) 143 | 144 | def raiseWarning(warning): 145 | print(textColors.WARNING + "WARNING: " + warning + textColors.ENDC) 146 | 147 | def getByteSection(byteArray,offset,size): 148 | data = byteArray[offset:(offset+size)] 149 | return data 150 | def removeByteSection(byteArray,offset,size):#removes specified amount of bytes from byte array at offset 151 | del byteArray[offset:(offset+size)]#Deletes directly from the array passed to it 152 | def insertByteSection(byteArray,offset,input):#inserts bytes into bytearray at offset 153 | byteArray[offset:offset] = input 154 | 155 | def dictString(dictionary):#Return string of dictionary contents 156 | outputString ="" 157 | for key,value in dictionary.items(): 158 | outputString +=str(key)+": "+str(value)+"\n" 159 | return outputString 160 | def unsignedToSigned(uintValue): 161 | intValue = uintValue & ((1 << 32) - 1) 162 | intValue = (intValue & ((1 << 31) - 1)) - (intValue & (1 << 31)) 163 | return intValue 164 | def signedToUnsigned(intValue): 165 | return intValue & 0xffffffff 166 | 167 | def getPaddedPos(currentPos,alignment): 168 | paddedPos = ((currentPos*-1)%alignment)+currentPos 169 | return paddedPos 170 | 171 | def getFolderSize(path='.'): 172 | total = 0 173 | try: 174 | for entry in os.scandir(path): 175 | if entry.is_file(): 176 | total += entry.stat().st_size 177 | elif entry.is_dir(): 178 | total += getFolderSize(entry.path) 179 | except: 180 | total = -1 181 | return total 182 | 183 | def formatByteSize(num, suffix="B"): 184 | for unit in ("", "K", "M", "G", "T", "P", "E", "Z"): 185 | if abs(num) < 1024.0: 186 | return f"{num:3.1f}{unit}{suffix}" 187 | num /= 1024.0 188 | return f"{num:.1f}Yi{suffix}" 189 | 190 | def wildCardFileSearch(wildCardFilePath):#Returns first file found matching wildcard, none if not found 191 | search = glob.glob(wildCardFilePath) 192 | if search == []: 193 | search = [None] 194 | return search[0] 195 | 196 | def wildCardFileSearchList(wildCardFilePath):#Returns all files matching wildcard 197 | search = glob.glob(wildCardFilePath) 198 | return search 199 | 200 | def splitNativesPath(filePath):#Splits file path of RE Engine natives/platform folder, returns none if there's no natives folder 201 | path = Path(filePath) 202 | parts = path.parts 203 | try: 204 | nativesIndex = parts.index("natives") 205 | rootPath = str(Path(*parts[:nativesIndex+2]))#stage\m01\a02\m01a02_iwa.mesh.2109148288 206 | nativesPath = str(Path(*parts[nativesIndex+2::]))#F:\MHR_EXTRACT\extract\re_chunk_000\natives\STM 207 | return (rootPath,nativesPath) 208 | except: 209 | return None 210 | 211 | def getAdjacentFileVersion(rootPath,fileType): 212 | fileVersion = -1 213 | search = wildCardFileSearch(os.path.join(glob.escape(rootPath),"*"+fileType+"*")) 214 | if search != None: 215 | versionExtension = os.path.splitext(search)[1][1::] 216 | if versionExtension.isdigit(): 217 | fileVersion = int(versionExtension) 218 | return fileVersion 219 | 220 | def progressBar(iterable, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', printEnd = "\r"): 221 | """ 222 | Call in a loop to create terminal progress bar 223 | @params: 224 | iterable - Required : iterable object (Iterable) 225 | prefix - Optional : prefix string (Str) 226 | suffix - Optional : suffix string (Str) 227 | decimals - Optional : positive number of decimals in percent complete (Int) 228 | length - Optional : character length of bar (Int) 229 | fill - Optional : bar fill character (Str) 230 | printEnd - Optional : end character (e.g. "\r", "\r\n") (Str) 231 | """ 232 | total = len(iterable) 233 | # Progress Bar Printing Function 234 | def printProgressBar (iteration): 235 | percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total))) 236 | filledLength = int(length * iteration // total) 237 | bar = fill * filledLength + '-' * (length - filledLength) 238 | print(f'\r{prefix} |{bar}| {percent}% {suffix}', end = printEnd) 239 | # Initial Call 240 | printProgressBar(0) 241 | # Update Progress Bar 242 | for i, item in enumerate(iterable): 243 | yield item 244 | printProgressBar(i + 1) 245 | # Print New Line on Complete 246 | print() 247 | 248 | 249 | IS_WINDOWS = platform.system() == 'Windows' 250 | def resolvePath(pathString): 251 | if IS_WINDOWS: 252 | return pathString 253 | else:#Fix issues related to case sensitive paths on linux, doesn't matter on windows 254 | newPath = pathString.replace("/",os.sep).replace("\\",os.sep) 255 | if not os.path.isfile(newPath):#Lower case the path in case the pak list is lowercased 256 | newPath = newPath.lower() 257 | return newPath 258 | 259 | def splitInt64(value):#Takes int64 and converts to 2 int32's 260 | return struct.unpack("ii", value.to_bytes(8, "little", signed=False)) 261 | 262 | def concatInt(a, b):#Combines two int values into a int64 263 | return (a << 32) | b -------------------------------------------------------------------------------- /modules/mdf/ui_re_mdf_panels.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | 3 | from bpy.types import (Panel, 4 | Menu, 5 | Operator, 6 | PropertyGroup, 7 | ) 8 | 9 | 10 | def tag_redraw(context, space_type="PROPERTIES", region_type="WINDOW"): 11 | for window in context.window_manager.windows: 12 | for area in window.screen.areas: 13 | if area.spaces[0].type == space_type: 14 | for region in area.regions: 15 | if region.type == region_type: 16 | region.tag_redraw() 17 | 18 | 19 | class OBJECT_PT_MDFObjectModePanel(Panel): 20 | bl_label = "RE MDF Tools" 21 | bl_idname = "OBJECT_PT_mdf_tools_panel" 22 | bl_space_type = "VIEW_3D" 23 | bl_region_type = "UI" 24 | bl_category = "RE Mesh" 25 | bl_context = "objectmode" 26 | 27 | @classmethod 28 | def poll(self,context): 29 | return context is not None and "HIDE_RE_MDF_EDITOR_TAB" not in context.scene 30 | 31 | def draw(self, context): 32 | layout = self.layout 33 | scene = context.scene 34 | re_mdf_toolpanel = scene.re_mdf_toolpanel 35 | layout.operator("re_mdf.create_mdf_collection") 36 | layout.label(text = "Active MDF Collection") 37 | layout.prop_search(re_mdf_toolpanel, "mdfCollection",bpy.data,"collections",icon = "COLLECTION_COLOR_05") 38 | layout.label(text = "Active Game") 39 | layout.prop(re_mdf_toolpanel, "activeGame") 40 | layout.operator("re_mdf.reindex_materials") 41 | 42 | 43 | 44 | class OBJECT_PT_MDFMaterialPresetPanel(Panel): 45 | bl_label = "MDF Material Presets" 46 | bl_idname = "OBJECT_PT_mdf_material_preset_panel" 47 | bl_parent_id = "OBJECT_PT_mdf_tools_panel" # Specify the ID of the parent panel 48 | bl_space_type = "VIEW_3D" 49 | bl_region_type = "UI" 50 | bl_category = "RE Mesh" 51 | bl_options = {'DEFAULT_CLOSED'} 52 | 53 | def draw(self, context): 54 | layout = self.layout 55 | obj = context.active_object 56 | re_mdf_toolpanel = context.scene.re_mdf_toolpanel 57 | row = layout.row() 58 | row.emboss = "PULLDOWN_MENU" 59 | row.label(text="Premade MDF materials.") 60 | split = layout.split(factor=0.025)#Indent list slightly to make it more clear it's a part of a sub panel 61 | col1 = split.column() 62 | col2 = split.column() 63 | col2.prop(re_mdf_toolpanel, "materialPresets") 64 | col2.operator("re_mdf.add_preset_material") 65 | 66 | col2.operator("re_mdf.save_selected_as_preset") 67 | col2.operator("re_mdf.open_preset_folder") 68 | 69 | class OBJECT_PT_MDFMaterialPreviewPanel(Panel): 70 | bl_label = "MDF Preview" 71 | bl_idname = "OBJECT_PT_mdf_material_preview_panel" 72 | bl_parent_id = "OBJECT_PT_mdf_tools_panel" # Specify the ID of the parent panel 73 | bl_space_type = "VIEW_3D" 74 | bl_region_type = "UI" 75 | bl_category = "RE Mesh" 76 | bl_options = {'DEFAULT_CLOSED'} 77 | 78 | def draw(self, context): 79 | layout = self.layout 80 | obj = context.active_object 81 | re_mdf_toolpanel = context.scene.re_mdf_toolpanel 82 | row = layout.row() 83 | row.emboss = "PULLDOWN_MENU" 84 | row.label(text="View MDF materials in Blender.") 85 | split = layout.split(factor=0.025)#Indent list slightly to make it more clear it's a part of a sub panel 86 | col1 = split.column() 87 | col2 = split.column() 88 | 89 | 90 | col2.label(text = "Mesh Collection") 91 | col2.prop_search(re_mdf_toolpanel, "meshCollection",bpy.data,"collections",icon = "COLLECTION_COLOR_01") 92 | col2.label(text = "Mod Directory") 93 | col2.prop(re_mdf_toolpanel, "modDirectory") 94 | col2.operator("re_mdf.apply_mdf") 95 | 96 | class OBJECT_PT_MDFMaterialLoadSettingsPanel(Panel): 97 | bl_label = "MDF Load Settings" 98 | bl_idname = "OBJECT_PT_mdf_material_load_settings_panel" 99 | bl_parent_id = "OBJECT_PT_mdf_material_preview_panel" # Specify the ID of the parent panel 100 | bl_space_type = "VIEW_3D" 101 | bl_region_type = "UI" 102 | bl_category = "RE Mesh" 103 | bl_options = {'DEFAULT_CLOSED'} 104 | 105 | def draw(self, context): 106 | layout = self.layout 107 | obj = context.active_object 108 | re_mdf_toolpanel = context.scene.re_mdf_toolpanel 109 | split = layout.split(factor=0.025)#Indent list slightly to make it more clear it's a part of a sub panel 110 | col1 = split.column() 111 | col2 = split.column() 112 | col2.alignment='RIGHT' 113 | col2.prop(re_mdf_toolpanel,"reloadCachedTextures") 114 | col2.prop(re_mdf_toolpanel, "loadUnusedTextures") 115 | col2.prop(re_mdf_toolpanel, "loadUnusedProps") 116 | col2.prop(re_mdf_toolpanel, "useBackfaceCulling") 117 | 118 | 119 | class OBJECT_PT_MDFMaterialPanel(Panel): 120 | bl_label = "RE MDF Material Settings" 121 | bl_idname = "OBJECT_PT_mdf_material_panel" 122 | bl_space_type = "PROPERTIES" 123 | bl_region_type = "WINDOW" 124 | bl_category = "RE MDF Material Settings" 125 | bl_context = "object" 126 | 127 | 128 | @classmethod 129 | def poll(self,context): 130 | 131 | return context and context.object.mode == "OBJECT" and context.active_object.get("~TYPE",None) == "RE_MDF_MATERIAL" and not "HIDE_RE_MDF_EDITOR_PANEL" in context.scene 132 | 133 | def draw(self, context): 134 | layout = self.layout 135 | obj = context.active_object 136 | re_mdf_material = obj.re_mdf_material 137 | 138 | split = layout.split(factor=0.025) 139 | col1 = split.column() 140 | col2 = split.column() 141 | col2.label(text = f"{re_mdf_material.gameName} Material") 142 | split = layout.split(factor=0.01) 143 | col3 = split.column() 144 | col4 = split.column() 145 | col4.alignment='RIGHT' 146 | col4.use_property_split = True 147 | 148 | col4.prop(re_mdf_material, "materialName") 149 | col4.prop(re_mdf_material, "mmtrPath") 150 | col4.prop(re_mdf_material, "shaderType") 151 | col4.prop(re_mdf_material, "linkedMaterial") 152 | 153 | class OBJECT_PT_MDFFlagsPanel(Panel): 154 | bl_label = "Flags" 155 | bl_idname = "OBJECT_PT_mdf_material_flags_panel" 156 | bl_parent_id = "OBJECT_PT_mdf_material_panel" # Specify the ID of the parent panel 157 | bl_space_type = 'PROPERTIES' 158 | bl_region_type = 'WINDOW' 159 | bl_options = {'DEFAULT_CLOSED'} 160 | 161 | def draw(self, context): 162 | layout = self.layout 163 | obj = context.active_object 164 | flags = obj.re_mdf_material.flags 165 | split = layout.split(factor=0.025)#Indent list slightly to make it more clear it's a part of a sub panel 166 | col1 = split.column() 167 | col2 = split.column() 168 | col2.alignment='RIGHT' 169 | col2.prop(flags, "hideMaterialInGame") 170 | col2.prop(flags, "shaderLODNum") 171 | col2.prop(flags, "bakeTextureArraySize") 172 | col2.separator() 173 | 174 | col2.prop(flags,"flagIntValue") 175 | col2.prop(flags, "flagIntValueB") 176 | col2.separator() 177 | col2.separator(type = "LINE") 178 | 179 | col2.prop(flags,"BaseTwoSideEnable") 180 | col2.prop(flags,"BaseAlphaTestEnable") 181 | col2.prop(flags,"ShadowCastDisable") 182 | col2.prop(flags,"VertexShaderUsed") 183 | col2.prop(flags,"EmissiveUsed") 184 | col2.prop(flags,"TessellationEnable") 185 | col2.prop(flags,"EnableIgnoreDepth") 186 | col2.prop(flags,"AlphaMaskUsed") 187 | col2.prop(flags,"ForcedTwoSideEnable") 188 | col2.prop(flags,"TwoSideEnable") 189 | col2.prop(flags,"TransparentZPostPassEnable") 190 | col2.prop(flags,"TessFactor") 191 | col2.prop(flags,"PhongFactor") 192 | col2.prop(flags,"RoughTransparentEnable") 193 | col2.prop(flags,"ForcedAlphaTestEnable") 194 | col2.prop(flags,"AlphaTestEnable") 195 | col2.prop(flags,"SSSProfileUsed") 196 | col2.prop(flags,"EnableStencilPriority") 197 | col2.prop(flags,"RequireDualQuaternion") 198 | col2.prop(flags,"PixelDepthOffsetUsed") 199 | col2.prop(flags,"NoRayTracing") 200 | 201 | col2.separator() 202 | col2.separator(type = "LINE") 203 | col2.prop(flags,"TransparentDistortionEnable") 204 | col2.prop(flags,"AlphaUsed") 205 | col2.prop(flags,"BakeTextureUseSecondaryUV") 206 | col2.prop(flags,"ForwardPrepassEnabled") 207 | col2.prop(flags,"ForcedAlphaTestEnableShadow") 208 | col2.prop(flags,"TessellationZPrepassDisable") 209 | col2.prop(flags,"DitheredLodTransitionEnable") 210 | col2.prop(flags,"reserved0") 211 | col2.prop(flags,"TransparentPriorityBias") 212 | col2.prop(flags,"reserved1") 213 | col2.prop(flags,"reserved2") 214 | 215 | 216 | class OBJECT_PT_MDFMaterialPropertyListPanel(Panel): 217 | bl_label = "Property List" 218 | bl_idname = "OBJECT_PT_mdf_material_proplist_panel" 219 | bl_parent_id = "OBJECT_PT_mdf_material_panel" # Specify the ID of the parent panel 220 | bl_space_type = 'PROPERTIES' 221 | bl_region_type = 'WINDOW' 222 | 223 | def draw(self, context): 224 | layout = self.layout 225 | obj = context.active_object 226 | re_mdf_material = obj.re_mdf_material 227 | 228 | split = layout.split(factor=0.025)#Indent list slightly to make it more clear it's a part of a sub panel 229 | col1 = split.column() 230 | col2 = split.column() 231 | col2.label(text = f"Property Count: {str(len(obj.re_mdf_material.propertyList_items))}") 232 | col2.template_list( 233 | listtype_name = "MESH_UL_MDFPropertyList", 234 | list_id = "", 235 | dataptr = re_mdf_material, 236 | propname = "propertyList_items", 237 | active_dataptr = re_mdf_material, 238 | active_propname = "propertyList_index", 239 | rows = 6, 240 | type='DEFAULT' 241 | ) 242 | 243 | class OBJECT_PT_MDFMaterialTextureBindingListPanel(Panel): 244 | bl_label = "Texture Bindings" 245 | bl_idname = "OBJECT_PT_mdf_material_texturebindinglist_panel" 246 | bl_parent_id = "OBJECT_PT_mdf_material_panel" # Specify the ID of the parent panel 247 | bl_space_type = 'PROPERTIES' 248 | bl_region_type = 'WINDOW' 249 | 250 | def draw(self, context): 251 | layout = self.layout 252 | obj = context.active_object 253 | re_mdf_material = obj.re_mdf_material 254 | split = layout.split(factor=0.025)#Indent list slightly to make it more clear it's a part of a sub panel 255 | col1 = split.column() 256 | col2 = split.column() 257 | col2.label(text = f"Texture Binding Count: {str(len(obj.re_mdf_material.textureBindingList_items))}") 258 | col2.operator("re_mdf.replace_texture_bindings") 259 | col2.template_list( 260 | listtype_name = "MESH_UL_MDFTextureBindingList", 261 | list_id = "", 262 | dataptr = re_mdf_material, 263 | propname = "textureBindingList_items", 264 | active_dataptr = re_mdf_material, 265 | active_propname = "textureBindingList_index", 266 | rows = 6, 267 | type='DEFAULT' 268 | ) 269 | class OBJECT_PT_MDFMaterialMMTRSIndexListPanel(Panel): 270 | bl_label = "MMTRS Data" 271 | bl_idname = "OBJECT_PT_mdf_material_mmtrsindexlist_panel" 272 | bl_parent_id = "OBJECT_PT_mdf_material_panel" # Specify the ID of the parent panel 273 | bl_space_type = 'PROPERTIES' 274 | bl_region_type = 'WINDOW' 275 | bl_options = {'DEFAULT_CLOSED'} 276 | @classmethod 277 | def poll(self,context): 278 | return context.active_object != None and len(context.active_object.re_mdf_material.mmtrsData_items) != 0 279 | 280 | def draw(self, context): 281 | layout = self.layout 282 | obj = context.active_object 283 | re_mdf_material = obj.re_mdf_material 284 | split = layout.split(factor=0.025)#Indent list slightly to make it more clear it's a part of a sub panel 285 | col1 = split.column() 286 | col2 = split.column() 287 | col2.label(text = f"Do not change these unless you know what you're doing.") 288 | col2.label(text = f"Index Count: {str(len(obj.re_mdf_material.mmtrsData_items))}") 289 | col2.template_list( 290 | listtype_name = "MESH_UL_MDFMMTRSDataList", 291 | list_id = "", 292 | dataptr = re_mdf_material, 293 | propname = "mmtrsData_items", 294 | active_dataptr = re_mdf_material, 295 | active_propname = "mmtrsData_index", 296 | rows = 8, 297 | type='DEFAULT' 298 | ) 299 | 300 | class OBJECT_PT_MDFMaterialGPBFDataListPanel(Panel): 301 | bl_label = "GPBF Data" 302 | bl_idname = "OBJECT_PT_mdf_material_gpbfdatalist_panel" 303 | bl_parent_id = "OBJECT_PT_mdf_material_panel" # Specify the ID of the parent panel 304 | bl_space_type = 'PROPERTIES' 305 | bl_region_type = 'WINDOW' 306 | bl_options = {'DEFAULT_CLOSED'} 307 | @classmethod 308 | def poll(self,context): 309 | return context.active_object != None and len(context.active_object.re_mdf_material.gpbfData_items) != 0 310 | 311 | def draw(self, context): 312 | layout = self.layout 313 | obj = context.active_object 314 | re_mdf_material = obj.re_mdf_material 315 | split = layout.split(factor=0.025)#Indent list slightly to make it more clear it's a part of a sub panel 316 | col1 = split.column() 317 | col2 = split.column() 318 | col2.label(text = f"Do not change these unless you know what you're doing.") 319 | col2.label(text = f"Index Count: {str(len(obj.re_mdf_material.gpbfData_items))}") 320 | col2.template_list( 321 | listtype_name = "MESH_UL_MDFGPBFDataList", 322 | list_id = "", 323 | dataptr = re_mdf_material, 324 | propname = "gpbfData_items", 325 | active_dataptr = re_mdf_material, 326 | active_propname = "gpbfData_index", 327 | rows = 3, 328 | type='DEFAULT' 329 | ) -------------------------------------------------------------------------------- /modules/mesh/re_mesh_export_errors.py: -------------------------------------------------------------------------------- 1 | from ..gen_functions import textColors 2 | import bpy 3 | import textwrap 4 | ERROR_WINDOW_SIZE = 750 5 | SPLIT_FACTOR = .35 6 | class REMeshErrorEntry(bpy.types.PropertyGroup): 7 | errorType: bpy.props.StringProperty( 8 | name="", 9 | ) 10 | 11 | errorName: bpy.props.StringProperty( 12 | name="", 13 | ) 14 | errorDescription: bpy.props.StringProperty( 15 | name="", 16 | ) 17 | objectSetString: bpy.props.StringProperty( 18 | name="", 19 | ) 20 | errorCount: bpy.props.IntProperty( 21 | name="", 22 | ) 23 | 24 | 25 | class MESH_UL_REMeshErrorList(bpy.types.UIList): 26 | 27 | def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index): 28 | layout.label(text=f"{item.errorType} ({str(item.errorCount)})") 29 | # Disable double-click to rename 30 | def invoke(self, context, event): 31 | return {'PASS_THROUGH'} 32 | 33 | class WM_OT_ShowREMeshErrorWindow(bpy.types.Operator): 34 | 'Show Export Errors' 35 | bl_idname = 're_mesh.show_export_error_window' 36 | bl_label = 'RE Mesh Export Error' 37 | bl_options = {'REGISTER'} 38 | targetCollection : bpy.props.StringProperty() 39 | armatureName : bpy.props.StringProperty() 40 | errorList_items: bpy.props.CollectionProperty(type = REMeshErrorEntry) 41 | errorList_index: bpy.props.IntProperty(name="") 42 | def execute(self, context): 43 | print("Displayed error window2.") 44 | return {'FINISHED'} 45 | 46 | def invoke(self, context, event): 47 | region = bpy.context.region 48 | centerX = region.width // 2 49 | centerY = region.height 50 | 51 | #currentX = event.mouse_region_X 52 | #currentY = event.mouse_region_Y 53 | 54 | #Move cursor to center so error window is at the center of the window 55 | context.window.cursor_warp(centerX,centerY) 56 | for entry in bpy.context.scene.re_mesh_error_list: 57 | item = self.errorList_items.add() 58 | for key,value in entry.items(): 59 | item[key] = value 60 | return context.window_manager.invoke_props_dialog(self,width = ERROR_WINDOW_SIZE) 61 | 62 | def draw(self, context): 63 | layout = self.layout 64 | rowCount = 2 65 | uifontscale = 9 * context.preferences.view.ui_scale 66 | max_label_width = int((ERROR_WINDOW_SIZE*(1-SPLIT_FACTOR)*(2-SPLIT_FACTOR)) // uifontscale) 67 | layout.label(text=f"The mesh has {len(self.errorList_items)} {'issues' if len(self.errorList_items) > 1 else 'issue'} that must be fixed before it can be exported.",icon = "ERROR") 68 | row = layout.row() 69 | row.label(text=f"Target Collection: {self.targetCollection}") 70 | row.label(text=f"Target Armature: {self.armatureName}") 71 | 72 | row = layout.row().separator() 73 | split = layout.split(factor = SPLIT_FACTOR)#Indent list slightly to make it more clear it's a part of a sub panel 74 | col1 = split.column() 75 | col2 = split.column() 76 | 77 | """ 78 | layout.prop(self,"currentError") 79 | row.label(text =f"Error {str(self.currentError)} / {str(len(bpy.context.scene.re_mesh_error_list))}") 80 | if self.currentError <= len(bpy.context.scene.re_mesh_error_list): 81 | """ 82 | 83 | if len(self.errorList_items) != 0: 84 | item = self.errorList_items[self.errorList_index] 85 | col2.label(text = f"Error Info: {item.errorType}") 86 | box = col2.box() 87 | row = box.row() 88 | for line in item.errorDescription.splitlines(): 89 | line = line.strip() 90 | for chunk in textwrap.wrap(line,width=max_label_width): 91 | box.label(text=chunk) 92 | rowCount+=1 93 | row = layout.row() 94 | if item.objectSetString != "": 95 | for line in item.objectSetString.splitlines(): 96 | line = line.strip() 97 | for chunk in textwrap.wrap(line,width=max_label_width): 98 | box.label(text=chunk) 99 | rowCount+=1 100 | col1.label(text = f"Error Count: {str(len(self.errorList_items))}") 101 | col1.template_list( 102 | listtype_name = "MESH_UL_REMeshErrorList", 103 | list_id = "", 104 | dataptr = self, 105 | propname = "errorList_items", 106 | active_dataptr = self, 107 | active_propname = "errorList_index", 108 | rows = rowCount, 109 | type='DEFAULT' 110 | ) 111 | def addErrorToDict(errorDict,errorType,objectName): 112 | if errorType in errorDict: 113 | errorDict[errorType]["count"] += 1 114 | if objectName != None: 115 | errorDict[errorType]["objectSet"].add(objectName) 116 | 117 | else: 118 | if objectName != None: 119 | errorDict[errorType] = {"count":1,"objectSet":set([objectName])} 120 | else: 121 | errorDict[errorType] = {"count":1,"objectSet":set()} 122 | 123 | errorInfoDict = { 124 | "NoMeshesInCollection":"""No Meshes In Collection 125 | No meshes were found in the target collection. 126 | 127 | HOW TO FIX: 128 | _______________ 129 | 130 | Specify a target collection in the export options that contains meshes, or leave it blank and move your meshes and armature into the Scene Collection. 131 | """, 132 | 133 | "MoreThanOneArmature":"""More Than One Armature 134 | More than one armature was found in the target collection. Only one armature can be present in a mesh collection. 135 | 136 | HOW TO FIX: 137 | _______________ 138 | 139 | Move the extra armature into another collection or delete it. 140 | """, 141 | "NoMaterialOnSubMesh":"""No Material On Sub Mesh 142 | A mesh has no material assigned to it. All meshes must have one material assigned to them. 143 | 144 | HOW TO FIX: 145 | _______________ 146 | 147 | Specify an MDF material name on the end of the object name separated by two underscores. See example, here the material name is "pl1000_Body_Mat". 148 | 149 | Example Object Name: LOD_0_Group_0_Sub_0__pl1000_Body_Mat 150 | """, 151 | 152 | "MoreThanOneMaterialOnSubMesh":"""More Than One Material On Sub Mesh 153 | A mesh has more than one material assigned to it. All meshes must have only one material assigned to them. 154 | 155 | HOW TO FIX: 156 | _______________ 157 | 158 | Select the listed mesh in edit mode, press A to select all vertices. Then press P > Material to split the mesh by it's materials. 159 | """, 160 | 161 | "NoUVMapOnSubMesh":"""No UV Map On Sub Mesh 162 | A mesh has no UV map. All meshes require at least one uv map. 163 | 164 | HOW TO FIX: 165 | _______________ 166 | 167 | Create a UV map. 168 | """, 169 | 170 | "NoVerticesOnSubMesh":"""No Vertices On Sub Mesh 171 | A mesh has no vertices. All meshes must have at least 3 vertices and 1 face. 172 | 173 | HOW TO FIX: 174 | _______________ 175 | 176 | Delete the listed mesh. 177 | """, 178 | 179 | "NoFacesOnSubMesh":"""No Faces On Sub Mesh 180 | A mesh has no faces. All meshes must have at least 3 vertices and 1 face. 181 | 182 | HOW TO FIX: 183 | _______________ 184 | 185 | Delete the listed mesh. 186 | """, 187 | 188 | "LooseVerticesOnSubMesh":"""Loose Vertices On Sub Mesh 189 | A mesh has loose vertices with no faces assigned. 190 | 191 | HOW TO FIX: 192 | _______________ 193 | 194 | Select the listed mesh in edit mode, press A to select all vertices. In the menu bar at the top, select > Mesh > Clean Up > Delete Loose. 195 | 196 | OR 197 | 198 | Press the "Delete Loose Geometry" button in the RE Mesh tab to delete loose vertices on all meshes. 199 | """, 200 | 201 | "NonTriangulatedFace":"""Non Triangulated Faces 202 | A mesh has non triangulated faces. All faces must be triangulated. 203 | 204 | HOW TO FIX: 205 | _______________ 206 | 207 | Select the listed mesh in edit mode, press A to select all vertices. Press Ctrl + T to triangulate faces. 208 | 209 | 210 | """, 211 | "MultipleUVsAssignedToVertex":"""Multiple UVs Assigned To Vertex 212 | A mesh has multiple uvs assigned to a single vertex. 213 | 214 | HOW TO FIX: 215 | _______________ 216 | 217 | Select the listed mesh in edit mode, switch to edge select mode. 218 | Press A to select all edges,then press F3 to search for and select "Seams From Islands". 219 | Select an edge that is marked red on the mesh, then press Shift > G > Seam. 220 | Press V to rip the vertices and then press Esc so that the vertices stay in the same location. 221 | 222 | OR 223 | 224 | Check the "Auto Solve Repeated UVs" box when exporting the mesh. 225 | """, 226 | "MaxVerticesExceeded":"""Max Vertices Exceeded On Sub Mesh 227 | A mesh exceeded the limit of 4294967295 vertices. 228 | 229 | HOW TO FIX: 230 | _______________ 231 | 232 | Separate parts of the mesh into more sub meshes. 233 | 234 | OR 235 | 236 | Use the decimate modifier to reduce mesh quality. 237 | """, 238 | "MaxFacesExceeded":"""Max Faces Exceeded On Sub Mesh 239 | A mesh exceeded the limit of 4294967295 faces. 240 | 241 | HOW TO FIX: 242 | _______________ 243 | 244 | Reconsider the life choices that led you to decide to try to export a mesh with 4 million faces. 245 | """, 246 | 247 | "MaxWeightsPerVertexExceeded":"""Max Weights Per Vertex Exceeded On Sub Mesh 248 | A vertex has more the maximum of 8 (or 6 for SF6) weights assigned to it. 249 | 250 | HOW TO FIX: 251 | _______________ 252 | 253 | Limit total weights to 8 in weight paint mode and normalize all weights from the Weights menu. 254 | 255 | OR 256 | 257 | Use the "Limit Total and Normalize All Weights" button in the RE Mesh tab. 258 | 259 | """, 260 | 261 | 262 | "ExtendedMaxWeightsPerVertexExceeded":"""Extended Max Weights Per Vertex Exceeded On Sub Mesh 263 | A vertex has more the maximum of 16 (or 12 for SF6 and MH Wilds) weights assigned to it. 264 | 265 | HOW TO FIX: 266 | _______________ 267 | 268 | Limit total weights to 16 in weight paint mode and normalize all weights from the Weights menu. 269 | 270 | OR 271 | 272 | Use the "Limit Total and Normalize All Weights" button the RE Mesh tab. 273 | 274 | """, 275 | 276 | "NoBonesOnArmature":"""No Bones on Armature 277 | The armature in the target collection has no bones. 278 | 279 | HOW TO FIX: 280 | _______________ 281 | 282 | Import a valid armature from an existing mesh file. 283 | """, 284 | 285 | "NoArmatureInCollection":"""No Armature In Collection 286 | A mesh has weights but no armature is inside the mesh collection. 287 | 288 | HOW TO FIX: 289 | _______________ 290 | 291 | Move the armature that the mesh is parented to inside the mesh collection. 292 | 293 | You can do this by selecting the armature in the outliner and dragging it onto the mesh collection. 294 | 295 | """, 296 | 297 | "NoWeightsOnMesh":"""No Weights on Sub Mesh 298 | A mesh has an armature, but no weights assigned to bones. 299 | 300 | HOW TO FIX: 301 | _______________ 302 | 303 | Add a new vertex group and weight it to a bone on the armature in weight paint mode. 304 | """, 305 | "MaxWeightedBonesExceeded":"""Max Weighted Bones Exceeded 306 | The mesh exceeded the limit of 256 bones with weights assigned to them. 307 | 308 | HOW TO FIX: 309 | _______________ 310 | 311 | Reduce the amount of chain bones on the armature. 312 | """, 313 | "InvalidWeights":"""Invalid Weighting 314 | A submesh contains invalid weights and can't be exported. 315 | 316 | HOW TO FIX: 317 | _______________ 318 | 319 | Check that the armature is inside the mesh collection. 320 | 321 | Also check that all bones that are being weighted to are on the armature. 322 | 323 | Be sure to limit total weights to 6, normalize all, and remove unweighted vertex groups. 324 | 325 | """, 326 | } 327 | 328 | def printErrorDict(errorDict): 329 | print(f"\n{textColors.FAIL}Unable to export mesh. {len(errorDict)} error(s) were found that need to be fixed.{textColors.ENDC}\n") 330 | for index, errorType in enumerate(sorted(errorDict.keys())): 331 | count = errorDict[errorType]["count"] 332 | objectSet = errorDict[errorType]["objectSet"] 333 | errorInfo = errorInfoDict[errorType] 334 | nameListString = "" 335 | if objectSet: 336 | nameListString = f"\nObjects with this error ({str(len(objectSet))}):\n" 337 | for name in sorted(list(objectSet)): 338 | nameListString += "["+name +"]\n" 339 | print(f"{textColors.FAIL}ERROR ({str(index+1)} / {len(errorDict)}): {str(count)} instance(s) of {errorInfo}{nameListString}\n__________________________________{textColors.ENDC}") 340 | 341 | def showREMeshErrorWindow(targetCollectionName,armatureObj,errorDict): 342 | bpy.types.Scene.re_mesh_error_list = bpy.props.CollectionProperty(type=REMeshErrorEntry) 343 | bpy.context.scene.re_mesh_error_list.clear() 344 | for index, errorType in enumerate(sorted(errorDict.keys())): 345 | item = bpy.context.scene.re_mesh_error_list.add() 346 | item.errorCount = errorDict[errorType]["count"] 347 | errorInfoSplit = errorInfoDict[errorType].split("\n",1) 348 | 349 | item.errorType = errorInfoSplit[0] 350 | item.errorDescription = errorInfoSplit[1] 351 | objectSet = errorDict[errorType]["objectSet"] 352 | errorInfo = errorInfoDict[errorType] 353 | nameListString = "" 354 | if objectSet: 355 | nameListString = f"\nObjects with this error ({str(len(objectSet))}):\n" 356 | for name in sorted(list(objectSet)): 357 | nameListString += "["+name +"]\n" 358 | item.objectSetString = nameListString 359 | if armatureObj != None: 360 | armatureName = armatureObj.name 361 | else: 362 | armatureName = "None" 363 | 364 | bpy.ops.re_mesh.show_export_error_window('INVOKE_DEFAULT',targetCollection = targetCollectionName,armatureName = armatureName) -------------------------------------------------------------------------------- /modules/tex/re_tex_utils.py: -------------------------------------------------------------------------------- 1 | # Author: NSA Cloud & AsteriskAmpersand 2 | # Credit to Asterisk Ampersand, code borrowed from Tex Chopper 3 | from ..dds.file_dds import DDS, DX10_Header, DDSFile, getDDSHeader 4 | 5 | from .file_re_tex import RE_TexFile, MipData, CompressedImageHeader,GDEFLATE_VERSIONS 6 | from ..gen_functions import raiseWarning 7 | from ..gdeflate.gdeflate import GDeflate, GDeflateCompressionLevel, GDeflateError 8 | from ..ddsconv.directx.texconv import Texconv, unload_texconv 9 | from .enums import tex_format_enum as texenum 10 | from .enums import dxgi_format_enum as dxgienum 11 | from .enums.dds_bpps import ddsBpps 12 | from . import tex_math as tmath 13 | from . import format_ops 14 | import os 15 | 16 | VERSION_MHWILDS_BETA = 240701001 17 | VERSION_MHWILDS = 241106027 18 | 19 | 20 | def TexToDDS(tex, imageIndex): 21 | """ Generates a DDS file from the 'imageIndex'th image in the tex file""" 22 | dds = DDS() 23 | dds.header.dwSize = 124 24 | # DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT | DDSD_MIPMAPCOUNT | DDSD_LINEARSIZE 25 | dds.header.dwFlags = 0x00000001 | 0x00000002 | 0x00000004 | 0x00001000 | 0x00020000 | 0x00080000 26 | dds.header.dwHeight = tex.header.height 27 | dds.header.dwWidth = tex.header.width 28 | bpps = ddsBpps[texenum.texFormatToDXGIStringDict[tex.header.format]] 29 | dds.header.dwPitchOrLinearSize = ( 30 | dds.header.dwWidth * dds.header.dwHeight * bpps) // 8 31 | #dds.header.dwDepth = tex.header.depth 32 | dds.header.dwDepth = tex.header.depth 33 | dds.header.dwMipMapCount = tex.header.mipCount 34 | dds.header.ddpfPixelFormat.dwSize = 32 35 | dds.header.ddpfPixelFormat.dwFlags = 0x4 # DDPF_FOURCC 36 | dds.header.ddpfPixelFormat.dwFourCC = 808540228 # DX10 37 | dds.header.ddpfPixelFormat.dwRGBBitCount = 0 38 | dds.header.ddpfPixelFormat.dwRBitMask = 0 39 | dds.header.ddpfPixelFormat.dwGBitMask = 0 40 | dds.header.ddpfPixelFormat.dwBBitMask = 0 41 | dds.header.ddpfPixelFormat.dwABitMask = 0 42 | dds.header.ddsCaps1 = 0x00001000 | 0x00400000 # DDSCAPS_TEXTURE | DDSCAPS_MIPMAP 43 | if tex.header.cubemapMarker != 0: 44 | dds.header.ddsCaps1 = dds.header.ddsCaps1 | 0x00000008 # DDSCAPS_COMPLEX 45 | # DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX | DDSCAPS2_CUBEMAP_NEGATIVEX | DDSCAPS2_CUBEMAP_POSITIVEY | DDSCAPS2_CUBEMAP_NEGATIVEY | DDSCAPS2_CUBEMAP_POSITIVEZ | DDSCAPS2_CUBEMAP_NEGATIVEZ 46 | dds.header.ddsCaps2 = 0x00000200 | 0x00000400 | 0x00000800 | 0x00001000 | 0x00002000 | 0x00004000 | 0x00008000 47 | else: 48 | dds.header.ddsCaps2 = 0 49 | dds.header.ddsCaps3 = 0 50 | dds.header.ddsCaps4 = 0 51 | dds.header.dwReserved2 = 0 52 | 53 | dds.header.dx10Header = DX10_Header() 54 | dds.header.dx10Header.dxgiFormat = dxgienum.formatStringToDXGIDict[ 55 | texenum.texFormatToDXGIStringDict[tex.header.format]] 56 | # D3D10_RESOURCE_DIMENSION_TEXTURE2D 57 | dds.header.dx10Header.resourceDimension = 3 58 | """ Image Arrays are exported as multiple files, hence this is commented out 59 | if tex.header.cubemapMarker != 0: 60 | dds.header.dx10Header.arraySize = tex.header.imageCount // 6 61 | else: 62 | dds.header.dx10Header.arraySize = tex.header.imageCount 63 | """ 64 | dds.header.dx10Header.arraySize = 1 65 | dds.header.dx10Header.miscFlags2 = 0 66 | dds.data = tex.GetTextureData(imageIndex) 67 | return dds 68 | 69 | 70 | def convertTexFileToDDS(texPath, outputPath): 71 | texFile = RE_TexFile() 72 | texFile.read(texPath) 73 | 74 | texInfo = {"isArray": texFile.tex.header.imageCount > 75 | 1, "arrayNum": texFile.tex.header.imageCount} 76 | if texFile.tex.header.imageCount == 1: 77 | ddsFile = DDSFile() 78 | ddsFile.dds = TexToDDS(texFile.tex, 0) 79 | ddsFile.write(outputPath) 80 | else: 81 | digitCount = len(str(texFile.tex.header.imageCount)) 82 | #print("TEX ARRAY FOUND") 83 | newOutPathRoot = os.path.splitext(outputPath)[0] + " #ARRAY_" 84 | for i in range(texFile.tex.header.imageCount): 85 | newOutputPath = f"{newOutPathRoot}{str(i).zfill(digitCount)}.dds" 86 | ddsFile = DDSFile() 87 | ddsFile.dds = TexToDDS(texFile.tex, i) 88 | ddsFile.write(newOutputPath) 89 | #print(f"Wrote {newOutputPath}") 90 | return texInfo 91 | 92 | 93 | def makeTexHeader(texVersion, ddsHeader, imageCount): 94 | newTexFile = RE_TexFile() 95 | texHeader = newTexFile.tex.header 96 | texHeader.version = texVersion 97 | texHeader.width = ddsHeader.dwWidth 98 | texHeader.height = ddsHeader.dwHeight 99 | texHeader.depth = ddsHeader.dwDepth 100 | texHeader.imageCount = imageCount 101 | texHeader.mipCount = ddsHeader.dwMipMapCount # For DMC5/RE2 102 | texHeader.imageMipHeaderSize = ddsHeader.dwMipMapCount << 4 103 | #texHeader.imageCount = (ddsHeader.dwMipMapCount << 12) | imageCount 104 | #print(f"imageCount {imageCount}") 105 | #print(f"dwMipMapCount {ddsHeader.dwMipMapCount}") 106 | #print(f"tex image count {texHeader.imageCount}") 107 | texHeader.formatString = format_ops.buildFormatString(ddsHeader) 108 | texHeader.format = texenum.formatStringToTexFormatDict[texHeader.formatString] 109 | cubemap = (ddsHeader.ddsCaps2 & 0x00000200 != 0)*1 # DDSCAPS2_CUBEMAP 110 | texHeader.cubemapMarker = cubemap * 4 111 | return newTexFile 112 | 113 | 114 | def padding(image, ddsSl, capSl, linecount): 115 | result = b'' 116 | for ix in range(linecount): 117 | imgData = image[ix*ddsSl:(ix+1)*ddsSl] 118 | pad = b'\x00'*(capSl-ddsSl) 119 | result += imgData + pad 120 | return result 121 | 122 | 123 | def packageTextures(ddsHeader, ddsList, compress, formatData): 124 | """Pads and compresses textures aas needed, stores information relative to their size for header generation""" 125 | compressor = GDeflate() 126 | miptex = [] 127 | for dds in ddsList: 128 | offset = 0 129 | mips = [] 130 | minima = formatData.scanlineMinima 131 | #print(f"image {tex}") 132 | for mip in range(ddsHeader.dwMipMapCount): 133 | x, y = tmath.ruD(ddsHeader.dwWidth, 2 ** 134 | mip), tmath.ruD(ddsHeader.dwHeight, 2**mip) 135 | z = tmath.ruD(ddsHeader.dwDepth, 2**mip) 136 | xcount, ycount = tmath.ruD( 137 | x, formatData.tx), tmath.ruD(y, formatData.ty) 138 | mpacketSize = tmath.ruD(format_ops.packetSize, round( 139 | tmath.product(tmath.dotDivide(formatData.pixelPerPacket, formatData.texelSize)))) 140 | # texelSize = packetTexelPacking and mTexelSize = tx,ty 141 | bytelen = xcount*ycount*z*mpacketSize 142 | mipmap = dds.data[offset:offset+bytelen] 143 | capcomScanline = tmath.ruNX(xcount*mpacketSize, minima) 144 | ddsScanline = mpacketSize * xcount 145 | paddedMipmap = padding(mipmap, ddsScanline, 146 | capcomScanline, ycount*z) 147 | uncompressedSize = capcomScanline * ycount 148 | if x * y >= 64 and compress: 149 | try: 150 | compressedPaddedMipmap = compressor.compress( 151 | paddedMipmap, GDeflateCompressionLevel.BEST_RATIO) 152 | s = compressor.get_uncompressed_size(compressedPaddedMipmap) 153 | if len(compressedPaddedMipmap) > s: 154 | compressedPaddedMipmap = paddedMipmap 155 | except GDeflateError: 156 | compressedPaddedMipmap = paddedMipmap 157 | else: 158 | compressedPaddedMipmap = paddedMipmap 159 | mipData = compressedPaddedMipmap, uncompressedSize, len( 160 | compressedPaddedMipmap), capcomScanline 161 | parsel = (mipData, (xcount, ycount)) 162 | mips.append(parsel) 163 | offset += bytelen 164 | #assert len(parsel[0]) == bytelen 165 | miptex.append(mips) 166 | return miptex 167 | 168 | 169 | def storeTextures(ddsHeader, texFile, miptex, compress): 170 | texHeader = texFile.tex.header 171 | texVersion = texHeader.version 172 | imageCount = len(miptex) 173 | mipCount = ddsHeader.dwMipMapCount 174 | mipstride = 0x10 175 | baseHeader = (8 if texVersion >= 28 and texVersion != 190820018 else 0) + 0x20 176 | mipbase = baseHeader + mipstride*imageCount*mipCount 177 | offset = 0x0 178 | mipoffset = 0x0 179 | tex = [] 180 | for texture in miptex: 181 | mips = [] 182 | mipHeaders = [] 183 | for mip, (mipData, texelCount) in enumerate(texture): 184 | mipmap, uncompressedSize, compressedSize, scanlineSize = mipData 185 | tx, ty = texelCount 186 | mips.append(mipmap) 187 | mipEntry = MipData() 188 | mipEntry.mipOffset = mipbase+mipoffset 189 | mipEntry.uncompressedSize = uncompressedSize 190 | mipEntry.scanlineLength = scanlineSize 191 | mipEntry.textureData = mipmap 192 | mipHeaders.append(mipEntry) 193 | if compress: 194 | imageHeader = CompressedImageHeader() 195 | imageHeader.imageOffset = offset 196 | imageHeader.imageSize = compressedSize 197 | texFile.tex.imageHeaderList.append(imageHeader) 198 | offset += compressedSize 199 | mipoffset += uncompressedSize 200 | tex.append(mips) 201 | texFile.tex.imageMipDataList.append(mipHeaders) 202 | return texFile 203 | 204 | 205 | def getTexFileFromDDS(ddsList, texVersion, streamingFlag=False): 206 | ddsHeader = ddsList[0].header 207 | isGDeflate = texVersion in GDEFLATE_VERSIONS 208 | newTexFile = makeTexHeader(texVersion, ddsHeader, len(ddsList)) 209 | formatData = format_ops.packetSizeData(newTexFile.tex.header.formatString) 210 | miptex = packageTextures(ddsHeader, ddsList, isGDeflate, formatData) 211 | return storeTextures(ddsHeader, newTexFile, miptex, isGDeflate) 212 | 213 | 214 | def DDSToTex(ddsPathList, texVersion, outPath, streamingFlag=False): 215 | 216 | if len(ddsPathList) == 1: 217 | ddsFile = DDSFile() 218 | ddsFile.read(ddsPathList[0]) 219 | texFile = getTexFileFromDDS([ddsFile.dds], texVersion, streamingFlag) 220 | texFile.write(outPath) 221 | else: # Array texture 222 | baseHeader = getDDSHeader(ddsPathList[0]) 223 | # Preparse dds files to make sure they have the same height,width,format and mip count as the first 224 | valid = True 225 | fixDDSMipList = [] # Force mip counts to match first dds in array 226 | for ddsPath in ddsPathList: 227 | 228 | currentHeader = getDDSHeader(ddsPath) 229 | if currentHeader.dwWidth != baseHeader.dwWidth: 230 | raiseWarning( 231 | f"{os.path.split(ddsPath)[1]} - Width does not match first array texture.") 232 | valid = False 233 | if currentHeader.dwHeight != baseHeader.dwHeight: 234 | raiseWarning( 235 | f"{os.path.split(ddsPath)[1]} - Height does not match first array texture.") 236 | valid = False 237 | if currentHeader.dwMipMapCount != baseHeader.dwMipMapCount: 238 | raiseWarning( 239 | f"{os.path.split(ddsPath)[1]} - Mipmap count does not match first array texture.") 240 | fixDDSMipList.append(ddsPath) 241 | #valid = False 242 | if currentHeader.dx10Header == None: 243 | raiseWarning( 244 | f"{os.path.split(ddsPath)[1]} - DX10 header is missing, save the DDS file using Photoshop with the Intel DDS plugin.") 245 | valid = False 246 | else: 247 | if baseHeader.dx10Header != None: 248 | if currentHeader.dx10Header.dxgiFormat != baseHeader.dx10Header.dxgiFormat: 249 | raiseWarning( 250 | f"{os.path.split(ddsPath)[1]} - DDS format ({dxgienum.DXGIToFormatStringDict.get(currentHeader.dx10Header.dxgiFormat)}) does not match first array texture ({dxgienum.DXGIToFormatStringDict.get(baseHeader.dx10Header.dxgiFormat)}).") 251 | valid = False 252 | 253 | if valid: 254 | if fixDDSMipList != []: 255 | texConv = Texconv() 256 | for fixPath in fixDDSMipList: 257 | print(f"Fixing mip count on {os.path.split(fixPath)[1]}") 258 | texConv.fix_mip_count(fixPath, os.path.split( 259 | fixPath)[0], baseHeader.dwMipMapCount) 260 | unload_texconv() 261 | 262 | ddsList = [] 263 | for ddsPath in ddsPathList: 264 | ddsFile = DDSFile() 265 | ddsFile.read(ddsPath) 266 | ddsList.append(ddsFile.dds) 267 | 268 | texFile = getTexFileFromDDS(ddsList, texVersion, streamingFlag) 269 | texFile.write(outPath) 270 | -------------------------------------------------------------------------------- /modules/ddsconv/directx/texconv.py: -------------------------------------------------------------------------------- 1 | """Texture converter. 2 | 3 | Notes: 4 | You need to build dll from https://github.com/matyalatte/Texconv-Custom-DLL. 5 | And put the dll in the same directory as texconv.py. 6 | """ 7 | import ctypes 8 | from ctypes.util import find_library 9 | import os 10 | import tempfile 11 | 12 | from .dds import DDSHeader, is_hdr, is_signed 13 | from .dxgi_format import DXGI_FORMAT 14 | from . import util 15 | 16 | DLL = None 17 | 18 | 19 | def get_dll_close_from_lib(lib_name): 20 | """Return dll function to unlaod DLL if the library has it.""" 21 | dlpath = find_library(lib_name) 22 | if dlpath is None: 23 | # DLL not found. 24 | return None 25 | try: 26 | lib = ctypes.CDLL(dlpath) 27 | if hasattr(lib, "dlclose"): 28 | return lib.dlclose 29 | except OSError: 30 | pass 31 | # dlclose not found. 32 | return None 33 | 34 | 35 | def get_dll_close(): 36 | """Get dll function to unload DLL.""" 37 | if util.is_windows(): 38 | return ctypes.windll.kernel32.FreeLibrary 39 | else: 40 | # Search libc, libdl, and libSystem 41 | for lib_name in ["c", "dl", "System"]: 42 | dlclose = get_dll_close_from_lib(lib_name) 43 | if dlclose is not None: 44 | return dlclose 45 | # Failed to find dlclose 46 | return None 47 | 48 | 49 | def unload_texconv(): 50 | global DLL 51 | if DLL is None: 52 | return 53 | 54 | dll_close = get_dll_close() 55 | if dll_close is None: 56 | raise RuntimeError("Failed to unload DLL.") 57 | 58 | handle = DLL._handle 59 | dll_close.argtypes = [ctypes.c_void_p] 60 | dll_close(handle) 61 | DLL = None 62 | 63 | 64 | class Texconv: 65 | """Texture converter.""" 66 | def __init__(self, dll_path=None): 67 | self.load_dll(dll_path=dll_path) 68 | 69 | def load_dll(self, dll_path=None): 70 | global DLL 71 | if DLL is not None: 72 | self.dll = DLL 73 | return 74 | 75 | if dll_path is None: 76 | file_path = os.path.realpath(__file__) 77 | if util.is_windows(): 78 | dll_name = "texconv.dll" 79 | elif util.is_mac(): 80 | dll_name = "libtexconv.dylib" 81 | elif util.is_linux(): 82 | dll_name = "libtexconv.so" 83 | else: 84 | raise RuntimeError(f'This OS ({util.get_os_name()}) is unsupported.') 85 | dirname = os.path.dirname(file_path) 86 | dll_path = os.path.join(dirname, dll_name) 87 | 88 | if util.is_arm(): 89 | raise RuntimeError(f'{dll_name} does NOT support ARM devices') 90 | 91 | if not os.path.exists(dll_path): 92 | raise RuntimeError(f'texconv not found. ({dll_path})') 93 | 94 | self.dll = ctypes.cdll.LoadLibrary(dll_path) 95 | DLL = self.dll 96 | 97 | def unload_dll(self): 98 | unload_texconv() 99 | self.dll = None 100 | 101 | def convert_to_tga(self, file, out=None, cubemap_layout="h-cross", invert_normals=False, verbose=True): 102 | """Convert dds to tga.""" 103 | if self.dll is None: 104 | raise RuntimeError("texconv is unloaded.") 105 | 106 | dds_header = DDSHeader.read_from_file(file) 107 | 108 | if not dds_header.is_supported(): 109 | raise RuntimeError( 110 | f"DDS converter does NOT support {dds_header.get_format_as_str()}.\n" 111 | "Use '.dds' as an export format." 112 | ) 113 | 114 | if dds_header.is_3d() or dds_header.is_array(): 115 | raise RuntimeError("DDS converter does Not support non-2D textures.") 116 | 117 | if dds_header.is_partial_cube(): 118 | raise RuntimeError("Partial cubemaps are unsupported.") 119 | 120 | if verbose: 121 | print(f'DXGI_FORMAT: {dds_header.get_format_as_str()}') 122 | 123 | args = [] 124 | 125 | if dds_header.is_hdr(): 126 | fmt = 'hdr' 127 | if not dds_header.convertible_to_hdr(): 128 | args += ['-f', 'fp32'] 129 | else: 130 | fmt = 'tga' 131 | if not dds_header.convertible_to_tga(): 132 | args += ['-f', 'rgba'] 133 | 134 | if dds_header.is_signed(): 135 | args += '-x2bias' 136 | 137 | if dds_header.is_int(): 138 | msg = f'Int format detected. ({dds_header.get_format_as_str()})\n It might not be converted correctly.' 139 | print(msg) 140 | 141 | if not dds_header.is_cube(): 142 | args += ['-ft', fmt] 143 | 144 | if dds_header.is_bc5(): 145 | if not dds_header.is_signed(): 146 | args += ['-reconstructz'] 147 | if invert_normals: 148 | args += ['-inverty'] 149 | print(args) 150 | out = self.__texconv(file, args, out=out, verbose=verbose) 151 | 152 | name = os.path.join(out, os.path.basename(file)) 153 | name = ".".join(name.split(".")[:-1] + [fmt]) 154 | 155 | if dds_header.is_cube(): 156 | self.__cube_to_image(file, name, args, cubemap_layout=cubemap_layout, verbose=verbose) 157 | 158 | return name 159 | def fix_mip_count(self, file, outDir,mipCount): 160 | """Convert dds to tga.""" 161 | if self.dll is None: 162 | raise RuntimeError("texconv is unloaded.") 163 | 164 | dds_header = DDSHeader.read_from_file(file) 165 | 166 | if not dds_header.is_supported(): 167 | raise RuntimeError( 168 | f"DDS converter does NOT support {dds_header.get_format_as_str()}.\n" 169 | "Use '.dds' as an export format." 170 | ) 171 | 172 | if dds_header.is_3d() or dds_header.is_array(): 173 | raise RuntimeError("DDS converter does Not support non-2D textures.") 174 | 175 | if dds_header.is_partial_cube(): 176 | raise RuntimeError("Partial cubemaps are unsupported.") 177 | 178 | 179 | args = ['-m',str(mipCount),'-ft',"dds"] 180 | 181 | out = self.__texconv(file, args, out=outDir,verbose=False,allow_slow_codec=True) 182 | 183 | 184 | def convert_to_png(self, file, out=None, cubemap_layout="h-cross", invert_normals=False, verbose=True): 185 | #NOTE:Texconv is very tempermental about saving pngs for some reason, it gives access denied errors in appdata where other texture formats don't 186 | #tif is used instead because of this 187 | """Convert dds to png.""" 188 | if self.dll is None: 189 | raise RuntimeError("texconv is unloaded.") 190 | 191 | dds_header = DDSHeader.read_from_file(file) 192 | 193 | if not dds_header.is_supported(): 194 | raise RuntimeError( 195 | f"DDS converter does NOT support {dds_header.get_format_as_str()}.\n" 196 | "Use '.dds' as an export format." 197 | ) 198 | 199 | if dds_header.is_3d() or dds_header.is_array(): 200 | raise RuntimeError("DDS converter does Not support non-2D textures.") 201 | 202 | if dds_header.is_partial_cube(): 203 | raise RuntimeError("Partial cubemaps are unsupported.") 204 | 205 | if verbose: 206 | print(f'DXGI_FORMAT: {dds_header.get_format_as_str()}') 207 | fmt = "png" 208 | args = ['-m','1','-ft',fmt] 209 | if "SRGB" in dds_header.get_format_as_str(): 210 | args += ['-f','R8G8B8A8_UNORM_SRGB'] 211 | else: 212 | args += ['-f','R8G8B8A8_UNORM'] 213 | 214 | out = self.__texconv(file, args, out=out, verbose=verbose) 215 | 216 | name = os.path.join(out, os.path.basename(file)) 217 | name = ".".join(name.split(".")[:-1] + [fmt]) 218 | 219 | if dds_header.is_cube(): 220 | self.__cube_to_image(file, name, args, cubemap_layout=cubemap_layout, verbose=verbose) 221 | 222 | return name 223 | def convert_to_tif(self, file, out=None, cubemap_layout="h-cross", invert_normals=False, verbose=True): 224 | """Convert dds to tif.""" 225 | if self.dll is None: 226 | raise RuntimeError("texconv is unloaded.") 227 | 228 | dds_header = DDSHeader.read_from_file(file) 229 | 230 | if not dds_header.is_supported(): 231 | raise RuntimeError( 232 | f"DDS converter does NOT support {dds_header.get_format_as_str()}.\n" 233 | "Use '.dds' as an export format." 234 | ) 235 | 236 | if dds_header.is_3d() or dds_header.is_array(): 237 | raise RuntimeError("DDS converter does Not support non-2D textures.") 238 | 239 | if dds_header.is_partial_cube(): 240 | raise RuntimeError("Partial cubemaps are unsupported.") 241 | 242 | if verbose: 243 | print(f'DXGI_FORMAT: {dds_header.get_format_as_str()}') 244 | fmt = "tif" 245 | args = ['-m','1','-ft',fmt] 246 | if "SRGB" in dds_header.get_format_as_str(): 247 | args += ['-f','R8G8B8A8_UNORM_SRGB'] 248 | else: 249 | args += ['-f','R8G8B8A8_UNORM'] 250 | 251 | out = self.__texconv(file, args, out=out, verbose=verbose) 252 | 253 | name = os.path.join(out, os.path.basename(file)) 254 | name = ".".join(name.split(".")[:-1] + [fmt]) 255 | 256 | if dds_header.is_cube(): 257 | self.__cube_to_image(file, name, args, cubemap_layout=cubemap_layout, verbose=verbose) 258 | 259 | return name 260 | def convert_to_dds(self, file, dds_fmt, out=None, 261 | invert_normals=False, no_mip=False, 262 | image_filter="LINEAR", 263 | export_as_cubemap=False, 264 | cubemap_layout="h-cross", 265 | verbose=True, allow_slow_codec=False): 266 | """Convert texture to dds.""" 267 | if self.dll is None: 268 | raise RuntimeError("texconv is unloaded.") 269 | 270 | ext = util.get_ext(file) 271 | 272 | if is_hdr(dds_fmt) and ext != 'hdr': 273 | raise RuntimeError(f'Use .hdr for HDR textures. ({file})') 274 | if ('BC6' in dds_fmt or 'BC7' in dds_fmt) and (not util.is_windows()) and (not allow_slow_codec): 275 | raise RuntimeError(f'Can NOT export {dds_fmt} textures on this platform.' 276 | ' Or enable the "Allow Slow Codec" option.') 277 | 278 | if not DXGI_FORMAT.is_valid_format(dds_fmt): 279 | raise RuntimeError(f'Not DXGI format. ({dds_fmt})') 280 | 281 | if verbose: 282 | print(f'DXGI_FORMAT: {dds_fmt}') 283 | 284 | base_name = os.path.basename(file) 285 | base_name = '.'.join(base_name.split('.')[:-1] + ['dds']) 286 | 287 | args = ['-f', dds_fmt] 288 | if no_mip: 289 | args += ['-m', '1'] 290 | if image_filter != "LINEAR": 291 | args += ["-if", image_filter] 292 | 293 | if is_signed(dds_fmt): 294 | args += '-x2bias' 295 | 296 | if "SRGB" in dds_fmt: 297 | args += ['-srgb'] 298 | 299 | if ("BC5" in dds_fmt) and invert_normals: 300 | args += ['-inverty'] 301 | 302 | if export_as_cubemap: 303 | if ext == "hdr": 304 | temp_args = ['-f', 'fp32'] 305 | else: 306 | temp_args = ['-f', 'rgba'] 307 | with tempfile.TemporaryDirectory() as temp_dir: 308 | temp_dds = os.path.join(temp_dir, base_name) 309 | self.__image_to_cube(file, temp_dds, temp_args, cubemap_layout=cubemap_layout, verbose=verbose) 310 | out = self.__texconv(temp_dds, args, out=out, verbose=verbose, allow_slow_codec=allow_slow_codec) 311 | else: 312 | out = self.__texconv(file, args, out=out, verbose=verbose, allow_slow_codec=allow_slow_codec) 313 | name = os.path.join(out, base_name) 314 | return name 315 | 316 | def __texconv(self, file, args, out=None, verbose=True, allow_slow_codec=False): 317 | """Run texconv.""" 318 | if out is not None and isinstance(out, str): 319 | args += ['-o', out] 320 | else: 321 | out = '.' 322 | 323 | if out not in ['.', ''] and not os.path.exists(out): 324 | util.mkdir(out) 325 | 326 | args += ["-y", "--", os.path.normpath(file)] 327 | #print(args) 328 | args_p = [ctypes.c_wchar_p(arg) for arg in args] 329 | args_p = (ctypes.c_wchar_p*len(args_p))(*args_p) 330 | 331 | err_buf = ctypes.create_unicode_buffer(512) 332 | result = self.dll.texconv(len(args), args_p, verbose, False, allow_slow_codec, err_buf, 512) 333 | if result != 0: 334 | raise RuntimeError(err_buf.value) 335 | 336 | return out 337 | 338 | def __cube_to_image(self, file, new_file, args, cubemap_layout="h-cross", verbose=True): 339 | """Generate an image from a cubemap with texassemble.""" 340 | if cubemap_layout.endswith("-fnz"): 341 | cubemap_layout = cubemap_layout[:-4] 342 | args = [cubemap_layout] + args 343 | self.__texassemble(file, new_file, args, verbose=verbose) 344 | 345 | def __image_to_cube(self, file, new_file, args, cubemap_layout="h-cross", verbose=True): 346 | """Generate a cubemap from an image with texassemble.""" 347 | cmd = "cube-from-" + cubemap_layout[0] + cubemap_layout[2] 348 | args = [cmd] + args 349 | self.__texassemble(file, new_file, args, verbose=verbose) 350 | 351 | def __texassemble(self, file, new_file, args, verbose=True): 352 | """Run texassemble.""" 353 | out = os.path.dirname(new_file) 354 | if out not in ['.', ''] and not os.path.exists(out): 355 | util.mkdir(out) 356 | args += ["-y", "-o", new_file, "--", file] 357 | 358 | args_p = [ctypes.c_wchar_p(arg) for arg in args] 359 | args_p = (ctypes.c_wchar_p*len(args_p))(*args_p) 360 | err_buf = ctypes.create_unicode_buffer(512) 361 | result = self.dll.texassemble(len(args), args_p, verbose, False, err_buf, 512) 362 | if result != 0: 363 | raise RuntimeError(err_buf.value) 364 | -------------------------------------------------------------------------------- /modules/tex/file_re_tex.py: -------------------------------------------------------------------------------- 1 | # Author: NSA Cloud & AsteriskAmpersand 2 | from io import BytesIO 3 | from ..gen_functions import raiseError 4 | from ..gen_functions import read_uint, read_int, read_uint64,\ 5 | read_float, read_short, read_ushort, read_ubyte,\ 6 | read_unicode_string, read_byte,\ 7 | write_uint, write_int, write_uint64, write_float,\ 8 | write_short, write_ushort, write_ubyte, write_byte,\ 9 | write_unicode_string 10 | from ..gen_functions import textColors, raiseWarning, getPaddingAmount 11 | from ..gdeflate.gdeflate import GDeflate 12 | from .enums.game_version_enum import gameNameToTexVersionDict 13 | from .format_ops import packetSizeData 14 | from .enums.tex_format_enum import texFormatToDXGIStringDict 15 | from . import tex_math as tmath 16 | 17 | GDEFLATE_VERSIONS = set([ 18 | 240701001,#VERSION_MHWILDS_BETA 19 | 241106027,#VERSION_MHWILDS 20 | 250813143,#VERSION_PRAGDEMO 21 | ]) 22 | 23 | VERSION_MHWILDS_BETA = 240701001 24 | VERSION_MHWILDS = 241106027 25 | 26 | 27 | def getTexVersionFromGameName(gameName): 28 | return gameNameToTexVersionDict.get(gameName, -1) 29 | 30 | 31 | class TexHeader(): 32 | def __init__(self): 33 | self.magic = 5784916 34 | self.version = 0 35 | self.width = 0 36 | self.height = 0 37 | self.depth = 0 38 | self.imageCount = 0 39 | self.imageMipHeaderSize = 0 40 | self.mipCount = 0 # Internal,calcluated from mip header size 41 | self.format = 0 42 | self.swizzleControl = -1 43 | self.cubemapMarker = 0 44 | self.unkn04 = 0 45 | self.unkn05 = 0 46 | self.null0 = 0 47 | # swizzle data,unused 48 | self.swizzleHeightDepth = 0 49 | self.swizzleWidth = 0 50 | self.null1 = 0 51 | self.seven = 0 52 | self.one = 0 53 | 54 | self.packetSizeData = None # Internal, for reading texture data with cursed pitch 55 | 56 | def read(self, file): 57 | self.magic = read_uint(file) 58 | if self.magic != 5784916: 59 | raiseError("File is not a tex file.") 60 | self.version = read_uint(file) 61 | self.width = read_ushort(file) 62 | self.height = read_ushort(file) 63 | self.depth = read_ushort(file) 64 | if self.version > 11 and self.version != 190820018: 65 | self.imageCount = read_ubyte(file) 66 | self.imageMipHeaderSize = read_ubyte(file) 67 | self.mipCount = self.imageMipHeaderSize // 16 68 | else: 69 | self.mipCount = read_ubyte(file) 70 | self.imageCount = read_ubyte(file) 71 | self.format = read_uint(file) 72 | self.swizzleControl = read_int(file) 73 | self.cubemapMarker = read_uint(file) 74 | self.unkn04 = read_ubyte(file) 75 | self.unkn05 = read_ubyte(file) 76 | self.null0 = read_ushort(file) 77 | if self.version > 27 and self.version != 190820018: # Thanks RE3 78 | # swizzle data,unused 79 | self.swizzleHeightDepth = read_ubyte(file) 80 | self.swizzleWidth = read_ubyte(file) 81 | self.null1 = read_ushort(file) 82 | self.seven = read_ushort(file) 83 | self.one = read_ushort(file) 84 | self.formatData = packetSizeData(texFormatToDXGIStringDict.get(self.format, "UNKNOWN")) 85 | #self.reserved = read_uint64(file) 86 | 87 | def write(self, file): 88 | write_uint(file, self.magic) 89 | write_uint(file, self.version) 90 | write_ushort(file, self.width) 91 | write_ushort(file, self.height) 92 | write_ushort(file, self.depth) 93 | if self.version > 11 and self.version != 190820018: 94 | write_ubyte(file, self.imageCount) 95 | write_ubyte(file, self.imageMipHeaderSize) 96 | else: 97 | write_ubyte(file, self.mipCount) 98 | write_ubyte(file, self.imageCount) 99 | 100 | write_uint(file, self.format) 101 | write_int(file, self.swizzleControl) 102 | write_uint(file, self.cubemapMarker) 103 | write_ubyte(file, self.unkn04) 104 | write_ubyte(file, self.unkn05) 105 | write_ushort(file, self.null0) 106 | if self.version > 27 and self.version != 190820018: # Thanks RE3 107 | write_ubyte(file, self.swizzleHeightDepth) 108 | write_ubyte(file, self.swizzleWidth) 109 | write_ushort(file, self.null1) 110 | write_ushort(file, self.seven) 111 | write_ushort(file, self.one) 112 | 113 | 114 | class CompressedImageHeader(): 115 | def __init__(self): 116 | self.imageSize = 0 117 | self.imageOffset = 0 118 | 119 | def read(self, file, expectedMipSize, width, height, ddsBPPs, currentImageDataHeaderOffset, imageDataOffset, texVersion): 120 | self.imageSize = read_uint(file) 121 | self.imageOffset = read_uint(file) 122 | 123 | def write(self, file): 124 | write_uint(file, self.imageSize) 125 | write_uint(file, self.imageOffset) 126 | 127 | 128 | class MipData(): 129 | def __init__(self): 130 | self.mipOffset = 0 131 | self.scanlineLength = 0 132 | self.uncompressedSize = 0 133 | 134 | # WILDS 135 | self.imageSize = 0 136 | self.imageOffset = 0 137 | 138 | self.textureData = None 139 | 140 | def storeTrimmed(self, source, target, scanlineLength, dataLength, endSize): 141 | """ 142 | Parameters 143 | ---------- 144 | source : BytesIO 145 | Raw untrimmed texture data 146 | target : BytesIO 147 | Buffer to write the trimmed texture data to 148 | scanlineLength : int 149 | Length in bytes of scanline as specified by the file 150 | dataLength : int 151 | Length in bytes of a single row of pixels 152 | endSize : int 153 | Final size of the texture as specified by the file 154 | 155 | Raises 156 | ------ 157 | BufferError 158 | If texture is read past expected bounds 159 | """ 160 | currentOffset = 0 161 | seekAmount = scanlineLength - dataLength 162 | #print(f"seekAmount: {seekAmount}") 163 | #print(f"endSize: {endSize}") 164 | #tempFile = open(r"D:\Modding\Monster Hunter Wilds\texDataTest\tempData"+str(currentImageDataHeaderOffset),"wb") 165 | # tempFile.write(mipData.getvalue()) 166 | # tempFile.close() 167 | #print(scanlineLength,dataLength,endSize) 168 | while currentOffset != endSize: 169 | #print("S",currentOffset,endSize) 170 | #print(f"current block offset {file.tell()}") 171 | target.extend(source.read(dataLength)) 172 | source.seek(seekAmount, 1) 173 | currentOffset += scanlineLength 174 | #print("E",currentOffset,endSize) 175 | if currentOffset > endSize: 176 | raise BufferError("Texture Data Read Past Bounds") 177 | #print(f"end block offset {file.tell()}") 178 | #print(f"end mip read offset {file.tell()}") 179 | source.close() 180 | return target 181 | 182 | def calculateLineBytelength(self, formatData, width): 183 | """ 184 | Parameters 185 | ---------- 186 | formatData : FormatData 187 | Information about texture format dimensions 188 | width : Int 189 | Texture width in pixels 190 | 191 | Returns 192 | ---------- 193 | lineBytelength : Int 194 | Number of bytes expected per horizontal pixel line 195 | """ 196 | return tmath.ruD(width,formatData.tx)*formatData.bytelen 197 | 198 | def uncompressGdeflate(self, file, headerOffset, imageOffset): 199 | """ 200 | Parameters 201 | ---------- 202 | file : BytesIO 203 | Tex file being read 204 | headerOffset : Int 205 | Offset from the start of the file to the current Compressed Data Header 206 | imageOffset : Int 207 | Offset form the start of the file to the start of Texture information 208 | 209 | Returns 210 | ---------- 211 | mipData : BytesIO 212 | Uncompressed texture data 213 | """ 214 | 215 | file.seek(headerOffset) 216 | self.imageSize = read_uint(file) 217 | self.imageOffset = read_uint(file) 218 | 219 | file.seek(imageOffset + self.imageOffset) 220 | rawImageData = file.read(self.imageSize) 221 | 222 | # Check if it's GDeflate compressed by the [0x04, 0xFB] header until we figure 223 | # out if there is a flag somewhere else. 224 | if len(rawImageData) >= 2 and rawImageData[0] == 0x04 and rawImageData[1] == 0xFB: 225 | #print("Decompressing MH Wilds texture with GDeflate") 226 | decompressor = GDeflate() 227 | mipData = BytesIO(decompressor.decompress( 228 | rawImageData, num_workers=4)) 229 | 230 | else: 231 | #print("MH Wilds texture without GDeflate header - assuming uncompressed.") 232 | mipData = BytesIO(rawImageData) 233 | 234 | #print(f"Decompressed data size: {endSize}") 235 | #print(f"Specified size: {self.uncompressedSize}") 236 | return mipData 237 | 238 | def read(self, file, expectedMipSize, dimensions, formatData, 239 | currentImageDataHeaderOffset, imageDataOffset, 240 | texVersion): 241 | """ 242 | Parameters 243 | ---------- 244 | file : BytesIO 245 | Tex file being read 246 | expectedMipSize : Int 247 | Expected size in bytes of the final texture buffer (x * y * z * bytesPerPixel) 248 | dimensions : Int[3] 249 | Height, Width and Depth of Image 250 | formatData : FormatData 251 | Information about texture format dimensions 252 | currentImageDataHeaderOffset : Int 253 | Offset from the start of the file to the current Compressed Data Header 254 | imageDataOffset : Int 255 | Offset form the start of the file to the start of Texture information 256 | texVersion : Int 257 | RE Engine Texture format Version 258 | """ 259 | width, height, depth = dimensions 260 | self.mipOffset = read_uint64(file) 261 | self.scanlineLength = read_uint(file) 262 | self.uncompressedSize = read_uint(file) 263 | self.textureData = bytearray() 264 | currentPos = file.tell() 265 | file.seek(self.mipOffset) 266 | #print(f"mip offset {self.mipOffset}") 267 | # print(f"{file.tell()}") 268 | mipData = None # BytesIO for uncompressed texture data 269 | 270 | if texVersion in GDEFLATE_VERSIONS:# or texVersion == VERSION_MHWILDS_BETA: 271 | mipData = self.uncompressGdeflate(file, 272 | currentImageDataHeaderOffset, imageDataOffset) 273 | endSize = mipData.getbuffer().nbytes 274 | else: 275 | mipData = BytesIO(file.read(self.uncompressedSize)) 276 | endSize = self.uncompressedSize 277 | 278 | #print(f"expected mip size: {expectedMipSize}\nactual mip size: {self.uncompressedSize}") 279 | if endSize != expectedMipSize: 280 | lineBytelength = self.calculateLineBytelength(formatData, width) 281 | #print("sclLen: %d, lbLenL %d"%(self.scanlineLength,lineBytelength)) 282 | #print("expMpS: %d, actual: %d"%(expectedMipSize,endSize)) 283 | self.storeTrimmed(mipData, self.textureData, 284 | self.scanlineLength, lineBytelength, 285 | endSize) 286 | else: 287 | if mipData != None: 288 | self.textureData = mipData.getvalue() 289 | mipData.close() 290 | else: 291 | self.textureData = file.read(self.uncompressedSize) 292 | 293 | file.seek(currentPos) 294 | 295 | def write(self, file): 296 | write_uint64(file, self.mipOffset) 297 | write_uint(file, self.scanlineLength) 298 | write_uint(file, self.uncompressedSize) 299 | 300 | 301 | class Tex(): 302 | def __init__(self): 303 | self.header = TexHeader() 304 | self.imageMipDataList = [] 305 | self.imageHeaderList = [] # WILDS 306 | 307 | def read(self, file): 308 | self.header.read(file) 309 | self.imageMipDataList = [] 310 | fmtData = self.header.formatData 311 | currentOverallMipIndex = 0 312 | currentImageDataHeaderOffset = file.tell() + \ 313 | (self.header.mipCount * self.header.imageCount) * \ 314 | 16 # 16 is mip header size 315 | imageDataOffset = currentImageDataHeaderOffset + \ 316 | (self.header.mipCount * self.header.imageCount) * \ 317 | 8 # 8 is compression data header size 318 | for i in range(self.header.imageCount): 319 | imageMipDataListEntry = [] 320 | currentXSize = self.header.width 321 | currentYSize = self.header.height 322 | for j in range(self.header.mipCount): 323 | mipEntry = MipData() 324 | mipX = max(self.header.width >> j, 1) 325 | mipY = max(self.header.height >> j, 1) 326 | mipZ = max(self.header.depth >> j, 1) 327 | expectedMipSize = tmath.ruD(mipY,fmtData.ty) * mipZ * fmtData.bytelen 328 | mipEntry.read(file, expectedMipSize, (mipX, mipY, mipZ), 329 | fmtData, currentImageDataHeaderOffset, 330 | imageDataOffset, self.header.version) 331 | imageMipDataListEntry.append(mipEntry) 332 | currentImageDataHeaderOffset += 8 # 8 is image data header size 333 | self.imageMipDataList.append(imageMipDataListEntry) 334 | 335 | def write(self, file): 336 | self.header.write(file) 337 | # Write mip offsets and sizes 338 | for mipEntryList in self.imageMipDataList: 339 | for mipEntry in mipEntryList: 340 | mipEntry.write(file) 341 | 342 | # Write compressed image headers, wilds 343 | for imageHeader in self.imageHeaderList: 344 | imageHeader.write(file) 345 | 346 | # Write mip data 347 | for mipEntryList in self.imageMipDataList: 348 | for mipEntry in mipEntryList: 349 | file.write(mipEntry.textureData) 350 | 351 | def GetTextureData(self, imageIndex): 352 | byteArray = bytearray() 353 | # for image in self.imageMipDataList: 354 | image = self.imageMipDataList[imageIndex] 355 | for mipData in image: 356 | byteArray.extend(mipData.textureData) 357 | return bytes(byteArray) 358 | 359 | 360 | class RE_TexFile: 361 | def __init__(self): 362 | self.tex = Tex() 363 | 364 | def read(self, filePath): 365 | #print("Opening " + filePath) 366 | try: 367 | file = open(filePath, "rb") 368 | except: 369 | raiseError("Failed to open " + filePath) 370 | self.tex.read(file) 371 | file.close() 372 | 373 | def write(self, filePath): 374 | print("Writing " + filePath) 375 | try: 376 | file = open(filePath, "wb") 377 | except: 378 | raiseError("Failed to open " + filePath) 379 | self.tex.write(file) 380 | file.close() 381 | -------------------------------------------------------------------------------- /modules/ddsconv/directx/dds.py: -------------------------------------------------------------------------------- 1 | """Class for DDS files. 2 | 3 | Notes: 4 | - Official document for DDS header 5 | https://learn.microsoft.com/en-us/windows/win32/direct3ddds/dds-header 6 | - Official repo for DDS 7 | https://github.com/microsoft/DirectXTex 8 | """ 9 | 10 | import ctypes as c 11 | from enum import IntEnum 12 | import os 13 | 14 | from . import util 15 | from .dxgi_format import DXGI_FORMAT, FOURCC_TO_DXGI, BITMASK_TO_DXGI 16 | 17 | 18 | class PF_FLAGS(IntEnum): 19 | '''dwFlags for DDS_PIXELFORMAT''' 20 | # ALPHAPIXELS = 0x00000001 21 | # ALPHA = 0x00000002 22 | FOURCC = 0x00000004 23 | # RGB = 0x00000040 24 | # LUMINANCE = 0x00020000 25 | BUMPDUDV = 0x00080000 26 | 27 | 28 | UNCANONICAL_FOURCC = [ 29 | # fourCC for uncanonical formats (ETC, PVRTC, ATITC, ASTC) 30 | b"ETC", 31 | b"ETC1", 32 | b"ETC2", 33 | b"ET2A", 34 | b"PTC2", 35 | b"PTC4", 36 | b"ATC", 37 | b"ATCA", 38 | b"ATCE", 39 | b"ATCI", 40 | b"AS44", 41 | b"AS55", 42 | b"AS66", 43 | b"AS85", 44 | b"AS86", 45 | b"AS:5" 46 | ] 47 | 48 | 49 | class DDSPixelFormat(c.LittleEndianStructure): 50 | _pack_ = 1 51 | _fields_ = [ 52 | ("size", c.c_uint32), # PfSize == 32 53 | ("flags", c.c_uint32), # PfFlags (if 4 then FourCC is used) 54 | ("fourCC", c.c_char * 4), # FourCC 55 | ("bit_count", c.c_uint32), # Bitcount 56 | ("bit_mask", c.c_uint32 * 4), # Bitmask 57 | ] 58 | 59 | def __init__(self): 60 | super().__init__() 61 | self.size = 32 62 | self.flags = (c.c_uint32)(PF_FLAGS.FOURCC) 63 | self.fourCC = b"DX10" 64 | self.bit_count = (c.c_uint32)(0) 65 | self.bit_mask = (c.c_uint32 * 4)((0) * 4) 66 | 67 | def get_dxgi(self): 68 | '''Similar method as GetDXGIFormat in DirectXTex/DDSTextureLoader/DDSTextureLoader12.cpp''' 69 | 70 | if not self.is_canonical(): 71 | raise RuntimeError(f"Non-standard fourCC detected. ({self.fourCC.decode()})") 72 | 73 | # Try to detect DXGI from fourCC. 74 | if self.flags & PF_FLAGS.FOURCC: 75 | for cc_list, dxgi in FOURCC_TO_DXGI: 76 | if self.fourCC in cc_list: 77 | return dxgi 78 | 79 | # Try to detect DXGI from bit mask. 80 | detected_dxgi = None 81 | for bit_mask, dxgi in BITMASK_TO_DXGI: 82 | if self.is_bit_mask(bit_mask): 83 | detected_dxgi = dxgi 84 | 85 | if detected_dxgi is None: 86 | print("Failed to detect dxgi format. It'll be loaded as B8G8R8A8.") 87 | return DXGI_FORMAT.B8G8R8A8_UNORM 88 | 89 | if self.flags & PF_FLAGS.BUMPDUDV: 90 | # DXGI format should be signed. 91 | return DXGI_FORMAT.get_signed(detected_dxgi) 92 | else: 93 | return detected_dxgi 94 | 95 | def is_bit_mask(self, bit_mask): 96 | for b1, b2 in zip(self.bit_mask, bit_mask): 97 | if b1 != b2: 98 | return False 99 | return True 100 | 101 | def is_canonical(self): 102 | return self.fourCC not in UNCANONICAL_FOURCC 103 | 104 | def is_dx10(self): 105 | return self.fourCC == b"DX10" 106 | 107 | 108 | class DDS_FLAGS(IntEnum): 109 | CAPS = 0x1 110 | HEIGHT = 0x2 111 | WIDTH = 0x4 112 | PITCH = 0x8 # Use "w * h * bpp" for pitch_or_linear_size 113 | PIXELFORMAT = 0x1000 114 | MIPMAPCOUNT = 0x20000 115 | LINEARSIZE = 0x80000 # Use "w * bpp" for pitch_or_linear_size 116 | DEPTH = 0x800000 # For volume textures 117 | DEFAULT = CAPS | HEIGHT | WIDTH | PIXELFORMAT | MIPMAPCOUNT 118 | 119 | @staticmethod 120 | def get_flags(is_compressed, is_3d): 121 | flags = DDS_FLAGS.DEFAULT 122 | if is_compressed: 123 | flags |= DDS_FLAGS.PITCH 124 | else: 125 | flags |= DDS_FLAGS.LINEARSIZE 126 | if is_3d: 127 | flags |= DDS_FLAGS.DEPTH 128 | return flags 129 | 130 | @staticmethod 131 | def has_pitch(flags): 132 | return (flags & DDS_FLAGS.PITCH) > 0 133 | 134 | 135 | class DDS_CAPS(IntEnum): 136 | CUBEMAP = 0x8 # DDSCAPS_COMPLEX 137 | MIPMAP = 0x400008 # DDSCAPS_COMPLEX | DDSCAPS_MIPMAP 138 | REQUIRED = 0x1000 # DDSCAPS_TEXTURE 139 | 140 | @staticmethod 141 | def get_caps(has_mips, is_cube): 142 | caps = DDS_CAPS.REQUIRED 143 | if has_mips: 144 | caps |= DDS_CAPS.MIPMAP 145 | if is_cube: 146 | caps |= DDS_CAPS.CUBEMAP 147 | return caps 148 | 149 | 150 | class DDS_CAPS2(IntEnum): 151 | CUBEMAP = 0x200 152 | CUBEMAP_POSITIVEX = 0x400 153 | CUBEMAP_NEGATIVEX = 0x800 154 | CUBEMAP_POSITIVEY = 0x1000 155 | CUBEMAP_NEGATIVEY = 0x2000 156 | CUBEMAP_POSITIVEZ = 0x4000 157 | CUBEMAP_NEGATIVEZ = 0x8000 158 | CUBEMAP_FULL = 0xFE00 # for cubemap that have all faces 159 | VOLUME = 0x200000 160 | 161 | @staticmethod 162 | def get_caps2(is_cube, is_3d): 163 | caps2 = 0 164 | if is_cube: 165 | caps2 |= DDS_CAPS2.CUBEMAP_FULL 166 | if is_3d: 167 | caps2 |= DDS_CAPS2.VOLUME 168 | return caps2 169 | 170 | @staticmethod 171 | def is_cube(caps2): 172 | return (caps2 & DDS_CAPS2.CUBEMAP) > 0 173 | 174 | @staticmethod 175 | def is_3d(caps2): 176 | return (caps2 & DDS_CAPS2.VOLUME) > 0 177 | 178 | @staticmethod 179 | def is_partial_cube(caps2): 180 | return DDS_CAPS2.is_cube(caps2) and (caps2 != DDS_CAPS2.CUBEMAP_FULL) 181 | 182 | 183 | HDR_SUPPORTED = [ 184 | # Convertible as a decompressed format 185 | "BC6H_TYPELESS", 186 | "BC6H_UF16", 187 | "BC6H_SF16", 188 | 189 | # Directory convertible 190 | "R32G32B32A32_FLOAT", 191 | "R16G16B16A16_FLOAT", 192 | "R32G32B32_FLOAT" 193 | ] 194 | 195 | 196 | TGA_SUPPORTED = [ 197 | # Convertible as a decompressed format 198 | "BC1_TYPELESS", 199 | "BC1_UNORM", 200 | "BC1_UNORM_SRGB", 201 | "BC2_TYPELESS", 202 | "BC2_UNORM", 203 | "BC2_UNORM_SRGB", 204 | "BC3_TYPELESS", 205 | "BC3_UNORM", 206 | "BC3_UNORM_SRGB", 207 | "BC4_TYPELESS", 208 | "BC4_UNORM", 209 | "BC4_SNORM", 210 | "BC7_TYPELESS", 211 | "BC7_UNORM", 212 | "BC7_UNORM_SRGB", 213 | 214 | # Directory convertible 215 | "R8G8B8A8_UNORM", 216 | "R8G8B8A8_UNORM_SRGB", 217 | "B8G8R8A8_UNORM", 218 | "B8G8R8A8_UNORM_SRGB", 219 | "B8G8R8X8_UNORM", 220 | "B8G8R8X8_UNORM_SRGB", 221 | "R8_UNORM", 222 | "A8_UNORM", 223 | "B5G5R5A1_UNORM" 224 | ] 225 | 226 | 227 | class DX10Header(c.LittleEndianStructure): 228 | _pack_ = 1 229 | _fields_ = [ 230 | ("dxgi_format", c.c_uint32), 231 | ("resource_dimension", c.c_uint32), 232 | ("misc_flags", c.c_uint32), 233 | ("array_size", c.c_uint32), 234 | ("misc_flags2", c.c_uint32) 235 | ] 236 | 237 | def get_dxgi(self): 238 | if self.dxgi_format not in [fmt.value for fmt in DXGI_FORMAT]: 239 | raise RuntimeError(f"Unsupported DXGI format detected. ({self.dxgi_format})") 240 | if "ASTC" in DXGI_FORMAT(self.dxgi_format).name: 241 | raise RuntimeError(f"ASTC textures are not supported. ({self.dxgi_format})") 242 | return DXGI_FORMAT(self.dxgi_format) 243 | 244 | def update(self, dxgi_format, is_cube, is_3d, array_size): 245 | self.dxgi_format = int(dxgi_format) 246 | self.resource_dimension = 3 + is_3d 247 | self.misc_flags = 4 * is_cube 248 | self.array_size = array_size 249 | self.misc_flags2 = 0 250 | 251 | def is_array(self): 252 | return self.array_size > 1 253 | 254 | 255 | def is_hdr(name: str): 256 | return 'BC6' in name or 'FLOAT' in name or 'INT' in name 257 | 258 | 259 | def is_signed(name: str): 260 | return 'SNORM' in name or 'SF16' in name 261 | 262 | 263 | def convertible_to_tga(name: str): 264 | return name in TGA_SUPPORTED 265 | 266 | 267 | def convertible_to_hdr(name: str): 268 | return name in HDR_SUPPORTED 269 | 270 | 271 | class DDSHeader(c.LittleEndianStructure): 272 | MAGIC = b'DDS ' 273 | _pack_ = 1 274 | _fields_ = [ 275 | ("magic", c.c_char * 4), # Magic == 'DDS ' 276 | ("head_size", c.c_uint32), # Size == 124 277 | ("flags", c.c_uint32), # DDS_FLAGS 278 | ("height", c.c_uint32), 279 | ("width", c.c_uint32), 280 | ("pitch_or_linear_size", c.c_uint32), # w * h * bpp for compressed, w * bpp for uncompressed 281 | ("depth", c.c_uint32), 282 | ("mipmap_num", c.c_uint32), 283 | ("reserved", c.c_uint32 * 9), # Reserved1 284 | ("tool_name", c.c_char * 4), # Reserved1 285 | ("null", c.c_uint32), # Reserved1 286 | ("pixel_format", DDSPixelFormat), 287 | ("caps", c.c_uint32), # DDS_CAPS 288 | ("caps2", c.c_uint32), # DDS_CAPS2 289 | ("reserved2", c.c_uint32 * 3), # ReservedCpas, Reserved2 290 | ] 291 | 292 | def __init__(self): 293 | super().__init__() 294 | self.magic = DDSHeader.MAGIC 295 | self.head_size = 124 296 | self.mipmap_num = 1 297 | self.pixel_format = DDSPixelFormat() 298 | self.reserved = (c.c_uint32 * 9)((0) * 9) 299 | self.tool_name = b"UEDT" 300 | self.null = 0 301 | self.reserved2 = (c.c_uint32*3)(0, 0, 0) 302 | self.dx10_header = DX10Header() 303 | 304 | self.dxgi_format = DXGI_FORMAT.UNKNOWN 305 | self.byte_per_pixel = 0 306 | 307 | @staticmethod 308 | def read(f): 309 | """Read dds header.""" 310 | head = DDSHeader() 311 | f.readinto(head) 312 | head.mipmap_num += head.mipmap_num == 0 313 | 314 | # DXT10 header 315 | if head.pixel_format.is_dx10(): 316 | f.readinto(head.dx10_header) 317 | head.dxgi_format = head.dx10_header.get_dxgi() 318 | else: 319 | head.dxgi_format = head.pixel_format.get_dxgi() 320 | head.dx10_header.update(head.dxgi_format, head.is_cube(), head.is_3d(), 1) 321 | 322 | # Raise errors for unsupported files 323 | if head.magic != DDSHeader.MAGIC or head.head_size != 124: 324 | raise RuntimeError("Not DDS file.") 325 | if head.dx10_header.resource_dimension == 2: 326 | raise RuntimeError("1D textures are unsupported.") 327 | 328 | return head 329 | 330 | @staticmethod 331 | def read_from_file(file_name): 332 | """Read dds header from a file.""" 333 | with open(file_name, 'rb') as f: 334 | head = DDSHeader.read(f) 335 | return head 336 | 337 | def write(self, f): 338 | f.write(self) 339 | if self.pixel_format.is_dx10(): 340 | f.write(self.dx10_header) 341 | 342 | def update(self, depth, array_size): 343 | self.depth = depth 344 | 345 | has_mips = self.has_mips() 346 | is_3d = self.is_3d() 347 | is_cube = self.is_cube() 348 | bpp = self.get_bpp() 349 | 350 | self.flags = DDS_FLAGS.get_flags(self.is_compressed(), is_3d) 351 | if DDS_FLAGS.has_pitch(self.flags): 352 | self.pitch_or_linear_size = int(self.width * self.height * bpp) 353 | else: 354 | self.pitch_or_linear_size = int(self.width * bpp) 355 | self.caps = DDS_CAPS.get_caps(has_mips, is_cube) 356 | self.caps2 = DDS_CAPS2.get_caps2(is_cube, is_3d) 357 | self.dx10_header.update(self.dxgi_format, is_cube, is_3d, array_size) 358 | self.pixel_format = DDSPixelFormat() 359 | 360 | def get_bpp(self): 361 | bpp = self.pitch_or_linear_size // self.width 362 | if DDS_FLAGS.has_pitch(self.flags): 363 | bpp = bpp // self.height 364 | return bpp 365 | 366 | def is_compressed(self): 367 | dxgi = self.get_format_as_str() 368 | return "BC" in dxgi or "ASTC" in dxgi 369 | 370 | def has_mips(self): 371 | return self.mipmap_num > 1 372 | 373 | def is_cube(self): 374 | return DDS_CAPS2.is_cube(self.caps2) 375 | 376 | def is_3d(self): 377 | return self.depth > 1 378 | 379 | def is_array(self): 380 | return self.dx10_header.is_array() 381 | 382 | def is_hdr(self): 383 | return is_hdr(self.dxgi_format.name) 384 | 385 | def is_bc5(self): 386 | return 'BC5' in self.dxgi_format.name 387 | 388 | def get_format_as_str(self): 389 | return self.dxgi_format.name 390 | 391 | def is_srgb(self): 392 | return 'SRGB' in self.dxgi_format.name 393 | 394 | def is_int(self): 395 | return 'INT' in self.dxgi_format.name 396 | 397 | def is_signed(self): 398 | return is_signed(self.dxgi_format.name) 399 | 400 | def is_canonical(self): 401 | return self.fourCC not in UNCANONICAL_FOURCC 402 | 403 | def is_supported(self): 404 | if self.dxgi_format not in [fmt.value for fmt in DXGI_FORMAT]: 405 | return False 406 | if "ASTC" in DXGI_FORMAT(self.dxgi_format).name: 407 | return False 408 | return True 409 | 410 | def is_partial_cube(self): 411 | return DDS_CAPS2.is_partial_cube(self.caps2) 412 | 413 | def convertible_to_tga(self): 414 | name = self.get_format_as_str() 415 | return convertible_to_tga(name) 416 | 417 | def convertible_to_hdr(self): 418 | name = self.get_format_as_str() 419 | return convertible_to_hdr(name) 420 | 421 | def get_array_size(self): 422 | return self.dx10_header.array_size 423 | 424 | def get_num_slices(self): 425 | return self.get_array_size() * self.depth * (1 + (self.is_cube() * 5)) 426 | 427 | def disassemble(self): 428 | self.update(1, 1) 429 | 430 | def assemble(self, is_array, size): 431 | if is_array: 432 | self.update(1, size) 433 | else: 434 | self.update(size, 1) 435 | 436 | def get_texture_type(self): 437 | if self.is_3d(): 438 | return "volume" 439 | if self.is_cube(): 440 | t = "cube" 441 | else: 442 | t = "2d" 443 | if self.is_array(): 444 | t += "_array" 445 | return t 446 | 447 | 448 | class DDS: 449 | def __init__(self, header, slices=None): 450 | self.header = header 451 | self.slice_bin_list = slices 452 | 453 | @staticmethod 454 | def load(file, verbose=False): 455 | with open(file, 'rb') as f: 456 | header = DDSHeader.read(f) 457 | data_size = util.get_size(f) - f.tell() 458 | num_slices = header.get_num_slices() 459 | slice_size = data_size // num_slices 460 | slices = [f.read(slice_size) for i in range(num_slices)] 461 | return DDS(header, slices) 462 | 463 | def save(self, file): 464 | folder = os.path.dirname(file) 465 | if folder not in ['.', ''] and not os.path.exists(folder): 466 | util.mkdir(folder) 467 | 468 | with open(file, 'wb') as f: 469 | self.header.write(f) 470 | for d in self.slice_bin_list: 471 | f.write(d) 472 | 473 | def is_cube(self): 474 | return self.header.is_cube() 475 | 476 | def get_array_size(self): 477 | return self.header.get_array_size() 478 | 479 | def get_disassembled_dds_list(self): 480 | new_dds_num = self.header.depth * self.get_array_size() 481 | num_slices = 1 + (5 * self.is_cube()) 482 | self.header.disassemble() 483 | dds_list = [] 484 | for i in range(new_dds_num): 485 | dds = DDS( 486 | self.header, 487 | self.slice_bin_list[i * num_slices: (i + 1) * num_slices] 488 | ) 489 | dds_list.append(dds) 490 | return dds_list 491 | 492 | @staticmethod 493 | def assemble(dds_list, is_array=True): 494 | header = dds_list[0].header 495 | header.assemble(is_array, len(dds_list)) 496 | for dds in dds_list[1:]: 497 | if header.dxgi_format != dds.header.dxgi_format: 498 | raise RuntimeError("Failed to assemble dds files. DXGI formats should be the same") 499 | if header.width != dds.header.width or header.height != dds.header.height: 500 | raise RuntimeError("Failed to assemble dds files. Texture sizes should be the same") 501 | slice_bin_list = sum([dds.slice_bin_list for dds in dds_list], []) 502 | return DDS(header, slice_bin_list) 503 | --------------------------------------------------------------------------------