├── .gitignore ├── DsqFile.py ├── DtsShape.py ├── DtsTypes.py ├── FAQ.md ├── LICENSE ├── README.md ├── __init__.py ├── export_dsq.py ├── export_dts.py ├── import_dsq.py ├── import_dts.py ├── import_sequence.py ├── shared_export.py ├── util.py └── write_report.py /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | developer.py -------------------------------------------------------------------------------- /DsqFile.py: -------------------------------------------------------------------------------- 1 | from .DtsTypes import Sequence, Trigger, Vector, Quaternion 2 | from struct import pack, unpack, calcsize 3 | from ctypes import c_byte, c_short, c_int 4 | 5 | def read(fd, fmt): 6 | return unpack(fmt, fd.read(calcsize(fmt))) 7 | 8 | def write(fd, fmt, *values): 9 | fd.write(pack(fmt, *values)) 10 | 11 | def write_quat(fd, q): 12 | write(fd, "4h", 13 | c_short(int(q.x * 32767)).value, 14 | c_short(int(q.y * 32767)).value, 15 | c_short(int(q.z * 32767)).value, 16 | c_short(int(q.w * -32767)).value) 17 | 18 | def write_vec(fd, v): 19 | write(fd, "3f", v.x, v.y, v.z) 20 | 21 | def read_quat(fd): 22 | x, y, z, w = read(fd, "4h") 23 | return Quaternion(( 24 | w / -32767, 25 | x / 32767, 26 | y / 32767, 27 | z / 32767)) 28 | 29 | def read_vec(fd): 30 | return Vector(read(fd, "3f")) 31 | 32 | class DsqFile: 33 | def __init__(self): 34 | self.nodes = [] 35 | self.rotations = [] 36 | self.translations = [] 37 | self.uniform_scales = [] 38 | self.aligned_scales = [] 39 | self.arbitrary_scale_rots = [] 40 | self.arbitrary_scale_factors = [] 41 | self.ground_translations = [] 42 | self.ground_rotations = [] 43 | self.sequences = [] 44 | self.triggers = [] 45 | 46 | def write_dump(self, fd): 47 | def p(s): 48 | fd.write(s + "\n") 49 | 50 | p("# rotations: {}".format(len(self.rotations))) 51 | p("# translations: {}".format(len(self.translations))) 52 | p("# uniform_scales: {}".format(len(self.uniform_scales))) 53 | p("# aligned_scales: {}".format(len(self.aligned_scales))) 54 | p("# arbitrary_scale_rots: {}".format(len(self.arbitrary_scale_rots))) 55 | p("# arbitrary_scale_factors: {}".format(len(self.arbitrary_scale_factors))) 56 | p("# ground_translations: {}".format(len(self.ground_translations))) 57 | p("# ground_rotations: {}".format(len(self.ground_rotations))) 58 | 59 | p("Nodes ({}):".format(len(self.nodes))) 60 | for i, name in enumerate(self.nodes): 61 | p(" {}: {}".format(i, name)) 62 | 63 | p("Sequences ({}):".format(len(self.sequences))) 64 | for i, seq in enumerate(self.sequences): 65 | p(" {}: {}".format(i, seq.name)) 66 | p(" numKeyframes = {}".format(seq.numKeyframes)) 67 | p(" duration = {}".format(seq.duration)) 68 | p(" rotationMatters = {}".format("".join(map(str, map(int, seq.rotationMatters))))) 69 | p(" translationMatters = {}".format("".join(map(str, map(int, seq.translationMatters))))) 70 | p(" scaleMatters = {}".format("".join(map(str, map(int, seq.scaleMatters))))) 71 | 72 | def write_name(self, fd, name): 73 | write(fd, "v24 not supported yet" 141 | 142 | (num_nodes,) = read(fd, " 21: 153 | self.rotations = [read_quat(fd) for i in range(read(fd, " 8: 184 | (num_sjws,) = read(fd, "= len(self.buffer32): 128 | raise EOFError() 129 | 130 | data = self.buffer32[self.tell32] 131 | self.tell32 += 1 132 | return data 133 | 134 | def read16(self): 135 | if self.tell16 >= len(self.buffer16): 136 | raise EOFError() 137 | 138 | data = self.buffer16[self.tell16] 139 | self.tell16 += 1 140 | return data 141 | 142 | def read8(self): 143 | if self.tell8 >= len(self.buffer8): 144 | raise EOFError() 145 | 146 | data = self.buffer8[self.tell8] 147 | self.tell8 += 1 148 | return data 149 | 150 | def read_float(self): 151 | return unpack("f", pack("i", self.read32()))[0] 152 | 153 | def read_string(self): 154 | buf = bytearray() 155 | while True: 156 | byte = self.read8() 157 | if byte == 0: 158 | break 159 | else: 160 | buf.append(byte) 161 | return buf.decode("cp1252") 162 | 163 | def read_vec3(self): 164 | return Vector((self.read_float(), self.read_float(), self.read_float())) 165 | 166 | def read_vec2(self): 167 | return Vector((self.read_float(), self.read_float())) 168 | 169 | def read_box(self): 170 | return Box(self.read_vec3(), self.read_vec3()) 171 | 172 | def read_quat(self): 173 | x = self.read16() / 32767 174 | y = self.read16() / 32767 175 | z = self.read16() / 32767 176 | w = self.read16() / -32767 177 | return Quaternion((w, x, y, z)) 178 | 179 | class DtsShape(object): 180 | def __init__(self): 181 | self.nodes = [] 182 | self.objects = [] 183 | self.decals = [] 184 | self.subshapes = [] 185 | self.iflmaterials = [] 186 | self.materials = [] 187 | self.default_rotations = [] 188 | self.default_translations = [] 189 | self.node_rotations = [] 190 | self.node_translations = [] 191 | self.node_uniform_scales = [] 192 | self.node_aligned_scales = [] 193 | self.node_arbitrary_scale_factors = [] 194 | self.node_arbitrary_scale_rots = [] 195 | self.ground_translations = [] 196 | self.ground_rotations = [] 197 | self.objectstates = [] 198 | self.decalstates = [] 199 | self.triggers = [] 200 | self.detail_levels = [] 201 | self.meshes = [] 202 | self.sequences = [] 203 | self.names = [] 204 | self._names_lookup = {} 205 | 206 | self.smallest_size = 0.0 207 | self.smallest_detail_level = 0 208 | self.radius = 0.0 209 | self.radius_tube = 0.0 210 | self.center = Vector() 211 | self.bounds = Box(Vector(), Vector()) 212 | 213 | def name(self, string): 214 | index = self._names_lookup.get(string.lower()) 215 | 216 | if index == None: 217 | index = len(self.names) 218 | self.names.append(string) 219 | self._names_lookup[string.lower()] = index 220 | 221 | return index 222 | 223 | def name_resolve(self, string): 224 | index = self.name(string) 225 | return (index, self.names[index]) 226 | 227 | def get_world_mat(self, nodeid): 228 | matrix = Matrix() 229 | 230 | while nodeid != -1: 231 | cur = Matrix.Translation(self.default_translations[nodeid]) * self.default_rotations[nodeid].to_matrix() 232 | matrix = cur * matrix 233 | nodeid = self.nodes[nodeid].parent 234 | 235 | return mat 236 | 237 | def verify(self): 238 | assert self.detail_levels 239 | assert self.subshapes 240 | assert len(self.nodes) == len(self.default_translations) 241 | assert len(self.nodes) == len(self.default_rotations) 242 | assert len(self.objects) == len(self.objectstates) 243 | assert len(self.node_arbitrary_scale_factors) == len(self.node_arbitrary_scale_rots) 244 | assert len(self.ground_translations) == len(self.ground_rotations) 245 | 246 | def save(self, fd, dtsVersion=24): 247 | stream = DtsOutputStream(dtsVersion) 248 | 249 | # Header 250 | stream.write32( 251 | len(self.nodes), 252 | len(self.objects), 253 | len(self.decals), 254 | len(self.subshapes), 255 | len(self.iflmaterials), 256 | len(self.node_rotations), 257 | len(self.node_translations), 258 | len(self.node_uniform_scales), 259 | len(self.node_aligned_scales), 260 | len(self.node_arbitrary_scale_factors), 261 | len(self.ground_translations), 262 | len(self.objectstates), 263 | len(self.decalstates), 264 | len(self.triggers), 265 | len(self.detail_levels), 266 | len(self.meshes), 267 | len(self.names), 268 | ) 269 | stream.write_float(self.smallest_size) 270 | stream.write32(self.smallest_detail_level) 271 | 272 | if dtsVersion > 24: 273 | # write morphs 274 | pass 275 | 276 | stream.guard(0) 277 | 278 | # Bounds 279 | stream.write_float(self.radius, self.radius_tube) 280 | stream.write_vec3(self.center) 281 | stream.write_box(self.bounds) 282 | stream.guard(1) 283 | 284 | # Nodes 285 | for node in self.nodes: 286 | node.write(stream) 287 | stream.guard(2) 288 | 289 | # Objects 290 | for obj in self.objects: 291 | obj.write(stream) 292 | stream.guard(3) 293 | 294 | # Decals 295 | for decal in self.decals: 296 | decal.write(stream) 297 | stream.guard(4) 298 | 299 | # IFL materials 300 | for ifl in self.iflmaterials: 301 | ifl.write(stream) 302 | stream.guard(5) 303 | 304 | # Subshapes 305 | for sub in self.subshapes: 306 | stream.write32(sub.firstNode) 307 | for sub in self.subshapes: 308 | stream.write32(sub.firstObject) 309 | for sub in self.subshapes: 310 | stream.write32(sub.firstDecal) 311 | stream.guard(6) 312 | for sub in self.subshapes: 313 | stream.write32(sub.numNodes) 314 | for sub in self.subshapes: 315 | stream.write32(sub.numObjects) 316 | for sub in self.subshapes: 317 | stream.write32(sub.numDecals) 318 | stream.guard(7) 319 | 320 | # Default translations and rotations 321 | assert len(self.default_rotations) == len(self.nodes) 322 | assert len(self.default_translations) == len(self.nodes) 323 | 324 | for i in range(len(self.nodes)): 325 | stream.write_quat(self.default_rotations[i]) 326 | stream.write_vec3(self.default_translations[i]) 327 | 328 | # Animation translations and rotations 329 | for point in self.node_translations: 330 | stream.write_vec3(point) 331 | for quat in self.node_rotations: 332 | stream.write_quat(quat) 333 | stream.guard(8) 334 | 335 | # Default scales 336 | for point in self.node_uniform_scales: 337 | stream.write_float(point) 338 | for point in self.node_aligned_scales: 339 | stream.write_vec3(point) 340 | for point in self.node_arbitrary_scale_factors: 341 | stream.write_vec3(point) 342 | # if dtsVersion >= 26: 343 | for quat in self.node_arbitrary_scale_rots: 344 | stream.write_quat(quat) 345 | stream.guard(9) 346 | 347 | # Ground transformations 348 | assert len(self.ground_translations) == len(self.ground_rotations) 349 | for point in self.ground_translations: 350 | self.write_vec3(point) 351 | for quat in self.ground_rotations: 352 | self.write_quat(quat) 353 | stream.guard(10) 354 | 355 | # Object states 356 | for state in self.objectstates: 357 | state.write(stream) 358 | stream.guard(11) 359 | 360 | # Decal states 361 | for state in self.decalstates: 362 | state.write(stream) 363 | stream.guard(12) 364 | 365 | # Triggers 366 | for trigger in self.triggers: 367 | trigger.write(stream) 368 | stream.guard(13) 369 | 370 | # Detail levels 371 | for lod in self.detail_levels: 372 | lod.write(stream) 373 | stream.guard(14) 374 | 375 | # Meshes 376 | for mesh in self.meshes: 377 | mesh.write(stream) 378 | stream.guard() 379 | 380 | # Names 381 | for name in self.names: 382 | stream.write_string(name) 383 | stream.guard() 384 | 385 | # Finished with the 3-buffer section 386 | stream.flush(fd) 387 | 388 | # Sequences 389 | ws(fd, "= 26: 400 | ws(fd, "i", len(mat.name)) 401 | else: 402 | ws(fd, "b", len(mat.name)) 403 | 404 | fd.write(mat.name.encode("cp1252")) 405 | for mat in self.materials: 406 | ws(fd, "i", mat.flags) 407 | for mat in self.materials: 408 | ws(fd, "i", mat.reflectanceMap) 409 | for mat in self.materials: 410 | ws(fd, "i", mat.bumpMap) 411 | for mat in self.materials: 412 | ws(fd, "i", mat.detailMap) 413 | if dtsVersion == 25: 414 | for mat in self.materials: 415 | fd.write(b"\x00\x00\x00\x00") 416 | for mat in self.materials: 417 | ws(fd, "f", mat.detailScale) 418 | for mat in self.materials: 419 | ws(fd, "f", mat.reflectance) 420 | 421 | def load(self, fd): 422 | stream = DtsInputStream(fd) 423 | 424 | # Header 425 | n_node = stream.read32() 426 | n_object = stream.read32() 427 | n_decal = stream.read32() 428 | n_subshape = stream.read32() 429 | n_ifl = stream.read32() 430 | 431 | if stream.dtsVersion < 22: 432 | n_noderotation = stream.read32() 433 | n_noderotation -= n_node 434 | n_nodetranslation = n_noderotation 435 | n_nodescaleuniform = 0 436 | n_nodescalealigned = 0 437 | n_nodescalearbitrary = 0 438 | else: 439 | n_noderotation = stream.read32() 440 | n_nodetranslation = stream.read32() 441 | n_nodescaleuniform = stream.read32() 442 | n_nodescalealigned = stream.read32() 443 | n_nodescalearbitrary = stream.read32() 444 | 445 | if stream.dtsVersion > 23: 446 | n_groundframe = stream.read32() 447 | else: 448 | n_groundframe = 0 449 | 450 | n_objectstate = stream.read32() 451 | n_decalstate = stream.read32() 452 | n_trigger = stream.read32() 453 | n_detaillevel = stream.read32() 454 | n_mesh = stream.read32() 455 | 456 | if stream.dtsVersion < 23: 457 | n_skin = stream.read32() 458 | else: 459 | n_skin = 0 460 | 461 | n_name = stream.read32() 462 | self.smallest_size = stream.read_float() 463 | self.smallest_detail_level = stream.read32() 464 | stream.guard() 465 | 466 | # Misc geometry properties 467 | self.radius = stream.read_float() 468 | self.radius_tube = stream.read_float() 469 | self.center = stream.read_vec3() 470 | self.bounds = stream.read_box() 471 | stream.guard() 472 | 473 | # Primary data 474 | self.nodes = [Node.read(stream) for i in range(n_node)] 475 | stream.guard() 476 | self.objects = [Object.read(stream) for i in range(n_object)] 477 | stream.guard() 478 | self.decals = [Decal.read(stream) for i in range(n_decal)] 479 | stream.guard() 480 | self.iflmaterials = [IflMaterial.read(stream) for i in range(n_ifl)] 481 | stream.guard() 482 | 483 | # Subshapes 484 | self.subshapes = [Subshape(0, 0, 0, 0, 0, 0) for i in range(n_subshape)] 485 | for i in range(n_subshape): 486 | self.subshapes[i].firstNode = stream.read32() 487 | for i in range(n_subshape): 488 | self.subshapes[i].firstObject = stream.read32() 489 | for i in range(n_subshape): 490 | self.subshapes[i].firstDecal = stream.read32() 491 | stream.guard() 492 | for i in range(n_subshape): 493 | self.subshapes[i].numNodes = stream.read32() 494 | for i in range(n_subshape): 495 | self.subshapes[i].numObjects = stream.read32() 496 | for i in range(n_subshape): 497 | self.subshapes[i].numDecals = stream.read32() 498 | stream.guard() 499 | 500 | # MeshIndexList (obsolete data) 501 | if stream.dtsVersion < 16: 502 | for i in range(stream.read32()): 503 | stream.read32() 504 | 505 | # Default translations and rotations 506 | self.default_rotations = [None] * n_node 507 | self.default_translations = [None] * n_node 508 | 509 | for i in range(n_node): 510 | self.default_rotations[i] = stream.read_quat() 511 | self.default_translations[i] = stream.read_vec3() 512 | 513 | # Animation translations and rotations 514 | self.node_translations = [stream.read_vec3() for i in range(n_nodetranslation)] 515 | self.node_rotations = [stream.read_quat() for i in range(n_noderotation)] 516 | stream.guard() 517 | 518 | # Default scales 519 | if stream.dtsVersion > 21: 520 | self.node_uniform_scales = [stream.read_float() for i in range(n_nodescaleuniform)] 521 | self.node_aligned_scales = [stream.read_vec3() for i in range(n_nodescalealigned)] 522 | self.node_arbitrary_scale_factors = [stream.read_vec3() for i in range(n_nodescalearbitrary)] 523 | self.node_arbitrary_scale_rots = [stream.read_quat() for i in range(n_nodescalearbitrary)] 524 | stream.guard() 525 | else: 526 | self.node_uniform_scales = [None] * n_nodescaleuniform 527 | self.node_aligned_scales = [None] * n_nodescalealigned 528 | self.node_arbitrary_scale_factors = [None] * n_nodescalearbitrary 529 | self.node_arbitrary_scale_rots = [None] * n_nodescalearbitrary 530 | # ??? 531 | # print(stream.dtsVersion) 532 | # print(stream.sequence) 533 | # if stream.dtsVersion > 21: 534 | # what1 = stream.read32() 535 | # what2 = stream.read32() 536 | # what3 = stream.read32() 537 | # stream.guard() 538 | 539 | # Ground transformations 540 | if stream.dtsVersion > 23: 541 | self.ground_translations = [stream.read_vec3() for i in range(n_groundframe)] 542 | self.ground_rotations = [stream.read_quat() for i in range(n_groundframe)] 543 | stream.guard() 544 | else: 545 | self.ground_translations = [None] * n_groundframe 546 | self.ground_rotations = [None] * n_groundframe 547 | 548 | # Object states 549 | self.objectstates = [ObjectState.read(stream) for i in range(n_objectstate)] 550 | stream.guard() 551 | 552 | # Decal states 553 | self.decalstates = [stream.read32() for i in range(n_decalstate)] 554 | stream.guard() 555 | 556 | # Triggers 557 | self.triggers = [Trigger.read(stream) for i in range(n_trigger)] 558 | stream.guard() 559 | 560 | # Detail levels 561 | self.detail_levels = [DetailLevel.read(stream) for i in range(n_detaillevel)] 562 | stream.guard() 563 | 564 | # Meshes 565 | self.meshes = [Mesh.read(stream) for i in range(n_mesh)] 566 | stream.guard() 567 | 568 | # Names 569 | self.names = [None] * n_name 570 | self._names_lookup = {} 571 | 572 | for i in range(n_name): 573 | self.names[i] = stream.read_string() 574 | self._names_lookup[self.names[i]] = i 575 | 576 | stream.guard() 577 | 578 | self.alpha_in = [None] * n_detaillevel 579 | self.alpha_out = [None] * n_detaillevel 580 | 581 | if stream.dtsVersion >= 26: 582 | for i in range(n_detaillevel): 583 | self.alphaIn[i] = stream.read32() 584 | for i in range(n_detaillevel): 585 | self.alphaOut[i] = stream.read32() 586 | 587 | # Done with the tribuffer section 588 | n_sequence = unpack("i", fd.read(4))[0] 589 | self.sequences = [None] * n_sequence 590 | 591 | for i in range(n_sequence): 592 | self.sequences[i] = Sequence.read(fd) 593 | 594 | material_type = unpack("b", fd.read(1))[0] 595 | assert material_type == 0x1 596 | 597 | n_material = unpack("i", fd.read(4))[0] 598 | self.materials = [Material() for i in range(n_material)] 599 | 600 | for i in range(n_material): 601 | if stream.dtsVersion >= 26: 602 | length = unpack("i", fd.read(4))[0] 603 | else: 604 | length = unpack("B", fd.read(1))[0] 605 | 606 | self.materials[i].name = fd.read(length).decode("cp1252") 607 | 608 | for i in range(n_material): 609 | self.materials[i].flags = unpack("I", fd.read(4))[0] 610 | for i in range(n_material): 611 | self.materials[i].reflectanceMap = unpack("i", fd.read(4))[0] 612 | for i in range(n_material): 613 | self.materials[i].bumpMap = unpack("i", fd.read(4))[0] 614 | for i in range(n_material): 615 | self.materials[i].detailMap = unpack("i", fd.read(4))[0] 616 | 617 | if stream.dtsVersion == 25: 618 | for i in range(n_material): 619 | fd.read(4) 620 | 621 | for i in range(n_material): 622 | self.materials[i].detailScale = unpack("f", fd.read(4))[0] 623 | for i in range(n_material): 624 | self.materials[i].reflectance = unpack("f", fd.read(4))[0] 625 | -------------------------------------------------------------------------------- /DtsTypes.py: -------------------------------------------------------------------------------- 1 | # vim: tabstop=8 noexpandtab 2 | 3 | from collections import namedtuple 4 | from struct import pack, unpack 5 | from enum import Enum 6 | 7 | import math 8 | from mathutils import Euler, Matrix, Quaternion, Vector 9 | 10 | def bit(n): 11 | return 1 << n 12 | 13 | class Box: 14 | def __init__(self, min, max): 15 | self.min = min 16 | self.max = max 17 | 18 | def __repr__(self): 19 | return "({}, {})".format(self.min, self.max) 20 | 21 | class Node: 22 | def __init__(self, name, parent=-1): 23 | self.name = name 24 | self.parent = parent 25 | 26 | # Unused 27 | self.firstObject = -1 28 | self.firstChild = -1 29 | self.nextSibling = -1 30 | 31 | def write(self, stream): 32 | stream.write32( 33 | self.name, self.parent, 34 | self.firstObject, self.firstChild, self.nextSibling) 35 | 36 | @classmethod 37 | def read(cls, stream): 38 | obj = cls(stream.read32(), stream.read32()) 39 | obj.firstObject = stream.read32() 40 | obj.firstChild = stream.read32() 41 | obj.nextSibling = stream.read32() 42 | return obj 43 | 44 | class Object: 45 | def __init__(self, name, numMeshes, firstMesh, node): 46 | self.name = name 47 | self.numMeshes = numMeshes 48 | self.firstMesh = firstMesh 49 | self.node = node 50 | 51 | # Unused 52 | self.nextSibling = -1 53 | self.firstDecal = -1 54 | 55 | def write(self, stream): 56 | stream.write32( 57 | self.name, self.numMeshes, self.firstMesh, 58 | self.node, self.nextSibling, self.firstDecal) 59 | 60 | @classmethod 61 | def read(cls, stream): 62 | obj = cls(stream.read32(), stream.read32(), stream.read32(), stream.read32()) 63 | obj.nextSibling = stream.read32() 64 | obj.firstDecal = stream.read32() 65 | return obj 66 | 67 | class IflMaterial: 68 | def __init__(self, name, slot): 69 | self.name = name 70 | self.slot = slot 71 | 72 | # Unused 73 | self.firstFrame = -1 74 | self.time = -1 75 | self.numFrames = -1 76 | 77 | def write(self, stream): 78 | stream.write32( 79 | self.name, self.slot, self.firstFrame, 80 | self.time, self.numFrames) 81 | 82 | @classmethod 83 | def read(cls, stream): 84 | instance = cls( 85 | stream.read32(), stream.read32()) 86 | instance.firstFrame = stream.read32() 87 | instance.time = stream.read32() 88 | instance.numFrames = stream.read32() 89 | return instance 90 | 91 | class Subshape: 92 | def __init__(self, firstNode, firstObject, firstDecal, numNodes, numObjects, numDecals): 93 | self.firstNode = firstNode 94 | self.firstObject = firstObject 95 | self.firstDecal = firstDecal 96 | self.numNodes = numNodes 97 | self.numObjects = numObjects 98 | self.numDecals = numDecals 99 | 100 | class ObjectState: 101 | def __init__(self, vis, frame, matFrame): 102 | self.vis = vis 103 | self.frame = frame 104 | self.matFrame = matFrame 105 | 106 | def write(self, stream): 107 | stream.write_float(self.vis) 108 | stream.write32(self.frame, self.matFrame) 109 | 110 | @classmethod 111 | def read(cls, stream): 112 | return cls(stream.read_float(), stream.read32(), stream.read32()) 113 | 114 | class Trigger: 115 | StateOn = bit(31) 116 | InvertOnReverse = bit(30) 117 | 118 | def __init__(self, state, pos): 119 | self.state = state 120 | self.pos = pos 121 | 122 | def write(self, stream): 123 | stream.write32(self.state) 124 | stream.write_float(self.pos) 125 | 126 | @classmethod 127 | def read(cls, stream): 128 | return cls(stream.read32(), stream.read_float()) 129 | 130 | class DetailLevel: 131 | def __init__(self, name, subshape, objectDetail, size, avgError=-1.0, maxError=-1.0, polyCount=0): 132 | self.name = name 133 | self.subshape = subshape 134 | self.objectDetail = objectDetail 135 | self.size = size 136 | 137 | # Unused 138 | self.avgError = -1.0 139 | self.maxError = -1.0 140 | self.polyCount = 0 141 | 142 | def write(self, stream): 143 | stream.write32(self.name, self.subshape, self.objectDetail) 144 | stream.write_float(self.size, self.avgError, self.maxError) 145 | stream.write32(self.polyCount) 146 | 147 | @classmethod 148 | def read(cls, stream): 149 | obj = cls(stream.read32(), stream.read32(), stream.read32(), stream.read_float()) 150 | obj.avgError = stream.read_float() 151 | obj.maxError = stream.read_float() 152 | obj.polyCount = stream.read32() 153 | return obj 154 | 155 | class Primitive: 156 | Triangles = 0x00000000 157 | Strip = 0x40000000 158 | Fan = 0x80000000 159 | TypeMask = 0xC0000000 160 | Indexed = 0x20000000 161 | NoMaterial = 0x10000000 162 | MaterialMask = 0x0FFFFFFF 163 | 164 | def __init__(self, firstElement, numElements, type): 165 | self.firstElement = firstElement 166 | self.numElements = numElements 167 | self.type = type 168 | 169 | def write(self, stream): 170 | stream.write16(self.firstElement, self.numElements) 171 | stream.write32(self.type) 172 | 173 | @classmethod 174 | def read(cls, stream): 175 | return cls(stream.read16(), stream.read16(), stream.read32()) 176 | 177 | class Mesh: 178 | StandardType = 0 179 | SkinType = 1 180 | DecalType = 2 181 | SortedType = 3 182 | NullType = 4 183 | TypeMask = 7 184 | 185 | TypeName = ["Standard", "Skin", "Decal", "Sorted", "Null"] 186 | 187 | Billboard = bit(31) 188 | HasDetailTexture = bit(30) 189 | BillboardZAxis = bit(29) 190 | UseEncodedNormals = bit(28) 191 | 192 | def __init__(self, mtype): 193 | self.bounds = Box(Vector(), Vector()) 194 | self.center = Vector() 195 | self.radius = 0 196 | self.numFrames = 1 197 | self.numMatFrames = 1 198 | self.vertsPerFrame = 1 199 | self.parent = -1 200 | self.type = mtype 201 | self.verts = [] 202 | self.tverts = [] 203 | self.normals = [] 204 | self.enormals = [] 205 | self.primitives = [] 206 | self.indices = [] 207 | self.mindices = [] 208 | 209 | self.bones = [] 210 | self.influences = [] 211 | 212 | def get_type(self): 213 | return self.type & Mesh.TypeMask 214 | 215 | def get_flags(self, flag=0xFFFFFFFF): 216 | return self.type & flag 217 | 218 | def set_flags(self, flag): 219 | self.type |= flag 220 | 221 | def transformed_verts(self, mat): 222 | return map(lambda vert: mat * vert, self.verts) 223 | 224 | def calculate_bounds_mat(self, mat): 225 | box = Box( 226 | Vector(( 10e30, 10e30, 10e30)), 227 | Vector((-10e30, -10e30, -10e30))) 228 | 229 | for vert in self.transformed_verts(mat): 230 | box.min.x = min(box.min.x, vert.x) 231 | box.min.y = min(box.min.y, vert.y) 232 | box.min.z = min(box.min.z, vert.z) 233 | box.max.x = max(box.max.x, vert.x) 234 | box.max.y = max(box.max.y, vert.y) 235 | box.max.z = max(box.max.z, vert.z) 236 | 237 | return box 238 | 239 | def calculate_radius_mat(self, mat, center): 240 | radius = 0.0 241 | 242 | for vert in self.transformed_verts(mat): 243 | radius = max(radius, (vert - center).length) 244 | 245 | return radius 246 | 247 | def calculate_radius_tube_mat(self, mat, center): 248 | radius = 0 249 | 250 | for vert in self.transformed_verts(mat): 251 | delta = vert - center 252 | radius = max(radius, Vector((delta.x, delta.y)).length) 253 | 254 | return radius 255 | 256 | def write(self, stream): 257 | mtype = self.get_type() 258 | stream.write32(self.type) 259 | 260 | if mtype == Mesh.NullType: 261 | return 262 | 263 | stream.guard() 264 | stream.write32(self.numFrames, self.numMatFrames, self.parent) 265 | stream.write_box(self.bounds) 266 | stream.write_vec3(self.center) 267 | stream.write_float(self.radius) 268 | 269 | # Geometry data 270 | stream.write32(len(self.verts)) 271 | for vert in self.verts: 272 | stream.write_vec3(vert) 273 | stream.write32(len(self.tverts)) 274 | for tvert in self.tverts: 275 | stream.write_vec2(tvert) 276 | 277 | assert len(self.normals) == len(self.verts) 278 | assert len(self.enormals) == len(self.verts) 279 | for normal in self.normals: 280 | stream.write_vec3(normal) 281 | for enormal in self.enormals: 282 | stream.write8(enormal) 283 | 284 | # Primitives and other stuff 285 | stream.write32(len(self.primitives)) 286 | for prim in self.primitives: 287 | prim.write(stream) 288 | 289 | #if stream.dtsVersion >= 25: 290 | stream.write32(len(self.indices)) 291 | stream.write16(*self.indices) 292 | stream.write32(len(self.mindices)) 293 | stream.write16(*self.mindices) 294 | stream.write32(self.vertsPerFrame) 295 | stream.write32(self.get_flags()) 296 | stream.guard() 297 | 298 | if mtype == Mesh.SkinType: 299 | stream.write32(len(self.verts)) 300 | for v in self.verts: 301 | stream.write_vec3(v) 302 | for v in self.normals: 303 | stream.write_vec3(v) 304 | stream.write8(*self.enormals) 305 | 306 | stream.write32(len(self.bones)) 307 | for _, initial_transform in self.bones: 308 | for f in initial_transform: 309 | stream.write_float(f) 310 | 311 | stream.write32(len(self.influences)) 312 | for vertex_index, _, _ in self.influences: 313 | stream.write32(vertex_index) 314 | for _, bone_index, _ in self.influences: 315 | stream.write32(bone_index) 316 | for _, _, weight in self.influences: 317 | stream.write_float(weight) 318 | 319 | stream.write32(len(self.bones)) 320 | for node_index, _ in self.bones: 321 | stream.write32(node_index) 322 | 323 | stream.guard() 324 | elif mtype != Mesh.StandardType: 325 | raise ValueError("cannot write {} mesh".format(mtype)) 326 | 327 | def read_standard_mesh(self, stream): 328 | stream.guard() 329 | 330 | self.numFrames = stream.read32() 331 | self.numMatFrames = stream.read32() 332 | self.parent = stream.read32() 333 | self.bounds = stream.read_box() 334 | self.center = stream.read_vec3() 335 | self.radius = stream.read_float() 336 | 337 | # Geometry data 338 | n_vert = stream.read32() 339 | self.verts = [stream.read_vec3() for i in range(n_vert)] 340 | n_tvert = stream.read32() 341 | self.tverts = [stream.read_vec2() for i in range(n_tvert)] 342 | self.normals = [stream.read_vec3() for i in range(n_vert)] 343 | # TODO: don't read this when not relevant 344 | self.enormals = [stream.read8() for i in range(n_vert)] 345 | 346 | # Primitives and other stuff 347 | self.primitives = [Primitive.read(stream) for i in range(stream.read32())] 348 | self.indices = [stream.read16() for i in range(stream.read32())] 349 | self.mindices = [stream.read16() for i in range(stream.read32())] 350 | self.vertsPerFrame = stream.read32() 351 | self.set_flags(stream.read32()) 352 | 353 | stream.guard() 354 | 355 | def read_skin_mesh(self, stream): 356 | self.read_standard_mesh(stream) 357 | 358 | sz = stream.read32() 359 | _ = [stream.read_vec3() for i in range(sz)] 360 | _ = [stream.read_vec3() for i in range(sz)] 361 | _ = [stream.read8() for i in range(sz)] 362 | 363 | sz = stream.read32() 364 | self.bones = [[None, None] for i in range(sz)] 365 | 366 | for i in range(sz): 367 | initial_transform = [stream.read_float() for i in range(16)] 368 | self.bones[i][1] = initial_transform 369 | 370 | sz = stream.read32() 371 | self.influences = [[None, None, None] for i in range(sz)] 372 | 373 | for i in range(sz): 374 | self.influences[i][0] = stream.read32() 375 | for i in range(sz): 376 | self.influences[i][1] = stream.read32() 377 | for i in range(sz): 378 | self.influences[i][2] = stream.read_float() 379 | 380 | sz = stream.read32() 381 | assert sz == len(self.bones) 382 | 383 | for i in range(sz): 384 | self.bones[i][0] = stream.read32() 385 | 386 | stream.guard() 387 | 388 | @classmethod 389 | def read(cls, stream): 390 | mtype = stream.read32() & Mesh.TypeMask 391 | mesh = cls(mtype) 392 | 393 | if mtype == Mesh.StandardType: 394 | mesh.read_standard_mesh(stream) 395 | elif mtype == Mesh.SkinType: 396 | mesh.read_skin_mesh(stream) 397 | # others here 398 | elif mtype == Mesh.NullType: 399 | pass 400 | else: 401 | raise ValueError("don't know how to read {} mesh".format(mtype)) 402 | 403 | return mesh 404 | 405 | class Material: 406 | SWrap = 0x00000001 407 | TWrap = 0x00000002 408 | Translucent = 0x00000004 409 | Additive = 0x00000008 410 | Subtractive = 0x00000010 411 | SelfIlluminating = 0x00000020 412 | NeverEnvMap = 0x00000040 413 | NoMipMap = 0x00000080 414 | MipMapZeroBorder = 0x00000100 415 | IFLMaterial = 0x08000000 416 | IFLFrame = 0x10000000 417 | DetailMap = 0x20000000 418 | BumpMap = 0x40000000 419 | ReflectanceMap = 0x80000000 420 | AuxiliaryMask = 0xE0000000 421 | 422 | def __init__(self, name="", flags=0, 423 | reflectanceMap=-1, bumpMap=-1, detailMap=-1, 424 | detailScale=1.0, reflectance=0.0): 425 | self.name = name 426 | self.flags = flags 427 | self.reflectanceMap = reflectanceMap 428 | self.bumpMap = bumpMap 429 | self.detailMap = detailMap 430 | self.detailScale = detailScale 431 | self.reflectance = reflectance 432 | 433 | def read_bit_set(fd): 434 | dummy, numWords = unpack("> 5] & (1 << (i & 31))) != 0 for i in range(total)] 438 | 439 | def write_bit_set(fd, bits): 440 | numWords = int(math.ceil(len(bits) / 32.0)) 441 | words = [0] * numWords 442 | 443 | for i, bit in enumerate(bits): 444 | if bit: 445 | words[i >> 5] |= 1 << (i & 31) 446 | 447 | fd.write(pack(" Export -> Torque (.dts)` in the menu or press spacebar and search for `Export DTS` in the quick menu. The steps are the same for DSQ, only the operator is named `Export DSQ`. 6 | 7 | #### How do I import from a DTS file? 8 | 9 | Go to `File -> Import -> Torque (.dts)` in the menu or press spacebar and search for `Import DTS` in the quick menu. The steps are the same for DSQ, only the operator is named `Import DSQ`. 10 | 11 | #### I have colored/textured materials on my model in Blender but the model appears white when I export it. 12 | 13 | Torque uses texture files for materials that are looked up by the name of your material. For a material named "boxTrapOrange", you will need a PNG/JPG file with the same name. For solid colors (diffuse in Blender), you will need a texture as well. You will need to either create these yourself or use the option to generate colored textures on export. 14 | 15 | #### My model is bigger/smaller, rotated wrong or in the wrong place when I export it. 16 | 17 | Select your mesh and press `Ctrl-A` to apply location, rotation or scale. This will apply the mesh transforms to the actual geometry and clear them, which you need to do because DTS does not support them. 18 | 19 | #### How do I make a material transparent? 20 | 21 | Check the "Transparency" setting under material properties. 22 | 23 | #### How do I make a material self illuminating? 24 | 25 | Check the "Shadeless" setting under material properties. 26 | 27 | #### How do I make a transparent material subtractive? 28 | 29 | Add a custom property named "blendMode" and set it to "subtractive" in the material properties. 30 | 31 | ![](http://i.imgur.com/exQ5sqL.png) 32 | 33 | ![](http://i.imgur.com/FXufzLb.png) 34 | 35 | #### How do I create IFL materials? 36 | 37 | Set the following custom properties on the material: 38 | 39 | * `iflName` 40 | * `iflFirstFrame` 41 | * `iflNumFrames` 42 | * `iflTime` 43 | 44 | I have yet to figure out how they work myself, so I can't quite document them yet. 45 | 46 | #### How do I make a material refer to a different texture name than the material name? 47 | 48 | Either use the # suffix (the name "foo#bar" would look for texture "foo") or set the custom property "texture". 49 | 50 | #### My UV mapping is too small/big and doesn't even seem to loop. 51 | 52 | Make sure your texture resolution is a power of 2 (..., 128, 256, 512, 1024, ...). 53 | 54 | #### My UV mapping is distorted/shaped differently. 55 | 56 | The exporter will automatically triangulate all meshes on export, and certain UV mappings you can do on quads (or n-gons) are impossible on triangles. Instead, triangulate the geometry yourself in Blender and do your UV mapping on that to get correct output. 57 | 58 | #### What is the mesh size limit? 59 | 60 | Right now you are limited to 21845 triangles. Explanation: You are limited to 65536 vertex indices and each face (triangle) uses three. `65536 / 3`. 61 | 62 | #### What is the mesh number limit? 63 | 64 | There is effectively no limit, but stay below 256 as anything higher sends invalid node update packets and will have random bad effects on clients. 65 | 66 | #### How do I put a mesh in a detail level/LOD? How do detail levels work in this plugin? 67 | 68 | The exporter will use the object group name as the detail level name if present. If your mesh is named "Col-X" it will put it in "Collision-X". Otherwise it uses "detail32" by default. 69 | 70 | #### How do I create multiple LODs of the same node? 71 | 72 | First of all, each LOD should be a separate mesh, and as stated in the previous question, set the object group name for each. Since Blender does not allow multiple objects with the same name, you will need to change the name. 73 | 74 | For this reason the plugin ignores any part of a name after and including a # character. I recommend using something related to the LOD as the suffix. For example, `horn#32` and `horn#128` for an object named `horn` in `detail32` and `detail128`. 75 | 76 | #### How do I add a collision mesh? 77 | 78 | Add a mesh named "Col-X" where X is a number from 1 to 8(?). It must be in the "Collision-X" detail level, but the exporter will automatically do this for you based on the name. 79 | 80 | #### How do I add a raycast collision mesh? 81 | 82 | Add a mesh named "LOSCol-X" where X is a number from 9 to 16(?). It must be in the "LOS-X" detail level. The exporter should automatically do this for you but that hasn't been implemented yet. 83 | 84 | #### Why are my collision meshes stopping players but not raycasts and projectiles? 85 | 86 | I think it's because your mesh is concave instead of convex. 87 | 88 | #### Can I tell the exporter to always ignore a mesh? 89 | 90 | Set the object group name ('detail level') to "\_\_ignore\_\_". 91 | 92 | #### How do I define the hierarchy of nodes for animations, mount points, muzzle points, etc.? 93 | 94 | Ideally this is where the FAQ would just say "use bones", but this plugin does not support bones. It's basically the biggest remaining feature. 95 | 96 | Regardless, create an empty for each node (visual type does not matter) and parent them to each other in Blender. 97 | 98 | ![](http://i.imgur.com/a89m7Hm.png) 99 | 100 | #### How do I manually set the bounding box of my model? 101 | 102 | Name one of your meshes "bounds". 103 | 104 | #### Does this plugin care about layers? 105 | 106 | No. It will export from every layer. 107 | 108 | ### When I import multiple animations they are messed up. 109 | 110 | This is because the animations are "blending into each other", so to speak; the last keyframes of animations placed earlier in the timeline are staying and affecting later animations. A solution would have to be a root pose keyframe on every node inbetween animated animations, but the importer doesn't do this yet. 111 | 112 | #### How do I define an animation? 113 | 114 | Create a marker at the start frame and end frame of your animation on the timeline and name them appropriately: if your animation should be called "glide" then name them "glide:start" and "glide:end". 115 | 116 | #### Can I use the different interpolation types for keyframe animations? 117 | 118 | Yes. 119 | 120 | #### How is the speed/duration of an animation determined? 121 | 122 | It should have the same length in-game and in Blender. It will evaluate and export the animation at the scene FPS (24 by default). 123 | 124 | #### How do I make an animation cyclic? How do I set the priority of an animations? How do I make an animation blend? 125 | 126 | You need to create a text block (in the Text Editor view) named "Sequences" and list the properties of your animations. Here is an example of the format (in this case, the properties of the default player animations): 127 | 128 | ``` 129 | activate: priority 64, blend 130 | activate2: priority 64, blend 131 | armattack: priority 64, cyclic, blend 132 | armreadyboth: priority 14 133 | armreadyleft: priority 14 134 | armreadyright: priority 14 135 | back: priority 12, cyclic 136 | crouch: priority 20 137 | crouchback: priority 21, cyclic 138 | crouchrun: priority 21, cyclic 139 | crouchside: priority 21, cyclic 140 | death1: priority 128 141 | fall: priority 7 142 | headside: priority 0, blend 143 | headup: priority 0, blend 144 | leftrecoil: priority 64, blend 145 | look: priority 8, blend 146 | plant: priority 64, blend 147 | root: priority 0 148 | rotccw: priority 64, blend 149 | rotcw: priority 64, blend 150 | run: priority 12, cyclic 151 | shiftaway: priority 64, blend 152 | shiftdown: priority 64, blend 153 | shiftleft: priority 64, blend 154 | shiftright: priority 64, blend 155 | shiftto: priority 64, blend 156 | shiftup: priority 64, blend 157 | side: priority 12, cyclic 158 | sit: priority 64 159 | spearready: priority 14, blend 160 | spearthrow: priority 14, blend 161 | standjump: priority 8, blend 162 | talk: priority 0, cyclic, blend 163 | undo: priority 64, blend 164 | wrench: priority 64, blend 165 | ``` 166 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Nick Smith 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # io_scene_dts 2 | 3 | A modern Blender plugin for working with DTS/DSQ assets. Includes an importer and an exporter. 4 | 5 | ### Goals 6 | 7 | * Implement import and export of all detail levels, nodes, objects, meshes, materials and sequences from/to DTS/DSQ files. 8 | * Support all versions of the DTS file format from the one used in TGE 1.0 to the one used in the current version of T3D. 9 | * Support the newest version of Blender (at the time of writing, 2.77), unlike the original DTS plugin (2.49b). 10 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | bl_info = { 2 | "name": "Torque DTS format", 3 | "author": "port", 4 | "version": (0, 2, 0), 5 | "blender": (2, 74, 0), 6 | "location": "File > Import-Export", 7 | "description": "Import-Export DTS, Import DTS mesh, UV's, " 8 | "materials and textures", 9 | "warning": "", 10 | "support": 'COMMUNITY', 11 | "category": "Import-Export"} 12 | 13 | if "bpy" in locals(): 14 | import importlib 15 | if "import_dts" in locals(): 16 | importlib.reload(import_dts) 17 | if "import_dsq" in locals(): 18 | importlib.reload(import_dsq) 19 | if "export_dts" in locals(): 20 | importlib.reload(export_dts) 21 | if "export_dsq" in locals(): 22 | importlib.reload(export_dsq) 23 | 24 | is_developer = False 25 | try: 26 | from .developer import is_developer 27 | except ImportError: 28 | pass 29 | 30 | if is_developer: 31 | debug_prop_options = set() 32 | else: 33 | debug_prop_options = {'HIDDEN'} 34 | 35 | import bpy 36 | from bpy.props import (BoolProperty, 37 | FloatProperty, 38 | IntProperty, 39 | StringProperty, 40 | EnumProperty, 41 | PointerProperty, 42 | ) 43 | from bpy_extras.io_utils import (ImportHelper, 44 | ExportHelper, 45 | ) 46 | 47 | class ImportDTS(bpy.types.Operator, ImportHelper): 48 | """Load a Torque DTS File""" 49 | bl_idname = "import_scene.dts" 50 | bl_label = "Import DTS" 51 | bl_options = {'PRESET', 'UNDO'} 52 | 53 | filename_ext = ".dts" 54 | filter_glob = StringProperty( 55 | default="*.dts", 56 | options={'HIDDEN'}, 57 | ) 58 | 59 | reference_keyframe = BoolProperty( 60 | name="Reference keyframe", 61 | description="Set a keyframe with the reference pose for blend animations", 62 | default=True, 63 | ) 64 | 65 | import_sequences = BoolProperty( 66 | name="Import sequences", 67 | description="Automatically add keyframes for embedded sequences", 68 | default=True, 69 | ) 70 | 71 | use_armature = BoolProperty( 72 | name="Experimental: Skeleton as armature", 73 | description="Import bones into an armature instead of empties. Does not work with 'Import sequences'", 74 | default=False, 75 | ) 76 | 77 | debug_report = BoolProperty( 78 | name="Write debug report", 79 | description="Dump out all the information from the DTS to a file", 80 | options=debug_prop_options, 81 | default=False, 82 | ) 83 | 84 | def execute(self, context): 85 | from . import import_dts 86 | 87 | keywords = self.as_keywords(ignore=("filter_glob", "split_mode")) 88 | return import_dts.load(self, context, **keywords) 89 | 90 | class ImportDSQ(bpy.types.Operator, ImportHelper): 91 | """Load a Torque DSQ File""" 92 | bl_idname = "import_scene.dsq" 93 | bl_label = "Import DSQ" 94 | bl_options = {'PRESET', 'UNDO'} 95 | 96 | filename_ext = ".dsq" 97 | filter_glob = StringProperty( 98 | default="*.dsq", 99 | options={'HIDDEN'}, 100 | ) 101 | 102 | debug_report = BoolProperty( 103 | name="Write debug report", 104 | description="Dump out all the information from the DSQ to a file", 105 | options=debug_prop_options, 106 | default=False, 107 | ) 108 | 109 | def execute(self, context): 110 | from . import import_dsq 111 | 112 | keywords = self.as_keywords(ignore=("filter_glob", "split_mode")) 113 | return import_dsq.load(self, context, **keywords) 114 | 115 | class ExportDTS(bpy.types.Operator, ExportHelper): 116 | """Save a Torque DTS File""" 117 | 118 | bl_idname = "export_scene.dts" 119 | bl_label = 'Export DTS' 120 | bl_options = {'PRESET'} 121 | 122 | filename_ext = ".dts" 123 | filter_glob = StringProperty( 124 | default="*.dts", 125 | options={'HIDDEN'}, 126 | ) 127 | 128 | select_object = BoolProperty( 129 | name="Selected objects only", 130 | description="Export selected objects (empties, meshes) only", 131 | default=False, 132 | ) 133 | select_marker = BoolProperty( 134 | name="Selected markers only", 135 | description="Export selected timeline markers only, used for sequences", 136 | default=False, 137 | ) 138 | 139 | blank_material = BoolProperty( 140 | name="Blank material", 141 | description="Add a blank material to meshes with none assigned", 142 | default=True, 143 | ) 144 | 145 | generate_texture = EnumProperty( 146 | name="Generate textures", 147 | description="Automatically generate solid color textures for materials", 148 | default="disabled", 149 | items=( 150 | ("disabled", "Disabled", "Do not generate any textures"), 151 | ("custom-missing", "Custom (if missing)", "Generate textures for non-default material names if not already present"), 152 | ("custom-always", "Custom (always)", "Generate textures for non-default material names"), 153 | ("all-missing", "All (if missing)", "Generate textures for all materials if not already present"), 154 | ("all-always", "All (always)", "Generate textures for all materials")) 155 | ) 156 | 157 | raw_colors = BoolProperty( 158 | name="Use raw material colors", 159 | description="Use raw rgb material colors when generating textures", 160 | default = False, 161 | ) 162 | 163 | dsq_compat = BoolProperty( 164 | name="Export with DSQ compatibility", 165 | description="Use to ensure imported and reexported models work with previously existing DSQ's. Do not enable if you are not reexporting an imported model.", 166 | default=False, 167 | ) 168 | 169 | apply_modifiers = BoolProperty( 170 | name="Apply modifiers", 171 | description="Apply modifiers to meshes", 172 | default=True, 173 | ) 174 | 175 | debug_report = BoolProperty( 176 | name="Write debug report", 177 | description="Dump out all the information from the DTS to a file", 178 | options=debug_prop_options, 179 | default=False, 180 | ) 181 | 182 | check_extension = True 183 | 184 | def execute(self, context): 185 | from . import export_dts 186 | keywords = self.as_keywords(ignore=("check_existing", "filter_glob")) 187 | return export_dts.save(self, context, **keywords) 188 | 189 | class ExportDSQ(bpy.types.Operator, ExportHelper): 190 | """Save many Torque DSQ Files""" 191 | 192 | bl_idname = "export_scene.dsq" 193 | bl_label = 'Export DSQ' 194 | bl_options = {'PRESET'} 195 | 196 | filename_ext = ".dsq" 197 | filter_glob = StringProperty( 198 | default="*.dsq", 199 | options={'HIDDEN'}, 200 | ) 201 | 202 | select_marker = BoolProperty( 203 | name="Selection only", 204 | description="Export selected timeline markers only", 205 | default=False, 206 | ) 207 | 208 | debug_report = BoolProperty( 209 | name="Write debug report", 210 | description="Dump out all the information from the DSQ to a file", 211 | options=debug_prop_options, 212 | default=False, 213 | ) 214 | 215 | check_extension = True 216 | 217 | def execute(self, context): 218 | from . import export_dsq 219 | keywords = self.as_keywords(ignore=("check_existing", "filter_glob")) 220 | return export_dsq.save(self, context, **keywords) 221 | 222 | class SplitMeshIndex(bpy.types.Operator): 223 | """Split a mesh into new meshes limiting the number of indices""" 224 | 225 | bl_idname = "mesh.split_mesh_vindex" 226 | bl_label = "Split mesh by indices" 227 | bl_options = {"REGISTER", "UNDO"} 228 | 229 | def execute(self, context): 230 | limit = 10922 231 | 232 | ob = context.active_object 233 | 234 | if ob is None or ob.type != "MESH": 235 | self.report({"ERROR"}, "Select a mesh object first") 236 | return {"FINISHED"} 237 | 238 | me = ob.data 239 | 240 | out_me = None 241 | out_ob = None 242 | 243 | def split(): 244 | nonlocal out_me 245 | nonlocal out_ob 246 | 247 | if out_me is not None: 248 | out_me.validate() 249 | out_me.update() 250 | 251 | out_me = bpy.data.meshes.new(ob.name) 252 | out_ob = bpy.data.objects.new(ob.name, out_me) 253 | 254 | context.scene.objects.link(out_ob) 255 | 256 | # For now, copy all verts over. See what happens? 257 | out_me.vertices.add(len(me.vertices)) 258 | 259 | for vert, out_vert in zip(me.vertices, out_me.vertices): 260 | out_vert.co = vert.co 261 | out_vert.normal = vert.normal 262 | 263 | split() 264 | 265 | for poly in me.polygons: 266 | if poly.loop_total >= limit: 267 | continue 268 | 269 | if len(out_me.loops) + poly.loop_total > limit: 270 | split() 271 | 272 | loop_start = len(out_me.loops) 273 | out_me.loops.add(poly.loop_total) 274 | 275 | out_me.polygons.add(1) 276 | out_poly = out_me.polygons[-1] 277 | 278 | out_poly.loop_start = loop_start 279 | out_poly.loop_total = poly.loop_total 280 | out_poly.use_smooth = poly.use_smooth 281 | 282 | for loop_index, out_loop_index in zip(poly.loop_indices, out_poly.loop_indices): 283 | loop = me.loops[loop_index] 284 | out_loop = out_me.loops[out_loop_index] 285 | 286 | out_loop.normal = loop.normal 287 | out_loop.vertex_index = loop.vertex_index 288 | 289 | out_me.validate() 290 | out_me.update() 291 | 292 | return {"FINISHED"} 293 | 294 | class HideBlockheadNodes(bpy.types.Operator): 295 | """Set all non-default Blockhead model apparel meshes as hidden""" 296 | 297 | bl_idname = "mesh.hide_blockhead_nodes" 298 | bl_label = "Hide Blockhead nodes on selection" 299 | bl_options = {"REGISTER", "UNDO"} 300 | 301 | blacklist = ( 302 | "copHat", 303 | "knitHat", 304 | "pack", 305 | "quiver", 306 | "femChest", 307 | "epauletsRankB", 308 | "epauletsRankC", 309 | "epauletsRankD", 310 | "epauletsRankA", 311 | "skirtHip", 312 | "skirtTrimRight", 313 | "RHook", 314 | "RarmSlim", 315 | "LHook", 316 | "LarmSlim", 317 | "PointyHelmet", 318 | "Helmet", 319 | "bicorn", 320 | "scoutHat", 321 | "FlareHelmet", 322 | "triPlume", 323 | "plume", 324 | "septPlume", 325 | "tank", 326 | "armor", 327 | "cape", 328 | "Bucket", 329 | "epaulets", 330 | "ShoulderPads", 331 | "Rski", 332 | "Rpeg", 333 | "Lski", 334 | "Lpeg", 335 | "skirtTrimLeft", 336 | "Visor", 337 | ) 338 | 339 | def execute(self, context): 340 | for ob in context.scene.objects: 341 | if ob.select and ob.type == "MESH" and ob.name in self.blacklist: 342 | ob.hide = True 343 | 344 | return {"FINISHED"} 345 | 346 | class TorqueMaterialProperties(bpy.types.PropertyGroup): 347 | blend_mode = EnumProperty( 348 | name="Blend mode", 349 | items=( 350 | ("ADDITIVE", "Additive", "White is white, black is transparent"), 351 | ("SUBTRACTIVE", "Subtractive", "White is black, black is transparent"), 352 | ("NONE", "None", "I don't know how to explain this, try it yourself"), 353 | ), 354 | default="ADDITIVE") 355 | s_wrap = BoolProperty(name="S-Wrap", default=True) 356 | t_wrap = BoolProperty(name="T-Wrap", default=True) 357 | no_mipmaps = BoolProperty(name="No Mipmaps", default=True) 358 | mip_bzero = BoolProperty(name="Mipmap Zero Border", default=False) 359 | use_ifl = BoolProperty(name="IFL") 360 | ifl_name = StringProperty(name="Name") 361 | 362 | class TorqueMaterialPanel(bpy.types.Panel): 363 | bl_idname = "MATERIAL_PT_torque" 364 | bl_label = "Torque" 365 | bl_space_type = 'PROPERTIES' 366 | bl_region_type = 'WINDOW' 367 | bl_context = "material" 368 | bl_options = {'DEFAULT_CLOSED'} 369 | 370 | @classmethod 371 | def poll(cls, context): 372 | return (context.material is not None) 373 | 374 | def draw(self, context): 375 | layout = self.layout 376 | obj = context.material 377 | 378 | sublayout = layout.row() 379 | sublayout.enabled = obj.use_transparency 380 | sublayout.prop(obj.torque_props, "blend_mode", expand=True) 381 | 382 | row = layout.row() 383 | row.prop(obj.torque_props, "use_ifl") 384 | sublayout = row.column() 385 | sublayout.enabled = obj.torque_props.use_ifl 386 | sublayout.prop(obj.torque_props, "ifl_name", text="") 387 | sublayout = layout.column() 388 | sublayout.enabled = obj.torque_props.use_ifl 389 | 390 | row = layout.row() 391 | sublayout = row.column() 392 | sublayout.prop(obj.torque_props, "s_wrap") 393 | sublayout = row.column() 394 | sublayout.prop(obj.torque_props, "t_wrap") 395 | 396 | row = layout.row() 397 | sublayout = row.column() 398 | sublayout.prop(obj.torque_props, "no_mipmaps") 399 | sublayout = row.column() 400 | sublayout.enabled = not obj.torque_props.no_mipmaps 401 | sublayout.prop(obj.torque_props, "mip_bzero") 402 | 403 | def menu_func_import_dts(self, context): 404 | self.layout.operator(ImportDTS.bl_idname, text="Torque (.dts)") 405 | 406 | def menu_func_import_dsq(self, context): 407 | self.layout.operator(ImportDSQ.bl_idname, text="Torque Sequences (.dsq)") 408 | 409 | def menu_func_export_dts(self, context): 410 | self.layout.operator(ExportDTS.bl_idname, text="Torque (.dts)") 411 | 412 | def menu_func_export_dsq(self, context): 413 | self.layout.operator(ExportDSQ.bl_idname, text="Torque Sequences (.dsq)") 414 | 415 | def register(): 416 | bpy.utils.register_module(__name__) 417 | 418 | bpy.types.Material.torque_props = PointerProperty( 419 | type=TorqueMaterialProperties) 420 | 421 | bpy.types.INFO_MT_file_import.append(menu_func_import_dts) 422 | bpy.types.INFO_MT_file_import.append(menu_func_import_dsq) 423 | bpy.types.INFO_MT_file_export.append(menu_func_export_dts) 424 | bpy.types.INFO_MT_file_export.append(menu_func_export_dsq) 425 | 426 | def unregister(): 427 | bpy.utils.unregister_module(__name__) 428 | 429 | del bpy.types.Material.torque_props 430 | 431 | bpy.types.INFO_MT_file_import.remove(menu_func_import_dts) 432 | bpy.types.INFO_MT_file_import.remove(menu_func_import_dsq) 433 | bpy.types.INFO_MT_file_export.remove(menu_func_export_dts) 434 | bpy.types.INFO_MT_file_export.remove(menu_func_export_dsq) 435 | 436 | if __name__ == "__main__": 437 | register() -------------------------------------------------------------------------------- /export_dsq.py: -------------------------------------------------------------------------------- 1 | import bpy, sys 2 | from math import sqrt, pi 3 | from operator import attrgetter 4 | from itertools import groupby 5 | 6 | from .DsqFile import DsqFile 7 | from .DtsTypes import * 8 | from .util import fail, evaluate_all, find_reference, array_from_fcurves, \ 9 | array_from_fcurves_rotation, fcurves_keyframe_in_range, find_reference 10 | from .shared_export import find_seqs 11 | 12 | def save(operator, context, filepath, 13 | select_marker=False, 14 | debug_report=False): 15 | print("Exporting scene to DSQ") 16 | 17 | scene = context.scene 18 | dsq = DsqFile() 19 | 20 | # Find all the sequences to export 21 | sequences, sequence_flags = find_seqs(context.scene, select_marker) 22 | 23 | # Seek to reference frame if present before reading nodes 24 | reference_frame = find_reference(scene) 25 | 26 | if reference_frame is not None: 27 | print("Note: Seeking to reference frame at", reference_frame) 28 | scene.frame_set(reference_frame) 29 | 30 | # Create a DTS node for every armature/empty in the scene 31 | node_ob = {} 32 | node_transform = {} 33 | 34 | def traverse_node(node): 35 | node_ob[node.name] = node 36 | node_transform[node] = node.matrix_local.decompose() 37 | dsq.nodes.append(node.name) 38 | 39 | for child in node.children: 40 | if child.type == "EMPTY": 41 | traverse_node(child) 42 | 43 | for ob in scene.objects: 44 | if ob.type == "EMPTY" and not ob.parent: 45 | traverse_node(ob) 46 | 47 | reference_frame = find_reference(context.scene) 48 | 49 | # NodeOrder backwards compatibility 50 | if "NodeOrder" in bpy.data.texts: 51 | print("Warning: NodeOrder found, using it for backwards compatibility") 52 | order = bpy.data.texts["NodeOrder"].as_string().split("\n") 53 | order_key = {name: i for i, name in enumerate(order)} 54 | else: 55 | order_key = {} 56 | 57 | # Sort by node indices from the DTS 58 | dsq.nodes.sort(key=lambda n: 59 | order_key.get(n, node_ob[n].get("nodeIndex", sys.maxsize))) 60 | 61 | node_index = {node_ob[name]: i for i, name in enumerate(dsq.nodes)} 62 | auto_root_index = None 63 | animated_nodes = [] 64 | 65 | for node in dsq.nodes: 66 | ob = node_ob[node] 67 | data = ob.animation_data 68 | if data and data.action and len(data.action.fcurves): 69 | animated_nodes.append(ob) 70 | 71 | for bobj in scene.objects: 72 | if bobj.type != "MESH" or bobj.name.lower() == "bounds": 73 | continue 74 | 75 | if bobj.users_group and bobj.users_group[0].name == "__ignore__": 76 | continue 77 | 78 | if not bobj.parent: 79 | if auto_root_index is None: 80 | auto_root_index = len(dsq.nodes) 81 | dsq.nodes.append("__auto_root__") 82 | 83 | for name, markers in sequences.items(): 84 | print("Exporting sequence", name) 85 | 86 | if "start" not in markers: 87 | return fail(operator, "Missing start marker for sequence '{}'".format(name)) 88 | 89 | if "end" not in markers: 90 | return fail(operator, "Missing end marker for sequence '{}'".format(name)) 91 | 92 | frame_start = markers["start"].frame 93 | frame_end = markers["end"].frame 94 | frame_range = frame_end - frame_start + 1 95 | 96 | seq = Sequence() 97 | seq.name = name 98 | seq.flags = Sequence.AlignedScale 99 | seq.priority = 1 100 | 101 | seq.toolBegin = frame_start 102 | seq.duration = frame_range * (context.scene.render.fps_base / context.scene.render.fps) 103 | 104 | if name in sequence_flags: 105 | for part in sequence_flags[name]: 106 | flag, *data = part.split(" ", 1) 107 | if data: data = data[0] 108 | 109 | if flag == "priority": 110 | seq.priority = int(data) 111 | elif flag == "cyclic": 112 | seq.flags |= Sequence.Cyclic 113 | elif flag == "blend": 114 | seq.flags |= Sequence.Blend 115 | elif flag == "duration": 116 | seq.duration = float(data) 117 | else: 118 | print("Warning: Unknown flag '{}' (used by sequence '{}')".format(flag, name)) 119 | 120 | seq.numKeyframes = frame_range 121 | seq.firstGroundFrame = len(dsq.ground_translations) 122 | seq.baseRotation = len(dsq.rotations) 123 | seq.baseTranslation = len(dsq.translations) 124 | seq.baseScale = len(dsq.aligned_scales) 125 | seq.baseObjectState = 0 126 | seq.baseDecalState = 0 127 | seq.firstTrigger = len(dsq.triggers) 128 | 129 | seq.rotationMatters = [False] * len(dsq.nodes) 130 | seq.translationMatters = [False] * len(dsq.nodes) 131 | seq.scaleMatters = [False] * len(dsq.nodes) 132 | seq.decalMatters = [False] * len(dsq.nodes) 133 | seq.iflMatters = [False] * len(dsq.nodes) 134 | seq.visMatters = [False] * len(dsq.nodes) 135 | seq.frameMatters = [False] * len(dsq.nodes) 136 | seq.matFrameMatters = [False] * len(dsq.nodes) 137 | 138 | dsq.sequences.append(seq) 139 | 140 | frame_indices = list(range(frame_start, frame_end + 1)) 141 | 142 | # Store all animation data so we don't need to frame_set all over the place 143 | animation_data = {frame: {} for frame in frame_indices} 144 | 145 | for frame in frame_indices: 146 | scene.frame_set(frame) 147 | 148 | for ob in animated_nodes: 149 | animation_data[frame][ob] = ob.matrix_local.decompose() 150 | 151 | for ob in animated_nodes: 152 | index = node_index[ob] 153 | 154 | base_translation, base_rotation, base_scale = node_transform[ob] 155 | 156 | fcurves = ob.animation_data.action.fcurves 157 | 158 | curves_rotation = array_from_fcurves_rotation(fcurves, ob) 159 | curves_translation = array_from_fcurves(fcurves, "location", 3) 160 | curves_scale = array_from_fcurves(fcurves, "scale", 3) 161 | 162 | # Decide what matters by presence of f-curves 163 | if curves_rotation and fcurves_keyframe_in_range(curves_rotation, frame_start, frame_end): 164 | seq.rotationMatters[index] = True 165 | 166 | if curves_translation and fcurves_keyframe_in_range(curves_translation, frame_start, frame_end): 167 | seq.translationMatters[index] = True 168 | 169 | if curves_scale and fcurves_keyframe_in_range(curves_scale, frame_start, frame_end): 170 | seq.scaleMatters[index] = True 171 | 172 | # Write the data where it matters 173 | # This assumes that animated_nodes is in the same order as shape.nodes 174 | for frame in frame_indices: 175 | translation, rotation, scale = animation_data[frame][ob] 176 | 177 | if seq.translationMatters[index]: 178 | if seq.flags & Sequence.Blend: 179 | translation -= base_translation 180 | dsq.translations.append(translation) 181 | 182 | if seq.rotationMatters[index]: 183 | if seq.flags & Sequence.Blend: 184 | rotation = base_rotation.inverted() * rotation 185 | dsq.rotations.append(rotation) 186 | 187 | if seq.scaleMatters[index]: 188 | dsq.aligned_scales.append(scale) 189 | 190 | with open(filepath, "wb") as fd: 191 | dsq.write(fd) 192 | 193 | if debug_report: 194 | with open(filepath + ".txt", "w") as fd: 195 | dsq.write_dump(fd) 196 | 197 | return {"FINISHED"} 198 | -------------------------------------------------------------------------------- /export_dts.py: -------------------------------------------------------------------------------- 1 | import bpy, bmesh, os, sys 2 | from math import sqrt, pi 3 | from operator import attrgetter 4 | from itertools import groupby 5 | 6 | from .DtsShape import DtsShape 7 | from .DtsTypes import * 8 | from .write_report import write_debug_report 9 | from .util import fail, resolve_texture, default_materials, evaluate_all, find_reference, \ 10 | array_from_fcurves, array_from_fcurves_rotation, fcurves_keyframe_in_range 11 | from .shared_export import find_seqs 12 | 13 | import re 14 | # re really isn't necessary. oh well. 15 | re_lod_size = re.compile(r"(-?\d+)$") 16 | common_col_name = re.compile(r"^(LOS)?[cC]ol-?\d+$") 17 | 18 | def undup_name(n): 19 | return n.split("#", 1)[0] 20 | 21 | def linearrgb_to_srgb(c): 22 | if c < 0.0031308: 23 | if c < 0: 24 | return 0 25 | else: 26 | return c * 12.92 27 | else: 28 | return 1.055 * (c ** (1.0 / 2.4)) - 0.055 29 | 30 | def get_vertex_bone(mesh, node): 31 | for bone_index, (node_index, _) in enumerate(mesh.bones): 32 | if node_index == node.index: 33 | return bone_index 34 | 35 | bone_index = len(mesh.bones) 36 | mat = node.bl_ob.matrix_local 37 | 38 | # TODO: Move this conversion to DtsTypes.py 39 | flat_mat = [x for y in mat.row for x in y] 40 | 41 | mesh.bones.append((node.index, flat_mat)) 42 | return bone_index 43 | 44 | def add_vertex_influences(ob, armature, node_lookup, mesh, vert, vertex_index): 45 | influences = [] 46 | total_weight = 0 47 | 48 | for group in vert.groups: 49 | vertex_group = ob.vertex_groups[group.group] 50 | 51 | bone = armature.data.bones.get(vertex_group.name) 52 | if bone is None: 53 | continue 54 | 55 | node = node_lookup.get(bone) 56 | if node is False: 57 | continue 58 | 59 | influences.append((node, group.weight)) 60 | total_weight += group.weight 61 | 62 | if total_weight == 0: 63 | weight_multiplier = 1 64 | else: 65 | weight_multiplier = 1 / total_weight 66 | 67 | for node, weight in influences: 68 | mesh.influences.append(( 69 | vertex_index, 70 | get_vertex_bone(mesh, node), 71 | weight * weight_multiplier)) 72 | 73 | def export_material(mat, shape): 74 | # print("Exporting material", mat.name) 75 | 76 | material_index = len(shape.materials) 77 | flags = 0 78 | 79 | if mat.use_shadeless: 80 | flags |= Material.SelfIlluminating 81 | if mat.use_transparency: 82 | flags |= Material.Translucent 83 | if mat.torque_props.blend_mode == "ADDITIVE": 84 | flags |= Material.Additive 85 | elif mat.torque_props.blend_mode == "SUBTRACTIVE": 86 | flags |= Material.Subtractive 87 | 88 | if mat.torque_props.s_wrap: 89 | flags |= Material.SWrap 90 | if mat.torque_props.t_wrap: 91 | flags |= Material.TWrap 92 | flags |= Material.NeverEnvMap 93 | if mat.torque_props.no_mipmaps: 94 | flags |= Material.NoMipMap 95 | if mat.torque_props.mip_bzero: 96 | flags |= Material.MipMapZeroBorder 97 | 98 | if mat.torque_props.use_ifl: 99 | flags |= Material.IFLMaterial 100 | 101 | # TODO: keep IFL materials in a table by name? 102 | # what would duplicates do? 103 | 104 | ifl_index = len(shape.iflmaterials) 105 | ifl = IflMaterial( 106 | name=shape.name(mat.torque_props.ifl_name), 107 | slot=material_index) 108 | shape.iflmaterials.append(ifl) 109 | 110 | material = Material(name=undup_name(mat.name), flags=flags) 111 | material.bl_mat = mat 112 | 113 | shape.materials.append(material) 114 | 115 | return material_index 116 | 117 | def seq_float_eq(a, b): 118 | return all(abs(i - j) < 0.000001 for i, j in zip(a, b)) 119 | 120 | def export_empty_node(lookup, shape, select_object, ob, parent=-1): 121 | if select_object and not ob.select: 122 | lookup[ob] = False 123 | return 124 | 125 | if "name" in ob: 126 | name = ob["name"] 127 | else: 128 | name = undup_name(ob.name) 129 | 130 | node = Node(shape.name(name), parent) 131 | 132 | node.armature = None 133 | node.bl_ob = ob 134 | node.matrix = ob.matrix_local 135 | 136 | shape.nodes.append(node) 137 | lookup[ob] = node 138 | 139 | for child in ob.children: 140 | if child.type == 'EMPTY': 141 | export_empty_node(lookup, shape, select_object, child, node) 142 | 143 | def export_bones(lookup, shape, armature, bones, parent=-1): 144 | for bone in bones: 145 | node = Node(shape.name(bone.name), parent) 146 | node.bone = bone 147 | node.bl_ob = bone 148 | 149 | mat = bone.matrix_local 150 | 151 | if bone.parent: 152 | mat = bone.parent.matrix_local.inverted() * mat 153 | 154 | node.armature = armature 155 | node.bl_ob = bone 156 | node.matrix = mat 157 | 158 | shape.nodes.append(node) 159 | lookup[bone] = node 160 | export_bones(lookup, shape, armature, bone.children, node) 161 | 162 | def save_nodes(scene, shape, select_object, dsq_compat): 163 | node_lookup = {} 164 | 165 | # Try to create nodes from empties armature bones 166 | for ob in scene.objects: 167 | if ob.parent is not None: 168 | continue 169 | 170 | if ob.type == 'EMPTY': 171 | export_empty_node(node_lookup, shape, select_object, ob) 172 | elif ob.type == 'ARMATURE' and (ob.select or not select_object): 173 | top_bones = filter(lambda b: b.parent is None, ob.data.bones) 174 | export_bones(node_lookup, shape, ob, top_bones) 175 | 176 | # NodeOrder backwards compatibility 177 | if "NodeOrder" in bpy.data.texts: 178 | print("Warning: NodeOrder found, using it for backwards compatibility") 179 | order = bpy.data.texts["NodeOrder"].as_string().split("\n") 180 | order_key = {name: i for i, name in enumerate(order)} 181 | else: 182 | order_key = {} 183 | 184 | # Sort by node indices from the DTS 185 | if dsq_compat: 186 | shape.nodes.sort(key=lambda n: 187 | order_key.get(shape.names[n.name], n.bl_ob.get("nodeIndex", sys.maxsize))) 188 | 189 | for index, node in enumerate(shape.nodes): 190 | if not isinstance(node.parent, int): 191 | if not hasattr(node.parent, "index") and dsq_compat: 192 | node_lookup = {"fail": "DSQ compatibility export failed due to new node structure."} 193 | break 194 | node.parent = node.parent.index 195 | 196 | location, rotation, scale = node.matrix.decompose() 197 | 198 | if not seq_float_eq((1, 1, 1), scale): 199 | print("Warning: '{}' uses scale, which cannot be exported to DTS nodes" 200 | .format(shape.names[node.name])) 201 | 202 | node.index = index 203 | node.matrix_world = Matrix.Translation(location) * rotation.to_matrix().to_4x4() 204 | 205 | if node.parent != -1: 206 | parent = shape.nodes[node.parent] 207 | node.matrix_world = parent.matrix_world * node.matrix_world 208 | 209 | shape.default_translations.append(location) 210 | shape.default_rotations.append(rotation) 211 | 212 | return node_lookup 213 | 214 | def save_meshes(scene, shape, node_lookup, select_object): 215 | scene_lods = {} 216 | scene_objects = {} 217 | 218 | auto_root_index = None 219 | bounds_ob = None 220 | 221 | for bobj in scene.objects: 222 | if bobj.type != "MESH": 223 | continue 224 | 225 | if select_object and not bobj.select: 226 | continue 227 | 228 | if bobj.name.lower() == "bounds": 229 | if bounds_ob: 230 | print("Warning: Multiple 'bounds' objects found - check capitalization") 231 | bounds_ob = bobj 232 | continue 233 | 234 | if "name" in bobj: 235 | name = bobj["name"] 236 | else: 237 | name = undup_name(bobj.name) 238 | 239 | if bobj.users_group: 240 | if len(bobj.users_group) > 1: 241 | print("Warning: Mesh {} is in multiple groups".format(bobj.name)) 242 | 243 | lod_name = bobj.users_group[0].name 244 | elif common_col_name.match(name): 245 | lod_name = "collision-1" 246 | else: 247 | lod_name = "detail32" 248 | 249 | if lod_name == "__ignore__": 250 | continue 251 | 252 | transform_mat = bobj.matrix_local 253 | armature_modifier = None 254 | 255 | # Try to find an armature modifier on the object 256 | for modifier in bobj.modifiers: 257 | if modifier.type != 'ARMATURE': 258 | continue 259 | 260 | armature_modifier = modifier 261 | break 262 | 263 | if armature_modifier is not None: 264 | # Should we do something with the parent here? 265 | # Ignore it for now. 266 | print('NYI: Armature modifier on mesh {}'.format(bobj.name)) 267 | attach_node = None 268 | elif bobj.parent: 269 | if bobj.parent_type == 'BONE': 270 | armature = bobj.parent 271 | bone = armature.data.bones[bobj.parent_bone] 272 | 273 | if bone not in node_lookup: 274 | print('Ignoring mesh {} - parent bone {} not included' 275 | .format(bobj.name, bone.name)) 276 | continue 277 | 278 | node = node_lookup[bone] 279 | attach_node = node.index 280 | 281 | # Compensate for matrix_local pointing to tail, offset to head 282 | # Does this need to use node.matrix somehow? 283 | transform_mat = Matrix.Translation((0, bone.length, 0)) * transform_mat 284 | elif bobj.parent_type == 'OBJECT': 285 | if bobj.parent not in node_lookup: 286 | print("The mesh '{}' has a parent of type '{}' (named '{}'). You can only parent meshes to empties, not other meshes.".format(bobj.name, bobj.parent.type, bobj.parent.name)) 287 | continue 288 | 289 | if node_lookup[bobj.parent] is False: # not selected 290 | continue 291 | 292 | attach_node = node_lookup[bobj.parent].index 293 | else: 294 | print('Warning: Mesh "{}" is using an unsupported parenting type "{}"' 295 | .format(bobj.name, bobj.parent_type)) 296 | attach_node = None 297 | else: 298 | print("Warning: Mesh '{}' has no parent".format(bobj.name)) 299 | attach_node = None 300 | 301 | if attach_node is None: 302 | if auto_root_index is None: 303 | auto_root_index = len(shape.nodes) 304 | 305 | node = Node(shape.name("__auto_root__")) 306 | node.bl_ob = None 307 | node.armature = None 308 | node.index = auto_root_index 309 | node.matrix = Matrix.Identity(4) 310 | node.matrix_world = node.matrix 311 | 312 | shape.nodes.append(node) 313 | shape.default_rotations.append(Quaternion((1, 0, 0, 0))) 314 | shape.default_translations.append(Vector()) 315 | 316 | attach_node = auto_root_index 317 | 318 | lod_name_index, lod_name = shape.name_resolve(lod_name) 319 | 320 | if lod_name not in scene_lods: 321 | match = re_lod_size.search(lod_name) 322 | 323 | if match: 324 | lod_size = int(match.group(1)) 325 | else: 326 | print("Warning: LOD '{}' does not end with a size, assuming size 32".format(lod_name)) 327 | lod_size = 32 # setting? 328 | 329 | print("Creating LOD '{}' (size {})".format(lod_name, lod_size)) 330 | scene_lods[lod_name] = DetailLevel(name=lod_name_index, subshape=0, objectDetail=-1, size=lod_size) 331 | shape.detail_levels.append(scene_lods[lod_name]) 332 | 333 | if name not in scene_objects: 334 | object = Object(shape.name(name), numMeshes=0, firstMesh=0, node=attach_node) 335 | object.has_transparency = False 336 | shape.objects.append(object) 337 | shape.objectstates.append(ObjectState(1.0, 0, 0)) # ff56g: search for a37hm 338 | scene_objects[name] = (object, {}) 339 | 340 | for slot in bobj.material_slots: 341 | if slot.material.use_transparency: 342 | scene_objects[name][0].has_transparency = True 343 | 344 | if lod_name in scene_objects[name][1]: 345 | print("Warning: Multiple objects {} in LOD {}, ignoring...".format(name, lod_name)) 346 | else: 347 | scene_objects[name][1][lod_name] = (bobj, transform_mat, armature_modifier) 348 | 349 | return scene_lods, scene_objects, bounds_ob 350 | 351 | def compute_bounds(shape, bounds_ob): 352 | print("Computing bounds") 353 | 354 | # shape.smallest_size = None 355 | # shape.smallest_detail_level = -1 356 | # 357 | # for i, lod in enumerate(shape.detail_levels): 358 | # if lod.size >= 0 and (shape.smallest_size == None or lod.size < shape.smallest_size): 359 | # shape.smallest_size = lod.size 360 | # shape.smallest_detail_level = i 361 | 362 | shape.bounds = Box( 363 | Vector(( 10e30, 10e30, 10e30)), 364 | Vector((-10e30, -10e30, -10e30))) 365 | 366 | shape.center = Vector() 367 | 368 | shape.radius = 0 369 | shape.radius_tube = 0 370 | 371 | for obj in shape.objects: 372 | for j in range(0, obj.numMeshes): 373 | mesh = shape.meshes[obj.firstMesh + j] 374 | 375 | if mesh.type == Mesh.NullType: 376 | continue 377 | 378 | mat = shape.nodes[obj.node].matrix_world 379 | bounds = mesh.calculate_bounds_mat(mat) 380 | 381 | shape.radius = max(shape.radius, mesh.calculate_radius_mat(mat, shape.center)) 382 | shape.radius_tube = max(shape.radius_tube, mesh.calculate_radius_tube_mat(mat, shape.center)) 383 | 384 | shape.bounds.min.x = min(shape.bounds.min.x, bounds.min.x) 385 | shape.bounds.min.y = min(shape.bounds.min.y, bounds.min.y) 386 | shape.bounds.min.z = min(shape.bounds.min.z, bounds.min.z) 387 | shape.bounds.max.x = max(shape.bounds.max.x, bounds.max.x) 388 | shape.bounds.max.y = max(shape.bounds.max.y, bounds.max.y) 389 | shape.bounds.max.z = max(shape.bounds.max.z, bounds.max.z) 390 | 391 | # Is there a bounds mesh? Use that instead. 392 | if bounds_ob: 393 | shape.bounds = Box(Vector(bounds_ob.bound_box[0]), Vector(bounds_ob.bound_box[6])) 394 | 395 | shape.center = Vector(( 396 | (shape.bounds.min.x + shape.bounds.max.x) / 2, 397 | (shape.bounds.min.y + shape.bounds.max.y) / 2, 398 | (shape.bounds.min.z + shape.bounds.max.z) / 2)) 399 | 400 | def save(operator, context, filepath, 401 | select_object=False, 402 | select_marker=False, 403 | blank_material=True, 404 | generate_texture="disabled", 405 | raw_colors = False, 406 | dsq_compat = False, 407 | apply_modifiers=True, 408 | debug_report=False): 409 | print("Exporting scene to DTS") 410 | 411 | scene = context.scene 412 | active = context.active_object 413 | shape = DtsShape() 414 | 415 | blank_material_index = None 416 | 417 | reference_frame = find_reference(scene) 418 | 419 | if reference_frame is not None: 420 | print("Note: Seeking to reference frame at", reference_frame) 421 | scene.frame_set(reference_frame) 422 | 423 | node_lookup = save_nodes(scene, shape, select_object, dsq_compat) 424 | if "fail" in node_lookup: 425 | return fail(operator, node_lookup["fail"]) 426 | scene_lods, scene_objects, bounds_ob = save_meshes( 427 | scene, shape, node_lookup, select_object) 428 | 429 | # If the shape is empty, add a detail level so it is valid 430 | if not shape.detail_levels: 431 | dl = DetailLevel(name=shape.name('detail1'), subshape=0, objectDetail=-1, size=1) 432 | shape.detail_levels.append(dl) 433 | 434 | # Put objects with transparent materials last 435 | # Note: If this plugin ever needs to do anything with objectstates, 436 | # that needs to be handled properly. a37hm: earch for ff56g 437 | shape.objects.sort(key=lambda object: object.has_transparency) # TODO: attrgetter 438 | 439 | # Sort detail levels 440 | shape.detail_levels.sort(key=attrgetter("size"), reverse=True) 441 | 442 | for i, lod in enumerate(shape.detail_levels): 443 | lod.objectDetail = i # this isn't the right place for this 444 | 445 | print("Adding meshes to objects...") 446 | 447 | material_table = {} 448 | 449 | for object, lods in scene_objects.values(): 450 | object.firstMesh = len(shape.meshes) 451 | 452 | for i, lod in enumerate(reversed(shape.detail_levels)): 453 | if shape.names[lod.name] in lods: 454 | object.numMeshes = len(shape.detail_levels) - i 455 | break 456 | else: 457 | object.numMeshes = 0 458 | continue 459 | 460 | for i in range(object.numMeshes): 461 | lod = shape.detail_levels[i] 462 | lod_name = shape.names[lod.name] 463 | 464 | if lod_name in lods: 465 | print("Exporting mesh '{}' (LOD '{}')".format(shape.names[object.name], lod_name)) 466 | bobj, transform_mat, armature_modifier = lods[lod_name] 467 | 468 | if armature_modifier is None: 469 | mesh_type = Mesh.StandardType 470 | else: 471 | mesh_type = Mesh.SkinType 472 | armature = armature_modifier.object 473 | 474 | ######################### 475 | ### Welcome to complexity 476 | 477 | # Disable the armature modifier so it does not deform the mesh 478 | # when writing it to the DTS file 479 | if armature_modifier is not None: 480 | was_show_render = armature_modifier.show_render 481 | was_show_viewport = armature_modifier.show_viewport 482 | 483 | armature_modifier.show_render = False 484 | armature_modifier.show_viewport = False 485 | 486 | mesh = bobj.to_mesh(scene, apply_modifiers, "PREVIEW") 487 | bm = bmesh.new() 488 | bm.from_mesh(mesh) 489 | bmesh.ops.triangulate(bm, faces=bm.faces) 490 | bm.to_mesh(mesh) 491 | bm.free() 492 | 493 | # Restore the armature modifier 494 | if armature_modifier is not None: 495 | armature_modifier.show_render = was_show_render 496 | armature_modifier.show_viewport = was_show_viewport 497 | 498 | # This is the danger zone 499 | # Data from down here may not stay around! 500 | 501 | dmesh = Mesh(mesh_type) 502 | shape.meshes.append(dmesh) 503 | 504 | dmesh.matrix_world = bobj.matrix_world 505 | 506 | dmesh.bounds = dmesh.calculate_bounds_mat(Matrix()) 507 | #dmesh.center = Vector(( 508 | # (dmesh.bounds.min.x + dmesh.bounds.max.x) / 2, 509 | # (dmesh.bounds.min.y + dmesh.bounds.max.y) / 2, 510 | # (dmesh.bounds.min.z + dmesh.bounds.max.z) / 2)) 511 | dmesh.center = Vector() 512 | dmesh.radius = dmesh.calculate_radius_mat(Matrix(), dmesh.center) 513 | 514 | # Group all materials by their material_index 515 | key = attrgetter("material_index") 516 | grouped_polys = groupby(sorted(mesh.polygons, key=key), key=key) 517 | grouped_polys = tuple(map(lambda t: (t[0], tuple(t[1])), grouped_polys)) 518 | 519 | # Create a primitive from each group 520 | for material_index, polys in grouped_polys: 521 | flags = Primitive.Triangles | Primitive.Indexed 522 | 523 | if mesh.materials: 524 | bmat = mesh.materials[material_index] 525 | 526 | if bmat not in material_table: 527 | material_table[bmat] = export_material(bmat, shape) 528 | 529 | flags |= material_table[bmat] & Primitive.MaterialMask 530 | elif blank_material: 531 | if blank_material_index is None: 532 | blank_material_index = len(shape.materials) 533 | shape.materials.append(Material(name="blank", 534 | flags=Material.SWrap | Material.TWrap | Material.NeverEnvMap)) 535 | 536 | flags |= blank_material_index & Primitive.MaterialMask 537 | else: 538 | flags |= Primitive.NoMaterial 539 | 540 | firstElement = len(dmesh.verts) 541 | 542 | for poly in polys: 543 | if mesh.uv_layers: 544 | uv_layer = mesh.uv_layers[0].data 545 | else: 546 | uv_layer = None 547 | 548 | use_face_normal = not poly.use_smooth 549 | 550 | for vert_index, loop_index in zip(reversed(poly.vertices), reversed(poly.loop_indices)): 551 | vertex_index = len(dmesh.verts) 552 | dmesh.indices.append(len(dmesh.indices)) 553 | 554 | vert = mesh.vertices[vert_index] 555 | 556 | if use_face_normal: 557 | normal = poly.normal 558 | else: 559 | normal = vert.normal 560 | 561 | dmesh.verts.append(transform_mat * vert.co) 562 | dmesh.normals.append((transform_mat.to_3x3() * normal).normalized()) 563 | 564 | dmesh.enormals.append(0) 565 | 566 | if uv_layer: 567 | uv = uv_layer[loop_index].uv 568 | dmesh.tverts.append(Vector((uv.x, 1 - uv.y))) 569 | else: 570 | dmesh.tverts.append(Vector((0, 0))) 571 | 572 | if mesh_type == Mesh.SkinType: 573 | add_vertex_influences(bobj, armature, 574 | node_lookup, dmesh, 575 | vert, vertex_index) 576 | 577 | numElements = len(dmesh.verts) - firstElement 578 | dmesh.primitives.append(Primitive(firstElement, numElements, flags)) 579 | 580 | bpy.data.meshes.remove(mesh) # RIP! 581 | 582 | # ??? ? ?? ???? ??? ? 583 | dmesh.vertsPerFrame = len(dmesh.verts) 584 | 585 | if len(dmesh.indices) >= 65536: 586 | return fail(operator, "The mesh '{}' has too many vertex indices ({} >= 65536)".format(bobj.name, len(dmesh.indices))) 587 | 588 | ### Nobody leaves Hotel California 589 | else: 590 | # print("Adding Null mesh for object {} in LOD {}".format(shape.names[object.name], lod_name)) 591 | shape.meshes.append(Mesh(Mesh.NullType)) 592 | 593 | print("Creating subshape with " + str(len(shape.nodes)) + " nodes and " + str(len(shape.objects)) + " objects") 594 | shape.subshapes.append(Subshape(0, 0, 0, len(shape.nodes), len(shape.objects), 0)) 595 | 596 | # Figure out all the things 597 | compute_bounds(shape, bounds_ob) 598 | 599 | sequences, sequence_flags = find_seqs(context.scene, select_marker) 600 | 601 | for name, markers in sequences.items(): 602 | print("Exporting sequence", name) 603 | 604 | if "start" not in markers: 605 | return fail(operator, "Missing start marker for sequence '{}'".format(name)) 606 | 607 | if "end" not in markers: 608 | return fail(operator, "Missing end marker for sequence '{}'".format(name)) 609 | 610 | frame_start = markers["start"].frame 611 | frame_end = markers["end"].frame 612 | frame_range = frame_end - frame_start + 1 613 | 614 | seq = Sequence() 615 | seq.nameIndex = shape.name(name) 616 | seq.flags = Sequence.AlignedScale 617 | seq.priority = 1 618 | 619 | seq.toolBegin = frame_start 620 | seq.duration = frame_range * (context.scene.render.fps_base / context.scene.render.fps) 621 | 622 | if name in sequence_flags: 623 | for part in sequence_flags[name]: 624 | flag, *data = part.split(" ", 1) 625 | if data: data = data[0] 626 | 627 | if flag == "priority": 628 | seq.priority = int(data) 629 | elif flag == "cyclic": 630 | seq.flags |= Sequence.Cyclic 631 | elif flag == "blend": 632 | seq.flags |= Sequence.Blend 633 | elif flag == "duration": 634 | seq.duration = float(data) 635 | else: 636 | print("Warning: Unknown flag '{}' (used by sequence '{}')".format(flag, name)) 637 | 638 | seq.numKeyframes = frame_range 639 | seq.firstGroundFrame = len(shape.ground_translations) 640 | seq.baseRotation = len(shape.node_rotations) 641 | seq.baseTranslation = len(shape.node_translations) 642 | seq.baseScale = len(shape.node_aligned_scales) 643 | seq.baseObjectState = len(shape.objectstates) 644 | seq.baseDecalState = len(shape.decalstates) 645 | seq.firstTrigger = len(shape.triggers) 646 | 647 | seq.rotationMatters = [False] * len(shape.nodes) 648 | seq.translationMatters = [False] * len(shape.nodes) 649 | seq.scaleMatters = [False] * len(shape.nodes) 650 | seq.decalMatters = [False] * len(shape.nodes) 651 | seq.iflMatters = [False] * len(shape.nodes) 652 | seq.visMatters = [False] * len(shape.nodes) 653 | seq.frameMatters = [False] * len(shape.nodes) 654 | seq.matFrameMatters = [False] * len(shape.nodes) 655 | 656 | shape.sequences.append(seq) 657 | 658 | frame_indices = list(range(frame_start, frame_end + 1)) 659 | 660 | # Store all animation data so we don't need to frame_set all over the place 661 | animation_data = {frame: {} for frame in frame_indices} 662 | 663 | for frame in frame_indices: 664 | scene.frame_set(frame) 665 | 666 | for node in shape.nodes: 667 | if node.armature is not None: 668 | continue 669 | 670 | animation_data[frame][node] = node.matrix.decompose() 671 | 672 | for index, node in enumerate(shape.nodes): 673 | if node.armature is not None: 674 | continue 675 | 676 | ob = node.bl_ob 677 | 678 | if ob is None: 679 | continue 680 | 681 | data = ob.animation_data 682 | 683 | if not data or not data.action or not len(data.action.fcurves): 684 | continue 685 | 686 | base_translation, base_rotation, _ = node.matrix.decompose() 687 | base_scale = Vector((1.0, 1.0, 1.0)) 688 | 689 | fcurves = data.action.fcurves 690 | 691 | curves_rotation = array_from_fcurves_rotation(fcurves, ob) 692 | curves_translation = array_from_fcurves(fcurves, "location", 3) 693 | curves_scale = array_from_fcurves(fcurves, "scale", 3) 694 | 695 | # Decide what matters by presence of f-curves 696 | if curves_rotation and fcurves_keyframe_in_range(curves_rotation, frame_start, frame_end): 697 | seq.rotationMatters[index] = True 698 | 699 | if curves_translation and fcurves_keyframe_in_range(curves_translation, frame_start, frame_end): 700 | seq.translationMatters[index] = True 701 | 702 | if curves_scale and fcurves_keyframe_in_range(curves_scale, frame_start, frame_end): 703 | seq.scaleMatters[index] = True 704 | 705 | # Write the data where it matters 706 | for frame in frame_indices: 707 | translation, rotation, scale = animation_data[frame][node] 708 | 709 | if seq.translationMatters[index]: 710 | if seq.flags & Sequence.Blend: 711 | translation -= base_translation 712 | shape.node_translations.append(translation) 713 | 714 | if seq.rotationMatters[index]: 715 | if seq.flags & Sequence.Blend: 716 | rotation = base_rotation.inverted() * rotation 717 | shape.node_rotations.append(rotation) 718 | 719 | if seq.scaleMatters[index]: 720 | shape.node_aligned_scales.append(scale) 721 | 722 | if debug_report: 723 | print("Writing debug report") 724 | write_debug_report(filepath + ".txt", shape) 725 | 726 | shape.verify() 727 | 728 | with open(filepath, "wb") as fd: 729 | shape.save(fd) 730 | 731 | write_material_textures(generate_texture, filepath, shape, raw_colors) 732 | 733 | return {"FINISHED"} 734 | 735 | def write_material_textures(mode, filepath, shape, raw_colors): 736 | if mode == 'disabled': 737 | return 738 | 739 | f_lookup = mode in ("custom-missing", "all-missing") 740 | f_custom = mode in ("custom-missing", "custom-always") 741 | 742 | for material in shape.materials: 743 | if not hasattr(material, "bl_mat"): 744 | continue 745 | 746 | if f_custom and material.name.lower() in default_materials: 747 | continue 748 | 749 | if f_lookup and resolve_texture(filepath, material.name) is not None: 750 | continue 751 | 752 | bl_mat = material.bl_mat 753 | color = bl_mat.diffuse_color 754 | if not raw_colors: 755 | color = color * bl_mat.diffuse_intensity 756 | color.r = linearrgb_to_srgb(color.r) 757 | color.g = linearrgb_to_srgb(color.g) 758 | color.b = linearrgb_to_srgb(color.b) 759 | 760 | image = bpy.data.images.new(material.name.lower() + "_generated", 16, 16) 761 | image.pixels = (color.r, color.g, color.b, 1.0) * 256 762 | image.filepath_raw = os.path.join(os.path.dirname(filepath), material.name + ".png") 763 | image.file_format = "PNG" 764 | image.save() -------------------------------------------------------------------------------- /import_dsq.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | from math import ceil 3 | 4 | from .DsqFile import DsqFile 5 | from .DtsTypes import Sequence, Quaternion, Vector 6 | from .util import fail, ob_location_curves, ob_scale_curves, ob_rotation_curves, ob_rotation_data, \ 7 | evaluate_all, find_reference 8 | 9 | def get_free_name(name, taken): 10 | if name not in taken: 11 | return name 12 | 13 | suffix = 1 14 | 15 | while True: 16 | name_try = name + "." + str(suffix) 17 | 18 | if name_try not in taken: 19 | return name_try 20 | 21 | suffix += 1 22 | 23 | # action.fcurves.new(data_path, array_index) 24 | # action.fcurves[].keyframe_points.add(number) 25 | # action.fcurves[].keyframe_points[].interpolation = "LINEAR" 26 | # action.fcurves[].keyframe_points[].co 27 | 28 | def load(operator, context, filepath, 29 | debug_report=False): 30 | dsq = DsqFile() 31 | 32 | with open(filepath, "rb") as fd: 33 | dsq.read(fd) 34 | 35 | if debug_report: 36 | with open(filepath + ".txt", "w") as fd: 37 | dsq.write_dump(fd) 38 | 39 | print("Resolving nodes...") 40 | 41 | found_obs = {} 42 | 43 | # Find all our candidate nodes 44 | # DSQ is case-insensitive, that's why we can't just [] lookup 45 | for ob in context.scene.objects: 46 | if ob.type in ("EMPTY", "ARMATURE"): 47 | name = ob.name.lower() 48 | 49 | if name in found_obs: 50 | print("Warning: Nodes with varying capitalization found ('{}', '{}'), ignoring second".format(found_obs[name].name, ob.name)) 51 | continue 52 | 53 | found_obs[name] = ob 54 | 55 | nodes = [None] * len(dsq.nodes) 56 | node_missing = [] 57 | 58 | # Now associate DSQ node indices with Blender objects 59 | for index, name in enumerate(dsq.nodes): 60 | lower = name.lower() 61 | 62 | if lower in found_obs: 63 | nodes[index] = found_obs[lower] 64 | else: 65 | node_missing.append(name) 66 | 67 | if node_missing: 68 | return fail(operator, "The following nodes from the DSQ file could not be found in your scene:\n" + ", ".join(node_missing)) 69 | 70 | # Now, find all the existing sequence names so we can rename duplicates 71 | # Also find out where the last user-defined animation data is 72 | last_frame = 1 73 | scene_sequences = set() 74 | 75 | for marker in context.scene.timeline_markers: 76 | last_frame = max(last_frame, int(ceil(marker.frame + 10))) 77 | 78 | if ":" not in marker.name: 79 | continue 80 | 81 | name, what = marker.name.rsplit(":", 1) 82 | scene_sequences.add(name) 83 | 84 | for action in bpy.data.actions: 85 | last_frame = max(last_frame, int(ceil(action.frame_range[1] + 10))) 86 | 87 | if "Sequences" in bpy.data.texts: 88 | for line in bpy.data.texts["Sequences"].as_string().split("\n"): 89 | line = line.strip() 90 | 91 | if not line or line == "strict" or ":" not in line: 92 | continue 93 | 94 | name, flags = line.split(":", 1) 95 | scene_sequences.add(name) 96 | 97 | sequences_text = [] 98 | reference_frame = find_reference(context.scene) 99 | 100 | # Create Blender keyframes and markers for each sequence 101 | for seq in dsq.sequences: 102 | name = get_free_name(seq.name, scene_sequences) 103 | print("found seq", seq.name, "to", name) 104 | 105 | flags = [] 106 | flags.append("priority {}".format(seq.priority)) 107 | 108 | if seq.flags & Sequence.Cyclic: 109 | flags.append("cyclic") 110 | 111 | if seq.flags & Sequence.Blend: 112 | flags.append("blend") 113 | 114 | flags.append("duration {}".format(seq.duration)) 115 | 116 | if flags: 117 | sequences_text.append(name + ": " + ", ".join(flags)) 118 | 119 | nodesRotation = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(nodes, seq.rotationMatters)))) 120 | nodesTranslation = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(nodes, seq.translationMatters)))) 121 | nodesScale = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(nodes, seq.scaleMatters)))) 122 | 123 | step = 1 124 | 125 | for mattersIndex, ob in enumerate(nodesTranslation): 126 | curves = ob_location_curves(ob) 127 | 128 | for frameIndex in range(seq.numKeyframes): 129 | vec = dsq.translations[seq.baseTranslation + mattersIndex * seq.numKeyframes + frameIndex] 130 | if seq.flags & Sequence.Blend: 131 | if reference_frame is None: 132 | return fail(operator, "Missing 'reference' marker for blend animation '{}'".format(name)) 133 | ref_vec = Vector(evaluate_all(curves, reference_frame)) 134 | vec = ref_vec + vec 135 | 136 | for curve in curves: 137 | curve.keyframe_points.add(1) 138 | key = curve.keyframe_points[-1] 139 | key.interpolation = "LINEAR" 140 | key.co = (last_frame + frameIndex * step, vec[curve.array_index]) 141 | 142 | for mattersIndex, ob in enumerate(nodesRotation): 143 | mode, curves = ob_rotation_curves(ob) 144 | 145 | for frameIndex in range(seq.numKeyframes): 146 | rot = dsq.rotations[seq.baseRotation + mattersIndex * seq.numKeyframes + frameIndex] 147 | if seq.flags & Sequence.Blend: 148 | if reference_frame is None: 149 | return fail(operator, "Missing 'reference' marker for blend animation '{}'".format(name)) 150 | ref_rot = Quaternion(evaluate_all(curves, reference_frame)) 151 | rot = ref_rot * rot 152 | if mode == 'AXIS_ANGLE': 153 | rot = rot.to_axis_angle() 154 | elif mode != 'QUATERNION': 155 | rot = rot.to_euler(mode) 156 | 157 | for curve in curves: 158 | curve.keyframe_points.add(1) 159 | key = curve.keyframe_points[-1] 160 | key.interpolation = "LINEAR" 161 | key.co = (last_frame + frameIndex * step, rot[curve.array_index]) 162 | 163 | for mattersIndex, ob in enumerate(nodesScale): 164 | curves = ob_scale_curves(ob) 165 | 166 | for frameIndex in range(seq.numKeyframes): 167 | index = seq.baseScale + mattersIndex * seq.numKeyframes + frameIndex 168 | 169 | if seq.flags & Sequence.UniformScale: 170 | s = dsq.uniform_scales[index] 171 | scale = s, s, s 172 | elif seq.flags & Sequence.AlignedScale: 173 | scale = dsq.aligned_scales[index] 174 | elif seq.flags & Sequence.ArbitraryScale: 175 | print("Warning: Arbitrary scale animation not implemented") 176 | break 177 | else: 178 | print("Warning: Invalid scale flags found in sequence") 179 | break 180 | 181 | for curve in curves: 182 | curve.keyframe_points.add(1) 183 | key = curve.keyframe_points[-1] 184 | key.interpolation = "LINEAR" 185 | key.co = (last_frame + frameIndex * step, scale[curve.array_index]) 186 | 187 | context.scene.timeline_markers.new(name + ":start", last_frame) 188 | context.scene.timeline_markers.new(name + ":end", last_frame + seq.numKeyframes) 189 | 190 | last_frame += seq.numKeyframes + 10 191 | 192 | if "Sequences" in bpy.data.texts: 193 | sequences_buf = bpy.data.texts["Sequences"] 194 | else: 195 | sequences_buf = bpy.data.texts.new("Sequences") 196 | 197 | if not sequences_buf.as_string(): 198 | sequences_buf.from_string("\n".join(sequences_text)) 199 | else: 200 | sequences_buf.from_string(sequences_buf.as_string() + "\n" + "\n".join(sequences_text)) 201 | 202 | return {"FINISHED"} 203 | -------------------------------------------------------------------------------- /import_dts.py: -------------------------------------------------------------------------------- 1 | import bpy 2 | import os 3 | from bpy_extras.io_utils import unpack_list 4 | 5 | from .DtsShape import DtsShape 6 | from .DtsTypes import * 7 | from .write_report import write_debug_report 8 | from .util import default_materials, resolve_texture, get_rgb_colors, fail, \ 9 | ob_location_curves, ob_scale_curves, ob_rotation_curves, ob_rotation_data, evaluate_all 10 | 11 | import operator 12 | from itertools import zip_longest, count 13 | from functools import reduce 14 | from random import random 15 | 16 | def grouper(iterable, n, fillvalue=None): 17 | "Collect data into fixed-length chunks or blocks" 18 | # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" 19 | args = [iter(iterable)] * n 20 | return zip_longest(*args, fillvalue=fillvalue) 21 | 22 | def dedup_name(group, name): 23 | if name not in group: 24 | return name 25 | 26 | for suffix in count(2): 27 | new_name = name + "#" + str(suffix) 28 | 29 | if new_name not in group: 30 | return new_name 31 | 32 | def import_material(color_source, dmat, filepath): 33 | bmat = bpy.data.materials.new(dedup_name(bpy.data.materials, dmat.name)) 34 | bmat.diffuse_intensity = 1 35 | 36 | texname = resolve_texture(filepath, dmat.name) 37 | 38 | if texname is not None: 39 | try: 40 | teximg = bpy.data.images.load(texname) 41 | except: 42 | print("Cannot load image", texname) 43 | 44 | texslot = bmat.texture_slots.add() 45 | texslot.use_map_alpha = True 46 | tex = texslot.texture = bpy.data.textures.new(dmat.name, "IMAGE") 47 | tex.image = teximg 48 | 49 | # Try to figure out a diffuse color for solid shading 50 | if teximg.size[0] <= 16 and teximg.size[1] <= 16: 51 | if teximg.use_alpha: 52 | pixels = grouper(teximg.pixels, 4) 53 | else: 54 | pixels = grouper(teximg.pixels, 3) 55 | 56 | color = pixels.__next__() 57 | 58 | for other in pixels: 59 | if other != color: 60 | break 61 | else: 62 | bmat.diffuse_color = color[:3] 63 | elif dmat.name.lower() in default_materials: 64 | bmat.diffuse_color = default_materials[dmat.name.lower()] 65 | else: # give it a random color 66 | bmat.diffuse_color = color_source.__next__() 67 | 68 | if dmat.flags & Material.SelfIlluminating: 69 | bmat.use_shadeless = True 70 | if dmat.flags & Material.Translucent: 71 | bmat.use_transparency = True 72 | 73 | if dmat.flags & Material.Additive: 74 | bmat.torque_props.blend_mode = "ADDITIVE" 75 | elif dmat.flags & Material.Subtractive: 76 | bmat.torque_props.blend_mode = "SUBTRACTIVE" 77 | else: 78 | bmat.torque_props.blend_mode = "NONE" 79 | 80 | if dmat.flags & Material.SWrap: 81 | bmat.torque_props.s_wrap = True 82 | if dmat.flags & Material.TWrap: 83 | bmat.torque_props.t_wraps = True 84 | if dmat.flags & Material.IFLMaterial: 85 | bmat.torque_props.use_ifl = True 86 | 87 | # TODO: MipMapZeroBorder, IFLFrame, DetailMap, BumpMap, ReflectanceMap 88 | # AuxilaryMask? 89 | 90 | return bmat 91 | 92 | class index_pass: 93 | def __getitem__(self, item): 94 | return item 95 | 96 | def create_bmesh(dmesh, materials, shape): 97 | me = bpy.data.meshes.new("Mesh") 98 | 99 | faces = [] 100 | material_indices = {} 101 | 102 | indices_pass = index_pass() 103 | 104 | for prim in dmesh.primitives: 105 | if prim.type & Primitive.Indexed: 106 | indices = dmesh.indices 107 | else: 108 | indices = indices_pass 109 | 110 | dmat = None 111 | 112 | if not (prim.type & Primitive.NoMaterial): 113 | dmat = shape.materials[prim.type & Primitive.MaterialMask] 114 | 115 | if dmat not in material_indices: 116 | material_indices[dmat] = len(me.materials) 117 | me.materials.append(materials[dmat]) 118 | 119 | if prim.type & Primitive.Strip: 120 | even = True 121 | for i in range(prim.firstElement + 2, prim.firstElement + prim.numElements): 122 | if even: 123 | faces.append(((indices[i], indices[i - 1], indices[i - 2]), dmat)) 124 | else: 125 | faces.append(((indices[i - 2], indices[i - 1], indices[i]), dmat)) 126 | even = not even 127 | elif prim.type & Primitive.Fan: 128 | even = True 129 | for i in range(prim.firstElement + 2, prim.firstElement + prim.numElements): 130 | if even: 131 | faces.append(((indices[i], indices[i - 1], indices[0]), dmat)) 132 | else: 133 | faces.append(((indices[0], indices[i - 1], indices[i]), dmat)) 134 | even = not even 135 | else: # Default to Triangle Lists (prim.type & Primitive.Triangles) 136 | for i in range(prim.firstElement + 2, prim.firstElement + prim.numElements, 3): 137 | faces.append(((indices[i], indices[i - 1], indices[i - 2]), dmat)) 138 | 139 | me.vertices.add(len(dmesh.verts)) 140 | me.vertices.foreach_set("co", unpack_list(dmesh.verts)) 141 | me.vertices.foreach_set("normal", unpack_list(dmesh.normals)) 142 | 143 | me.polygons.add(len(faces)) 144 | me.loops.add(len(faces) * 3) 145 | 146 | me.uv_textures.new() 147 | uvs = me.uv_layers[0] 148 | 149 | for i, ((verts, dmat), poly) in enumerate(zip(faces, me.polygons)): 150 | poly.use_smooth = True # DTS geometry is always smooth shaded 151 | poly.loop_total = 3 152 | poly.loop_start = i * 3 153 | 154 | if dmat: 155 | poly.material_index = material_indices[dmat] 156 | 157 | for j, index in zip(poly.loop_indices, verts): 158 | me.loops[j].vertex_index = index 159 | uv = dmesh.tverts[index] 160 | uvs.data[j].uv = (uv.x, 1 - uv.y) 161 | 162 | me.validate() 163 | me.update() 164 | 165 | return me 166 | 167 | def file_base_name(filepath): 168 | return os.path.basename(filepath).rsplit(".", 1)[0] 169 | 170 | def insert_reference(frame, shape_nodes): 171 | for node in shape_nodes: 172 | ob = node.bl_ob 173 | 174 | curves = ob_location_curves(ob) 175 | for curve in curves: 176 | curve.keyframe_points.add(1) 177 | key = curve.keyframe_points[-1] 178 | key.interpolation = "LINEAR" 179 | key.co = (frame, ob.location[curve.array_index]) 180 | 181 | curves = ob_scale_curves(ob) 182 | for curve in curves: 183 | curve.keyframe_points.add(1) 184 | key = curve.keyframe_points[-1] 185 | key.interpolation = "LINEAR" 186 | key.co = (frame, ob.scale[curve.array_index]) 187 | 188 | _, curves = ob_rotation_curves(ob) 189 | rot = ob_rotation_data(ob) 190 | for curve in curves: 191 | curve.keyframe_points.add(1) 192 | key = curve.keyframe_points[-1] 193 | key.interpolation = "LINEAR" 194 | key.co = (frame, rot[curve.array_index]) 195 | 196 | def load(operator, context, filepath, 197 | reference_keyframe=True, 198 | import_sequences=True, 199 | use_armature=False, 200 | debug_report=False): 201 | shape = DtsShape() 202 | 203 | with open(filepath, "rb") as fd: 204 | shape.load(fd) 205 | 206 | if debug_report: 207 | write_debug_report(filepath + ".txt", shape) 208 | with open(filepath + ".pass.dts", "wb") as fd: 209 | shape.save(fd) 210 | 211 | # Create a Blender material for each DTS material 212 | materials = {} 213 | color_source = get_rgb_colors() 214 | 215 | for dmat in shape.materials: 216 | materials[dmat] = import_material(color_source, dmat, filepath) 217 | 218 | # Now assign IFL material properties where needed 219 | for ifl in shape.iflmaterials: 220 | mat = materials[shape.materials[ifl.slot]] 221 | assert mat.torque_props.use_ifl == True 222 | mat.torque_props.ifl_name = shape.names[ifl.name] 223 | 224 | # First load all the nodes into armatures 225 | lod_by_mesh = {} 226 | 227 | for lod in shape.detail_levels: 228 | lod_by_mesh[lod.objectDetail] = lod 229 | 230 | node_obs = [] 231 | node_obs_val = {} 232 | 233 | if use_armature: 234 | root_arm = bpy.data.armatures.new(file_base_name(filepath)) 235 | root_ob = bpy.data.objects.new(root_arm.name, root_arm) 236 | root_ob.show_x_ray = True 237 | 238 | context.scene.objects.link(root_ob) 239 | context.scene.objects.active = root_ob 240 | 241 | # Calculate armature-space matrix, head and tail for each node 242 | for i, node in enumerate(shape.nodes): 243 | node.mat = shape.default_rotations[i].to_matrix() 244 | node.mat = Matrix.Translation(shape.default_translations[i]) * node.mat.to_4x4() 245 | if node.parent != -1: 246 | node.mat = shape.nodes[node.parent].mat * node.mat 247 | # node.head = node.mat.to_translation() 248 | # node.tail = node.head + Vector((0, 0, 0.25)) 249 | # node.tail = node.mat.to_translation() 250 | # node.head = node.tail - Vector((0, 0, 0.25)) 251 | 252 | bpy.ops.object.mode_set(mode="EDIT") 253 | 254 | edit_bone_table = [] 255 | bone_names = [] 256 | 257 | for i, node in enumerate(shape.nodes): 258 | bone = root_arm.edit_bones.new(shape.names[node.name]) 259 | # bone.use_connect = True 260 | # bone.head = node.head 261 | # bone.tail = node.tail 262 | bone.head = (0, 0, -0.25) 263 | bone.tail = (0, 0, 0) 264 | 265 | if node.parent != -1: 266 | bone.parent = edit_bone_table[node.parent] 267 | 268 | bone.matrix = node.mat 269 | bone["nodeIndex"] = i 270 | 271 | edit_bone_table.append(bone) 272 | bone_names.append(bone.name) 273 | 274 | bpy.ops.object.mode_set(mode="OBJECT") 275 | else: 276 | if reference_keyframe: 277 | reference_marker = context.scene.timeline_markers.get("reference") 278 | if reference_marker is None: 279 | reference_frame = 0 280 | context.scene.timeline_markers.new("reference", reference_frame) 281 | else: 282 | reference_frame = reference_marker.frame 283 | else: 284 | reference_frame = None 285 | 286 | # Create an empty for every node 287 | for i, node in enumerate(shape.nodes): 288 | ob = bpy.data.objects.new(dedup_name(bpy.data.objects, shape.names[node.name]), None) 289 | node.bl_ob = ob 290 | ob["nodeIndex"] = i 291 | ob.empty_draw_type = "SINGLE_ARROW" 292 | ob.empty_draw_size = 0.5 293 | 294 | if node.parent != -1: 295 | ob.parent = node_obs[node.parent] 296 | 297 | ob.location = shape.default_translations[i] 298 | ob.rotation_mode = "QUATERNION" 299 | ob.rotation_quaternion = shape.default_rotations[i] 300 | if shape.names[node.name] == "__auto_root__" and ob.rotation_quaternion.magnitude == 0: 301 | ob.rotation_quaternion = (1, 0, 0, 0) 302 | 303 | context.scene.objects.link(ob) 304 | node_obs.append(ob) 305 | node_obs_val[node] = ob 306 | 307 | if reference_keyframe: 308 | insert_reference(reference_frame, shape.nodes) 309 | 310 | # Try animation? 311 | if import_sequences: 312 | globalToolIndex = 10 313 | fps = context.scene.render.fps 314 | 315 | sequences_text = [] 316 | 317 | for seq in shape.sequences: 318 | name = shape.names[seq.nameIndex] 319 | print("Importing sequence", name) 320 | 321 | flags = [] 322 | flags.append("priority {}".format(seq.priority)) 323 | 324 | if seq.flags & Sequence.Cyclic: 325 | flags.append("cyclic") 326 | 327 | if seq.flags & Sequence.Blend: 328 | flags.append("blend") 329 | 330 | flags.append("duration {}".format(seq.duration)) 331 | 332 | if flags: 333 | sequences_text.append(name + ": " + ", ".join(flags)) 334 | 335 | nodesRotation = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(shape.nodes, seq.rotationMatters)))) 336 | nodesTranslation = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(shape.nodes, seq.translationMatters)))) 337 | nodesScale = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(shape.nodes, seq.scaleMatters)))) 338 | 339 | step = 1 340 | 341 | for mattersIndex, node in enumerate(nodesTranslation): 342 | ob = node_obs_val[node] 343 | curves = ob_location_curves(ob) 344 | 345 | for frameIndex in range(seq.numKeyframes): 346 | vec = shape.node_translations[seq.baseTranslation + mattersIndex * seq.numKeyframes + frameIndex] 347 | if seq.flags & Sequence.Blend: 348 | if reference_frame is None: 349 | return fail(operator, "Missing 'reference' marker for blend animation '{}'".format(name)) 350 | ref_vec = Vector(evaluate_all(curves, reference_frame)) 351 | vec = ref_vec + vec 352 | 353 | for curve in curves: 354 | curve.keyframe_points.add(1) 355 | key = curve.keyframe_points[-1] 356 | key.interpolation = "LINEAR" 357 | key.co = ( 358 | globalToolIndex + frameIndex * step, 359 | vec[curve.array_index]) 360 | 361 | for mattersIndex, node in enumerate(nodesRotation): 362 | ob = node_obs_val[node] 363 | mode, curves = ob_rotation_curves(ob) 364 | 365 | for frameIndex in range(seq.numKeyframes): 366 | rot = shape.node_rotations[seq.baseRotation + mattersIndex * seq.numKeyframes + frameIndex] 367 | if seq.flags & Sequence.Blend: 368 | if reference_frame is None: 369 | return fail(operator, "Missing 'reference' marker for blend animation '{}'".format(name)) 370 | ref_rot = Quaternion(evaluate_all(curves, reference_frame)) 371 | rot = ref_rot * rot 372 | if mode == 'AXIS_ANGLE': 373 | rot = rot.to_axis_angle() 374 | elif mode != 'QUATERNION': 375 | rot = rot.to_euler(mode) 376 | 377 | for curve in curves: 378 | curve.keyframe_points.add(1) 379 | key = curve.keyframe_points[-1] 380 | key.interpolation = "LINEAR" 381 | key.co = ( 382 | globalToolIndex + frameIndex * step, 383 | rot[curve.array_index]) 384 | 385 | for mattersIndex, node in enumerate(nodesScale): 386 | ob = node_obs_val[node] 387 | curves = ob_scale_curves(ob) 388 | 389 | for frameIndex in range(seq.numKeyframes): 390 | index = seq.baseScale + mattersIndex * seq.numKeyframes + frameIndex 391 | vec = shape.node_translations[seq.baseTranslation + mattersIndex * seq.numKeyframes + frameIndex] 392 | 393 | if seq.flags & Sequence.UniformScale: 394 | s = shape.node_uniform_scales[index] 395 | vec = (s, s, s) 396 | elif seq.flags & Sequence.AlignedScale: 397 | vec = shape.node_aligned_scales[index] 398 | elif seq.flags & Sequence.ArbitraryScale: 399 | print("Warning: Arbitrary scale animation not implemented") 400 | break 401 | else: 402 | print("Warning: Invalid scale flags found in sequence") 403 | break 404 | 405 | for curve in curves: 406 | curve.keyframe_points.add(1) 407 | key = curve.keyframe_points[-1] 408 | key.interpolation = "LINEAR" 409 | key.co = ( 410 | globalToolIndex + frameIndex * step, 411 | vec[curve.array_index]) 412 | 413 | # Insert a reference frame immediately before the animation 414 | # insert_reference(globalToolIndex - 2, shape.nodes) 415 | 416 | context.scene.timeline_markers.new(name + ":start", globalToolIndex) 417 | context.scene.timeline_markers.new(name + ":end", globalToolIndex + seq.numKeyframes * step - 1) 418 | globalToolIndex += seq.numKeyframes * step + 30 419 | 420 | if "Sequences" in bpy.data.texts: 421 | sequences_buf = bpy.data.texts["Sequences"] 422 | else: 423 | sequences_buf = bpy.data.texts.new("Sequences") 424 | 425 | sequences_buf.from_string("\n".join(sequences_text)) 426 | 427 | # Then put objects in the armatures 428 | for obj in shape.objects: 429 | if obj.node == -1: 430 | print('Warning: Object {} is not attached to a node, ignoring' 431 | .format(shape.names[obj.name])) 432 | continue 433 | 434 | for meshIndex in range(obj.numMeshes): 435 | mesh = shape.meshes[obj.firstMesh + meshIndex] 436 | mtype = mesh.type 437 | 438 | if mtype == Mesh.NullType: 439 | continue 440 | 441 | if mtype != Mesh.StandardType and mtype != Mesh.SkinType: 442 | print('Warning: Mesh #{} of object {} is of unsupported type {}, ignoring'.format( 443 | meshIndex + 1, mtype, shape.names[obj.name])) 444 | continue 445 | 446 | bmesh = create_bmesh(mesh, materials, shape) 447 | bobj = bpy.data.objects.new(dedup_name(bpy.data.objects, shape.names[obj.name]), bmesh) 448 | context.scene.objects.link(bobj) 449 | 450 | add_vertex_groups(mesh, bobj, shape) 451 | 452 | if use_armature: 453 | bobj.parent = root_ob 454 | bobj.parent_bone = bone_names[obj.node] 455 | bobj.parent_type = "BONE" 456 | bobj.matrix_world = shape.nodes[obj.node].mat 457 | 458 | if mtype == Mesh.SkinType: 459 | modifier = bobj.modifiers.new('Armature', 'ARMATURE') 460 | modifier.object = root_ob 461 | else: 462 | bobj.parent = node_obs[obj.node] 463 | 464 | lod_name = shape.names[lod_by_mesh[meshIndex].name] 465 | 466 | if lod_name not in bpy.data.groups: 467 | bpy.data.groups.new(lod_name) 468 | 469 | bpy.data.groups[lod_name].objects.link(bobj) 470 | 471 | # Import a bounds mesh 472 | me = bpy.data.meshes.new("Mesh") 473 | me.vertices.add(8) 474 | me.vertices[0].co = (shape.bounds.min.x, shape.bounds.min.y, shape.bounds.min.z) 475 | me.vertices[1].co = (shape.bounds.max.x, shape.bounds.min.y, shape.bounds.min.z) 476 | me.vertices[2].co = (shape.bounds.max.x, shape.bounds.max.y, shape.bounds.min.z) 477 | me.vertices[3].co = (shape.bounds.min.x, shape.bounds.max.y, shape.bounds.min.z) 478 | me.vertices[4].co = (shape.bounds.min.x, shape.bounds.min.y, shape.bounds.max.z) 479 | me.vertices[5].co = (shape.bounds.max.x, shape.bounds.min.y, shape.bounds.max.z) 480 | me.vertices[6].co = (shape.bounds.max.x, shape.bounds.max.y, shape.bounds.max.z) 481 | me.vertices[7].co = (shape.bounds.min.x, shape.bounds.max.y, shape.bounds.max.z) 482 | me.validate() 483 | me.update() 484 | ob = bpy.data.objects.new("bounds", me) 485 | ob.draw_type = "BOUNDS" 486 | context.scene.objects.link(ob) 487 | 488 | return {"FINISHED"} 489 | 490 | def add_vertex_groups(mesh, ob, shape): 491 | for node, initial_transform in mesh.bones: 492 | # TODO: Handle initial_transform 493 | ob.vertex_groups.new(shape.names[shape.nodes[node].name]) 494 | 495 | for vertex, bone, weight in mesh.influences: 496 | ob.vertex_groups[bone].add((vertex,), weight, 'REPLACE') 497 | -------------------------------------------------------------------------------- /import_sequence.py: -------------------------------------------------------------------------------- 1 | # This file is currently unused 2 | 3 | import bpy 4 | 5 | def import_sequence(is_dsq, shape, seq): 6 | if is_dsq: 7 | name = shape.names[seq.nameIndex] 8 | else: 9 | name = seq.name 10 | 11 | act = bpy.data.actions.new(name) 12 | 13 | flags = ["priority {}".format(seq.priority)] 14 | if seq.flags & Sequence.Cyclic: 15 | flags.append("cyclic") 16 | if seq.flags & Sequence.Blend: 17 | flags.append("blend") 18 | # sequences_text.append(name + ": " + ", ".join(flags)) 19 | 20 | if is_dsq: 21 | nodes = shape.nodes 22 | rotations = shape.rotations 23 | else: 24 | nodes = tuple(map(lambda n: shape.names[n.name], shape.nodes)) 25 | rotations = shape.node_rotations 26 | 27 | if seq.flags & Sequence.UniformScale: 28 | scales = tuple(map(lambda s: (s, s, s), shape.uniform_scales)) 29 | elif seq.flags & Sequence.AlignedScale: 30 | scales = shape.aligned_scales 31 | elif seq.flags & Sequence.ArbitraryScale: 32 | print("Warning: Arbitrary scale animation not implemented") 33 | break 34 | else: 35 | print("Warning: Invalid scale flags found in sequence") 36 | break 37 | 38 | nodes_translation = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(nodes, seq.translationMatters)))) 39 | nodes_rotation = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(nodes, seq.rotationMatters)))) 40 | nodes_scale = tuple(map(lambda p: p[0], filter(lambda p: p[1], zip(nodes, seq.scaleMatters)))) 41 | 42 | for matters_index, node_name in enumerate(nodes_translation): 43 | data_path = 'pose.bones["{}"].location'.format(node_name) 44 | fcus = tuple(map(lambda array_index: act.fcurves.new(data_path, array_index), range(3))) 45 | for frame_index in range(seq.numKeyframes): 46 | array = translations[seq.baseTranslation + matters_index * seq.numKeyframes + frame_index] 47 | for array_index, fcu in enumerate(fcus): 48 | fcu.keyframe_points.add(1) 49 | key = fcu.keyframe_points[-1] 50 | key.interpolation = "LINEAR" 51 | key.co = (1 + frame_index, array[array_index]) 52 | 53 | for matters_index, node_name in enumerate(nodes_rotation): 54 | data_path = 'pose.bones["{}"].rotation_quaternion'.format(node_name) 55 | fcus = tuple(map(lambda array_index: act.fcurves.new(data_path, array_index), range(4))) 56 | for frame_index in range(seq.numKeyframes): 57 | array = rotations[seq.baseRotation + matters_index * seq.numKeyframes + frame_index] 58 | for array_index, fcu in enumerate(fcus): 59 | fcu.keyframe_points.add(1) 60 | key = fcu.keyframe_points[-1] 61 | key.interpolation = "LINEAR" 62 | key.co = (1 + frame_index, array[array_index]) 63 | 64 | for matters_index, node_name in enumerate(nodes_scale): 65 | data_path = 'pose.bones["{}"].scale'.format(node_name) 66 | fcus = tuple(map(lambda array_index: act.fcurves.new(data_path, array_index), range(3))) 67 | for frame_index in range(seq.numKeyframes): 68 | array = scales[seq.baseScale + matters_index * seq.numKeyframes + frame_index] 69 | for array_index, fcu in enumerate(fcus): 70 | fcu.keyframe_points.add(1) 71 | key = fcu.keyframe_points[-1] 72 | key.interpolation = "LINEAR" 73 | key.co = (1 + frame_index, array[array_index]) 74 | 75 | # if seq.flags & Sequence.Blend: 76 | # if reference_frame is None: 77 | # return fail(operator, "Missing 'reference' marker for blend animation '{}'".format(name)) 78 | # ref_vec = Vector(evaluate_all(curves, reference_frame)) 79 | # vec = ref_vec + vec 80 | # if seq.flags & Sequence.Blend: 81 | # if reference_frame is None: 82 | # return fail(operator, "Missing 'reference' marker for blend animation '{}'".format(name)) 83 | # ref_rot = Quaternion(evaluate_all(curves, reference_frame)) 84 | # rot = ref_rot * rot 85 | -------------------------------------------------------------------------------- /shared_export.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | import bpy 3 | 4 | def find_seqs(scene, select_marker): 5 | sequences = OrderedDict() 6 | sequence_flags = {} 7 | 8 | if "Sequences" in bpy.data.texts: 9 | for line in bpy.data.texts["Sequences"].as_string().split("\n"): 10 | line = line.strip() 11 | 12 | if not line: 13 | continue 14 | 15 | if ":" not in line: 16 | print("Invalid line in 'Sequences':", line) 17 | continue 18 | 19 | name, flags = line.split(":", 1) 20 | 21 | if name not in sequences: 22 | sequences[name] = {} 23 | 24 | if flags.lstrip(): 25 | flags = tuple(map(lambda f: f.strip(), flags.split(","))) 26 | else: 27 | flags = () 28 | 29 | sequence_flags[name] = flags 30 | 31 | for marker in scene.timeline_markers: 32 | if ":" not in marker.name or (select_marker and not marker.select): 33 | continue 34 | 35 | name, what = marker.name.rsplit(":", 1) 36 | what = what.lower() 37 | 38 | if name not in sequences: 39 | sequences[name] = {} 40 | 41 | if what in sequences[name]: 42 | print("Warning: Got duplicate '{}' marker for sequence '{}' at frame {} (first was at frame {}), ignoring".format(what, name, marker.frame, sequences[name][what].frame)) 43 | continue 44 | 45 | sequences[name][what] = marker 46 | 47 | return sequences, sequence_flags -------------------------------------------------------------------------------- /util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import bpy 3 | from colorsys import hsv_to_rgb 4 | from itertools import count 5 | from fractions import Fraction 6 | 7 | texture_extensions = ("png", "jpg") 8 | 9 | default_materials = { 10 | "black": (0, 0, 0), 11 | "black25": (191, 191, 191), 12 | "black50": (128, 128, 128), 13 | "black75": (64, 64, 64), 14 | "blank": (255, 255, 255), 15 | "blue": (0, 0, 255), 16 | "darkRed": (128, 0, 0), 17 | "gray25": (64, 64, 64), 18 | "gray50": (128, 128, 128), 19 | "gray75": (191, 191, 191), 20 | "green": (26, 128, 64), 21 | "lightBlue": (10, 186, 245), 22 | "lightYellow": (249, 249, 99), 23 | "palegreen": (125, 136, 104), 24 | "red": (213, 0, 0), 25 | "white": (255, 255, 255), 26 | "yellow": (255, 255, 0) 27 | } 28 | 29 | for name, color in default_materials.items(): 30 | default_materials[name] = (color[0] / 255, color[1] / 255, color[2] / 255) 31 | 32 | for key, value in tuple(default_materials.items()): 33 | default_materials[key.lower()] = value 34 | 35 | def resolve_texture(filepath, name): 36 | dirname = os.path.dirname(filepath) 37 | 38 | while True: 39 | texbase = os.path.join(dirname, name) 40 | 41 | for extension in texture_extensions: 42 | texname = texbase + "." + extension 43 | 44 | if os.path.isfile(texname): 45 | return texname 46 | 47 | if os.path.ismount(dirname): 48 | break 49 | 50 | prevdir, dirname = dirname, os.path.dirname(dirname) 51 | 52 | if prevdir == dirname: 53 | break 54 | 55 | def fractions(): 56 | yield 0 57 | 58 | for k in count(): 59 | i = 2 ** k 60 | 61 | for j in range(1, i, 2): 62 | yield j / i 63 | 64 | def get_hsv_colors(): 65 | for h in fractions(): 66 | yield (h, 0.75, 0.75) 67 | 68 | def get_rgb_colors(): 69 | return map(lambda hsv: hsv_to_rgb(*hsv), get_hsv_colors()) 70 | 71 | def action_get_or_new(ob): 72 | if not ob.animation_data: 73 | ob.animation_data_create() 74 | 75 | if ob.animation_data.action: 76 | return ob.animation_data.action 77 | 78 | action = bpy.data.actions.new(ob.name + "Action") 79 | ob.animation_data.action = action 80 | 81 | return action 82 | 83 | def ob_curves_array(ob, data_path, array_count): 84 | action = action_get_or_new(ob) 85 | curves = [None] * array_count 86 | 87 | for curve in action.fcurves: 88 | if curve.data_path != data_path or curve.array_index < 0 or curve.array_index >= array_count: 89 | continue 90 | 91 | if curves[curve.array_index]: 92 | pass # TODO: warn if more than one curve for an array slot 93 | 94 | curves[curve.array_index] = curve 95 | 96 | for index, curve in enumerate(curves): 97 | if curve is None: 98 | curves[index] = action.fcurves.new(data_path, index) 99 | 100 | return curves 101 | 102 | def ob_location_curves(ob): 103 | return ob_curves_array(ob, "location", 3) 104 | 105 | def ob_scale_curves(ob): 106 | return ob_curves_array(ob, "scale", 3) 107 | 108 | def fcurves_path_from_rotation(ob): 109 | if ob.rotation_mode == 'QUATERNION': 110 | return ('rotation_quaternion', 4) 111 | elif ob.rotation_mode == 'AXIS_ANGLE': 112 | return ('rotation_axis_angle', 4) 113 | else: 114 | return ('rotation_euler', 3) 115 | 116 | def ob_rotation_data(ob): 117 | if ob.rotation_mode == 'QUATERNION': 118 | return ob.rotation_quaternion 119 | elif ob.rotation_mode == 'AXIS_ANGLE': 120 | return ob.rotation_axis_angle 121 | else: 122 | return ob.rotation_euler 123 | 124 | def ob_rotation_curves(ob): 125 | data_path, array_count = fcurves_path_from_rotation(ob) 126 | return ob.rotation_mode, ob_curves_array(ob, data_path, array_count) 127 | 128 | def evaluate_all(curves, frame): 129 | return tuple(map(lambda c: c.evaluate(frame), curves)) 130 | 131 | def array_from_fcurves(curves, data_path, array_size): 132 | found = False 133 | array = [None] * array_size 134 | 135 | for curve in curves: 136 | if curve.data_path == data_path and curve.array_index != -1: 137 | array[curve.array_index] = curve 138 | found = True 139 | 140 | if found: 141 | return tuple(array) 142 | 143 | def array_from_fcurves_rotation(curves, ob): 144 | data_path, array_count = fcurves_path_from_rotation(ob) 145 | return array_from_fcurves(curves, data_path, array_count) 146 | 147 | def fcurves_keyframe_in_range(curves, start, end): 148 | for curve in curves: 149 | for keyframe in curve.keyframe_points: 150 | frame = keyframe.co[0] 151 | if frame >= start and frame <= end: 152 | return True 153 | 154 | return False 155 | 156 | def find_reference(scene): 157 | reference_marker = scene.timeline_markers.get("reference") 158 | if reference_marker is not None: 159 | return reference_marker.frame 160 | 161 | def fail(operator, message): 162 | print("Error:", message) 163 | operator.report({"ERROR"}, message) 164 | return {"FINISHED"} 165 | -------------------------------------------------------------------------------- /write_report.py: -------------------------------------------------------------------------------- 1 | from .DtsTypes import * 2 | 3 | def write_debug_report(filepath, shape): 4 | with open(filepath, "w") as fd: 5 | def p(line): 6 | fd.write(line + "\n") 7 | def gn(i): 8 | return shape.names[i] 9 | def ln(table, first, count): 10 | def each(i): 11 | entry = table[i] 12 | if hasattr(entry, "name"): 13 | return str(i) + " -> " + gn(entry.name) 14 | else: 15 | return str(i) 16 | return ", ".join(map(each, range(first, first + count))) 17 | def show_matters(matters): 18 | return ' '.join(map(lambda p: gn(p[0].name), filter(lambda p: p[1], zip(shape.nodes, matters)))) 19 | 20 | p("smallest_size = " + str(shape.smallest_size)) 21 | p("smallest_detail_level = " + str(shape.smallest_detail_level)) 22 | p("radius = " + str(shape.radius)) 23 | p("radius_tube = " + str(shape.radius_tube)) 24 | p("center = " + str(shape.center)) 25 | p("bounds = " + str(shape.bounds)) 26 | # p("Decals (deprecated): " + str(len(shape.decals))) 27 | p("Ground frames: " + str(len(shape.ground_translations))) 28 | # p("Decal states (deprecated): " + str(len(shape.decalstates))) 29 | p("Triggers: " + str(len(shape.triggers))) 30 | 31 | p("Sequence node rotations: " + str(len(shape.node_rotations))) 32 | p("Sequence node translations: " + str(len(shape.node_translations))) 33 | p("Sequence node uniform scales: " + str(len(shape.node_uniform_scales))) 34 | p("Sequence node aligned scales: " + str(len(shape.node_aligned_scales))) 35 | p("Sequence node arbitrary scales: " + str(len(shape.node_arbitrary_scale_factors))) 36 | 37 | p("Detail levels (" + str(len(shape.detail_levels)) + "):") 38 | for i, lod in enumerate(shape.detail_levels): 39 | p(" LOD " + str(i) + " " + gn(lod.name) + " (size " + str(lod.size) + ")") 40 | p(" subshape = " + str(lod.subshape)) 41 | p(" objectDetail = " + str(lod.objectDetail)) 42 | p(" polyCount = " + str(lod.polyCount)) 43 | # p(" avgError (unused) = " + str(lod.avgError)) 44 | # p(" maxError (unused) = " + str(lod.maxError)) 45 | 46 | p("Subshapes (" + str(len(shape.subshapes)) + "):") 47 | for i, sub in enumerate(shape.subshapes): 48 | p(" Subshape " + str(i)) 49 | # p(" firstNode = " + str(sub.firstNode)) 50 | # p(" firstObject = " + str(sub.firstObject)) 51 | # p(" firstDecal (deprecated) = " + str(sub.firstDecal)) 52 | # p(" numNodes = " + str(sub.numNodes)) 53 | # p(" numObjects = " + str(sub.numObjects)) 54 | # p(" numDecals (deprecated) = " + str(sub.numDecals)) 55 | # p(" nodes = " + ln(shape.nodes, sub.firstNode, sub.numNodes)) 56 | # p(" objects = " + ln(shape.objects, sub.firstObject, sub.numObjects)) 57 | p(" nodes = " + ln(shape.nodes, sub.firstNode, sub.numNodes)) 58 | p(" objects = " + ln(shape.objects, sub.firstObject, sub.numObjects)) 59 | # p(" decals (deprecated) = " + ln(shape.decals, sub.firstDecal, sub.numDecals)) 60 | 61 | p("Nodes (" + str(len(shape.nodes)) + "):") 62 | for i, node in enumerate(shape.nodes): 63 | if node.parent == -1: 64 | p(" " + str(i) + " " + gn(node.name)) 65 | else: 66 | p(" " + str(i) + " " + gn(node.name) + " -> " + str(node.parent) + " " + gn(shape.nodes[node.parent].name)) 67 | if i < len(shape.default_translations): 68 | p(" translation = " + str(shape.default_translations[i])) 69 | else: 70 | p(" translation = MISSING!") 71 | if i < len(shape.default_rotations): 72 | p(" rotation = " + str(shape.default_rotations[i])) 73 | else: 74 | p(" rotation = MISSING!") 75 | 76 | # TODO: tell if default transform lists are longer than node list 77 | 78 | p("Object states: " + str(len(shape.objectstates))) 79 | p("Objects (" + str(len(shape.objects)) + "):") 80 | for i, obj in enumerate(shape.objects): 81 | s = " " + str(i) + " " + gn(obj.name) 82 | if obj.node == -1: 83 | s += " NOT ATTACHED!" 84 | else: 85 | s += " in " + str(obj.node) + " (" + gn(shape.nodes[obj.node].name) + ")" 86 | s += ", meshes = " + ln(shape.meshes, obj.firstMesh, obj.numMeshes) 87 | p(s) 88 | 89 | p("Materials (" + str(len(shape.materials)) + "):") 90 | for i, mat in enumerate(shape.materials): 91 | flagNames = ("SWrap", "TWrap", "Translucent", "Additive", "Subtractive", "SelfIlluminating", "NeverEnvMap", "NoMipMap", "MipMapZeroBorder", "IFLMaterial", "IFLFrame", "DetailMap", "BumpMap", "ReflectanceMap", "AuxiliaryMask") 92 | flags = "" 93 | for name in flagNames: 94 | if mat.flags & getattr(Material, name): 95 | flags += " " + name 96 | p(" " + str(i) + " " + mat.name + " (" + str(mat.flags) + flags + ")") 97 | p(" bumpMap = " + str(mat.bumpMap) + ", reflectanceMap = " + str(mat.reflectanceMap) + ", detailMap = " + str(mat.detailMap)) 98 | p(" reflectance = " + str(mat.reflectance) + ", detailScale = " + str(mat.detailScale)) 99 | 100 | p("IFL materials (" + str(len(shape.iflmaterials)) + "):") 101 | for ifl in shape.iflmaterials: 102 | p(" IflMat " + gn(ifl.name)) 103 | if ifl.slot in shape.materials: 104 | mat_name = gn(shape.materials[ifl.slot].name) 105 | else: 106 | mat_name = "" 107 | p(" slot = " + str(ifl.slot) + " " + mat_name + ", time = " + str(ifl.time)) 108 | p(" firstFrame = " + str(ifl.firstFrame) + ", numFrames = " + str(ifl.numFrames)) 109 | 110 | p("Meshes (" + str(len(shape.meshes)) + "):") 111 | for i, mesh in enumerate(shape.meshes): 112 | mtype = mesh.get_type() 113 | p(" Mesh " + str(i) + " - " + Mesh.TypeName[mtype]) 114 | 115 | if mtype == Mesh.NullType: 116 | continue 117 | 118 | p(" flags = " + str(mesh.get_flags())) 119 | p(" bounds = " + str(mesh.bounds)) 120 | p(" center = " + str(mesh.center)) 121 | p(" radius = " + str(mesh.radius)) 122 | # p(" numFrames = " + str(mesh.numFrames)) 123 | # p(" numMatFrames = " + str(mesh.numMatFrames)) 124 | # p(" vertsPerFrame = " + str(mesh.vertsPerFrame)) 125 | # p(" parent (unused?) = " + str(mesh.parent)) 126 | # p(" indices = " + ",".join(map(str, mesh.indices))) 127 | # p(" mindices = " + ",".join(map(str, mesh.mindices))) 128 | p(" + Primitives (" + str(len(mesh.primitives)) + "):") 129 | for prim in mesh.primitives: 130 | flags = "" 131 | if prim.type & Primitive.Triangles: 132 | flags += " Triangles" 133 | if prim.type & Primitive.Strip: 134 | flags += " Strip" 135 | if prim.type & Primitive.Fan: 136 | flags += " Fan" 137 | if flags == "": 138 | flags += " NoExplicitType->Triangles" 139 | if prim.type & Primitive.Indexed: 140 | flags += " Indexed" 141 | if prim.type & Primitive.NoMaterial: 142 | flags += " NoMaterial" 143 | mat = prim.type & Primitive.MaterialMask 144 | flags += " MaterialMask:" + str(mat) 145 | p(" " + str(prim.firstElement) + "->" + str(prim.firstElement + prim.numElements - 1) + " " + str(prim.type) + flags) 146 | p(" + Vertices (" + str(len(mesh.verts)) + "): ") 147 | # for i in range(len(mesh.verts)): 148 | # p(" vert" + str(i) + " " + str(mesh.verts[i]) + " normal " + str(mesh.normals[i]) + " encoded " + str(mesh.enormals[i])) 149 | p(" + Texture coords (" + str(len(mesh.tverts)) + "): ") 150 | # for i in range(len(mesh.tverts)): 151 | # p(" tvert" + str(i) + " " + str(mesh.tverts[i])) 152 | 153 | if mtype == Mesh.SkinType: 154 | p(" + Bones ({})".format(len(mesh.bones))) 155 | for i, (node_index, initial_transform) in enumerate(mesh.bones): 156 | p(" bone{} node={} initial_transform={}".format(i, node_index, initial_transform)) 157 | p(" + Influences ({}): ".format(len(mesh.influences))) 158 | # for vi, bi, w in mesh.influences: 159 | # p 160 | # p(" influence vert{} bone{} weight={}".format(vi, bi, w)) 161 | 162 | p("Sequences (" + str(len(shape.sequences)) + "):") 163 | for i, seq in enumerate(shape.sequences): 164 | p(" " + str(i) + " " + shape.names[seq.nameIndex]) 165 | p(" flags: " + str(seq.flags)) 166 | p(" numKeyframes: " + str(seq.numKeyframes)) 167 | p(" duration: " + str(seq.duration)) 168 | p(" priority: " + str(seq.priority)) 169 | p(" firstGroundFrame: " + str(seq.firstGroundFrame)) 170 | p(" numGroundFrames: " + str(seq.numGroundFrames)) 171 | p(" baseRotation: " + str(seq.baseRotation)) 172 | p(" baseTranslation: " + str(seq.baseTranslation)) 173 | p(" baseScale: " + str(seq.baseScale)) 174 | p(" baseObjectState: " + str(seq.baseObjectState)) 175 | p(" baseDecalState: " + str(seq.baseDecalState)) 176 | p(" firstTrigger: " + str(seq.firstTrigger)) 177 | p(" numTriggers: " + str(seq.numTriggers)) 178 | p(" toolBegin: " + str(seq.toolBegin)) 179 | p(" rotationMatters: " + show_matters(seq.rotationMatters)) 180 | p(" translationMatters: " + show_matters(seq.translationMatters)) 181 | p(" scaleMatters: " + show_matters(seq.scaleMatters)) 182 | p(" decalMatters: " + show_matters(seq.decalMatters)) 183 | p(" iflMatters: " + show_matters(seq.iflMatters)) 184 | p(" visMatters: " + show_matters(seq.visMatters)) 185 | p(" frameMatters: " + show_matters(seq.frameMatters)) 186 | p(" matFrameMatters: " + show_matters(seq.matFrameMatters)) 187 | 188 | p("Names (" + str(len(shape.names)) + "):") 189 | for i, name in enumerate(shape.names): 190 | p(" " + str(i) + " = " + name) 191 | --------------------------------------------------------------------------------