├── .gitignore ├── glob2 ├── __init__.py ├── fnmatch.py ├── compat.py └── impl.py ├── README.md └── openformat-to-obj.py /.gitignore: -------------------------------------------------------------------------------- 1 | /glob2/__pycache__/ 2 | /openformat-to-obj.log 3 | -------------------------------------------------------------------------------- /glob2/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from .impl import * 3 | 4 | 5 | __version__ = (0, 5) 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # openformat-to-obj 2 | Converts OPENIV (GTA5) odr files to wavefront obj files. 3 | 4 | Useful for viewing or modifiying GTA5 3D models. 5 | 6 | Requirements 7 | -------------- 8 | - OpenIV 2.8 9 | - Python3 (https://www.python.org/downloads/) 10 | 11 | Usage 12 | -------------- 13 | The script requires 1 argument - a glob or filepath to one or many odr files 14 | For each odr it outputs an obj and a mtl file for easy viewing. 15 | 16 | By default it checks for a pre-existing obj file, if it finds one that it created previously, it will not run the conversion on that particular odr file. 17 | 18 | To overide this behaviour, add the optional argument '-f' or '--force', and it will re-convert those odr files. 19 | 20 | Example 21 | -------------- 22 | python3 openformat-to-obj.py **/*.odr -f 23 | 24 | Or 25 | 26 | python3 openformat-to-obj.py "C:\GTA5Models\prop_shamal_crash.odr" 27 | 28 | ![](http://i.imgur.com/DjOCy7O.png) 29 | -------------------------------------------------------------------------------- /glob2/fnmatch.py: -------------------------------------------------------------------------------- 1 | """Filename matching with shell patterns. 2 | 3 | fnmatch(FILENAME, PATTERN) matches according to the local convention. 4 | fnmatchcase(FILENAME, PATTERN) always takes case in account. 5 | 6 | The functions operate by translating the pattern into a regular 7 | expression. They cache the compiled regular expressions for speed. 8 | 9 | The function translate(PATTERN) returns a regular expression 10 | corresponding to PATTERN. (It does not compile it.) 11 | """ 12 | import os 13 | import posixpath 14 | import re 15 | try: 16 | from functools import lru_cache 17 | except ImportError: 18 | from .compat import lru_cache 19 | 20 | __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] 21 | 22 | def fnmatch(name, pat): 23 | """Test whether FILENAME matches PATTERN. 24 | 25 | Patterns are Unix shell style: 26 | 27 | * matches everything 28 | ? matches any single character 29 | [seq] matches any character in seq 30 | [!seq] matches any char not in seq 31 | 32 | An initial period in FILENAME is not special. 33 | Both FILENAME and PATTERN are first case-normalized 34 | if the operating system requires it. 35 | If you don't want this, use fnmatchcase(FILENAME, PATTERN). 36 | """ 37 | name = os.path.normcase(name) 38 | pat = os.path.normcase(pat) 39 | return fnmatchcase(name, pat) 40 | 41 | lru_cache(maxsize=256, typed=True) 42 | def _compile_pattern(pat): 43 | if isinstance(pat, bytes): 44 | pat_str = pat.decode('ISO-8859-1') 45 | res_str = translate(pat_str) 46 | res = res_str.encode('ISO-8859-1') 47 | else: 48 | res = translate(pat) 49 | return re.compile(res).match 50 | 51 | def filter(names, pat): 52 | """Return the subset of the list NAMES that match PAT.""" 53 | result = [] 54 | pat = os.path.normcase(pat) 55 | match = _compile_pattern(pat) 56 | if os.path is posixpath: 57 | # normcase on posix is NOP. Optimize it away from the loop. 58 | for name in names: 59 | m = match(name) 60 | if m: 61 | result.append((name, m.groups())) 62 | else: 63 | for name in names: 64 | m = match(os.path.normcase(name)) 65 | if m: 66 | result.append((name, m.groups())) 67 | return result 68 | 69 | def fnmatchcase(name, pat): 70 | """Test whether FILENAME matches PATTERN, including case. 71 | 72 | This is a version of fnmatch() which doesn't case-normalize 73 | its arguments. 74 | """ 75 | match = _compile_pattern(pat) 76 | return match(name) is not None 77 | 78 | 79 | def translate(pat): 80 | """Translate a shell PATTERN to a regular expression. 81 | 82 | There is no way to quote meta-characters. 83 | """ 84 | 85 | i, n = 0, len(pat) 86 | res = '' 87 | while i < n: 88 | c = pat[i] 89 | i = i+1 90 | if c == '*': 91 | res = res + '(.*)' 92 | elif c == '?': 93 | res = res + '(.)' 94 | elif c == '[': 95 | j = i 96 | if j < n and pat[j] == '!': 97 | j = j+1 98 | if j < n and pat[j] == ']': 99 | j = j+1 100 | while j < n and pat[j] != ']': 101 | j = j+1 102 | if j >= n: 103 | res = res + '\\[' 104 | else: 105 | stuff = pat[i:j].replace('\\','\\\\') 106 | i = j+1 107 | if stuff[0] == '!': 108 | stuff = '^' + stuff[1:] 109 | elif stuff[0] == '^': 110 | stuff = '\\' + stuff 111 | res = '%s([%s])' % (res, stuff) 112 | else: 113 | res = res + re.escape(c) 114 | return res + '\Z(?ms)' 115 | -------------------------------------------------------------------------------- /glob2/compat.py: -------------------------------------------------------------------------------- 1 | # Back-port functools.lru_cache to Python 2 (and <= 3.2) 2 | # {{{ http://code.activestate.com/recipes/578078/ (r6) 3 | 4 | from collections import namedtuple 5 | from functools import update_wrapper 6 | from threading import RLock 7 | 8 | _CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"]) 9 | 10 | class _HashedSeq(list): 11 | __slots__ = 'hashvalue' 12 | 13 | def __init__(self, tup, hash=hash): 14 | self[:] = tup 15 | self.hashvalue = hash(tup) 16 | 17 | def __hash__(self): 18 | return self.hashvalue 19 | 20 | def _make_key(args, kwds, typed, 21 | kwd_mark = (object(),), 22 | fasttypes = set((int, str, frozenset, type(None))), 23 | sorted=sorted, tuple=tuple, type=type, len=len): 24 | 'Make a cache key from optionally typed positional and keyword arguments' 25 | key = args 26 | if kwds: 27 | sorted_items = sorted(kwds.items()) 28 | key += kwd_mark 29 | for item in sorted_items: 30 | key += item 31 | if typed: 32 | key += tuple(type(v) for v in args) 33 | if kwds: 34 | key += tuple(type(v) for k, v in sorted_items) 35 | elif len(key) == 1 and type(key[0]) in fasttypes: 36 | return key[0] 37 | return _HashedSeq(key) 38 | 39 | def lru_cache(maxsize=100, typed=False): 40 | """Least-recently-used cache decorator. 41 | 42 | If *maxsize* is set to None, the LRU features are disabled and the cache 43 | can grow without bound. 44 | 45 | If *typed* is True, arguments of different types will be cached separately. 46 | For example, f(3.0) and f(3) will be treated as distinct calls with 47 | distinct results. 48 | 49 | Arguments to the cached function must be hashable. 50 | 51 | View the cache statistics named tuple (hits, misses, maxsize, currsize) with 52 | f.cache_info(). Clear the cache and statistics with f.cache_clear(). 53 | Access the underlying function with f.__wrapped__. 54 | 55 | See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used 56 | 57 | """ 58 | 59 | # Users should only access the lru_cache through its public API: 60 | # cache_info, cache_clear, and f.__wrapped__ 61 | # The internals of the lru_cache are encapsulated for thread safety and 62 | # to allow the implementation to change (including a possible C version). 63 | 64 | def decorating_function(user_function): 65 | 66 | cache = dict() 67 | stats = [0, 0] # make statistics updateable non-locally 68 | HITS, MISSES = 0, 1 # names for the stats fields 69 | make_key = _make_key 70 | cache_get = cache.get # bound method to lookup key or return None 71 | _len = len # localize the global len() function 72 | lock = RLock() # because linkedlist updates aren't threadsafe 73 | root = [] # root of the circular doubly linked list 74 | root[:] = [root, root, None, None] # initialize by pointing to self 75 | nonlocal_root = [root] # make updateable non-locally 76 | PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields 77 | 78 | if maxsize == 0: 79 | 80 | def wrapper(*args, **kwds): 81 | # no caching, just do a statistics update after a successful call 82 | result = user_function(*args, **kwds) 83 | stats[MISSES] += 1 84 | return result 85 | 86 | elif maxsize is None: 87 | 88 | def wrapper(*args, **kwds): 89 | # simple caching without ordering or size limit 90 | key = make_key(args, kwds, typed) 91 | result = cache_get(key, root) # root used here as a unique not-found sentinel 92 | if result is not root: 93 | stats[HITS] += 1 94 | return result 95 | result = user_function(*args, **kwds) 96 | cache[key] = result 97 | stats[MISSES] += 1 98 | return result 99 | 100 | else: 101 | 102 | def wrapper(*args, **kwds): 103 | # size limited caching that tracks accesses by recency 104 | key = make_key(args, kwds, typed) if kwds or typed else args 105 | with lock: 106 | link = cache_get(key) 107 | if link is not None: 108 | # record recent use of the key by moving it to the front of the list 109 | root, = nonlocal_root 110 | link_prev, link_next, key, result = link 111 | link_prev[NEXT] = link_next 112 | link_next[PREV] = link_prev 113 | last = root[PREV] 114 | last[NEXT] = root[PREV] = link 115 | link[PREV] = last 116 | link[NEXT] = root 117 | stats[HITS] += 1 118 | return result 119 | result = user_function(*args, **kwds) 120 | with lock: 121 | root, = nonlocal_root 122 | if key in cache: 123 | # getting here means that this same key was added to the 124 | # cache while the lock was released. since the link 125 | # update is already done, we need only return the 126 | # computed result and update the count of misses. 127 | pass 128 | elif _len(cache) >= maxsize: 129 | # use the old root to store the new key and result 130 | oldroot = root 131 | oldroot[KEY] = key 132 | oldroot[RESULT] = result 133 | # empty the oldest link and make it the new root 134 | root = nonlocal_root[0] = oldroot[NEXT] 135 | oldkey = root[KEY] 136 | oldvalue = root[RESULT] 137 | root[KEY] = root[RESULT] = None 138 | # now update the cache dictionary for the new links 139 | del cache[oldkey] 140 | cache[key] = oldroot 141 | else: 142 | # put result in a new link at the front of the list 143 | last = root[PREV] 144 | link = [last, root, key, result] 145 | last[NEXT] = root[PREV] = cache[key] = link 146 | stats[MISSES] += 1 147 | return result 148 | 149 | def cache_info(): 150 | """Report cache statistics""" 151 | with lock: 152 | return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache)) 153 | 154 | def cache_clear(): 155 | """Clear the cache and cache statistics""" 156 | with lock: 157 | cache.clear() 158 | root = nonlocal_root[0] 159 | root[:] = [root, root, None, None] 160 | stats[:] = [0, 0] 161 | 162 | wrapper.__wrapped__ = user_function 163 | wrapper.cache_info = cache_info 164 | wrapper.cache_clear = cache_clear 165 | return update_wrapper(wrapper, user_function) 166 | 167 | return decorating_function -------------------------------------------------------------------------------- /glob2/impl.py: -------------------------------------------------------------------------------- 1 | """Filename globbing utility.""" 2 | 3 | from __future__ import absolute_import 4 | 5 | import sys 6 | import os 7 | import re 8 | from . import fnmatch 9 | 10 | try: 11 | from itertools import imap 12 | except ImportError: 13 | imap = map 14 | 15 | 16 | class Globber(object): 17 | 18 | listdir = staticmethod(os.listdir) 19 | isdir = staticmethod(os.path.isdir) 20 | islink = staticmethod(os.path.islink) 21 | exists = staticmethod(os.path.lexists) 22 | 23 | def walk(self, top, followlinks=False): 24 | """A simplified version of os.walk (code copied) that uses 25 | ``self.listdir``, and the other local filesystem methods. 26 | 27 | Because we don't care about file/directory distinctions, only 28 | a single list is returned. 29 | """ 30 | try: 31 | names = self.listdir(top) 32 | except os.error as err: 33 | return 34 | 35 | items = [] 36 | for name in names: 37 | items.append(name) 38 | 39 | yield top, items 40 | 41 | for name in items: 42 | new_path = os.path.join(top, name) 43 | if followlinks or not self.islink(new_path): 44 | for x in self.walk(new_path, followlinks): 45 | yield x 46 | 47 | def glob(self, pathname, with_matches=False, include_hidden=False): 48 | """Return a list of paths matching a pathname pattern. 49 | 50 | The pattern may contain simple shell-style wildcards a la 51 | fnmatch. However, unlike fnmatch, filenames starting with a 52 | dot are special cases that are not matched by '*' and '?' 53 | patterns. 54 | 55 | If ``include_hidden`` is True, then files and folders starting with 56 | a dot are also returned. 57 | """ 58 | return list(self.iglob(pathname, with_matches, include_hidden)) 59 | 60 | def iglob(self, pathname, with_matches=False, include_hidden=False): 61 | """Return an iterator which yields the paths matching a pathname 62 | pattern. 63 | 64 | The pattern may contain simple shell-style wildcards a la 65 | fnmatch. However, unlike fnmatch, filenames starting with a 66 | dot are special cases that are not matched by '*' and '?' 67 | patterns. 68 | 69 | If ``with_matches`` is True, then for each matching path 70 | a 2-tuple will be returned; the second element if the tuple 71 | will be a list of the parts of the path that matched the individual 72 | wildcards. 73 | 74 | If ``include_hidden`` is True, then files and folders starting with 75 | a dot are also returned. 76 | """ 77 | result = self._iglob(pathname, include_hidden=include_hidden) 78 | if with_matches: 79 | return result 80 | return imap(lambda s: s[0], result) 81 | 82 | def _iglob(self, pathname, rootcall=True, include_hidden=False): 83 | """Internal implementation that backs :meth:`iglob`. 84 | 85 | ``rootcall`` is required to differentiate between the user's call to 86 | iglob(), and subsequent recursive calls, for the purposes of resolving 87 | certain special cases of ** wildcards. Specifically, "**" is supposed 88 | to include the current directory for purposes of globbing, but the 89 | directory itself should never be returned. So if ** is the lastmost 90 | part of the ``pathname`` given the user to the root call, we want to 91 | ignore the current directory. For this, we need to know which the root 92 | call is. 93 | """ 94 | 95 | # Short-circuit if no glob magic 96 | if not has_magic(pathname): 97 | if self.exists(pathname): 98 | yield pathname, () 99 | return 100 | 101 | # If no directory part is left, assume the working directory 102 | dirname, basename = os.path.split(pathname) 103 | 104 | # If the directory is globbed, recurse to resolve. 105 | # If at this point there is no directory part left, we simply 106 | # continue with dirname="", which will search the current dir. 107 | # `os.path.split()` returns the argument itself as a dirname if it is a 108 | # drive or UNC path. Prevent an infinite recursion if a drive or UNC path 109 | # contains magic characters (i.e. r'\\?\C:'). 110 | if dirname != pathname and has_magic(dirname): 111 | # Note that this may return files, which will be ignored 112 | # later when we try to use them as directories. 113 | # Prefiltering them here would only require more IO ops. 114 | dirs = self._iglob(dirname, False, include_hidden) 115 | else: 116 | dirs = [(dirname, ())] 117 | 118 | # Resolve ``basename`` expr for every directory found 119 | for dirname, dir_groups in dirs: 120 | for name, groups in self.resolve_pattern( 121 | dirname, basename, not rootcall, include_hidden): 122 | yield os.path.join(dirname, name), dir_groups + groups 123 | 124 | def resolve_pattern(self, dirname, pattern, globstar_with_root, include_hidden): 125 | """Apply ``pattern`` (contains no path elements) to the 126 | literal directory in ``dirname``. 127 | 128 | If pattern=='', this will filter for directories. This is 129 | a special case that happens when the user's glob expression ends 130 | with a slash (in which case we only want directories). It simpler 131 | and faster to filter here than in :meth:`_iglob`. 132 | """ 133 | 134 | if sys.version_info[0] == 3: 135 | if isinstance(pattern, bytes): 136 | dirname = bytes(os.curdir, 'ASCII') 137 | else: 138 | if isinstance(pattern, unicode) and not isinstance(dirname, unicode): 139 | dirname = unicode(dirname, sys.getfilesystemencoding() or 140 | sys.getdefaultencoding()) 141 | 142 | # If no magic, short-circuit, only check for existence 143 | if not has_magic(pattern): 144 | if pattern == '': 145 | if self.isdir(dirname): 146 | return [(pattern, ())] 147 | else: 148 | if self.exists(os.path.join(dirname, pattern)): 149 | return [(pattern, ())] 150 | return [] 151 | 152 | if not dirname: 153 | dirname = os.curdir 154 | 155 | try: 156 | if pattern == '**': 157 | # Include the current directory in **, if asked; by adding 158 | # an empty string as opposed to '.', we spare ourselves 159 | # having to deal with os.path.normpath() later. 160 | names = [''] if globstar_with_root else [] 161 | for top, entries in self.walk(dirname): 162 | _mkabs = lambda s: os.path.join(top[len(dirname)+1:], s) 163 | names.extend(map(_mkabs, entries)) 164 | # Reset pattern so that fnmatch(), which does not understand 165 | # ** specifically, will only return a single group match. 166 | pattern = '*' 167 | else: 168 | names = self.listdir(dirname) 169 | except os.error: 170 | return [] 171 | 172 | if not include_hidden and not _ishidden(pattern): 173 | # Remove hidden files, but take care to ensure 174 | # that the empty string we may have added earlier remains. 175 | # Do not filter out the '' that we might have added earlier 176 | names = filter(lambda x: not x or not _ishidden(x), names) 177 | return fnmatch.filter(names, pattern) 178 | 179 | 180 | default_globber = Globber() 181 | glob = default_globber.glob 182 | iglob = default_globber.iglob 183 | del default_globber 184 | 185 | 186 | magic_check = re.compile('[*?[]') 187 | magic_check_bytes = re.compile(b'[*?[]') 188 | 189 | def has_magic(s): 190 | if isinstance(s, bytes): 191 | match = magic_check_bytes.search(s) 192 | else: 193 | match = magic_check.search(s) 194 | return match is not None 195 | 196 | def _ishidden(path): 197 | return path[0] in ('.', b'.'[0]) 198 | -------------------------------------------------------------------------------- /openformat-to-obj.py: -------------------------------------------------------------------------------- 1 | import sys, os, random, re, time, glob2, argparse 2 | import xml.etree.ElementTree as ET 3 | from enum import Enum 4 | 5 | VALID_IMAGE_EXTS = [".png", ".tga", ".dds", ".jpg", ".jpeg"] 6 | VALID_PATH_REGEX = re.compile(r"^[\\\w\-. ]+$") 7 | owd = os.getcwd() 8 | VERSION = "0.13" 9 | OBJ_FIRST_LINE = "# V %s" % VERSION 10 | 11 | class D3DDECLUSAGE(Enum): 12 | D3DDECLUSAGE_POSITION = 0, 13 | D3DDECLUSAGE_BLENDWEIGHT = 1, 14 | D3DDECLUSAGE_BLENDINDICES = 2, 15 | D3DDECLUSAGE_NORMAL = 3, 16 | D3DDECLUSAGE_PSIZE = 4, 17 | D3DDECLUSAGE_TEXCOORD = 5, 18 | D3DDECLUSAGE_TANGENT = 6, 19 | D3DDECLUSAGE_BINORMAL = 7, 20 | D3DDECLUSAGE_TESSFACTOR = 8, 21 | D3DDECLUSAGE_POSITIONT = 9, 22 | D3DDECLUSAGE_COLOR = 10, 23 | D3DDECLUSAGE_FOG = 11, 24 | D3DDECLUSAGE_DEPTH = 12, 25 | D3DDECLUSAGE_SAMPLE = 13 26 | 27 | class D3DCOMPONENT(Enum): 28 | def __init__(self, id, len): 29 | self.id = id 30 | self.len = len 31 | 32 | class D3DDECLTYPE(Enum): 33 | D3DDECLTYPE_FLOAT1 = 0, 34 | D3DDECLTYPE_FLOAT2 = 1, 35 | D3DDECLTYPE_FLOAT3 = 2, 36 | D3DDECLTYPE_FLOAT4 = 3, 37 | D3DDECLTYPE_D3DCOLOR = 4, 38 | D3DDECLTYPE_UBYTE4 = 5, 39 | D3DDECLTYPE_SHORT2 = 6, 40 | D3DDECLTYPE_SHORT4 = 7, 41 | D3DDECLTYPE_UBYTE4N = 8, 42 | D3DDECLTYPE_SHORT2N = 9, 43 | D3DDECLTYPE_SHORT4N = 10, 44 | D3DDECLTYPE_USHORT2N = 11, 45 | D3DDECLTYPE_USHORT4N = 12, 46 | D3DDECLTYPE_UDEC3 = 13, 47 | D3DDECLTYPE_DEC3N = 14, 48 | D3DDECLTYPE_FLOAT16_2 = 15, 49 | D3DDECLTYPE_FLOAT16_4 = 16, 50 | D3DDECLTYPE_UNUSED = 17, 51 | 52 | class Logger(object): 53 | def __init__(self): 54 | self.terminal = sys.stdout 55 | self.log = open(make_path_unique("openformat-to-obj.log"), "a") 56 | 57 | def write(self, message): 58 | self.terminal.write(message) 59 | self.log.write(message) 60 | 61 | def flush(self): 62 | pass 63 | 64 | def make_path_unique(path): 65 | dir = os.path.dirname(path) 66 | file_name_with_ext = os.path.basename(path) 67 | file_name, ext = os.path.splitext(file_name_with_ext) 68 | i = 1 69 | while(True): 70 | if not os.path.exists(path): 71 | return path 72 | path = os.path.join(dir, file_name + " " + str(i) + ext) 73 | i += 1 74 | 75 | def parse_odr(path, force=False): 76 | 77 | os.chdir(owd) 78 | full_path = os.path.realpath(path) 79 | name, ext = os.path.splitext(os.path.basename(full_path)) 80 | dirname = os.path.dirname(full_path) 81 | filename = os.path.basename(full_path) 82 | obj_path = name+".obj" 83 | mtl_path = name+".mtl" 84 | 85 | if ext != ".odr": 86 | print("'%s' is not an ODR file" % path) 87 | return 88 | 89 | os.chdir(dirname) 90 | times = [] 91 | times.append(time.time()) 92 | 93 | if not force: 94 | if os.path.isfile(obj_path): 95 | with open(obj_path, 'r') as f: 96 | first_line = f.readline() 97 | if first_line == OBJ_FIRST_LINE+"\n": 98 | print("Skipping '%s'..." % path) 99 | return 100 | 101 | print("Converting '%s'..." % path) 102 | 103 | odr_data = readfile(full_path) 104 | shader_datas = re.findall(r"Shaders\s+{([\s\S]+?)^\t}", odr_data, re.MULTILINE) 105 | shader_datas = re.findall(r"(.+?\.sps)[\s\S]+?{([\s\S]+?)}", shader_datas[0], re.MULTILINE) 106 | shaders = [] 107 | 108 | for d in shader_datas: 109 | shader_name = d[0].strip() 110 | 111 | diffuse = re.findall(r"DiffuseSampler\s+(.+)", d[1]) 112 | bump = re.findall(r"BumpSampler\s+(.+)", d[1]) 113 | spec = re.findall(r"SpecSampler\s+(.+)", d[1]) 114 | textures = { 115 | "diffuse" : diffuse[0] if len(diffuse)>0 else None, 116 | "bump" : bump[0] if len(bump)>0 else None, 117 | "spec" : spec[0] if len(spec)>0 else None, 118 | } 119 | for k in textures: 120 | otx_path = textures[k] 121 | 122 | if otx_path is None: 123 | continue 124 | 125 | if not VALID_PATH_REGEX.match(otx_path): 126 | print("Sampler '%s' is not a valid path" % otx_path) 127 | continue 128 | 129 | if not os.path.isfile(otx_path): 130 | if os.path.splitext(otx_path)[1] != ".otx": 131 | print("Sampler '%s' did not specify location. Searching..." % otx_path) 132 | 133 | otx_name = os.path.basename(otx_path) 134 | otx_files = [ 135 | os.path.join(otx_path+".otx"), 136 | os.path.join(otx_path, otx_name+".otx"), 137 | os.path.join("..", otx_path, otx_name+".otx"), 138 | ] 139 | otx_files = [p for p in otx_files if os.path.isfile(p)] 140 | 141 | if len(otx_files) == 0: 142 | otx_files = glob2.glob("*/" + otx_name + ".otx") 143 | 144 | if len(otx_files) == 0: 145 | print("Could not find a matching path") 146 | continue 147 | else: 148 | otx_path = otx_files[0] 149 | print("Found '%s'" % otx_path) 150 | 151 | path_split = os.path.split(otx_path) 152 | hidr_path = os.path.join(path_split[0]+"+hidr", *path_split[1:]) 153 | hi_path = os.path.join(path_split[0]+"+hi", *path_split[1:]) 154 | 155 | if os.path.isfile(hidr_path): 156 | otx_path = hidr_path 157 | elif os.path.isfile(hi_path): 158 | otx_path = hi_path 159 | 160 | otx_data = readfile(otx_path) 161 | 162 | image_path = re.findall(r"^\s+Image (.+)$", otx_data, re.MULTILINE)[0] 163 | 164 | textures[k] = os.path.join(os.path.dirname(otx_path), image_path) 165 | 166 | shader = { 167 | "name" : "%s_%d" % (name, len(shaders)), 168 | "shader_name" : shader_name, 169 | "textures" : textures, 170 | "xml" : shader_manager_xml.find("./Shaders/ShaderPreSet[@name='%s']" % shader_name), 171 | } 172 | 173 | shaders.append(shader) 174 | 175 | lod_data = re.findall(r"LodGroup\s+{([\s\S]+?)^\t}", odr_data, re.MULTILINE) 176 | high_lod_data = re.findall(r"High [\s\S]+?{([\s\S]+?)}", lod_data[0], re.MULTILINE) 177 | med_lod_data = re.findall(r"Med [\s\S]+?{([\s\S]+?)}", lod_data[0], re.MULTILINE) 178 | low_lod_data = re.findall(r"Low [\s\S]+?{([\s\S]+?)}", lod_data[0], re.MULTILINE) 179 | vlow_lod_data = re.findall(r"Vlow [\s\S]+?{([\s\S]+?)}", lod_data[0], re.MULTILINE) 180 | 181 | lods = { 182 | "high" : high_lod_data[0] if len(high_lod_data)>0 else None, 183 | "med" : med_lod_data[0] if len(med_lod_data)>0 else None, 184 | "low" : low_lod_data[0] if len(low_lod_data)>0 else None, 185 | "vlow" : vlow_lod_data[0] if len(vlow_lod_data)>0 else None, 186 | } 187 | 188 | obj_data = "" 189 | mtl_data = "" 190 | index_offset = 1 191 | 192 | obj_data += OBJ_FIRST_LINE+"\n" 193 | obj_data += "\n" 194 | 195 | for k in lods: 196 | if lods[k] is None: 197 | continue 198 | 199 | lods[k] = [re.split(r"\s", s.strip())[0] for s in lods[k].splitlines()] 200 | lods[k] = [s for s in lods[k] if s != ""] 201 | 202 | for mesh_path in lods[k]: 203 | 204 | mesh_data = readfile(mesh_path) 205 | 206 | skinned = re.findall(r"Skinned (.+)", mesh_data)[0] 207 | gemoetry_datas = re.findall(r"Geometry\s+?{([\s\S]+?)^\t\t}", mesh_data, re.MULTILINE) 208 | 209 | obj_data += "mtllib %s\n" % mtl_path 210 | obj_data += "\n" 211 | 212 | geom_id = 0 213 | for geom_data in gemoetry_datas: 214 | 215 | geom_id += 1 216 | shader_index = int(re.findall(r"ShaderIndex (.+)", geom_data)[0]) 217 | 218 | indices = re.findall(r"Indices \d+\s+{([\s\S]+?)}", geom_data)[0].strip() 219 | indices = re.split(r"\s+", indices) 220 | vertices = re.findall(r"Vertices \d+\s+{([\s\S]+?)}", geom_data)[0].strip() 221 | vertices = re.split(r"\n+", vertices) 222 | for i,v in enumerate(vertices): 223 | parts = vertices[i].split("/") 224 | parts = [p.strip().split(" ") for p in parts] 225 | vertices[i] = parts 226 | 227 | shader = shaders[shader_index] 228 | item_xml = shader["xml"].find("./VertexDeclarations/Item[@skinned='%s']" % skinned) 229 | elements_xml = item_xml.findall("./Element") 230 | 231 | ci = [0, 0, 0] #component_indices 232 | 233 | for i,e in enumerate(elements_xml): 234 | usage = e.get("usage") 235 | if usage == D3DDECLUSAGE.D3DDECLUSAGE_POSITION.name: 236 | ci[0] = i 237 | elif usage == D3DDECLUSAGE.D3DDECLUSAGE_NORMAL.name: 238 | ci[1] = i 239 | elif usage == D3DDECLUSAGE.D3DDECLUSAGE_TEXCOORD.name: 240 | ci[2] = i 241 | 242 | filtered_vertices = [[v[ci[0]], v[ci[1]], v[ci[2]]] for v in vertices] 243 | 244 | obj_data += "o %s_%s_%s\n" % (name, k, geom_id) 245 | obj_data += "\n" 246 | obj_data += "usemtl %s\n" % shader["name"] 247 | obj_data += "\n" 248 | for v in filtered_vertices: 249 | obj_data += "v "+" ".join(v[0])+"\n" 250 | obj_data += "\n" 251 | for v in filtered_vertices: 252 | obj_data += "vn "+" ".join(v[1])+"\n" 253 | obj_data += "\n" 254 | for v in filtered_vertices: 255 | vt = v[2][:] 256 | vt[1] = str(1-float(vt[1])) 257 | obj_data += "vt "+" ".join(vt)+"\n" 258 | obj_data += "\n" 259 | for i in range(0, len(indices), 3): 260 | obj_data += "f {0}/{0}/{0} {1}/{1}/{1} {2}/{2}/{2} \n".format(int(indices[i])+index_offset, int(indices[i+1])+index_offset, int(indices[i+2])+index_offset) 261 | obj_data += "\n" 262 | 263 | index_offset += len(vertices) 264 | 265 | #----------------------------- 266 | 267 | mtl_data += OBJ_FIRST_LINE+"\n" 268 | mtl_data += "\n" 269 | 270 | for shader in shaders: 271 | 272 | mtl_data += "newmtl %s\n" % shader["name"] 273 | 274 | if shader["textures"]["diffuse"] is not None: 275 | #mtl_data += "map_Ka %s\n" % shader["textures"]["diffuse"] 276 | mtl_data += "map_Kd %s\n" % shader["textures"]["diffuse"] 277 | #mtl_data += "map_Ks %s\n" % shader["textures"]["diffuse"] 278 | 279 | if shader["textures"]["bump"] is not None: 280 | #mtl_data += "bump %s\n" % shader["textures"]["bump"] 281 | #mtl_data += "norm %s\n" % shader["textures"]["bump"] 282 | mtl_data += "map_bump %s\n" % shader["textures"]["bump"] 283 | 284 | if shader["textures"]["spec"] is not None: 285 | mtl_data += "map_Ks %s\n" % shader["textures"]["spec"] 286 | 287 | mtl_data += "\n" 288 | 289 | savefile(obj_path, obj_data) 290 | savefile(mtl_path, mtl_data) 291 | 292 | times.append(time.time()) 293 | 294 | print("Created %s, %s (%.2f s)" % (obj_path, mtl_path, times[-1] - times[0])) 295 | 296 | os.chdir(owd) 297 | 298 | def readfile(path): 299 | with open(path, "r") as file: 300 | return file.read() 301 | 302 | def savefile(path, text): 303 | with open(path, "w") as file: 304 | file.write(text) 305 | 306 | sys.stdout = Logger() 307 | 308 | shader_manager_path = os.path.join(os.path.dirname(__file__), "ShaderManager.xml") 309 | shader_manager_xml = ET.fromstring(readfile(shader_manager_path)) 310 | 311 | parser = argparse.ArgumentParser() 312 | 313 | parser.add_argument("glob", default="*.odr", nargs="?", help="A pattern or name") 314 | parser.add_argument("--force", "-f", default=False, action="store_true", help="Force the converter to reconvert converted files") 315 | 316 | args = parser.parse_args() 317 | 318 | # if os.path.isdir(odr_path): 319 | # paths = os.listdir(odr_path) 320 | # odr_paths = [os.path.join(odr_path, p) for p in paths] 321 | # odr_paths = [p for p in odr_paths if os.path.isfile(p) and os.path.splitext(p)[1] == ".odr"] 322 | # else: 323 | # odr_paths = [odr_path] 324 | 325 | odr_paths = glob2.glob(args.glob) 326 | 327 | if len(odr_paths) == 0: 328 | print("No files matching glob found") 329 | else: 330 | for p in odr_paths: 331 | parse_odr(p, args.force) 332 | print("---------------------------------------------") --------------------------------------------------------------------------------