├── utils ├── shared │ ├── materials │ │ ├── cubemap.py │ │ └── proxies.txt │ ├── icon.ico │ ├── bin │ │ └── vtf2tga │ │ │ ├── 2013 │ │ │ ├── tier0.dll │ │ │ ├── vstdlib.dll │ │ │ ├── vtf2tga.exe │ │ │ └── FileSystem_Stdio.dll │ │ │ └── csgo │ │ │ ├── tier0.dll │ │ │ ├── vstdlib.dll │ │ │ ├── vtf2tga.exe │ │ │ └── filesystem_stdio.dll │ ├── textures │ │ └── vtex_template.kv2 │ ├── import_blacklist.json │ ├── PFM.py │ ├── keyvalues3.py │ ├── empty.vmap.txt │ ├── cstr.py │ ├── modeldoc.py │ ├── keyvalues1.py │ ├── material_proxies.py │ ├── qc.py │ ├── base_utils2.py │ └── cppkeyvalues.py ├── sound_import.py ├── dev │ ├── vsndevts.py │ ├── use_already_existing_particle_textures.py │ ├── .todo.txt │ ├── generate_particle_dict.py │ └── use_hlvr_surfaces.py ├── scenes_import.py ├── elements_import.py ├── vtf_to_tga.py ├── maps_import.py └── scripts_import.py ├── .gitmodules ├── .gitignore ├── requirements.txt ├── LICENCE.md ├── .github └── workflows │ ├── build_app.yml │ └── unittests.yml └── README.md /utils/shared/materials/cubemap.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /utils/shared/icon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/icon.ico -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "sample"] 2 | path = sample 3 | url = https://github.com/kristiker/source1import 4 | branch = sample 5 | -------------------------------------------------------------------------------- /utils/shared/bin/vtf2tga/2013/tier0.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/bin/vtf2tga/2013/tier0.dll -------------------------------------------------------------------------------- /utils/shared/bin/vtf2tga/csgo/tier0.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/bin/vtf2tga/csgo/tier0.dll -------------------------------------------------------------------------------- /utils/shared/bin/vtf2tga/2013/vstdlib.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/bin/vtf2tga/2013/vstdlib.dll -------------------------------------------------------------------------------- /utils/shared/bin/vtf2tga/2013/vtf2tga.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/bin/vtf2tga/2013/vtf2tga.exe -------------------------------------------------------------------------------- /utils/shared/bin/vtf2tga/csgo/vstdlib.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/bin/vtf2tga/csgo/vstdlib.dll -------------------------------------------------------------------------------- /utils/shared/bin/vtf2tga/csgo/vtf2tga.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/bin/vtf2tga/csgo/vtf2tga.exe -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | \.vscode/ 2 | env*/ 3 | *.pyc 4 | build/ 5 | dist/* 6 | source1import.spec 7 | source1import.json 8 | source1import.exe 9 | *.vtf 10 | -------------------------------------------------------------------------------- /utils/shared/bin/vtf2tga/2013/FileSystem_Stdio.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/bin/vtf2tga/2013/FileSystem_Stdio.dll -------------------------------------------------------------------------------- /utils/shared/bin/vtf2tga/csgo/filesystem_stdio.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kristiker/source1import/HEAD/utils/shared/bin/vtf2tga/csgo/filesystem_stdio.dll -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Materials 2 | Pillow 3 | numpy 4 | 5 | # Maps 6 | vdf 7 | dataclassy==0.10.4 8 | bsp_tool==0.3.1 9 | 10 | # Models/qc-import 11 | parsimonious==0.10.0 12 | srctools==2.3.4 13 | 14 | # Needed for installer(s) 15 | Nuitka==2.6.7 16 | pyinstaller==6.12.0 -------------------------------------------------------------------------------- /utils/sound_import.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | print("Copy the files yourself, but please on a new folder named /sounds (not /sound)\n") 4 | input("Press any key to exit...") 5 | 6 | from pathlib import Path 7 | 8 | 9 | def ImportSoundFile(asset_path: Path): 10 | ... 11 | # from s1/game/sound to s2/content/sounds -------------------------------------------------------------------------------- /utils/shared/textures/vtex_template.kv2: -------------------------------------------------------------------------------- 1 | 2 | "CDmeVtex" 3 | { 4 | "m_inputTextureArray" "element_array" 5 | [ 6 | "CDmeInputTexture" 7 | { 8 | "m_name" "string" "0" 9 | "m_fileName" "string" "materials/particle/dry_erase/stamps/example.tga" 10 | "m_colorSpace" "string" "srgb" 11 | "m_typeString" "string" "2D" 12 | } 13 | ] 14 | "m_outputTypeString" "string" "2D" 15 | "m_outputFormat" "string" "DXT5" 16 | "m_textureOutputChannelArray" "element_array" 17 | [ 18 | "CDmeTextureOutputChannel" 19 | { 20 | "m_inputTextureArray" "string_array" 21 | [ 22 | "0" 23 | ] 24 | "m_srcChannels" "string" "rgba" 25 | "m_dstChannels" "string" "rgba" 26 | "m_mipAlgorithm" "CDmeImageProcessor" 27 | { 28 | "m_algorithm" "string" "" 29 | "m_stringArg" "string" "" 30 | "m_vFloat4Arg" "vector4" "0 0 0 0" 31 | } 32 | "m_outputColorSpace" "string" "srgb" 33 | } 34 | ] 35 | } -------------------------------------------------------------------------------- /LICENCE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Kristi (kristiker) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /utils/dev/vsndevts.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | collected = {} 4 | def collect_dev(k, v): 5 | 6 | v_evauluated = eval(repr(v)) 7 | v_type = type(v_evauluated) 8 | 9 | c = collected.setdefault(k, ([], [])) 10 | if v_type not in c[0]: 11 | c[0].append(v_type) 12 | c[1].append(v_evauluated) 13 | 14 | e_types = [] 15 | for file in Path(r'D:\Games\steamapps\common\Half-Life Alyx\game\csgo\soundevents').glob("*.vsndevts"): 16 | with open(file) as fp: 17 | for line in fp.read().splitlines(): 18 | if len(line) < 3: 19 | continue 20 | if line[:2] == '\t'*2 and line[2].isalpha(): 21 | try: 22 | k, v = tuple(line.strip().split('=')) 23 | except ValueError: 24 | print(line) 25 | if k.strip() == 'event_type': 26 | e_types.append(v.strip()) 27 | continue 28 | collect_dev(k.strip(), v.strip()) 29 | 30 | from collections import Counter 31 | c = Counter(e_types) 32 | print(c.most_common()) 33 | for k, v in collected.items(): 34 | print(k, v[1]) -------------------------------------------------------------------------------- /utils/dev/use_already_existing_particle_textures.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import vpk 3 | 4 | content = Path(input("Enter your content path, e.g. D:/Games/steamapps/common/Half-Life Alyx/content/hlvr_addons/csgo\n>")) 5 | root_mod = Path(input("Enter root game, e.g. D:/Games/steamapps/common/Half-Life Alyx/game/hlvr\n>")) 6 | 7 | game = content 8 | while True: 9 | game = game.parent 10 | if game.name == "content": 11 | mod = content.relative_to(game) 12 | game = game.parent / "game" / mod 13 | break 14 | 15 | pak = vpk.VPK(root_mod/"pak01_dir.vpk") 16 | already_available_textures = set() 17 | 18 | # find vtexes in pak 19 | for pakked_file, _ in pak.items(): 20 | if not pakked_file.startswith("materials/particle"): 21 | continue 22 | if not pakked_file.endswith(".vtex_c"): 23 | continue 24 | already_available_textures.add(pakked_file) 25 | 26 | # Delete our "overrides" 27 | for tex in already_available_textures: 28 | content_side = (content/tex).with_suffix(".vtex") 29 | game_side = game/tex 30 | if content_side.is_file(): 31 | content_side.unlink() 32 | print("Removed content side", content_side.relative_to(content)) 33 | if game_side.is_file(): 34 | game_side.unlink() 35 | print("Removed game side", tex) 36 | -------------------------------------------------------------------------------- /.github/workflows/build_app.yml: -------------------------------------------------------------------------------- 1 | name: Build source1import.exe 2 | 3 | on: 4 | # Allows you to run this workflow manually from the Actions tab 5 | workflow_dispatch: 6 | 7 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 8 | jobs: 9 | build: 10 | # The type of runner that the job will run on 11 | runs-on: windows-latest 12 | 13 | # Steps represent a sequence of tasks that will be executed as part of the job 14 | steps: 15 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 16 | - uses: actions/checkout@v3 17 | 18 | - name: Setup Python 3.11 19 | uses: actions/setup-python@v4.2.0 20 | with: 21 | python-version: 3.11.0 22 | 23 | - name: Setup Dependencies 24 | run: pip install -r requirements.txt 25 | 26 | - name: Setup PyInstaller 27 | run: pip install -U pyinstaller 28 | 29 | - name: Build with PyInstaller 30 | run: ./build.bat 31 | 32 | - name: Build with Nuitka 33 | run: ./build_nuitka.bat 34 | 35 | - name: Upload a Build Artifact 36 | uses: actions/upload-artifact@v3.1.0 37 | with: 38 | # Artifact name 39 | name: source1import (latest) 40 | # A file, directory or wildcard pattern that describes what to upload 41 | path: source1import.exe 42 | -------------------------------------------------------------------------------- /utils/shared/import_blacklist.json: -------------------------------------------------------------------------------- 1 | { 2 | "materials": [ 3 | "/models/weapons/customization/stickers/", 4 | "/models/hybridphysx/", 5 | "/debug/", 6 | "/editor/", 7 | "/models/editor/", 8 | "/dev/", 9 | "/engine/additivevertexcolorvertexalpha.vmt", 10 | "/engine/colorcorrection.vmt", 11 | "/engine/depthwritealphatest.vmt", 12 | "/engine/depthwritemodel.vmt", 13 | "/engine/depthwritemodelalphatest.vmt", 14 | "/engine/filmdust.vmt", 15 | "/engine/filmgrain.vmt", 16 | "/engine/framesync.vmt", 17 | "/engine/lightshaft.vmt", 18 | "/engine/modulatesinglecolor.vmt", 19 | "/engine/preloadtexture.vmt", 20 | "/engine/renderdeferredshadow.vmt", 21 | "/engine/renderdeferredsimpleshadow.vmt", 22 | "/engine/shadowbuild.vmt", 23 | "/engine/singlecolor.vmt", 24 | "/engine/vmtview_background.vmt", 25 | "/engine/volumetricfog.vmt", 26 | "/engine/writestencil.vmt", 27 | "/engine/writez.vmt", 28 | "/tools/toolsskyfog.vmt", 29 | "/decals/rendershadow.vmt", 30 | "/decals/rendermodelshadow.vmt", 31 | "/decals/simpleshadow.vmt" 32 | ], 33 | "models": [ 34 | "/hybridphysx/", 35 | "/editor/", 36 | "/player/custom_player/animset", 37 | "/props_wasteland/bridge_railing.mdl" 38 | ], 39 | "particles": [ 40 | "/combineball*" 41 | ], 42 | "scripts": [ 43 | "/surfaceproperties_*", 44 | "/weapon_manifest.txt" 45 | ] 46 | } -------------------------------------------------------------------------------- /.github/workflows/unittests.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ "master" ] 6 | pull_request: 7 | branches: [ "master" ] 8 | 9 | # Allows you to run this workflow manually from the Actions tab 10 | workflow_dispatch: 11 | 12 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 13 | jobs: 14 | # This workflow contains a single job called "build" 15 | unit-test: 16 | # The type of runner that the job will run on 17 | runs-on: ubuntu-22.04 18 | 19 | # Steps represent a sequence of tasks that will be executed as part of the job 20 | steps: 21 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it 22 | - uses: actions/checkout@v3 23 | with: 24 | submodules: true 25 | 26 | - name: Setup Python 3.10 27 | uses: actions/setup-python@v4.2.0 28 | with: 29 | python-version: 3.10.6 30 | 31 | # Runs a single command using the runners shell 32 | - name: Dependencies 33 | run: pip install -r requirements.txt 34 | 35 | # Runs a set of commands using the runners shell 36 | - name: Test units 37 | run: | 38 | echo "" > gameinfo.txt 39 | python utils/shared/base_utils2.py -i "$PWD" -e "$PWD" 40 | python utils/shared/cstr.py 41 | python utils/shared/cppkeyvalues.py 42 | python utils/shared/keyvalues3.py 43 | python utils/shared/material_proxies.py 44 | python utils/shared/qc.py 45 | 46 | - name: Check imported files for changes 47 | run: | 48 | python sample/results_update.py 49 | git diff --exit-code --submodule=diff HEAD $REF -- sample 50 | 51 | -------------------------------------------------------------------------------- /utils/shared/PFM.py: -------------------------------------------------------------------------------- 1 | # Taken from https://gist.github.com/aminzabardast/cdddae35c367c611b6fd5efd5d63a326 2 | 3 | import sys, re, numpy as np 4 | 5 | def read_pfm(file): 6 | ''' 7 | Read a PFM file into a Numpy array. Note that it will have 8 | a shape of H x W, not W x H. Returns a tuple containing the 9 | loaded image and the scale factor from the file. 10 | ''' 11 | file = open(file, 'rb') 12 | 13 | color = None 14 | width = None 15 | height = None 16 | scale = None 17 | endian = None 18 | 19 | header = file.readline().rstrip() 20 | if header.decode('ascii') == 'PF': 21 | color = True 22 | elif header.decode('ascii') == 'Pf': 23 | color = False 24 | else: 25 | raise Exception('Not a PFM file.') 26 | 27 | dim_match = re.search(r'(\d+)\s(\d+)', file.readline().decode('ascii')) 28 | if dim_match: 29 | width, height = map(int, dim_match.groups()) 30 | else: 31 | raise Exception('Malformed PFM header.') 32 | 33 | scale = float(file.readline().rstrip()) 34 | if scale < 0: # little-endian 35 | endian = '<' 36 | scale = -scale 37 | else: 38 | endian = '>' # big-endian 39 | 40 | data = np.fromfile(file, endian + 'f') 41 | shape = (height, width, 3) if color else (height, width) 42 | return np.reshape(data, shape), scale, (width, height) 43 | 44 | def write_pfm(file, image, scale=1): 45 | ''' 46 | Write a Numpy array to a PFM file. 47 | ''' 48 | file = open(file, 'wb') 49 | 50 | color = None 51 | 52 | if image.dtype.name != 'float32': 53 | raise Exception('Image dtype must be float32.') 54 | 55 | if len(image.shape) == 3 and image.shape[2] == 3: # color image 56 | color = True 57 | # greyscale 58 | elif len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1: 59 | color = False 60 | else: 61 | raise Exception( 62 | 'Image must have H x W x 3, H x W x 1 or H x W dimensions.') 63 | 64 | file.write(b'PF\n' if color else b'Pf\n') 65 | file.write(b'%d %d\n' % (image.shape[1], image.shape[0])) 66 | 67 | endian = image.dtype.byteorder 68 | 69 | if endian == '<' or endian == '=' and sys.byteorder == 'little': 70 | scale = -scale 71 | 72 | file.write(b'%f\n' % scale) 73 | 74 | image.tofile(file) 75 | -------------------------------------------------------------------------------- /utils/scenes_import.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from shutil import copyfile 3 | import shared.base_utils2 as sh 4 | 5 | EVERYTHING_TO_ROOT = False 6 | 7 | IN_EXT = '.vcd' 8 | SCENESIMAGE = 'scenes.image' 9 | 10 | scenes_vrman_template = \ 11 | ''' 12 | ResourceManifest_t 13 | { 14 | string name = "Scenes Manifest" 15 | string[] resourceFileNameList = 16 | [ 17 | <> 18 | ] 19 | }''' 20 | 21 | # ORGANIZED Structure 22 | # _root.vcdlist scenes/a.vcd, scenes/b.vcd 23 | # d.vcdlist scenes/d/d5.vcd, scenes/d/test.vcd 24 | # test_magnificient.vcdlist scenes/test/magnificient/test.vcd, scenes/test/magnificient/b.vcd 25 | def main(): 26 | print("Importing Scenes!") 27 | for vcd in sh.collect('scenes', IN_EXT, '', existing=True): 28 | path_parts = vcd.local.relative_to('scenes').parent.parts 29 | if EVERYTHING_TO_ROOT or len(path_parts) == 0: 30 | ImportVCD(vcd, '_root') 31 | else: 32 | ImportVCD(vcd, '_'.join(path_parts)) 33 | 34 | print("Creating scenes.vrman!") 35 | 36 | scenes_vrman = sh.output('scenes/scenes.vrman') 37 | if not scenes_vrman.is_file(): 38 | scenes_vrman.parent.MakeDir() 39 | with open(scenes_vrman, 'w') as fp: 40 | fp.write(scenes_vrman_template.replace("<>", ",\n ".join(f'"scenes/{vcdlist}.vcdlist"' for vcdlist in vcdlist_entries_cache))) 41 | 42 | print("Looks like we are done!") 43 | 44 | vcdlist_entries_cache = {} 45 | 46 | def ImportVCD(vcd_in: Path, vcdlist_name: str): 47 | vcdlist = (sh.output('scenes') / vcdlist_name).with_suffix('.vcdlist') 48 | vcdlist.parent.MakeDir() 49 | vcd_out = sh.output(vcd_in) 50 | vcd_out.parent.MakeDir() 51 | 52 | if not vcd_out.is_file(): 53 | copyfile(vcd_in, vcd_out) 54 | 55 | vcd_local = vcd_out.local.relative_to('scenes').as_posix() 56 | 57 | vcdlist_entries_cache.setdefault(vcdlist_name, []) 58 | 59 | if vcdlist.is_file(): 60 | if not vcdlist_entries_cache.get(vcdlist_name): 61 | with open(vcdlist) as fp: 62 | # TODO: forward slashes, whitelines, etc 63 | vcdlist_entries_cache[vcdlist_name] = fp.read().splitlines() 64 | if vcd_local in vcdlist_entries_cache[vcdlist_name]: 65 | return vcdlist 66 | 67 | with open(vcdlist, 'a') as fp: 68 | fp.write(f'{vcd_local}\n') 69 | vcdlist_entries_cache[vcdlist_name].append(vcd_local) 70 | print(f"+ Appended VCD to {vcdlist.local}: {vcd_local}") 71 | 72 | return vcdlist 73 | 74 | if __name__ == '__main__': 75 | sh.parse_argv() 76 | main() 77 | -------------------------------------------------------------------------------- /utils/shared/materials/proxies.txt: -------------------------------------------------------------------------------- 1 | Proxies 2 | { 3 | Sine 4 | { 5 | sinemin "-10" 6 | sinemax "10" 7 | sineperiod "5" 8 | resultVar "$alpha" 9 | } 10 | } 11 | 12 | ------->> 13 | 14 | DynamicParams 15 | { 16 | g_flOpacityScale "" 17 | #TextureColor "x = 5 + time();\n\nreturn x+x+5;" 18 | #g_flAnimationFrame "random(5, 10)" 19 | } 20 | 21 | : 22 | // Constants 23 | M_PI = 3.14159265358979323846264338327950288; 24 | 25 | // Vars 26 | 27 | // Proxy: Sine 28 | m_Sine_sinemin = 0; 29 | m_Sine_sinemax = 1; 30 | m_Sine_sineperiod = 5; 31 | m_Sine_timeoffset = 0; 32 | 33 | m_Sine_resultVar = ( m_Sine_sinemax - m_Sine_sinemin ) * (( sin( 2.0 * M_PI * (time() - m_Sine_timeoffset) / m_Sine_sineperiod ) * 0.5 ) + 0.5) + m_Sine_sinemin; 34 | 35 | return m_Sine_resultVar; 36 | 37 | _proxy_addOutput = 0 38 | _proxy_loeOutput = 0 39 | _proxy_noiseSignal = 0 40 | _proxy_noiseGate = 0.6 41 | _proxy_zero = 0 42 | _proxy_sinewaveOutput = 0 43 | _proxy_one = 1 44 | 45 | 46 | _proxy_addOutput 47 | 48 | return clamp(0.1, 1, _proxy_addOutput) 49 | 50 | //////////// 51 | 52 | $addOutput 0 53 | $loeOutput 0 54 | $noiseSignal 0 55 | $noiseGate 0.6 56 | $loeOutput 57 | $zero 0 58 | $sinewaveOutput 0 59 | $one 1 60 | 61 | Proxies 62 | { 63 | Clamp // Clamp final output 64 | { 65 | minVal .1 66 | maxVal 1 67 | srcVar1 $addOutput 68 | resultVar $color 69 | } 70 | 71 | Add // Add vars together 72 | { 73 | srcVar1 $sinewaveOutput 74 | srcVar2 $loeOutput 75 | resultVar $addOutput 76 | } 77 | LessOrEqual 78 | { 79 | lessEqualVar "$zero" 80 | greaterVar "$one" 81 | srcVar1 "$noiseSignal" 82 | srcVar2 "$noiseGate" 83 | resultVar $loeOutput 84 | } 85 | GaussianNoise // Base noise signal 86 | { 87 | minVal .1 88 | maxVal 1 89 | halfWidth .5 90 | mean 1 91 | 92 | resultVar "$noiseSignal" 93 | } 94 | 95 | Sine // Base Sine Wave 96 | { 97 | sinemin 0 98 | sinemax 6 99 | sineperiod 5 100 | resultVar $sinewaveOutput 101 | } 102 | } 103 | 104 | loop all, check if any resultvar points to a known variable ($alpha) 105 | - paste relevant code here; if a param points to unknownvariable, loop to find where it is resultVar'd first 106 | - add code for that variable at the top -------------------------------------------------------------------------------- /utils/elements_import.py: -------------------------------------------------------------------------------- 1 | import shared.base_utils2 as sh 2 | import shared.datamodel as dmx 3 | from pathlib import Path 4 | 5 | SHOULD_OVERWRITE = False 6 | KEEP_AS_TEXT = True 7 | 8 | def ImportSFMSession(session_path: Path): 9 | """Update SFM resource references for S2FM.""" 10 | sh.status(f"- Opening {session_path.local}...") 11 | try: 12 | session = dmx.load(session_path) 13 | except Exception: 14 | return print("Error while reading:", session_path.local) 15 | 16 | # Map 17 | for clip in session.find_elements(elemtype='DmeFilmClip'): 18 | clip['mapname'] = clip.get('mapname', '').replace('.bsp', '.vmap') 19 | 20 | # Materials 21 | for overlay in session.find_elements(elemtype='DmeMaterialOverlayFXClip'): 22 | overlay['material'] = overlay.get('material', '').replace('.vmt', '.vmat') 23 | 24 | # Models 25 | for game_model in session.find_elements(elemtype='DmeGameModel'): 26 | game_model['modelName'] = game_model.get('modelName', '').replace('.mdl', '.vmdl') 27 | 28 | # Particles 29 | for game_particle in session.find_elements(elemtype='DmeGameParticleSystem'): 30 | game_particle['particleSystemType'] = sh.RemapTable.get('vpcf', {}).get(game_particle.get('particleSystemType', ''), '') 31 | 32 | # Projected Lights (cookies) 33 | for projected_light in session.find_elements(elemtype='DmeProjectedLight'): 34 | projected_light['texture'] = projected_light.get('texture', '').replace('.vtf', '.vtex') 35 | 36 | # Sounds 37 | for game_sound in session.find_elements(elemtype='DmeGameSound'): 38 | game_sound.name = game_sound.name.replace('\\', '/') 39 | file = Path(game_sound.get('soundname', '')) # 'sounds'/ 40 | if file.name: 41 | game_sound['soundname'] = file.with_suffix('.vsnd').as_posix() 42 | 43 | session_out_path = sh.output(session_path, dest=sh.EXPORT_GAME) 44 | session_out_path.parent.MakeDir() 45 | # text cuz binary won't work right now. 46 | session.write(session_out_path, 'keyvalues2', 4) 47 | print('+ Imported', session_path.local) 48 | return session_out_path 49 | 50 | def main(): 51 | print('Source 2 Filmmaker Session Importer!') 52 | sh.importing = 'elements' 53 | sh.import_context['dest'] = sh.EXPORT_GAME 54 | for session in sh.collect('elements', '.dmx', '.dmx', SHOULD_OVERWRITE, searchPath=sh.src('elements/sessions')): 55 | ImportSFMSession(session) 56 | 57 | if False and not KEEP_AS_TEXT: 58 | # convert to binary through dmxconvert.exe 59 | # won't accept args somehow 60 | sh.eEngineUtils.dmxconvert([ 61 | f'-i {imported.parent}\demo.dmx"', 62 | '-ie', "keyvalues2", 63 | f'-o {imported.parent}\demo.dmx', 64 | "-oe", "binary", 65 | "-of", "sfm_session", 66 | ]) 67 | 68 | print("Looks like we are done!") 69 | 70 | if __name__ == "__main__": 71 | sh.parse_argv() 72 | main() 73 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # source1import 2 | Set of scripts for importing Source 1 assets such as materials, models, and particle effects into Source 2. Inspired by Valve's own import utility also named source1import. 3 | 4 | The main difference is this one is open source so you can customize it (i.e. use different shader sets). 5 | 6 | Based off of [source2utils](https://github.com/AlpyneDreams/source2utils). 7 | 8 | > [!WARNING] 9 | > This tool has a number of disadvantages over the built-in [CS2 Import Scripts](https://github.com/andreaskeller96/cs2-import-scripts). Including: 10 | > * No PBR material conversion. So your textures will look dark and flat. 11 | > * No map converter. 12 | > * Cannot filter assets by map. 13 | 14 | > [!Note] 15 | > However there may be some features you might find useful such as: 16 | > * Support for texture ANIMATION 17 | > * Support for SKYBOX materials 18 | > * Support for material proxies (quite basic, but [this one](https://www.youtube.com/watch?v=g7xpRSqHV5g) for example works) 19 | 20 | ## Usage 21 | #### Download from here: [Releases](https://github.com/kristiker/source1import/releases) 22 | #### Note: 23 | * Make sure to move the entire s1 `models` folder to `content/` **before importing**. 24 | * Make sure to move the entire s1 `sound` folder to `content/` and rename it to `sounds`. No import necessary. 25 | * Make sure to have `gameinfo.txt` present in Import Game. 26 | * Make sure to read [this guide](https://developer.valvesoftware.com/wiki/Half-Life:_Alyx_Workshop_Tools/Importing_Source_1_Maps) for importing map files. 27 | * Materials won't be PBR ready. so you need to use some other post-conversion tool. 28 | 29 | ## Advanced Usage: 30 | ### CLI: 31 | ```bash 32 | cd utils 33 | python scripts_import.py -i "C:/.../Team Fortress 2/tf" -e "D:/Games/steamapps/common/sbox/addons/tf_source2" -b sbox 34 | python particles_import.py -i "C:/.../Portal 2/portal2" -e "C:/.../Half-Life Alyx/game/hlvr_addons/portal2" 35 | python scenes_import.py -i "C:/.../Half-Life Alyx/game/lostcoast" -e hlvr_addons/lostcoast 36 | python models_import.py -i "C:/.../Half-Life Alyx/game/l4d2" -e l4d2_source2 37 | python materials_import.py -i "C:/.../Half-Life Alyx/game/ep2" -e hlvr "materials/skybox" 38 | ``` 39 | * **-i** *\* This should be an absolute path pointing into a source1 game directory containing gameinfo.txt 40 | * **-e** *\* Path to source2 mod/addon folder. \<*modname*\> (short notation also allowed e.g. `-e portal2_imported`, provided the game folders sit next to eachother) 41 | * **-b** *\* Switch to a different branch. Default is `hlvr`. Other branches include `steamvr` `adj` `sbox` `cs2` `dota2`, ordered by magnitude of support. 42 | * **[filter]** Optionally a path at the end can be added as a filter. 43 | ### Requirements (dev): 44 | * [Python](https://www.python.org/downloads/) >= 3.10 45 | * `pip install -r requirements.txt` 46 | ## Results 47 | ### [CS:GO Taser - Streamable](https://streamable.com/eders9) 48 | 49 | 50 | * maps converted via built-in hammer 5 funcionality 51 | -------------------------------------------------------------------------------- /utils/dev/.todo.txt: -------------------------------------------------------------------------------- 1 | 2 | D:\Games\steamapps\common\Half-Life Alyx\game\bin\win64>resourcecompiler.exe -game hlvr -r -i "D:\Games\steamapps\common\Half-Life Alyx\content\csgo_imported\materials\*" 3 | ---- 4 | pano - import svg to vsvg, vtf to vtex, etc 5 | ---- 6 | modeldoc import from old vmdl? cs_mdl_import? 7 | ---- 8 | vr_projected_decals is close to generic with glossiness and stuff, also sheets 9 | 10 | g_flGlossiness "0.000" is inverse of roughness (=255) 11 | 12 | this looks like a s1 imported mat 13 | its used as a default *material* (s2 particle renderers use compiled textures usually) 14 | for render_blob particle renderer (portal2) 15 | D:\Games\steamapps\common\Half-Life Alyx\content\core\materials\dev\roomwalls_white.vmat 16 | 17 | inverted roughness 18 | materials/models/props_lab/monitor02.vmat 19 | 20 | [ W VmtToVmat ]: GetMappingDimensionsForVMT: can't open "materials/WALL_PAPER/HR_WP/HR_WALL_PAPER_D.vmt" 21 | 22 | --- 23 | 24 | sine 25 | { 26 | resultvar "$color[0]" 27 | sineperiod "0.2" 28 | sinemin "0.99" 29 | sinemax "1" 30 | } 31 | sine 32 | { 33 | resultvar "$color[1]" 34 | sineperiod "0.1" 35 | sinemin "0.98" 36 | sinemax "1" 37 | } 38 | sine 39 | { 40 | resultvar "$color[2]" 41 | sineperiod "0.1" 42 | sinemin "0.99" 43 | sinemax "1" 44 | } 45 | texturescroll 46 | { 47 | texturescrollvar "$texture2transform" 48 | texturescrollrate "1" 49 | texturescrollangle "-90" 50 | } 51 | ----- 52 | animatedtexture 53 | { 54 | animatedtexturevar "$normalmap" 55 | animatedtextureframenumvar "$bumpframe" 56 | animatedtextureframerate "30.0" 57 | } 58 | texturescroll 59 | { 60 | texturescrollvar "$bumptransform" 61 | texturescrollrate "0.05" 62 | texturescrollangle "45.0" 63 | } 64 | waterlod 65 | { 66 | } 67 | 68 | ----- 69 | music 70 | 71 | linearramp 72 | { 73 | rate "0.08" 74 | initialvalue "0" 75 | resultvar "$x1" 76 | } 77 | frac 78 | { 79 | srcvar1 "$x1" 80 | resultvar "$x2" 81 | } 82 | lessorequal 83 | { 84 | srcvar1 "$x2" 85 | srcvar2 "$offset2" 86 | lessequalvar "$offset1" 87 | greatervar "$offset2" 88 | resultvar "$offset_temp" 89 | } 90 | lessorequal 91 | { 92 | srcvar1 "$x2" 93 | srcvar2 "$offset3" 94 | lessequalvar "$offset_temp" 95 | greatervar "$offset3" 96 | resultvar "$offset" 97 | } 98 | multiply 99 | { 100 | srcvar1 "$x2" 101 | srcvar2 "$div" 102 | resultvar "$x1" 103 | } 104 | frac 105 | { 106 | srcvar1 "$x1" 107 | resultvar "$x3" 108 | } 109 | subtract 110 | { 111 | srcvar1 "$x3" 112 | srcvar2 "$half" 113 | resultvar "$x2" 114 | } 115 | lessorequal 116 | { 117 | srcvar1 "$x3" 118 | srcvar2 "$half" 119 | lessequalvar "$zero" 120 | greatervar "$x2" 121 | resultvar "$x1" 122 | } 123 | multiply 124 | { 125 | srcvar1 "$x1" 126 | srcvar2 "$rescale" 127 | resultvar "$x2" 128 | } 129 | add 130 | { 131 | srcvar1 "$x2" 132 | srcvar2 "$offset" 133 | resultvar "$scrollpos[0]" 134 | } 135 | texturetransform 136 | { 137 | translatevar "$scrollpos" 138 | resultvar "$basetexturetransform" 139 | } -------------------------------------------------------------------------------- /utils/shared/keyvalues3.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from dataclasses import dataclass 3 | from uuid import UUID 4 | 5 | @dataclass(frozen=True) 6 | class KV3Header: 7 | encoding: str = 'text' 8 | encoding_ver: str = 'e21c7f3c-8a33-41c5-9977-a76d3a32aa0d' 9 | format: str = 'generic' 10 | format_ver: str = '7412167c-06e9-4698-aff2-e63eb59037e7' 11 | _common = '' 12 | def __str__(self): 13 | return self._common % (self.encoding, self.encoding_ver, self.format, self.format_ver) 14 | 15 | @dataclass(frozen=True) 16 | class resource: 17 | path: Path 18 | def __str__(self): 19 | return f'resource:"{self.path.as_posix().lower()}"' 20 | 21 | class KV3File(dict): 22 | def __init__(self, *args, **kwargs): 23 | super().__init__(*args, **kwargs) 24 | self.header = KV3Header(format="source1imported") 25 | 26 | def __str__(self): 27 | kv3 = str(self.header) + '\n' 28 | 29 | def obj_serialize(obj, indent = 1, dictKey = False): 30 | preind = ('\t' * (indent-1)) 31 | ind = ('\t' * indent) 32 | if obj is None: 33 | return 'null' 34 | elif isinstance(obj, bool): 35 | if obj: return 'true' 36 | return 'false' 37 | elif isinstance(obj, str): 38 | return '"' + obj + '"' 39 | elif isinstance(obj, list): 40 | s = '[' 41 | if any(isinstance(item, dict) for item in obj): # TODO: only non numbers 42 | s = f'\n{preind}[\n' 43 | for item in obj: 44 | s += (obj_serialize(item, indent+1) + ',\n') 45 | return s + preind + ']\n' 46 | 47 | return f'[{", ".join((obj_serialize(item, indent+1) for item in obj))}]' 48 | elif isinstance(obj, dict): 49 | s = preind + '{\n' 50 | if dictKey: 51 | s = '\n' + s 52 | for key, value in obj.items(): 53 | #if value == [] or value == "" or value == {}: continue 54 | if not isinstance(key, str): 55 | key = f'"{key}"' 56 | s += ind + f"{key} = {obj_serialize(value, indent+1, dictKey=True)}\n" 57 | return s + preind + '}' 58 | else: # int, float, resource 59 | # round off inaccurate dmx floats 60 | if type(obj) == float: 61 | obj = round(obj, 6) 62 | return str(obj) 63 | 64 | kv3 += obj_serialize(self) 65 | 66 | return kv3 67 | 68 | def ToString(self): 69 | return self.__str__() 70 | 71 | if __name__ == '__main__': 72 | import unittest 73 | class Test_KV3(unittest.TestCase): 74 | default_header = '' 75 | default_s1import_context_header = '' 76 | def test_default_header(self): 77 | self.assertEqual(str(KV3Header()), self.default_header) 78 | 79 | def test_custom_header(self): 80 | header = KV3Header('text2', '123-123', 'generic2', '234-234') 81 | headertext = '' 82 | self.assertEqual(str(header), headertext) 83 | 84 | def test_kv(self): 85 | expect_text = f'{self.default_s1import_context_header}\n{{'+\ 86 | '\n\ta = "asd asd"'+\ 87 | '\n\tb = \n\t{\n\t\t"(2,)" = 3\n\t}'+\ 88 | '\n\tc = ["listed_text1", "listed_text2"]\n}' 89 | self.assertEqual( 90 | KV3File( 91 | a='asd asd', 92 | b={(2,):3}, 93 | c=["listed_text1", "listed_text2"] 94 | ).ToString(), 95 | expect_text 96 | ) 97 | 98 | unittest.main() 99 | -------------------------------------------------------------------------------- /utils/shared/empty.vmap.txt: -------------------------------------------------------------------------------- 1 | 2 | "$prefix_element$" 3 | { 4 | "id" "elementid" "d59a52e6-f2d9-4969-9bcb-e5d5818c663d" 5 | "map_asset_references" "string_array" 6 | [ 7 | ] 8 | } 9 | "CMapRootElement" 10 | { 11 | "id" "elementid" "437e6ff1-6b5d-444e-b1f1-6b10c4604d51" 12 | "isprefab" "bool" "0" 13 | "editorbuild" "int" "8632" 14 | "editorversion" "int" "400" 15 | "showgrid" "bool" "1" 16 | "snaprotationangle" "int" "15" 17 | "gridspacing" "float" "64" 18 | "show3dgrid" "bool" "1" 19 | "itemFile" "string" "" 20 | "defaultcamera" "CStoredCamera" 21 | { 22 | "id" "elementid" "2b61e2f5-a822-4b25-9176-2184e004146d" 23 | "position" "vector3" "0 -1000 1000" 24 | "lookat" "vector3" "0 0 0" 25 | } 26 | 27 | "3dcameras" "CStoredCameras" 28 | { 29 | "id" "elementid" "8d58c8cc-37d5-45e9-b4b9-80c527b6e84a" 30 | "activecamera" "int" "-1" 31 | "cameras" "element_array" 32 | [ 33 | ] 34 | } 35 | 36 | "world" "CMapWorld" 37 | { 38 | "id" "elementid" "8e6560fc-9cd1-42d2-a88e-3357bbf54850" 39 | "origin" "vector3" "0 0 0" 40 | "angles" "qangle" "0 0 0" 41 | "scales" "vector3" "1 1 1" 42 | "nodeID" "int" "1" 43 | "referenceID" "uint64" "0x0" 44 | "children" "element_array" 45 | [ 46 | ] 47 | "editorOnly" "bool" "0" 48 | "force_hidden" "bool" "0" 49 | "transformLocked" "bool" "0" 50 | "variableTargetKeys" "string_array" 51 | [ 52 | ] 53 | "variableNames" "string_array" 54 | [ 55 | ] 56 | "relayPlugData" "DmePlugList" 57 | { 58 | "id" "elementid" "85cca643-bd60-4d96-a41e-ce092125ae2e" 59 | "names" "string_array" 60 | [ 61 | ] 62 | "dataTypes" "int_array" 63 | [ 64 | ] 65 | "plugTypes" "int_array" 66 | [ 67 | ] 68 | "descriptions" "string_array" 69 | [ 70 | ] 71 | } 72 | 73 | "connectionsData" "element_array" 74 | [ 75 | ] 76 | "entity_properties" "EditGameClassProps" 77 | { 78 | "id" "elementid" "cd727b19-eca4-4a87-a349-5d229dbcfc10" 79 | "classname" "string" "worldspawn" 80 | "targetname" "string" "" 81 | "skyname" "string" "sky_day01_01" 82 | "startdark" "string" "0" 83 | "startcolor" "string" "0 0 0" 84 | "pvstype" "string" "10" 85 | "newunit" "string" "0" 86 | "maxpropscreenwidth" "string" "-1" 87 | "minpropscreenwidth" "string" "0" 88 | "vrchaperone" "string" "0" 89 | "vrmovement" "string" "0" 90 | "baked_light_index_min" "string" "0" 91 | "baked_light_index_max" "string" "256" 92 | "max_lightmap_resolution" "string" "0" 93 | "lightmap_queries" "string" "1" 94 | } 95 | 96 | "nextDecalID" "int" "0" 97 | "fixupEntityNames" "bool" "1" 98 | "mapUsageType" "string" "standard" 99 | } 100 | 101 | "visbility" "CVisibilityMgr" 102 | { 103 | "id" "elementid" "16dbc185-b988-45e4-990c-837aece0797a" 104 | "origin" "vector3" "0 0 0" 105 | "angles" "qangle" "0 0 0" 106 | "scales" "vector3" "1 1 1" 107 | "nodeID" "int" "0" 108 | "referenceID" "uint64" "0x0" 109 | "children" "element_array" 110 | [ 111 | ] 112 | "editorOnly" "bool" "0" 113 | "force_hidden" "bool" "0" 114 | "transformLocked" "bool" "0" 115 | "variableTargetKeys" "string_array" 116 | [ 117 | ] 118 | "variableNames" "string_array" 119 | [ 120 | ] 121 | "nodes" "element_array" 122 | [ 123 | ] 124 | "hiddenFlags" "int_array" 125 | [ 126 | ] 127 | } 128 | 129 | "mapVariables" "CMapVariableSet" 130 | { 131 | "id" "elementid" "59c2bb8b-f626-4558-a1cf-f9611338a09c" 132 | "variableNames" "string_array" 133 | [ 134 | ] 135 | "variableValues" "string_array" 136 | [ 137 | ] 138 | "variableTypeNames" "string_array" 139 | [ 140 | ] 141 | "variableTypeParameters" "string_array" 142 | [ 143 | ] 144 | "m_ChoiceGroups" "element_array" 145 | [ 146 | ] 147 | } 148 | 149 | "rootSelectionSet" "CMapSelectionSet" 150 | { 151 | "id" "elementid" "429d4f46-d5f3-4187-9eeb-43011bab2d25" 152 | "children" "element_array" 153 | [ 154 | ] 155 | "selectionSetName" "string" "" 156 | "selectionSetData" "element" "" 157 | } 158 | 159 | "m_ReferencedMeshSnapshots" "element_array" 160 | [ 161 | ] 162 | "m_bIsCordoning" "bool" "0" 163 | "m_bCordonsVisible" "bool" "0" 164 | "nodeInstanceData" "element_array" 165 | [ 166 | ] 167 | } 168 | -------------------------------------------------------------------------------- /utils/shared/cstr.py: -------------------------------------------------------------------------------- 1 | #cstr.py 2 | 3 | from functools import lru_cache 4 | import re 5 | __version__ = '2019.06.06' 6 | 7 | re_base_zero = re.compile(r"(?i)\s*[\+\-]?0(x)?") 8 | 9 | class strtod: 10 | def __init__(self, s, pos: int = 0) -> None: 11 | m = re.match(r'[+-]?\d*[.]?\d*(?:[eE][+-]?\d+)?', s[pos:]) 12 | #if m.group(0) == '': 13 | # raise ValueError('bad float: %s' % s[pos:]) 14 | #self.value = float(m.group(0)) 15 | self.string = s 16 | self.pos = pos 17 | if m: 18 | try: 19 | self.value = float(m.group(0)) 20 | except ValueError: 21 | self.value = None 22 | self.endpos = pos + m.end() 23 | else: 24 | raise ValueError('Cannot convert to float') 25 | 26 | 27 | 28 | @lru_cache(10) 29 | def strtol_re(base): 30 | if not (0 <= base <= 36): 31 | raise ValueError('Expected base between 0 and 36.') 32 | a = r"(?i)\s*(?:[\+\-]?)" 33 | if base == 16: 34 | r = a + r"(?:0x)?[\u0030-\u0039\u0041-\u0046]*" 35 | elif base > 10: 36 | r = (a + "[\u0030-\u0039\u0041-{}]*".format( 37 | chr(ord('A') + base - 10))) 38 | else: 39 | r = (a + "[\u0030-{}]*".format(chr(ord('0') + base - 1))) 40 | return re.compile(r) 41 | 42 | class strtol: 43 | """Small object to store result of conversion of string to int 44 | 45 | Arguments: 46 | s : string to read 47 | base=10 : integer base between 0 and 36 inclusive. 48 | pos=0 : position where to read in the string. 49 | 50 | Strtol members: 51 | value : an integer value parsed in the string. 52 | string : the string that was read. 53 | pos : the position where the integer was parsed. 54 | endpos : the position in the string after the integer. 55 | 56 | Errors: 57 | If no valid conversion could be performed, ValueError is raised. 58 | 59 | see also: 60 | the linux manual of strtol 61 | """ 62 | __slots__ = ('value', 'string', 'pos', 'endpos') 63 | 64 | def __init__(self, s, base=10, pos=0): 65 | self.string = s 66 | self.pos = pos 67 | 68 | if base == 0: 69 | m = re_base_zero.match(s, pos=pos) 70 | if m: 71 | base = 16 if m.group(1) else 8 72 | else: 73 | base = 10 74 | r = strtol_re(base) 75 | #print(r) 76 | #print(s[pos:]) 77 | m = r.match(s, pos=pos) 78 | if m: 79 | try: 80 | self.value = int(m.group(0), base) 81 | except ValueError: 82 | self.value = None 83 | self.endpos = m.end() 84 | else: 85 | raise ValueError('Cannot convert to int') 86 | 87 | if __name__ == '__main__': 88 | import unittest 89 | 90 | class Test_strtod(unittest.TestCase): 91 | def test_1(self): 92 | x = "3.1415913123" 93 | s = strtod(x) 94 | self.assertEqual(s.value, 3.1415913123) 95 | def test_main(self): 96 | x = 'lmao3.1515' 97 | s = strtod(x, pos=4) 98 | self.assertEqual(s.value, 3.1515) 99 | self.assertEqual(s.endpos, 10) 100 | self.assertEqual(s.pos, 4) 101 | self.assertIs(s.string, x) 102 | 103 | class Test_strtol(unittest.TestCase): 104 | def test_base_10(self): 105 | x = 'foo-324bar' 106 | s = strtol(x, pos=3) 107 | self.assertEqual(s.value, -324) 108 | self.assertEqual(s.endpos, 7) 109 | self.assertEqual(s.pos, 3) 110 | self.assertIs(s.string, x) 111 | 112 | def test_base_16(self): 113 | x = ' 324Bbar' 114 | s = strtol(x, base=16) 115 | self.assertEqual(s.value, int('324BBA', 16)) 116 | self.assertEqual(x[s.endpos:], 'r') 117 | 118 | def test_base_16_0x(self): 119 | x = ' -0x324Bbar' 120 | s = strtol(x, base=16) 121 | self.assertEqual(s.value, -int('324BBA', 16)) 122 | self.assertEqual(x[s.endpos:], 'r') 123 | 124 | def test_base_0_0x(self): 125 | x = ' -0x324Bbar' 126 | s = strtol(x, base=0) 127 | self.assertEqual(s.value, -int('324BBA', 16)) 128 | self.assertEqual(x[s.endpos:], 'r') 129 | 130 | def test_base_0(self): 131 | x = ' -324Bbar' 132 | s = strtol(x, base=0) 133 | self.assertEqual(s.value, -324) 134 | self.assertEqual(x[s.endpos:], 'Bbar') 135 | 136 | def test_base_0_octal(self): 137 | x = ' -0324Bbar' 138 | s = strtol(x, base=0) 139 | self.assertEqual(s.value, -int("324", 8)) 140 | self.assertEqual(x[s.endpos:], 'Bbar') 141 | 142 | def test_base_20(self): 143 | x = ' -0324BgGar' 144 | s = strtol(x, base=20) 145 | self.assertEqual(s.value, -int("324BgGa", 20)) 146 | self.assertEqual(x[s.endpos:], 'r') 147 | 148 | #def test_empty_string(self): 149 | # x = '' 150 | # self.assertRaises(ValueError, strtol, x) 151 | 152 | unittest.main() -------------------------------------------------------------------------------- /utils/dev/generate_particle_dict.py: -------------------------------------------------------------------------------- 1 | 2 | from particles_import import pcf_to_vpcf, ObjectP, BoolToSetKV, Discontinued 3 | 4 | global_changes = { 5 | 'initializers':{ 6 | 'm_nAxis': 'm_nComponent', 7 | 'm_bScaleInitialRange': BoolToSetKV('m_nSetMethod', "PARTICLE_SET_SCALE_CURRENT_VALUE"), 8 | 'm_bScaleCurrent': BoolToSetKV('m_nSetMethod', "PARTICLE_SET_SCALE_INITIAL_VALUE"), 9 | #'m_bUseHighestEndCP': Discontinued('m_bUseHighestEndCP', 9) 10 | }, 11 | 'operators':{ 12 | 'm_bScaleInitialRange': BoolToSetKV('m_nSetMethod', "PARTICLE_SET_SCALE_CURRENT_VALUE"), 13 | 'm_bScaleCurrent': BoolToSetKV('m_nSetMethod', "PARTICLE_SET_SCALE_INITIAL_VALUE"), 14 | }, 15 | 'emitters':{}, 16 | 'forces': {}, 17 | 'constraints': {}, 18 | } 19 | 20 | def codesplit(s) -> list: 21 | "str.split(',') but doesnt split inside quotes" 22 | parts = [] 23 | 24 | inquote = False 25 | part = '' 26 | for i, char in enumerate(s, 1): 27 | if char == ',' or i == len(s): 28 | if not inquote: 29 | parts.append(part) 30 | part = '' 31 | continue 32 | if char == '"': 33 | inquote = not inquote 34 | part += char 35 | return parts 36 | 37 | my_ops = {} 38 | def parse_operator(line: str): 39 | parts = line.replace('DEFINE_PARTICLE_OPERATOR', '').lstrip(' \t(').rstrip(');').split(',') 40 | if len(parts) == 3: 41 | key = parts[1].strip().strip('"') 42 | val = parts[0].strip() 43 | my_ops[key] = val 44 | subs.setdefault(val, {}) 45 | 46 | subs = {} 47 | def parse_sub(op, line): 48 | if not line: 49 | return 50 | defines = ( 51 | 'DMXELEMENT_UNPACK_FIELD_USERDATA', 52 | 'DMXELEMENT_UNPACK_FIELD_UTLSTRING_USERDATA', 53 | 'DMXELEMENT_UNPACK_FIELD_UTLSTRING', 54 | 'DMXELEMENT_UNPACK_FIELD_STRING_USERDATA', 55 | 'DMXELEMENT_UNPACK_FIELD_STRING', 56 | 'DMXELEMENT_UNPACK_FIELD', 57 | 'DMXELEMENT_UNPACK_FLTX4', 58 | ) 59 | for define in defines: 60 | if not line.startswith(define): 61 | continue 62 | replaced = line.replace(define, '') 63 | if replaced.strip(' \t(') == replaced: 64 | continue 65 | else: 66 | line = replaced.strip(' \t(') 67 | break 68 | 69 | line = line.lstrip() 70 | 71 | b = codesplit(line)#line.split(',') 72 | 73 | key = b[0].strip().strip('"') 74 | if len(b) < 4: 75 | val = b[2].replace(')', '').strip() 76 | else: 77 | val = b[3].replace(')', '').strip() 78 | if '"' in val: 79 | val = b[2].strip() 80 | 81 | if '.' in val: 82 | o, val = tuple(val.split('.', 1)) 83 | val = ObjectP(o, val) 84 | 85 | val = global_changes[main].get(val, val) 86 | if main == 'forces': 87 | if val[-1] == ']' and val[-3] == '[': 88 | val = val[:-3] + val[-2] 89 | 90 | subs.setdefault(op, {})[key] = val 91 | 92 | main = 'emitters' 93 | f = r'cstrike15_src/particles/builtin_particle_emitters.cpp' 94 | main = 'forces' 95 | f = r'cstrike15_src/particles/builtin_particle_forces.cpp' 96 | main = 'constraints' 97 | f = '/cstrike15_src/particles/builtin_constraints.cpp' 98 | with open(f, 'r') as fp: 99 | unpack = '' 100 | unpack_lines = [] 101 | 102 | op_begin = 'BEGIN_PARTICLE_OPERATOR_UNPACK' 103 | op_end = 'END_PARTICLE_OPERATOR_UNPACK' 104 | if main == 'initializers': 105 | op_begin = 'BEGIN_PARTICLE_INITIALIZER_OPERATOR_UNPACK' 106 | #elif main == 'operators': 107 | # op_begin = 'BEGIN_PARTICLE_OPERATOR_UNPACK' 108 | for line in fp.readlines(): 109 | line = line.strip() 110 | if line.startswith('//'): 111 | continue 112 | if line.startswith('DEFINE_PARTICLE_OPERATOR'): 113 | parse_operator(line) 114 | continue 115 | 116 | if line.startswith(op_begin): 117 | unpack = line.replace(op_begin, '').lstrip(' (').rstrip(') ')#.strip() 118 | continue 119 | elif line.startswith(op_end): 120 | unpack = '' 121 | continue 122 | if unpack: 123 | parse_sub(unpack, line) 124 | out = '' 125 | 126 | def get_subs(op): 127 | rv = '' 128 | #print(subs.keys()) 129 | for k, v in subs[op].items(): 130 | v = f"{v!r}" 131 | if not isinstance(v, str): 132 | v = v.strip("'") 133 | 134 | rv += f"{' '*12}{k!r}: {v},\n" 135 | return rv 136 | 137 | for op in my_ops: 138 | if "'" in op: 139 | out += f"{' '*8}"+f'"{op}"' + f": ('{my_ops[op]}', {'{'}\n{get_subs(my_ops[op])}{' '*8}{'}'}),\n" 140 | else: 141 | out += f"{' '*8}'{op}': ('{my_ops[op]}', {'{'}\n{get_subs(my_ops[op])}{' '*8}{'}'}),\n" 142 | 143 | print(out) 144 | 145 | 146 | def verify_ops(): 147 | addthese = '' 148 | ctx = pcf_to_vpcf.get(main) 149 | if isinstance(ctx, tuple): 150 | ctx = ctx[1] 151 | for k, v in my_ops.items(): 152 | if k not in ctx: 153 | addthese += f"{' '*8}'{k}': '{v}',\n" 154 | else: 155 | if ctx[k] != v: 156 | print(f"Mismatch with `{k}`: '{ctx[k]}' != '{v}'") 157 | if addthese: 158 | print() 159 | print(addthese) 160 | print() 161 | 162 | def verify_subs(): 163 | ctx = pcf_to_vpcf.get(main) 164 | if isinstance(ctx, tuple): 165 | ctx = ctx[1] 166 | for op in subs: 167 | addthese = '' 168 | print(f' ~~~ {op}') 169 | if isinstance(ctx.get(op), tuple): 170 | ctx = ctx.get(op)[1] 171 | for k, v in subs[op].items(): 172 | if k not in ctx: # (k in ctx and ctx[k] == '') 173 | addthese += f"{' '*12}'{k}': '{v}',\n" 174 | #else: 175 | #if ctx[k] != v: 176 | # if not isinstance(ctx[k], str): continue 177 | # print(f"Mismatch with `{k}`: '{ctx[k]}' != '{v}'") 178 | if addthese: 179 | #print() 180 | print(addthese) 181 | print() 182 | 183 | #verify_subs() 184 | #verify_ops() -------------------------------------------------------------------------------- /utils/shared/modeldoc.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from typing import Literal, Type 3 | 4 | @dataclass 5 | class _BaseNode: 6 | _class: str = __name__ 7 | note: str = "" 8 | children: list["_Node"] = field(default_factory=list) 9 | 10 | def __post_init__(self): 11 | self._class = self.__class__.__name__.replace("_", " ") 12 | 13 | def add_nodes(self, *nodes: "_BaseNode"): 14 | for node in nodes: 15 | self.children.append(node) 16 | 17 | def with_nodes(self, *nodes: "_BaseNode"): 18 | self.add_nodes(*nodes) 19 | return self 20 | 21 | def find_by_class_bfs(self, cls: Type["_BaseNode"]) -> "_BaseNode": 22 | """Breadth first search node by class.""" 23 | for child in self.children: 24 | if isinstance(child, cls): 25 | return child 26 | for child in self.children: 27 | result = child.find_by_class_bfs(cls) 28 | if result is not None: 29 | return result 30 | 31 | def find_by_name_dfs(self, name: str, depth=-1) -> "_BaseNode": 32 | """Depth first search node by name.""" 33 | for child in self.children: 34 | if child.name == name: 35 | return child 36 | if depth == 0: 37 | break 38 | result = child.find_by_name_dfs(name, depth-1) 39 | if result is not None: 40 | return result 41 | 42 | @dataclass 43 | class _Node(_BaseNode): 44 | "Node with _class, note, name, and children" 45 | name: str = "" 46 | 47 | class resourcepath(str): 48 | "string path to a resource" 49 | class namelink(str): 50 | "string link to another node" 51 | 52 | mdBaseLists: list[Type[_BaseNode]] = [] 53 | def containerof(*node_types): 54 | def inner(cls): 55 | # cls is supposed to contain *node_types 56 | mdBaseLists.append(cls) 57 | cls._childtypes = node_types 58 | return cls 59 | return inner 60 | 61 | class ModelDoc: 62 | 63 | @dataclass 64 | class RootNode(_BaseNode): 65 | model_archetype: str = "" 66 | primary_associated_entity: str = "" 67 | anim_graph_name: str = "" 68 | base_model_name: str = "" 69 | 70 | class Folder(_Node): 71 | pass 72 | 73 | @dataclass 74 | class RenderMeshFile(_Node): 75 | filename: resourcepath = "" 76 | import_translation: list[float] = field(default_factory=lambda: [0, 0, 0]) 77 | import_rotation: list[float] = field(default_factory=lambda: [0, 0, 0]) 78 | import_scale: float = 1.0 79 | align_origin_x_type: str = "None" 80 | align_origin_y_type: str = "None" 81 | align_origin_z_type: str = "None" 82 | parent_bone: namelink = "" 83 | import_filter: dict = field( 84 | default_factory=lambda:dict( 85 | exclude_by_default = False, 86 | exception_list = [ ] 87 | ) 88 | ) 89 | 90 | @dataclass 91 | class AnimFile(_Node): 92 | activity_name: str = "" 93 | activity_weight: int = 1 94 | weight_list_name: str = "" 95 | fade_in_time: float = 0.2 96 | fade_out_time: float = 0.2 97 | looping: bool = False 98 | delta: bool = False 99 | worldSpace: bool = False 100 | hidden: bool = False 101 | anim_markup_ordered: bool = False 102 | disable_compression: bool = False 103 | source_filename: resourcepath = "" 104 | start_frame: int = -1 105 | end_frame: int = -1 106 | framerate: int = -1.0 107 | take: int = 0 108 | reverse: bool = False 109 | anim_hold_list: list[dict[Literal["frame"] | Literal["frame_count"], int]] = field(default_factory=list) 110 | 111 | @dataclass 112 | class AnimEvent(_Node): 113 | event_class: str = "AE_EMPTY" 114 | event_frame: int = 0 115 | event_keys: dict = field(default_factory=dict) 116 | "custom keyvalues differ between event classes" 117 | 118 | @dataclass 119 | class PhysicsHullFile(_Node): 120 | parent_bone: str = "" 121 | surface_prop: str = "default" 122 | collision_tags: str = "solid" 123 | recenter_on_parent_bone: bool = False 124 | offset_origin = [ 0.0, 0.0, 0.0 ] 125 | offset_angles = [ 0.0, 0.0, 0.0 ] 126 | align_origin_x_type: str = "None" 127 | align_origin_y_type: str = "None" 128 | align_origin_z_type: str = "None" 129 | filename: resourcepath = "" 130 | import_scale: float = 1.0 131 | faceMergeAngle: float = 10.0 132 | maxHullVertices: int = 0 133 | import_mode: str = "SingleHull" 134 | optimization_algorithm: str = "QEM" 135 | import_filter: dict = field( 136 | default_factory=lambda:dict( 137 | exclude_by_default = False, 138 | exception_list = [ ] 139 | ) 140 | ) 141 | 142 | @dataclass 143 | class BodyGroupChoice(_Node): 144 | meshes: list[namelink] = field(default_factory=list) # list of names of meshes 145 | 146 | @dataclass 147 | class LODGroup(_Node): 148 | switch_threshold: float = 0.0 149 | meshes: list[namelink] = field(default_factory=list) # list of names of meshes 150 | 151 | @dataclass 152 | class Attachment(_Node): 153 | parent_bone: namelink = "" 154 | relative_origin: list[float] = field(default_factory=lambda: [0, 0, 0]) 155 | relative_angles: list[float] = field(default_factory=lambda: [0, 0, 0]) 156 | weight: float = 1.0 157 | ignore_rotation: bool = False 158 | 159 | @dataclass 160 | class GenericGameData(_Node): 161 | game_class: str = "" 162 | game_keys: dict = field(default_factory=dict) 163 | 164 | 165 | @dataclass 166 | class Bone(_Node): 167 | origin: list[float] = field(default_factory=lambda: [0, 0, 0]) 168 | angles: list[float] = field(default_factory=lambda: [0, 0, 0]) 169 | do_not_discard: bool = True 170 | 171 | @dataclass 172 | class WeightList(_Node): 173 | default_weight: float = 0.0 174 | weights: list[dict[Literal["bone"] | Literal["weight"], namelink | float]] = field(default_factory=list) 175 | master_morph_weight: float = -1.0 176 | morph_weights: list["unknown"] = field(default_factory=list) 177 | 178 | @dataclass 179 | class DefaultMaterialGroup(_Node): 180 | remaps: list[dict[Literal["from"] | Literal["to"], resourcepath]] = field(default_factory=list) 181 | use_global_default: bool = False 182 | global_default_material: resourcepath = "" 183 | 184 | @dataclass 185 | class MaterialGroup(_Node): 186 | remaps: list[dict[Literal["from"] | Literal["to"], resourcepath]] = field(default_factory=list) 187 | 188 | @dataclass 189 | class Bounds_Hull(_Node): 190 | """ 191 | If this node is present, set the model hull bounds explicitly. Normally the model hull bounds is derived from the physics hull bounds at model load time. 192 | """ 193 | mins: list[float] = field(default_factory=lambda: [ -1.0, -1.0, 0.0 ]) 194 | maxs: list[float] = field(default_factory=lambda: [ 1.0, 1.0, 1.0 ]) 195 | 196 | @dataclass 197 | class Bounds_View(_Node): 198 | """ 199 | If this node is present, set the model view bounds explicitly. Normally the model view bounds is derived from the render mesh bounds at model load time. 200 | """ 201 | mins: list[float] = field(default_factory=lambda: [ -1.0, -1.0, 0.0 ]) 202 | maxs: list[float] = field(default_factory=lambda: [ 1.0, 1.0, 1.0 ]) 203 | 204 | @dataclass 205 | class Prefab(_Node): 206 | target_file: resourcepath = "" 207 | 208 | @staticmethod 209 | def get_container(node_type: Type[_Node]): 210 | for basecontainer in mdBaseLists: 211 | if node_type in basecontainer._childtypes: 212 | return basecontainer 213 | 214 | #@containerof(BodyGroupChoice) 215 | @dataclass 216 | class BodyGroup(_Node): 217 | name: str = "" 218 | hidden_in_tools: bool = False 219 | 220 | @containerof(BodyGroup) 221 | class BodyGroupList(_BaseNode): pass 222 | 223 | @containerof(LODGroup) 224 | class LODGroupList(_BaseNode): pass 225 | 226 | @containerof(RenderMeshFile) 227 | class RenderMeshList(_BaseNode): pass 228 | 229 | @containerof(AnimFile) 230 | @dataclass 231 | class AnimationList(_BaseNode): 232 | default_root_bone_name: str = "" 233 | 234 | @containerof(Bone) 235 | class Skeleton(_BaseNode): pass 236 | 237 | @containerof(WeightList) 238 | class WeightListList(_BaseNode): pass 239 | 240 | @containerof(DefaultMaterialGroup, MaterialGroup) 241 | class MaterialGroupList(_BaseNode): pass 242 | 243 | @containerof(PhysicsHullFile) 244 | class PhysicsShapeList(_BaseNode): pass 245 | 246 | @containerof(Attachment) 247 | class AttachmentList(_BaseNode): pass 248 | 249 | @containerof(GenericGameData) 250 | class GameDataList(_BaseNode): pass 251 | 252 | @containerof(Bounds_Hull, Bounds_View) 253 | class ModelDataList(_BaseNode): pass 254 | 255 | @containerof(Prefab) 256 | class PrefabList(_BaseNode): pass 257 | -------------------------------------------------------------------------------- /utils/vtf_to_tga.py: -------------------------------------------------------------------------------- 1 | import os, re 2 | import subprocess 3 | import threading, multiprocessing 4 | import shutil 5 | from pathlib import Path 6 | import shared.base_utils2 as sh 7 | 8 | # https://developer.valvesoftware.com/wiki/VTF2TGA 9 | # Runs vtf2tga.exe on every vtf file 10 | # Same thing as `VTFCmd.exe -folder "\materials\*.vtf" -recurse` 11 | 12 | OVERWRITE = False 13 | IGNORE_WORLD_CUBEMAPS = True 14 | 15 | MULTITHREAD = True 16 | 17 | currentDir = Path(__file__).parent #os.getcwd() 18 | 19 | IN_EXT = ".vtf" 20 | VTEX_PARAMS_EXT = ".txt" 21 | 22 | OUT_EXT_LIST = [ 23 | '.tga', # LDR 24 | '.pfm', # HDR 25 | ] 26 | 27 | OUT_NAME_ENDS = [ 28 | "", # default 29 | "up", # CUBEMAP FACE UP (+ dn, lf, rt, ft, bk) 30 | "000", # GIF FRAME 0 (+ 001, 002, 003, ...) 31 | "_z000",# DEPTH SLICE 0 (+ _z001, _z002, _z003, ...) 32 | #"sph", # SPHEREMAP (Redundant) 33 | 34 | #"bk", # CUBEMAP up face 35 | #"dn", # CUBEMAP dn face 36 | #"ft", # CUBEMAP ft face 37 | #"lf", # CUBEMAP lf face 38 | #"rt", # CUBEMAP rt face 39 | #"up", # CUBEMAP up face 40 | ] 41 | 42 | def OutputList(path: Path, with_suffix = False): 43 | """ Return list with all the possible output names 44 | """ 45 | for name in OUT_NAME_ENDS: 46 | outPath = Path(path.parent) / (path.stem + name) 47 | if not with_suffix: yield outPath 48 | for ext in OUT_EXT_LIST: 49 | yield Path(outPath).with_suffix(ext) 50 | 51 | # force skybox vtfs to decompile with csgo's vtf2tga 52 | # csgo branch outputs pfm files 53 | FORCE_SKYBOX_DECOMPILE_CSGO = True 54 | 55 | # Add your vtf2tga.exe here. Accepts full (C:/) and relative paths (./). Priority is top to bottom 56 | PATHS_VTF2TGA = [ 57 | r"./shared/bin/vtf2tga/2013/vtf2tga.exe", 58 | r"./shared/bin/vtf2tga/csgo/vtf2tga.exe", # FORCE_SKYBOX_2ND_VTF2TGA 59 | ] 60 | tags = [] 61 | 62 | erroredFileList = [] 63 | totalFiles = 0 64 | MAX_THREADS = min(multiprocessing.cpu_count() + 2, 15) 65 | 66 | def ImportVTFtoTGA(vtfFile, force_2nd = False): 67 | semaphore.acquire() 68 | global totalFiles, erroredFileList 69 | 70 | for index, vtf2tga_exe in enumerate(PATHS_VTF2TGA): 71 | tag = tags[index] 72 | 73 | command = [vtf2tga_exe, "-i", vtfFile] #, "-o", fs.Output(vtfFile.parent) 74 | result = subprocess.run(command, stdout=subprocess.PIPE, creationflags= subprocess.CREATE_NO_WINDOW | subprocess.DETACHED_PROCESS) # 75 | #print (result.stdout.decode("utf-8")) 76 | 77 | # if we are forcing on index 1, continue (don't break) even if index 0 got bCreated 78 | if (force_2nd and (index != 1)): 79 | continue 80 | 81 | # VTF2TGA reported success... 82 | if result.returncode == 0: 83 | 84 | lock.acquire() 85 | bCreated = False 86 | for outPath in OutputList(vtfFile, True): 87 | if not outPath.is_file(): continue 88 | bCreated = True 89 | totalFiles +=1 90 | outImages: list[Path] = [] 91 | 92 | for ttype in OUT_NAME_ENDS: 93 | if not outPath.stem.endswith(ttype): 94 | continue 95 | 96 | if ttype == "": # default 97 | if (outPath.stem == vtfFile.stem): 98 | outImages.append(outPath) 99 | print(f"[{tag}] Sucessfully created:", outPath.local) 100 | break 101 | 102 | elif ttype == 'up': # cubemap 103 | for face in ('up', 'dn', 'lf', 'rt', 'bk', 'ft'): 104 | nextPath = vtfFile.parent / (vtfFile.stem + face + outPath.suffix) 105 | if nextPath.is_file(): 106 | outImages.append(nextPath) 107 | faces = "[ " + ", ".join([str(path.stem[-2:]) for path in outImages ]) + " ]" 108 | print(f"[{tag}] Sucessfully created: {outPath.local} {faces} cubemap faces") 109 | 110 | else: # frame sequence & depth slice 111 | for i in range(1000): 112 | nextPath = vtfFile.parent / (vtfFile.stem + f"{i:03}" + outPath.suffix) 113 | if nextPath.is_file(): 114 | outImages.append(nextPath) 115 | else: break 116 | 117 | lock.release() 118 | 119 | # shitty workaround to vtf2tga not being able to output properly 120 | for path in outImages: 121 | movePath = sh.output(path) 122 | os.makedirs(movePath.parent, exist_ok=True) #fs.MakeDir(movePath) 123 | if sh.MOCK: 124 | path.unlink() 125 | movePath.open('a').close() 126 | else: 127 | shutil.move(path, movePath) 128 | 129 | if not bCreated: 130 | print(f"[{tag}] uhm...?", vtfFile.local) 131 | lock.release() 132 | 133 | break # Output file created. Onto the next VTF. 134 | 135 | #else: # VTF2TGA reported failure 136 | # print(f"[{tag}] Something went wrong!", result.stdout) 137 | 138 | if not ((len(PATHS_VTF2TGA) > 1) and (index < (len(PATHS_VTF2TGA) - 1))): 139 | erroredFileList.append(vtfFile) 140 | 141 | semaphore.release() 142 | 143 | # https://developer.valvesoftware.com/wiki/Vtex_compile_parameters 144 | def txt_import(txtFile): 145 | transl_table = { 146 | "clamps": "clampu", 147 | "clampt": "clampv", 148 | "clampu": "clampw", 149 | "nocompress": "nocompress", 150 | "nolod": "nolod", 151 | "maxwidth": "maxres", 152 | "maxheight": "maxres", 153 | #"": "picmip0res", 154 | #"maxheight_360": "maxresmobile", 155 | #"maxwidth_360": "maxresmobile", 156 | "nomip": "nomip", 157 | "invertgreen": "legacy_source1_inverted_normal", 158 | #"": "brightness", 159 | #"": "brightness_offset", 160 | } 161 | 162 | # TODO: keyvalues 163 | with open(txtFile, 'r+') as fp: 164 | oldLines = fp.readlines() 165 | if "settings" in oldLines[0]: return 166 | fp.seek(0) 167 | fp.truncate() 168 | fp.write("\"settings\"\n{\n") 169 | for line in oldLines: 170 | key, value = re.split(r'\s', line, maxsplit=1) 171 | key, value = key.strip('"'), value.strip().strip('"') 172 | new_key = transl_table.get(key) 173 | if new_key is None: 174 | fp.write(f"\t// \"{key}\"\t\t\"{value}\"\n") # comment it 175 | else: 176 | fp.write(f"\t\"{new_key}\"\t\t\"{value}\"\n") 177 | fp.write("}") 178 | 179 | 180 | def main(): 181 | print("Decompiling Textures!") 182 | 183 | for i, path in enumerate(PATHS_VTF2TGA): 184 | if path is None: 185 | continue 186 | path = Path(path) 187 | if not path.is_absolute(): 188 | path = currentDir / path 189 | if path.is_file(): 190 | print("+ Using:", path) 191 | PATHS_VTF2TGA [i] = path 192 | # Tag this vtf2tga version with a short name 193 | tags.append( [ part for part in path.parts[::-1] if part not in ("vtf2tga.exe", "bin") ] [0]) 194 | else: 195 | print("~ Invalid vtf2tga path:", path) 196 | PATHS_VTF2TGA [i] = None 197 | 198 | if not any(PATHS_VTF2TGA): 199 | print(f"Cannot continue without a valid vtf2tga.exe. Please open {currentDir.name} and verify your paths.") 200 | quit(-1) 201 | 202 | THREADS: list[threading.Thread] = [] 203 | global semaphore; semaphore = multiprocessing.BoundedSemaphore(value=MAX_THREADS) 204 | global lock; lock = multiprocessing.Lock() 205 | 206 | sh.importing = Path("materials") 207 | 208 | vtfFileList = sh.collect(sh.importing, IN_EXT, OUT_EXT_LIST, existing = OVERWRITE, outNameRule = OutputList) 209 | txtFileList = sh.collect(sh.importing, VTEX_PARAMS_EXT, VTEX_PARAMS_EXT, existing = True) 210 | 211 | for vtfFile in vtfFileList: 212 | if IGNORE_WORLD_CUBEMAPS: 213 | s_vtfFile = str(vtfFile.name) 214 | numbers = sum(c.isdigit() for c in s_vtfFile) 215 | dashes = s_vtfFile.count('_') + s_vtfFile.count('-') 216 | if (numbers > 4) and (dashes >= 2) and (s_vtfFile.startswith('c')): 217 | #if fileName.lower().endswith('.hdr.vtf') or \ 218 | #os.path.exists(fileName.lower().replace('.vtf', '.hdr.vtf')): 219 | continue 220 | 221 | force_2nd = False 222 | if(FORCE_SKYBOX_DECOMPILE_CSGO and (len(PATHS_VTF2TGA) > 1) and ('skybox' in str(vtfFile))): 223 | force_2nd = True 224 | 225 | if MULTITHREAD: 226 | semaphore.acquire() 227 | 228 | thread = threading.Thread(target=ImportVTFtoTGA, args=(vtfFile, force_2nd)) 229 | THREADS.append(thread) 230 | thread.start() 231 | semaphore.release() 232 | else: 233 | ImportVTFtoTGA(vtfFile, force_2nd) 234 | 235 | for unfinished_thread in THREADS: 236 | unfinished_thread.join() # wait for the final threads to finish 237 | 238 | for txtFile in txtFileList: 239 | print(f"TODO: Found vtex compile param file {txtFile}") 240 | #txt_import() 241 | 242 | for unfinished_thread in THREADS: 243 | unfinished_thread.join() # wait for the final threads to finish 244 | 245 | if erroredFileList: 246 | print("\tNo vtf2tga could export the following files:") 247 | 248 | for erroredFile in erroredFileList: 249 | print(erroredFile.local) 250 | 251 | print(f"\tTotal: {len(erroredFileList)} / {len(vtfFileList)} | " + "{:.2f}".format((len(erroredFileList)/len(vtfFileList)) * 100) + f" % Error rate\n") 252 | 253 | print("\n+ Looks like we are done.") 254 | 255 | if __name__ == "__main__": 256 | sh.parse_argv() 257 | main() 258 | -------------------------------------------------------------------------------- /utils/maps_import.py: -------------------------------------------------------------------------------- 1 | 2 | from dataclassy import dataclass, factory 3 | import shared.base_utils2 as sh 4 | import vdf 5 | import bsp_tool 6 | import itertools 7 | from pathlib import Path 8 | import shared.datamodel as dmx 9 | from shared.datamodel import ( 10 | uint64, 11 | Vector3 as vector3, 12 | QAngle as qangle, 13 | Color as color, 14 | _ElementArray as element_array, 15 | _IntArray as int_array, 16 | _StrArray as string_array, 17 | ) 18 | 19 | OVERWRITE_MAPS = False 20 | IMPORT_VMF_ENTITIES = True 21 | IMPORT_BSP_ENTITIES = False 22 | #WRITE_TO_PREFAB = True 23 | 24 | maps = Path("maps") 25 | mapsrc = Path("mapsrc") 26 | 27 | def out_vmap_name(in_vmf: Path): 28 | root = mapsrc if in_vmf.local.is_relative_to(mapsrc) else maps 29 | return sh.EXPORT_CONTENT / maps / "source1imported" / "entities" / in_vmf.local.relative_to(root).with_suffix(".vmap") 30 | 31 | def main(): 32 | 33 | if IMPORT_VMF_ENTITIES: 34 | print("Importing vmf entities!") 35 | for vmf_path in itertools.chain( 36 | sh.collect(mapsrc, ".vmf", ".vmap", OVERWRITE_MAPS, out_vmap_name), 37 | sh.collect(maps, ".vmf", ".vmap", OVERWRITE_MAPS, out_vmap_name) 38 | ): 39 | ImportVMFEntitiesToVMAP(vmf_path) 40 | 41 | if IMPORT_BSP_ENTITIES: 42 | print("Importing bsp entities!") 43 | for bsp_path in sh.collect(maps, ".bsp", ".vmap", OVERWRITE_MAPS, out_vmap_name): 44 | ImportBSPEntitiesToVMAP(bsp_path) 45 | 46 | print("Looks like we are done!") 47 | 48 | def ImportVMFEntitiesToVMAP(vmf_path): 49 | 50 | vmap_path = out_vmap_name(vmf_path) 51 | vmap_path.parent.MakeDir() 52 | 53 | sh.status(f'- Reading {vmf_path.local}') 54 | with open(vmf_path) as fp: 55 | vmf: vdf.VDFDict = vdf.load(fp, mapper=vdf.VDFDict, merge_duplicate_keys=False)#KV.CollectionFromFile(vmf_path, case_sensitive=True) 56 | 57 | out_vmap = convert_vmf_entities(vmf) 58 | out_vmap.write(vmap_path, "keyvalues2", 4) 59 | if sh.MOCK: 60 | dmx.remove_ids(vmap_path) 61 | print("+ Generated", vmap_path.local) 62 | return vmap_path 63 | 64 | def ImportBSPEntitiesToVMAP(bsp_path): 65 | """Reads the bsp entity lump and converts it to a content vmap file.""" 66 | vmap_path = out_vmap_name(bsp_path) 67 | vmap_path.parent.MakeDir() 68 | 69 | sh.status(f'- Reading {bsp_path.local}') 70 | bsp: bsp_tool.ValveBsp = bsp_tool.load_bsp(bsp_path.as_posix()) 71 | fake_vmf = vdf.VDFDict() 72 | for entity in bsp.ENTITIES: 73 | fake_vmf[base_vmf.entity] = entity 74 | 75 | out_vmap = convert_vmf_entities(fake_vmf) 76 | out_vmap.write(vmap_path, "keyvalues2", 4) 77 | print("+ Generated from bsp", vmap_path.local) 78 | return vmap_path 79 | 80 | def convert_vmf_entities(vmf: vdf.VDFDict) -> dmx.DataModel: 81 | out_vmap = create_fresh_vmap() 82 | vmap = out_vmap.root 83 | 84 | for key, value in vmf.items(): 85 | # dismiss excess base keys (exlcuding entity) 86 | # e.g. multiple worlds, TODO: merge_multiple_worlds() use latest properties 87 | if len(vmf.get_all_for(key)) > 1 or key in (base_vmf.world, base_vmf.entity): 88 | continue 89 | # some fixups 90 | main_to_root(vmap, key, value) 91 | 92 | for vmfEntityKeyValues in vmf.get_all_for("entity"): 93 | translated_entity = CMapWorld.CMapEntity.FromVMFEntity(vmfEntityKeyValues) 94 | vmap["world"]["children"].append( 95 | translated_entity.get_element(vmap) 96 | ) 97 | 98 | return out_vmap 99 | 100 | from enum import Enum 101 | class base_vmf(str, Enum): 102 | versioninfo = "versioninfo" 103 | visgroups = "visgroups" 104 | viewsettings = "viewsettings" 105 | world = "world" 106 | entity = "entity" 107 | hidden = "hidden" 108 | cameras = "cameras" 109 | cordon = "cordon" 110 | cordons = "cordons" 111 | 112 | def create_fresh_vmap() -> dmx.DataModel: 113 | boilerplate = dmx.load(Path(__file__).parent / "shared/empty.vmap.txt") 114 | boilerplate.prefix_attributes.type = "$prefix_element$" 115 | return boilerplate 116 | 117 | @dataclass 118 | class _CustomElement: 119 | name: str = "" 120 | def get_element(self, dm: dmx.DataModel) -> dmx.Element: 121 | "py object 2 datamodel element" 122 | el = dmx.Element(dm, self.name, self.__class__.__name__) 123 | for k, v in self.__dict__.items(): 124 | if k == "name": 125 | continue 126 | if hasattr(v, "get_element"): 127 | v = v.get_element(dm) 128 | elif isinstance(v, list): 129 | for i, item in enumerate(v): 130 | if hasattr(item, "get_element"): 131 | v[i] = item.get_element(dm) 132 | el[k] = v 133 | return el 134 | 135 | class _BaseNode(_CustomElement): 136 | origin: vector3 = factory(lambda:vector3([0,0,0])) 137 | angles: qangle = factory(lambda:qangle([0,0,0])) 138 | scales: vector3 = factory(lambda:vector3([1,1,1])) 139 | nodeID: int = 0 # Increase with every instance 140 | referenceID: uint64 = uint64(0x0) # idk 141 | children: element_array = factory(element_array) 142 | editorOnly: bool = False 143 | force_hidden: bool = False 144 | transformLocked: bool = False 145 | variableTargetKeys: string_array = factory(string_array) 146 | variableNames: string_array = factory(string_array) 147 | 148 | def Value_to_Value2(self, k, v): 149 | "generic KV1 str value to typed KV2 value" 150 | 151 | if k not in self.__annotations__: 152 | return v 153 | _type = self.__annotations__[k] 154 | 155 | if issubclass(_type, list): 156 | if issubclass(_type, dmx._Array): 157 | return dmx.make_array(v.split(), _type) 158 | else: 159 | return _type(v.split()) 160 | 161 | return _type(v) 162 | 163 | @classmethod 164 | def FromKeyValues(cls, KV: vdf.VDFDict): # keyvalues of who's? 165 | t = cls() 166 | baseDict = {} 167 | editorDict = {} 168 | for k, v in KV.items(): 169 | if k == "id": 170 | t.nodeID = t.Value_to_Value2("nodeID", v) 171 | elif k == "name": 172 | t.name = v 173 | elif k in cls.__annotations__: 174 | #print("caling v_v2") 175 | baseDict[k] = t.Value_to_Value2(k, v) 176 | elif not isinstance(v, dict): 177 | if isinstance(v, list): 178 | v = v[0] # bsp_tool duplicate keys 179 | editorDict[k] = v 180 | #else: 181 | # print("Unknown editor object", k, type(v)) 182 | t.__dict__.update(baseDict) 183 | if isinstance(t, _BaseEnt): 184 | t.entity_properties.__dict__.update(editorDict) 185 | return t 186 | 187 | class _BaseEnt(_BaseNode):#(_T): 188 | class DmePlugList(_CustomElement): 189 | names: string_array = factory(string_array) 190 | dataTypes: int_array = factory(int_array) 191 | plugTypes: int_array = factory(int_array) 192 | descriptions: string_array = factory(string_array) 193 | 194 | class EditGameClassProps(_CustomElement): 195 | # Everything is a string 196 | def __init__(self, **kv: str): 197 | self.__dict__.update(**kv) 198 | 199 | relayPlugData: DmePlugList = factory(DmePlugList) 200 | connectionsData: element_array = factory(element_array) 201 | entity_properties: EditGameClassProps = factory(EditGameClassProps) 202 | 203 | 204 | class CMapWorld(_BaseEnt): 205 | 206 | class CMapGroup(_BaseNode): 207 | pass 208 | 209 | class CMapEntity(_BaseEnt): 210 | hitNormal: vector3 = factory(lambda:vector3([0,0,1])) 211 | isProceduralEntity: bool = False 212 | 213 | @classmethod 214 | def FromVMFEntity(cls, KV: vdf.VDFDict): 215 | classname = KV.get("classname") 216 | editorDict = {} 217 | if KV.get("editor") is not None: 218 | editorDict.update(KV.pop("editor")) 219 | 220 | # Add your translations here 221 | # TODO: not here 222 | 223 | if classname in ("info_player_terrorist", "info_player_counterterrorist"): 224 | KV["classname"] = "info_player_spawn" 225 | 226 | if classname == "prop_static": 227 | # color and alpha are merged into a vec4 228 | if "color" in KV: 229 | KV["rendercolor"] = f'{KV.pop("color")} {KV.pop("renderamt")}' 230 | if "rendercolor" in KV: 231 | KV["rendercolor"] = f'{KV.pop("rendercolor")} {KV.pop("renderamt")}' 232 | 233 | # from ('uniformscale' '1') to ('scales', '1 1 1') 234 | if "uniformscale" in KV: 235 | KV["scales"] = " ".join([KV.pop("uniformscale")]*3) 236 | 237 | if "model" in KV: 238 | KV["model"] = Path(KV["model"]).with_suffix(".vmdl").as_posix() 239 | 240 | if classname == "etc": 241 | ... 242 | 243 | rv = super(cls, cls).FromKeyValues(KV) 244 | rv.entity_properties.__dict__.update(editorDict) 245 | return rv 246 | 247 | nextDecalID: int = 0 248 | fixupEntityNames: bool = True 249 | mapUsageType: str = "standard" 250 | 251 | @classmethod 252 | def FromVMFWorld(cls, worldKV: vdf.VDFDict): 253 | for (i, key), value in worldKV.items(indexed_keys=True): 254 | if key in ("solid", "group", "hidden"): 255 | continue 256 | base = super().FromKeyValues(worldKV) # Base worldspawn entity properties. 257 | world = cls(**base.__dict__) 258 | return world 259 | 260 | RootDict = { 261 | "versioninfo":{ 262 | "prefab": ("isprefab", bool), 263 | }, 264 | "visgroups":{}, 265 | "viewsettings":{ 266 | "bShowGrid": ("showgrid", bool), 267 | "nGridSpacing": ("gridspacing", float), 268 | "bShow3DGrid": ("show3dgrid", bool), 269 | }, 270 | "world":{},"entity":{},"hidden":{},"cameras":{},"cordon":{},"cordons":{}, 271 | } 272 | def main_to_root(vmap, main_key: str, sub): 273 | for t in RootDict[main_key]: 274 | if t in sub: 275 | replacement, _type = RootDict[main_key][t] 276 | vmap[replacement] = _type(sub[t]) 277 | 278 | if __name__ == '__main__': 279 | sh.parse_argv() 280 | main() 281 | -------------------------------------------------------------------------------- /utils/shared/keyvalues1.py: -------------------------------------------------------------------------------- 1 | # keyvalues1.py 2 | 3 | # cppkeyvalues is nice for parsing, but we need an actual python object to work efficiently 4 | 5 | from collections import Counter 6 | from typing import Any, Optional, Union 7 | from pathlib import Path 8 | try: 9 | from cppkeyvalues import KeyValues, CUtlBuffer, CKeyValuesTokenReader 10 | except ImportError: 11 | from shared.cppkeyvalues import KeyValues, CUtlBuffer, CKeyValuesTokenReader 12 | 13 | 14 | class VDFDict(dict): 15 | """ 16 | Slightly modified VDFDict https://github.com/ValvePython/vdf 17 | 18 | Copyright (c) 2015 Rossen Georgiev 19 | """ 20 | def __init__(self, data=None): 21 | """ 22 | This is a dictionary that supports duplicate keys and preserves insert order 23 | 24 | ``data`` can be a ``dict``, or a sequence of key-value tuples. (e.g. ``[('key', 'value'),..]``) 25 | The only supported type for key is str. 26 | 27 | Get/set duplicates is done by tuples ``(index, key)``, where index is the duplicate index 28 | for the specified key. (e.g. ``(0, 'key')``, ``(1, 'key')``...) 29 | 30 | When the ``key`` is ``str``, instead of tuple, set will create a duplicate and get will look up ``(0, key)`` 31 | """ 32 | self.__omap = [] 33 | self.__kcount = Counter() 34 | 35 | if data is not None: 36 | if not isinstance(data, (list, dict)): 37 | raise ValueError("Expected data to be list of pairs or dict, got %s" % type(data)) 38 | self.update(data) 39 | 40 | def __repr__(self): 41 | return f"{self.__class__.__name__}({repr(list(self.iteritems()))})" 42 | 43 | def __len__(self): 44 | return len(self.__omap) 45 | 46 | def _verify_key_tuple(self, key): 47 | if len(key) != 2: 48 | raise ValueError("Expected key tuple length to be 2, got %d" % len(key)) 49 | if not isinstance(key[0], int): 50 | raise TypeError("Key index should be an int") 51 | if not isinstance(key[1], str): 52 | raise TypeError("Key value should be a str") 53 | 54 | def _normalize_key(self, key): 55 | if isinstance(key, str): 56 | key = (0, key) 57 | elif isinstance(key, tuple): 58 | self._verify_key_tuple(key) 59 | else: 60 | raise TypeError("Expected key to be a str or tuple, got %s" % type(key)) 61 | return key 62 | 63 | def __setitem__(self, k, v) -> None: 64 | if k in self: 65 | return super().__setitem__(self._normalize_key(k), v) 66 | self.add(k, v) 67 | 68 | def add(self, key, value): 69 | if isinstance(key, str): 70 | key = (self.__kcount[key], key) 71 | self.__omap.append(key) 72 | elif isinstance(key, tuple): 73 | self._verify_key_tuple(key) 74 | if key not in self: 75 | raise KeyError("%s doesn't exist" % repr(key)) 76 | else: 77 | raise TypeError("Expected either a str or tuple for key, got %s, %s" % (type(key), key)) 78 | if isinstance(value, dict): 79 | value = VDFDict(value) 80 | super().__setitem__(key, value) 81 | self.__kcount[key[1]] += 1 82 | 83 | def __getitem__(self, key): 84 | return super().__getitem__(self._normalize_key(key)) 85 | 86 | def __delitem__(self, key): 87 | key = self._normalize_key(key) 88 | result = super().__delitem__(key) 89 | 90 | start_idx = self.__omap.index(key) 91 | del self.__omap[start_idx] 92 | 93 | dup_idx, skey = key 94 | self.__kcount[skey] -= 1 95 | tail_count = self.__kcount[skey] - dup_idx 96 | 97 | if tail_count > 0: 98 | for idx in range(start_idx, len(self.__omap)): 99 | if self.__omap[idx][1] == skey: 100 | oldkey = self.__omap[idx] 101 | newkey = (dup_idx, skey) 102 | super().__setitem__(newkey, self[oldkey]) 103 | super().__delitem__(oldkey) 104 | self.__omap[idx] = newkey 105 | 106 | dup_idx += 1 107 | tail_count -= 1 108 | if tail_count == 0: 109 | break 110 | 111 | if self.__kcount[skey] == 0: 112 | del self.__kcount[skey] 113 | 114 | return result 115 | 116 | def __iter__(self): 117 | return iter(self.iterkeys()) 118 | 119 | def __contains__(self, key): 120 | return super().__contains__(self._normalize_key(key)) 121 | 122 | def __eq__(self, other): 123 | if isinstance(other, VDFDict): 124 | return list(self.items()) == list(other.items()) 125 | else: 126 | return False 127 | 128 | def __ne__(self, other): 129 | return not self.__eq__(other) 130 | 131 | def clear(self): 132 | super().clear() 133 | self.__kcount.clear() 134 | self.__omap = list() 135 | 136 | def get(self, key, *args): 137 | return super().get(self._normalize_key(key), *args) 138 | 139 | def setdefault(self, key, default=None): 140 | if key not in self: 141 | self.add(key, default) 142 | return self.__getitem__(key) 143 | 144 | def pop(self, key): 145 | key = self._normalize_key(key) 146 | value = self.__getitem__(key) 147 | self.__delitem__(key) 148 | return value 149 | 150 | def popitem(self): 151 | if not self.__omap: 152 | raise KeyError("VDFDict is empty") 153 | key = self.__omap[-1] 154 | return key[1], self.pop(key) 155 | 156 | def update(self, data=None, overwrite=False): 157 | if isinstance(data, dict): 158 | data = data.items() 159 | elif not isinstance(data, list): 160 | raise TypeError("Expected data to be a list or dict, got %s" % type(data)) 161 | 162 | update_func = self.__setitem__ if overwrite else self.add 163 | for key, value in data: 164 | if isinstance(value, list): 165 | update_func(key, VDFDict(value)) 166 | else: 167 | update_func(key, value) 168 | 169 | def iterkeys(self): 170 | return (key[1] for key in self.__omap) 171 | 172 | def keys(self): 173 | return list(self.iterkeys()) 174 | 175 | def itervalues(self): 176 | return (self[key] for key in self.__omap) 177 | 178 | def values(self): 179 | return list(self.itervalues()) 180 | 181 | def iteritems(self, indexed_keys = False): 182 | return ((key if indexed_keys else key[1], self[key]) for key in self.__omap) 183 | 184 | def items(self, indexed_keys = False): 185 | return list(self.iteritems(indexed_keys)) 186 | 187 | def get_all_for(self, key): 188 | """ Returns all values of the given key """ 189 | if not isinstance(key, str): 190 | raise TypeError("Key needs to be a string.") 191 | return [self[(idx, key)] for idx in range(self.__kcount[key])] 192 | 193 | def remove_all_for(self, key): 194 | """ Removes all items with the given key """ 195 | if not isinstance(key, str): 196 | raise TypeError("Key need to be a string.") 197 | 198 | for idx in range(self.__kcount[key]): 199 | super().__delitem__((idx, key)) 200 | 201 | self.__omap = list(filter(lambda x: x[1] != key, self.__omap)) 202 | 203 | del self.__kcount[key] 204 | 205 | def has_duplicates(self): 206 | """ 207 | Returns ``True`` if the dict contains keys with duplicates. 208 | Recurses through any all keys with value that is ``VDFDict``. 209 | """ 210 | for n in getattr(self.__kcount, 'itervalues')(): 211 | if n != 1: 212 | return True 213 | 214 | def dict_recurse(obj): 215 | for v in getattr(obj, 'itervalues')(): 216 | if isinstance(v, VDFDict) and v.has_duplicates(): 217 | return True 218 | elif isinstance(v, dict): 219 | return dict_recurse(v) 220 | return False 221 | 222 | return dict_recurse(self) 223 | def ToString(self, level = 0, quoteKeys=False): 224 | line_indent = '\t' * level 225 | s = "" 226 | s += "\n" + line_indent + '{\n' 227 | for key, value in self.items(): 228 | if quoteKeys: 229 | key = '"'+key+'"' 230 | if isinstance(value, VDFDict): 231 | s += line_indent + f"\t{key}{value.ToString(level+1, quoteKeys)}" 232 | else: 233 | s += line_indent + f'\t{key}\t"{value}"\n' 234 | s += line_indent + "}\n" 235 | return s 236 | 237 | def _NoneOnException(func): 238 | def wrapper(*args, **kwargs): 239 | try: func(*args) 240 | except Exception: return 241 | return wrapper 242 | 243 | class KV(VDFDict): 244 | 245 | @classmethod 246 | def FromFile(cls, file: Union[str, bytes, Path], case_sensitive=False, escape=False, **params): 247 | with open(file, 'r', encoding="utf-8") as f: 248 | return cls.FromBuffer(f.read(), file, case_sensitive, escape, **params) 249 | 250 | @classmethod 251 | def CollectionFromFile(cls, file: Path, case_sensitive=False, escape=False, **params): 252 | with open(file, 'r', encoding="utf-8") as f: 253 | return cls.CollectionFromBuffer(f.read(), file, case_sensitive, escape, **params) 254 | 255 | @classmethod 256 | def FromBuffer(cls, buf: str, resourceName: Union[str, bytes, Path] = None, case_sensitive=False, escape=False, **params): 257 | cppkv = KeyValues(case_sensitive = case_sensitive, escape = escape) 258 | cppkv.LoadFromBuffer(resourceName, buf = CUtlBuffer(buf), **params) 259 | return cls(cppkv.keyName, cppkv.value.ToBuiltin()) 260 | 261 | @classmethod 262 | def CollectionFromBuffer(cls, buf: str, resourceName: Path = None, case_sensitive=False, escape=False, **params): 263 | cppkv = KeyValues( 264 | k="" if resourceName is None else resourceName.name, 265 | case_sensitive = case_sensitive, escape = escape 266 | ) 267 | cppkv.RecursiveLoadFromBuffer(resourceName, CKeyValuesTokenReader(CUtlBuffer(buf)), True) 268 | return cls(cppkv.keyName, cppkv.value.ToBuiltin()) 269 | 270 | def __init__(self, keyName: str, value: dict) -> None: 271 | self.keyName: str = keyName 272 | super().__init__(value) 273 | 274 | def __getitem__(self, key) -> Optional[Any]: 275 | 'If key exists, value of key. Otherwise None' 276 | try: 277 | return super().__getitem__(key) 278 | except KeyError: 279 | return 280 | 281 | def setdef_instance(self, key, obj): 282 | raise NotImplementedError 283 | "setdefault but with type (dict, genericvalue)" 284 | get = self[key] 285 | if get is None: 286 | self.add(key, obj) 287 | return obj 288 | elif not isinstance(get, type(obj)): 289 | self[key] = obj 290 | return obj 291 | return get 292 | 293 | def __repr__(self): 294 | return f"{self.__class__.__name__}({self.keyName!r}, {list(self.iteritems())!r})" 295 | 296 | def __str__(self): 297 | return self.ToString() 298 | 299 | def as_value(self): 300 | return VDFDict({self.keyName:self}) 301 | 302 | def ToString(self, level=0, quoteKeys=False): 303 | line_indent = "\t" * level 304 | return line_indent + f'"{self.keyName}"{super().ToString(level, quoteKeys)}' 305 | 306 | def save(self, path, quoteKeys=False): 307 | with open(path, 'w') as fp: 308 | fp.write(self.ToString(quoteKeys=quoteKeys)) 309 | 310 | def ToKeyValues(self): 311 | raise NotImplementedError 312 | kv = KeyValues(self.keyName) 313 | kv.Sub = [] 314 | for k, v in self.items(): 315 | if isinstance(v, VDFDict): 316 | kv.value.append(v.ToKeyValues()) 317 | else: 318 | kv.value.append(KeyValues(k, v)) 319 | return kv 320 | -------------------------------------------------------------------------------- /utils/shared/material_proxies.py: -------------------------------------------------------------------------------- 1 | # Proxy to Dynamic Expression 2 | # https://developer.valvesoftware.com/wiki/List_Of_Material_Proxies 3 | # https://developer.valvesoftware.com/wiki/Dota_2_Workshop_Tools/Materials/Dynamic_Material_Expressions 4 | 5 | try: 6 | from .keyvalues1 import VDFDict 7 | except ImportError: 8 | from keyvalues1 import VDFDict 9 | from math import cos, sin 10 | 11 | class Proxies(VDFDict): pass 12 | 13 | def add(srcvar1, srcvar2, **_): return f"{srcvar1} + {srcvar2}" 14 | def multiply(srcvar1, srcvar2, **_): return f"{srcvar1} * {srcvar2}" 15 | def subtract(srcvar1, srcvar2, **_): return f"{srcvar1} - {srcvar2}" 16 | def divide(srcvar1, srcvar2, **_): return f"{srcvar1} / {srcvar2}" 17 | 18 | def equals(srcvar1, **_): return f"{srcvar1}" 19 | def abs(srcvar1, **_): return f"abs({srcvar1})" 20 | def frac(srcvar1, **_): return f"frac({srcvar1})" 21 | 22 | _int = int 23 | def int(srcvar1, **_): 24 | return f"(frac({srcvar1}) >= 0.5 ? ceil({srcvar1}) : floor({srcvar1}))" 25 | 26 | def clamp(srcvar1, minval, maxval, **_): 27 | return f"clamp({srcvar1}, {minval}, {maxval})" 28 | 29 | def lessorequal(lessequalvar, greatervar, srcvar1, srcvar2, **_): 30 | return f"({srcvar1} <= {srcvar2}) ? {lessequalvar} : {greatervar}" 31 | 32 | def selectfirstifnonzero(srcvar1, srcvar2, **_): 33 | return f"({srcvar1} != 0) ? {srcvar1} : {srcvar2}" 34 | 35 | def wrapminmax(srcvar1, minval, maxval, **_): 36 | if ( maxval <= minval ): # Bad input, just return the min 37 | return f"{minval}" 38 | else: 39 | expr = ( 40 | f"flResult = ( {srcvar1} - {minval} ) / ( {maxval} - {minval} )", 41 | f"(flResult >= 0) ? flResult = flResult - ({int('flResult')}) : flResult = flResult - ({int('flResult')}) - 1", 42 | f"(flResult * ( {maxval} - {minval} )) + {minval}" 43 | ) 44 | #f"flResult = flResult + minval; 45 | return expr 46 | def remapvalclamp(srcvar1, range_in_min = 0, range_in_max = 1, range_out_min = 0, range_out_max = 1, **_): 47 | 48 | #if ( A == B ) return fsel( val - B , D , C ); // fsel(c,x,y) { ( (c) >= 0 ? (x) : (y) ) } 49 | #float cVal = (val - A) / (B - A); 50 | #cVal = clamp( cVal, 0.0f, 1.0f ); 51 | #return C + (D - C) * cVal; 52 | 53 | A, B, C, D = range_in_min, range_in_max, range_out_min, range_out_max 54 | expr = ( 55 | f"remapvalclamp_temp1 = ({srcvar1} - {B}) >= 0 ? {D} : {C}", 56 | f"remapvalclamp_temp2 = {C} + ({D} - {C}) * {clamp((srcvar1 - A) / (B - A), 0, 1)}", 57 | f"{A} == {B} ? remapvalclamp_temp1 : remapvalclamp_temp2", 58 | ) 59 | return expr 60 | 61 | #if ( A == B ) 62 | # return f"({val} - {B}) >= 0 ? {D} : {C}" 63 | #return f"{C} + ({D} - {C}) * {clamp((srcvar1 - A) / (B - A), 0, 1)}" 64 | 65 | def exponential( minval, maxval, srcvar1, offset = 0, scale = 1, **_): 66 | return f"clamp({scale} * pow(2.71828, {srcvar1} + {offset}), {minval}, {maxval})" 67 | 68 | def sine(sineperiod, sinemin = -1, sinemax = 1, timeoffset = 0, **_): 69 | return f"( {sinemax} - {sinemin} ) * (( sin( 2.0 * 3.14159265358979323846264338327950288 * (time() - {timeoffset}) / {sineperiod} ) * 0.5 ) + 0.5) + {sinemin}" 70 | 71 | def linearramp(rate = 1, initialvalue = 0, **_): 72 | return f"( {rate} * time() + {initialvalue} )" 73 | 74 | def entityrandom(**_): # temporary 75 | return "0" 76 | 77 | def health(scale, **_): 78 | return f"{scale} * $health" 79 | 80 | def currenttime(**_): 81 | return "time()" 82 | 83 | def uniformnoise(minval, maxval, mean, **_): 84 | return f"random({minval}, {maxval})" 85 | 86 | def gaussiannoise(minval, maxval, halfwidth, mean, **_): 87 | return f"random({minval}, {maxval})" 88 | 89 | def texturetransform(centervar, scalevar, rotatevar, translatevar, **_): 90 | return f"float2({translatevar[0]}, {translatevar[1]})" 91 | return f"float2({scalevar[0]}, {scalevar[1]})" 92 | 93 | def texturescroll(texturescrollvar, texturescrollrate, texturescrollangle, **_): 94 | return f"float2({texturescrollrate} * cos({texturescrollangle}, {texturescrollrate} * sin({texturescrollrate}))" 95 | 96 | def get_resultvar(proxy: dict): 97 | for key, val in proxy.items(): 98 | if key == "resultvar": 99 | return val 100 | return None 101 | 102 | def search_res(proxies, result: str): 103 | for proxy in proxies: 104 | if result == get_resultvar(proxies[proxy]): 105 | return proxy 106 | 107 | class DynamicExpression: 108 | def __init__(self): 109 | #self.parent = parent 110 | 111 | # Lists of lines of code 112 | self.constants = {} 113 | self.defined_variables = [] 114 | self.expression_list = [] 115 | 116 | def add_expression(self, expression): # bottom to top 117 | self.expression_list.insert(0, expression) 118 | 119 | def __repr__(self): 120 | if not self.defined_variables and not self.expression_list: 121 | return "" 122 | s: str = "// Auto-Generated by Source1Import\n\n" 123 | for k, v in self.constants.items(): 124 | s += f"{k} = {v};\n" 125 | for expression in self.expression_list: 126 | ";\n".join(expression.splitlines()) # multi-line expressions 127 | s += f"{expression};\n" 128 | return repr(s)#repr(s) 129 | 130 | def __str__(self) -> str: 131 | return str(self.__repr__()) 132 | 133 | def FormDynamicExpression(proxy: str, proxyParams: dict, mainResultVar: str, known, KeyValues, vmtProxies) -> DynamicExpression: 134 | 135 | undefined_vars = [mainResultVar] 136 | 137 | dynEx = DynamicExpression() 138 | 139 | for var in undefined_vars: 140 | #print(var) 141 | 142 | if var in dynEx.defined_variables: 143 | continue 144 | 145 | mainResult = (var == mainResultVar) 146 | #print(var, undefined_vars) 147 | 148 | if not mainResult: # input variable of preceding proxy 149 | proxy = search_res(vmtProxies, var) 150 | if not proxy: 151 | #print("no proxy found for", var) 152 | continue 153 | proxyParams = vmtProxies[proxy] 154 | 155 | for key, value in proxyParams.items(): 156 | if key == "resultvar": continue 157 | if type(value) is str and value.startswith("$"): 158 | if value in known: 159 | proxyParams[key] = known[value] # does nott work 160 | else: 161 | #proxyParams[key] = value 162 | undefined_vars.append(proxyParams[key]) 163 | #search = True 164 | try: 165 | expression = globals()[proxy](**proxyParams) 166 | except TypeError: 167 | continue 168 | except KeyError: 169 | print("Missing func", proxy) 170 | #dynEx.add_expression("$" + proxy) 171 | continue 172 | 173 | if not mainResult: 174 | expression = f"{var} = " + expression 175 | #if var in dynamicEx.undefined_variables: 176 | # dynamicEx.undefined_variables.remove(var) 177 | 178 | dynEx.add_expression(expression) 179 | dynEx.defined_variables.append(var) 180 | 181 | for var in undefined_vars[:]: 182 | if var in dynEx.defined_variables: 183 | undefined_vars.remove(var) 184 | continue 185 | 186 | if var in KeyValues: 187 | dynEx.constants[var] = KeyValues[var] 188 | undefined_vars.remove(var) 189 | 190 | return dynEx 191 | 192 | def ProxiesToDynamicParams(vmtProxies: VDFDict, known, KeyValues) -> tuple[dict, dict[str, DynamicExpression]]: 193 | """Convert proxy chains to a dynamic expressions""" 194 | vmatDynamicParams: dict[str, DynamicExpression] = {} 195 | vmatKeyValues: dict = {} 196 | 197 | for proxy, proxyParams in vmtProxies.items(): 198 | try: 199 | if proxy == "animatedtexture": 200 | # sequential animation, get framerate 201 | if proxyParams["animatedtextureframenumvar"] != "$frame": 202 | continue 203 | vmatKeyValues["F_TEXTURE_ANIMATION"] = 1 204 | vmatKeyValues["g_flAnimationTimePerFrame"] = 1 / float(proxyParams["animatedtextureframerate"]) 205 | continue 206 | elif proxy == "texturescroll": 207 | if proxyParams["texturescrollvar"] != "$basetexturetransform": 208 | continue 209 | u = float(proxyParams["texturescrollrate"]) * cos(_int(proxyParams["texturescrollangle"])) 210 | v = float(proxyParams["texturescrollrate"]) * sin(_int(proxyParams["texturescrollangle"])) 211 | vmatKeyValues["g_vTexCoordScrollSpeed"] = f"[{u:.6f} {v:.6f}]" 212 | continue 213 | except (ValueError, KeyError): 214 | # bad proxyParams 215 | continue 216 | # resultvar needs to be a vmt $key that can be translated 217 | if (resultvar:=get_resultvar(proxyParams)) not in known: 218 | continue 219 | 220 | dpKey = known[resultvar] # g_vColorTint 221 | if dpKey is None: 222 | continue 223 | 224 | # scripted animation sequence 225 | if resultvar == "$frame": 226 | vmatKeyValues["F_TEXTURE_ANIMATION"] = 1 227 | vmatKeyValues["F_TEXTURE_ANIMATION_MODE"] = 2 228 | 229 | dynEx = FormDynamicExpression(proxy, proxyParams, resultvar, known, KeyValues, vmtProxies) 230 | vmatDynamicParams[dpKey] = repr(dynEx).strip("'") # "clamp(random(1), 0.4, 0.6)" 231 | 232 | return vmatKeyValues, vmatDynamicParams 233 | 234 | 235 | M_PI = 3.14159265358979323846264338327950288 236 | if __name__ == "__main__": 237 | import unittest 238 | 239 | class Test_KeyValues(unittest.TestCase): 240 | def test1(self): 241 | known2 = {"$color": "g_vColor", "$alpha": "g_flOpacity"} 242 | KeyValue5 = { 243 | "$addoutput": 0, 244 | "$loeoutput": 0, 245 | "$noisesignal": 0, 246 | "$noisegate": 0.6, 247 | "$zero": 0, 248 | "$sinewaveoutput": 0, 249 | "$one": 1, 250 | "proxies": Proxies({ 251 | "clamp": { 252 | "minval": .1, 253 | "maxval": 1, 254 | "srcvar1": "$addoutput", 255 | "resultvar": "$color", 256 | }, 257 | "add": { 258 | "srcvar1": "$sinewaveoutput", 259 | "srcvar2": "$loeoutput", 260 | "resultvar": "$addoutput", 261 | }, 262 | "lessorequal": { 263 | "lessequalvar": "$zero", 264 | "greatervar": "$one", 265 | "srcvar1": "$noisesignal", 266 | "srcvar2": "$noisegate", 267 | "resultvar": "$loeoutput", 268 | }, 269 | "gaussiannoise": { 270 | "minval": .1, 271 | "maxval": 1, 272 | "halfwidth": .5, 273 | "mean": 1, 274 | 275 | "resultvar": "$noisesignal", 276 | }, 277 | "sine": { 278 | "sinemin": 0, 279 | "sinemax": 6, 280 | "sineperiod": 8, 281 | "resultvar": "$sinewaveoutput", 282 | }, 283 | "subtract": { 284 | "srcvar1": 5, 285 | "srcvar2": 2, 286 | "resultvar": "$alpha", 287 | } 288 | }), 289 | } 290 | expected_result = { 291 | 'g_vColor': r'// Auto-Generated by Source1Import\n\n$zero = 0;\n$one = 1;\n$noisegate = 0.6;\n$noisesignal = random(0.1, 1);\n$loeoutput = ($noisesignal <= $noisegate) ? $zero : $one;\n$sinewaveoutput = ( 6 - 0 ) * (( sin( 2.0 * 3.14159265358979323846264338327950288 * (time() - 0) / 8 ) * 0.5 ) + 0.5) + 0;\n$addoutput = $sinewaveoutput + $loeoutput;\nclamp($addoutput, 0.1, 1);\n', 292 | 'g_flOpacity': r'// Auto-Generated by Source1Import\n\n5 - 2;\n' 293 | } 294 | _, result = ProxiesToDynamicParams(KeyValue5["proxies"], known2, KeyValue5) 295 | self.maxDiff = None 296 | self.assertEqual(result, expected_result) 297 | unittest.main() 298 | -------------------------------------------------------------------------------- /utils/dev/use_hlvr_surfaces.py: -------------------------------------------------------------------------------- 1 | hlvr_surfaces = [ 2 | "default_silent", 3 | "default", 4 | "prop.ammo_shell_pistol", 5 | "base.metal", 6 | "base.sheetmetal", 7 | "world.metal", 8 | "world.metal_combine", 9 | "world.metal_panel", 10 | "world.metal_thin", 11 | "world.metal_vent", 12 | "base.prop.metal", 13 | "prop.metal_valve_handle", 14 | "prop.metal_box", 15 | "prop.metal_vent", 16 | "base.metal_grate", 17 | "world.metal_grate", 18 | "prop.metal_grenade", 19 | "prop.metal_gas_canister", 20 | "prop.metal_barrel", 21 | "prop.metal_barrel_floating", 22 | "prop.metal_popcan", 23 | "prop.metal_paintcan", 24 | "prop.metal_caster", 25 | "base.dirt", 26 | "world.dirt", 27 | "world.mud", 28 | "world.slipperyslime", 29 | "world.grass", 30 | "world.snow", 31 | "world.ice", 32 | "world.sand", 33 | "base.wood", 34 | "world.wood_solid", 35 | "world.wood_debris", 36 | "world.wood_lowdensity", 37 | "world.wood_panel", 38 | "base.prop_wood", 39 | "prop.wood_crate", 40 | "prop.wood_box", 41 | "prop.wood_plank", 42 | "prop.wood_furniture", 43 | "prop.wood_gib", 44 | "prop.wood_panel", 45 | "prop.wood_ladder", 46 | "base.water", 47 | "world.slime", 48 | "world.quicksand", 49 | "world.water", 50 | "world.water_shallow", 51 | "base.glass", 52 | "world.glass", 53 | "prop.glass_tv_tube", 54 | "prop.glass_tv_tube_02", 55 | "prop.glass", 56 | "prop.pottery", 57 | "base.concrete", 58 | "world.concrete", 59 | "world.concrete_debris", 60 | "world.rock", 61 | "world.boulder", 62 | "world.gravel", 63 | "world.asphalt", 64 | "world.brick", 65 | "world.brick_hollow", 66 | "prop.concrete", 67 | "prop.flesh", 68 | "prop.flesh_bloody", 69 | "prop.flesh_alien", 70 | "prop.flesh_armored", 71 | "npc_combine_soldier", 72 | "npc_combine_grunt_gastank", 73 | "npc_combine_soldier_gear", 74 | "npc_zombie_flesh", 75 | "npc_blindzombie_body", 76 | "npc_creature_flesh", 77 | "npc_headcrab_flesh", 78 | "npc_headcrab_black_flesh", 79 | "npc_barnacle_flesh", 80 | "npc_reviver_flesh", 81 | "npc_reviver_node_flesh", 82 | "npc_reviver_zombie_flesh", 83 | "npc_zombie_dead_flesh", 84 | "npc_antlion_flesh", 85 | "npc_antlion_armor", 86 | "npc_manhack", 87 | "npc_combine_hand_shield", 88 | "npc_boomerplant", 89 | "world.plastic", 90 | "prop.plastic", 91 | "prop.plastic_barrel", 92 | "prop.plastic_barrel_buoyant", 93 | "prop.plastic_crate", 94 | "prop.plastic_bottle_small", 95 | "base.rubber", 96 | "prop.rubber_tire", 97 | "prop.rubber_ball", 98 | "base.cardboard", 99 | "world.cardboard", 100 | "prop.cardboard", 101 | "prop.paper", 102 | "prop.paper_cup", 103 | "world.ceiling_tile", 104 | "world.chainlink", 105 | "prop.chain", 106 | "prop.metal_jerrycan", 107 | "prop.metal_bottle_crate", 108 | "prop.metal_mug", 109 | "prop.metal_office_chair", 110 | "prop.metal_coffee_pot", 111 | "prop.metal_bucket", 112 | "prop.metal_trash_can", 113 | "prop.watermelon", 114 | "player", 115 | "world.player_control_clip", 116 | "no_decal", 117 | "world.foliage", 118 | "world.foliage_palm", 119 | "world.carpet", 120 | "world.sofa", 121 | "world.mattress", 122 | "world.plaster", 123 | "roller", 124 | "world.tile_floor", 125 | "world.ladder", 126 | "prop.computer", 127 | "prop.rock", 128 | "prop.porcelain", 129 | "prop.boulder", 130 | "prop.brick", 131 | "prop.concrete_block", 132 | "prop.plastic_box", 133 | "base.weapon", 134 | "weapon", 135 | "prop.metal_gun", 136 | "prop.ammo_shell_shotgun", 137 | "prop.ammo_clip", 138 | "prop.combine_battery", 139 | "prop.health_pen", 140 | "npc_antlion_abdomen", 141 | "canister", 142 | "cardboard", 143 | "ceiling_tile", 144 | "chainlink", 145 | "chain", 146 | "computer", 147 | "concrete", 148 | "glass", 149 | "glassbottle", 150 | "grass", 151 | "grenade", 152 | "dirt", 153 | "flesh", 154 | "bloodyflesh", 155 | "alienflesh", 156 | "armorflesh", 157 | "item", 158 | "ladder", 159 | "solidmetal", 160 | "metal", 161 | "metalpanel", 162 | "metal_barrel", 163 | "floating_metal_barrel", 164 | "Metal_Box", 165 | "metalgrate", 166 | "metalvent", 167 | "mud", 168 | "popcan", 169 | "pottery", 170 | "paintcan", 171 | "plastic_barrel", 172 | "plastic_box", 173 | "plastic", 174 | "rubber", 175 | "quicksand", 176 | "sand", 177 | "slime", 178 | "slipperyslime", 179 | "tile", 180 | "wade", 181 | "water", 182 | "Wood", 183 | "Wood_lowdensity", 184 | "wood_box", 185 | "wood_create", 186 | "wood_plank", 187 | "Wood_Solid", 188 | "Wood_Panel", 189 | "woodladder", 190 | "rock", 191 | "porcelain", 192 | "boulder", 193 | "gravel", 194 | "brick", 195 | "concrete_block", 196 | "watermelon", 197 | "snow", 198 | "ice", 199 | "carpet", 200 | "plaster", 201 | "energy_ball", 202 | "prop.pillow", 203 | "base.cloth", 204 | "npc_xen_hearing_flower_flesh", 205 | "prop.sofa", 206 | "prop.wood_crate_large", 207 | "prop.wood_crate_small", 208 | "prop.wood_gib_small", 209 | "prop.wood_gib_large", 210 | "npc_headcrab_armor", 211 | "prop.cardboard_box_large", 212 | "prop.cardboard_box_medium", 213 | "prop.cardboard_box_small", 214 | "prop.cardboard_box_broken_large", 215 | "prop.cardboard_box_broken_medium", 216 | "prop.cardboard_box_empty_medium", 217 | "prop.cardboard_box_empty_small", 218 | "prop.cardboard_carton_biscuits", 219 | "prop.cardboard_carton_butter", 220 | "prop.cardboard_carton_cereal", 221 | "prop.cardboard_carton_cigarettes", 222 | "prop.cardboard_carton_crackers", 223 | "prop.cardboard_carton_milk", 224 | "prop.metal_cookware_pan_a", 225 | "prop.metal_cookware_pan_b", 226 | "prop.metal_cookware_pot_a", 227 | "prop.metal_cookware_pot_b", 228 | "prop.metal_cookware_pot_lid", 229 | "prop.pottery_flowerpot", 230 | "prop.pottery_gnome", 231 | "prop.pottery_shard", 232 | "prop.xen_resin_stack", 233 | "prop.xen_resin_puck", 234 | "prop.plastic_bottle_large", 235 | "prop.plastic_bottle_bleach", 236 | "prop.plastic_bottle_dishsoap", 237 | "prop.antlions_habitat", 238 | "prop.glass_bottle_shard", 239 | "prop.glass_bottle_large_wine", 240 | "prop.glass_bottle_large_spirits", 241 | "prop.glass_bottle_small", 242 | "prop.glass_jar", 243 | "prop.glass_vase", 244 | "prop.glass_tumbler", 245 | "prop.metal_manhack_gib", 246 | "prop.metal_jerrycan_explosive", 247 | "world.drywall", 248 | "npc_combine_soldier_armor", 249 | "prop.porcelain_dinnerplate", 250 | "prop.ammo_clip_large", 251 | "prop.ammo_clip_rapidfire", 252 | "prop.ammo_clip_shotgun", 253 | "player_hand", 254 | "prop.plastic_crate_large", 255 | "prop.plastic_paint_pail", 256 | "prop.plastic_traffic_cone", 257 | "prop.plastic_traffic_drum", 258 | "prop.metal_barrel_explosive", 259 | "prop.cardboard_vort_magic", 260 | "prop.metal_popcan_empty", 261 | "prop.plastic_bottle_cap", 262 | "prop.metal_ashtray", 263 | "npc_antlion_worker_flesh", 264 | "npc_antlion_worker_abdomen", 265 | "npc_combine_soldier_kevlar", 266 | "prop.book", 267 | "prop.paper_towel", 268 | "prop.cigarette", 269 | "prop.ration_bar", 270 | "prop.luggage_large", 271 | "prop.luggage_small", 272 | "prop.metal_tin", 273 | "prop.metal_tin_large", 274 | "prop.metal_tin_small", 275 | "prop.metal_tin_tiny", 276 | "prop.metal_food_can", 277 | "prop.metal_food_can_empty", 278 | "prop.porcelain_mug", 279 | "prop.porcelain_shard", 280 | "prop.porcelain_teapot", 281 | "prop.concrete_block_small", 282 | "prop.concrete_block_shard", 283 | "prop.brick_shard", 284 | "prop.wood_drawer", 285 | "prop.wood_pic_frame_large", 286 | "prop.glass_pic_frame", 287 | "prop.metal_tool_small", 288 | "prop.metal_tool_large", 289 | "prop.metal_tool", 290 | "prop.metal_tool_hammer", 291 | "prop.plastic_safety_hat", 292 | "prop.soap_bar", 293 | "prop.leather_shoe", 294 | "prop.plastic_keyboard", 295 | "prop.plastic_trashcan", 296 | "prop.plastic_trashcan_lid", 297 | "prop.metal_wheel_hub", 298 | "prop.plastic_bottle_pills_small", 299 | "prop.plastic_bottle_pills_large", 300 | "prop.billiard_ball", 301 | "prop.gpu", 302 | "prop.plastic_tray", 303 | "prop.plastic_small_misc", 304 | "prop.plastic_radio", 305 | "prop.plastic_vhs", 306 | "prop.plastic_bottle_fivegal", 307 | "prop.foliage_stem", 308 | "prop.plastic_telephone", 309 | "prop.wood_radio", 310 | "prop.ceramic_jar", 311 | "prop.flesh_bloody_small", 312 | "prop.metal_small", 313 | "prop.metal", 314 | "prop.keyfob", 315 | "prop.cardboard_matchbox", 316 | "prop.cardboard_matchbook", 317 | "prop.combine_console_tank", 318 | "prop.glass_health_vial", 319 | "prop.metal_padlock", 320 | "xen_foliage_impact", 321 | "world.metal_pipe", 322 | "prop.wood_pallet", 323 | "prop.wood_shovel_handle", 324 | "prop.metal_shovel_blade", 325 | "prop.paper_newspaper", 326 | "world.metal_vehicle_ext", 327 | "prop.rubber_boot", 328 | "prop.rubber_glove", 329 | "prop.respirator", 330 | "prop.metal_geiger_counter", 331 | "prop.metal_microscope", 332 | "prop.plastic_fan", 333 | "prop.plastic_calculator", 334 | "prop.leather_folder", 335 | "prop.plastic_stapler", 336 | "prop.metal_tray", 337 | "prop.wood_small", 338 | "world.stairs", 339 | "stairs.wood", 340 | "stairs.metal", 341 | "world.mud_puddles", 342 | "world.concrete_flooded", 343 | "prop.plastic_marker_tip", 344 | "world.linoleum_wet", 345 | "prop.metal_keg", 346 | "prop.bicycle", 347 | "prop.rubber_plunger", 348 | "prop.plastic_marker", 349 | "prop.metal_door_handle", 350 | "prop.wood_chair", 351 | "prop.wood_chair_light", 352 | "prop.barstool_top", 353 | "prop.metal_vase", 354 | "prop.metal_vase_small", 355 | "prop.wood_crate_ammo", 356 | "prop.wood_pic_frame_small", 357 | "prop.wood_pic_frame_med", 358 | "prop.computer_small", 359 | "prop.plastic_mask_toy", 360 | "prop.multitool", 361 | "prop.porcelain_sink", 362 | "prop.porcelain_chunk", 363 | "prop.metal_aerosol_can", 364 | "prop.plastic_bottle_small_empty", 365 | "prop.plastic_bottle_fivegal_empty", 366 | "prop.mannequin", 367 | "prop.plastic_garbage_bag", 368 | "prop.tabletop_eraser", 369 | "prop.train_door_pipe", 370 | "prop.metal_heavy_misc" 371 | ] 372 | 373 | from pathlib import Path 374 | import re 375 | from difflib import get_close_matches 376 | 377 | root = Path(input("Enter content path:")) 378 | 379 | trans = {} 380 | def write_appropriate(mat_path: Path, lines, surface_line, surface): 381 | s1_surface = surface.strip('"').lower() 382 | s2_surface = s1_surface 383 | local_mat = mat_path.relative_to(root) 384 | 385 | # these don't change 386 | if s1_surface in ('default', 'default_silent', 'no_decal', 'player', 'roller', 'weapon', 'solidmetal', 'flesh'): 387 | return 388 | def get(): 389 | if("props" in local_mat.parts): match_ = get_close_matches('prop.' + s1_surface, hlvr_surfaces, 1, 0.4) 390 | else: match_= get_close_matches('world.' + s1_surface, hlvr_surfaces, 1, 0.6) or get_close_matches(s1_surface, hlvr_surfaces, 1, 0.6) 391 | 392 | return { 393 | 'chainlink': 'world.chainlink', 394 | 'tile': "world.tile_floor", 395 | 'tiles': "world.tile_floor", 396 | 'stone': "world.rock", 397 | 'leaves': "world.grass", 398 | 'rubber': "base.rubber", 399 | 'carpet': "world.carpet", 400 | 'metalpanel': "world.metal_panel", 401 | 'stucco': "world.concrete_debris", 402 | 'cloth': "base.cloth", 403 | "curtain": "base.cloth", 404 | "computer": "prop.computer", 405 | "grate": "world.metal_grate", 406 | }.get(s1_surface, match_[0] if match_ else s1_surface) 407 | 408 | s2_surface = get() 409 | trans[s1_surface] = s2_surface 410 | 411 | with mat_path.open("w") as fp: 412 | fixedlines = [] 413 | for line in lines: 414 | if line == surface_line: 415 | line = line.replace(surface, f'"{s2_surface}"') 416 | fixedlines.append(line) 417 | fp.writelines(fixedlines) 418 | 419 | print("Wrote", surface, s2_surface, local_mat) 420 | 421 | for material in (root / "materials").glob("**/*.vmat"): 422 | with material.open() as fp: 423 | lines = fp.readlines() 424 | for line in lines: 425 | if "PhysicsSurfaceProperties" in line: 426 | if (match_:= re.search('"\w{1,}"', line)) is not None: 427 | write_appropriate(material, lines, line, match_.group(0)) 428 | break 429 | 430 | for kv in trans.items(): 431 | print(kv) 432 | -------------------------------------------------------------------------------- /utils/shared/qc.py: -------------------------------------------------------------------------------- 1 | 2 | from typing import Optional, Sequence, Type, Union, get_origin, get_args 3 | from parsimonious.grammar import Grammar 4 | from parsimonious.nodes import Node, NodeVisitor 5 | 6 | qcgrammar = Grammar( 7 | """ 8 | qcfile = _? ((cmd / token_base / group_base) _*)* 9 | 10 | # to distinguish top level from other stuff 11 | cmd = ~"\\$[_$a-zA-Z][\w$/.]*" 12 | token_base = token "" 13 | group_base = group "" 14 | 15 | group = "{" _* ((_2complex4grammar / token / group) _*)* ("}" / ~"\Z") 16 | 17 | token = (variable / quoted / number) 18 | 19 | variable = ~r"[_$a-zA-Z][\S]*" 20 | quoted = ~r'"[^"]*"' 21 | number = (int? frac) / int 22 | int = "-"? ((digit1to9 digits) / digit) 23 | frac = "." digits 24 | digits = digit+ 25 | digit = ~"[0-9]" 26 | digit1to9 = ~"[1-9]" 27 | 28 | # grab the whole thing, don't tokenize 29 | _2complex4grammar = flexfile 30 | 31 | flexfile = "flexfile" _ quoted _ "{" ~"[^}]*" _ "}" 32 | 33 | # statement = exp_var _ "=" _ expression 34 | # expression = "("? _ (factor (_ operation _ expression){,31} ) _ ")"? 35 | # factor = (exp_var / variable / number / function) 36 | # function = variable _ "(" _ (expression _ ("," _ expression _)* )? ")" 37 | # operation = ("+" / "-" / "*" / "/") 38 | # exp_var = ~"%[_$a-zA-Z][\w$/.]*" 39 | 40 | _ = __* 41 | __ = ~r"\s+" / comment / multiline_comment 42 | comment = ~"//[^\\r\\n]*" 43 | # This is dumber than the usual stuff, eating everything till it finds */ or EOF 44 | multiline_comment = ~"\\/\\*(.*?|\\s)*(\\*\\/|\\Z)" 45 | #multiline_comment = ~"/\\*[^*]*\\*+(?:[^/*][^*]*\\*+)*/" 46 | 47 | """ 48 | ) 49 | 50 | class Group(list): pass 51 | class Token(str): pass 52 | 53 | # These have a rule "Opening bracket must be on the same line" 54 | # but it is not carried out here. 55 | class TokensInlineOrGroup: pass 56 | 57 | class QC: 58 | class include: 59 | filename: str 60 | 61 | class includemodel: 62 | filename: str 63 | 64 | class modelname: 65 | filename: str 66 | 67 | class pushd: 68 | path: str 69 | 70 | class popd(): pass 71 | 72 | class staticprop(): pass 73 | 74 | class surfaceprop: 75 | name: str 76 | 77 | class origin: 78 | x: float 79 | y: float 80 | z: float 81 | 82 | class contents: 83 | name: str 84 | 85 | class illumposition: 86 | x: float 87 | y: float 88 | z: float 89 | 90 | class attachment: 91 | name: str 92 | parent_bone: str 93 | x: float 94 | y: float 95 | z: float 96 | options: TokensInlineOrGroup 97 | 98 | class cdmaterials: 99 | folder: str 100 | 101 | class texturegroup: 102 | name: str 103 | options: Group[Group[Token]] 104 | 105 | class renamematerial: 106 | current: str 107 | new: str 108 | 109 | class cbox: 110 | minx: float 111 | miny: float 112 | minz: float 113 | maxx: float 114 | maxy: float 115 | maxz: float 116 | 117 | class bbox: 118 | minx: float 119 | miny: float 120 | minz: float 121 | maxx: float 122 | maxy: float 123 | maxz: float 124 | 125 | class definebone: 126 | name: str 127 | parent: str 128 | posx: float 129 | posy: float 130 | posz: float 131 | rotx: float 132 | roty: float 133 | rotz: float 134 | rotx_fixup: float 135 | roty_fixup: float 136 | rotz_fixup: float 137 | 138 | class hierarchy: 139 | child_name: str 140 | parent_name: str 141 | 142 | class bodygroup: 143 | name: str 144 | options: Group[Token] 145 | 146 | class body: 147 | name: str 148 | mesh_filename: str 149 | # TODO: confirm these 150 | reverse: Optional[bool] = False 151 | scale: Optional[float] = 1 152 | 153 | class lod: 154 | threshold: int 155 | options: Group[Token] 156 | 157 | class model: 158 | name: str 159 | mesh_filename: str 160 | 161 | class animation: 162 | name: str 163 | 164 | class sequence: 165 | name: str 166 | options: TokensInlineOrGroup 167 | 168 | class declaresequence: 169 | name: str 170 | 171 | class weightlist: 172 | name: str 173 | options: TokensInlineOrGroup 174 | 175 | class defaultweightlist: 176 | options: TokensInlineOrGroup 177 | 178 | class collisionmodel: 179 | mesh_filename: str 180 | #options: _options 181 | 182 | class collisionjoints: 183 | mesh_filename: str 184 | #options: _options 185 | 186 | class keyvalues: 187 | def handle_options(self, options_node: Node): 188 | trav = QCBuilder.traverse_options(options_node) 189 | def nested(trav): 190 | d = {} 191 | for key, val in zip(trav, trav): 192 | if key.expr_name != "token": 193 | raise OptionParseError("Expected token as key, got group") 194 | if val.expr_name == "group": 195 | d[key.text.strip('"')] = nested(QCBuilder.traverse_options(val.children[2])) 196 | continue 197 | d[key.text.strip('"')] = val.text.strip('"') 198 | return d 199 | 200 | self.__dict__.update(nested(trav)) 201 | 202 | class QCParseError(Exception): pass 203 | class OptionParseError(Exception): pass 204 | 205 | from collections import deque 206 | 207 | class QCBuilder(NodeVisitor): 208 | grammar = qcgrammar 209 | 210 | def __init__(self): 211 | super().__init__() 212 | self.qc = list() 213 | self.command_to_build: QC.bodygroup | None = None 214 | self.annotations_to_build = deque() 215 | self.bInGroup: bool = False 216 | 217 | def push_command(self, command_cls: Type): 218 | # Didn't finish the previous command 219 | if self.command_to_build and len(self.annotations_to_build): 220 | # verify that the command is complete 221 | for member, type in self.annotations_to_build: 222 | if getattr(self.command_to_build, member, None) is None: 223 | if member == "options": 224 | self.command_to_build.options = Group() 225 | else: 226 | raise QCParseError(f"Missing member {member} in {self.command_to_build}") 227 | self.qc.append(self.command_to_build) 228 | 229 | self.command_to_build = command_cls() 230 | 231 | if not command_cls.__annotations__ and not hasattr(command_cls, "handle_options"): 232 | self.qc.append(self.command_to_build) 233 | self.command_to_build = None 234 | return 235 | 236 | self.annotations_to_build = deque(self.command_to_build.__annotations__.items()) 237 | 238 | 239 | def push_argument(self, arg: str): 240 | member, type = self.annotations_to_build[0] 241 | 242 | bInlineOptions = False 243 | if member == "options": 244 | if type is not TokensInlineOrGroup: 245 | return 246 | type = str 247 | bInlineOptions = True 248 | 249 | bInlineOptional = get_origin(type) is Union 250 | bCommandBuiltYet = hasattr(self.command_to_build, member) and not bInlineOptional 251 | 252 | if bInlineOptions or not bCommandBuiltYet: 253 | if bInlineOptional: 254 | type = get_args(type)[0] 255 | if type is bool: 256 | arg = True 257 | if type == str: 258 | arg = arg.strip('"') 259 | if type in (int, float): 260 | # fix for 7.006ff000, passes ff000 as token 261 | if not all(c in "0123456789.-" for c in arg): 262 | return # raise TokenError 263 | if bInlineOptions: 264 | if not bCommandBuiltYet: 265 | self.command_to_build.options = Group([type(arg)]) 266 | else: 267 | self.command_to_build.options.append(type(arg)) 268 | return 269 | 270 | # Fill member 271 | setattr(self.command_to_build, member, type(arg)) 272 | self.annotations_to_build.popleft() 273 | 274 | # didn't run out yet 275 | if len(self.annotations_to_build): 276 | return 277 | 278 | if hasattr(self.command_to_build, "handle_options"): 279 | return 280 | # ran out of members to fill 281 | self.qc.append(self.command_to_build) 282 | self.command_to_build = None 283 | 284 | @staticmethod 285 | def traverse_options(node: Node): 286 | for child in node: 287 | if child.expr_name in ("token", "group"): 288 | yield child 289 | break 290 | yield from QCBuilder.traverse_options(child) 291 | 292 | @staticmethod 293 | def nested(node) -> Group[Token | Group[Token]]: 294 | rv = Group() 295 | trav = QCBuilder.traverse_options(node) 296 | for option in trav: 297 | # add group as a nested list of tokens 298 | if option.expr_name == "group": 299 | rv.append(QCBuilder.nested(option.children[2])) 300 | continue 301 | rv.append(option.text.strip('"')) 302 | return rv 303 | 304 | def push_argument_group(self, base_group_node: Node): 305 | 306 | if self.command_to_build is None: 307 | return "?" 308 | 309 | if hasattr(self.command_to_build, "handle_options"): 310 | self.command_to_build.handle_options(base_group_node.children[0].children[2]) 311 | 312 | # just a list of tokens/groups { "a" "b" "c" { "d" "e" } } 313 | elif self.command_to_build.__annotations__.get("options") in (Group[Token], TokensInlineOrGroup): 314 | 315 | #print(base_group_node.children[0].children[2]) 316 | 317 | ls = QCBuilder.nested(base_group_node.children[0].children[2]) 318 | 319 | if getattr(self.command_to_build, "options", None) is not None: 320 | self.command_to_build.options.extend(ls) 321 | else: 322 | self.command_to_build.options = ls 323 | 324 | # a list of groups { { "a1" "b1" } { "a2" "b2" } } 325 | elif self.command_to_build.__annotations__.get("options") == Group[Group[Token]]: 326 | trav = QCBuilder.traverse_options(base_group_node.children[0].children[2]) 327 | base_group = Group() 328 | for group in trav: 329 | if group.expr_name != "group": 330 | raise OptionParseError(f"Expected group, got {group.expr_name}") 331 | subgr: Group[Token] = Group() 332 | a = QCBuilder.traverse_options(group.children[2]) 333 | for token in a: 334 | if token.expr_name != "token": 335 | raise OptionParseError(f"Expected token, got {token.expr_name}") 336 | subgr.append(token.text.strip('"')) 337 | base_group.append(subgr) 338 | 339 | self.command_to_build.options = base_group 340 | 341 | # options is the last member 342 | self.qc.append(self.command_to_build) 343 | self.command_to_build = None 344 | if len(self.annotations_to_build): 345 | self.annotations_to_build.popleft() 346 | 347 | def visit_qcfile(self, node: Node, visited_children: Sequence[Node]): 348 | return self.qc 349 | 350 | def visit_cmd(self, node, visited_children): 351 | token_name = node.text.lower() 352 | 353 | if (cls:=getattr(QC, token_name[1:], None)) is not None: 354 | self.push_command(cls) 355 | else: 356 | self.qc.append(f"{token_name}:unimplemented") 357 | return node 358 | 359 | def visit_token_base(self, node, _): 360 | if self.command_to_build is None: 361 | return 362 | if not hasattr(self.command_to_build, "__annotations__"): 363 | return 364 | self.push_argument(node.text) 365 | 366 | def visit_group_base(self, node, visited_children): 367 | self.push_argument_group(node) 368 | return node 369 | #def visit_comment(self, node, visited_children): 370 | # print("comment:", node.text[2:].strip()) 371 | # 372 | #def visit_flexfile(self, node, visited_children): 373 | # print("flexfile contents", node.text) 374 | 375 | def generic_visit(self, *args): 376 | return args[0] 377 | 378 | if __name__ == "__main__": 379 | testqc = \ 380 | """ 381 | $modelname "props\myfirstmodel .mdl" 382 | $body mybody "myfirstmodel-ref.smd" 1 0.236 383 | $body myhead "myfirstmodel-refhead.smd" 384 | 385 | $bodygroup sights { 386 | studio mybody 387 | studio myhead 388 | blank 389 | } 390 | 391 | $attachment "nozzle" "" 0 4.8 0 392 | 393 | $attachment "anim_attachment_RH" "ValveBiped.Anim_Attachment_RH" -0.00 -0.00 0.00 rotate -90.00 -90.00 0.00 394 | 395 | 396 | $staticprop 397 | $surfaceprop combine_metal 398 | $CDmaterials "models\props" 399 | 400 | $TextureGroup "skinfamilies" { 401 | { "helicopter_news_adj" "helicopter_news2" } // TODO: fix this character making the other line a comment->\ 402 | { "..\hybridPhysx\helicopter_news_adj" "..\hybridPhysx\helicopter_news2" } //helicopter_news2 from models/hybridPhysx 403 | { "..\hybridPhysx\helicopter_army" "..\hybridPhysx\helicopter_army2" } //Could also add second $cdmaterials line and just use "helicopter_army2" 404 | } 405 | 406 | 407 | $sequencE idle "myfirstmodel-ref.smd" { activity "ACT_IDLE" -1 fadein 0.2 408 | 409 | { event AE_MUZZLEFLASH 0 "357 MUZZLE" } 410 | { event 6001 0 "0" } 411 | snap 412 | } 413 | 414 | $collisionmodel "myfirstmodel-phys.smd" { 415 | $concave 416 | } 417 | 418 | $keyvalues 419 | { 420 | "prop_data" 421 | { 422 | "base" "Metal.LargeHealth" 423 | "allowstatic" "1" 424 | "dmg.bullets" "0" 425 | "dmg.fire" "0" 426 | "dmg.club" ".35" 427 | // "dmg.explosive" "1" 428 | "multiplayer_break" "both" 429 | "BlockLOS" "1" 430 | } 431 | 432 | } 433 | 434 | $collisionjoints "joints1" 435 | 436 | $collisiontext 437 | { 438 | "break" 439 | { 440 | "model" "props_unique\SubwayCarExterior01_SideDoor01_Damaged_01" 441 | "health" "10" 442 | // "fademindist" "10000" 443 | // "fademaxdist" "10000" 444 | } 445 | 446 | // it doesn't close 447 | 448 | /* neither does this comment 449 | 450 | $collisionjoints "joints2"//lastcomment """ 451 | import unittest 452 | class TestQC(unittest.TestCase): 453 | def test_parses_without_fail(self): 454 | qc = QCBuilder() 455 | qc.parse(testqc) 456 | def test_commands(self): 457 | self.maxDiff = None 458 | qc = QCBuilder() 459 | commands = qc.parse(testqc) 460 | expected_commands = [ 461 | ("modelname", {'filename': 'props\\myfirstmodel .mdl'}), 462 | ("body", {'name': 'mybody', 'mesh_filename': 'myfirstmodel-ref.smd', 'reverse': True, 'scale': 0.236}), 463 | ("body", {'mesh_filename': 'myfirstmodel-refhead.smd', 'name': 'myhead'}), 464 | ("bodygroup", {'name': 'sights', 'options': ['studio', 'mybody', 'studio', 'myhead', 'blank']}), 465 | ("attachment", {'name': 'nozzle', 'parent_bone': '', 'x': 0.0, 'y': 4.8, 'z': 0.0, 'options': []}), 466 | ("attachment", {'name': 'anim_attachment_RH', 'parent_bone': 'ValveBiped.Anim_Attachment_RH', 'x': -0.0, 'y': -0.0, 'z': 0.0, 'options': ['rotate', '-90.00', '-90.00', '0.00']}), 467 | ("staticprop", {}), 468 | ("surfaceprop", {'name': 'combine_metal'}), 469 | ("cdmaterials", {'folder': 'models\\props'}), 470 | ("texturegroup", {'name': 'skinfamilies', 'options': [['helicopter_news_adj', 'helicopter_news2'], ['..\\hybridPhysx\\helicopter_army', '..\\hybridPhysx\\helicopter_army2']]}), 471 | ("sequence", {'name': 'idle', 'options': ['myfirstmodel-ref.smd','activity','ACT_IDLE','-1','fadein','0.2',['event', 'AE_MUZZLEFLASH', '0', '357 MUZZLE'],['event', '6001', '0', '0'], 'snap']}), 472 | ("collisionmodel", {'mesh_filename': 'myfirstmodel-phys.smd'}), 473 | ("keyvalues", {'prop_data': {'base': 'Metal.LargeHealth', 'allowstatic': '1', 'dmg.bullets': '0', 'dmg.fire': '0', 'dmg.club': '.35', 'multiplayer_break': 'both', 'BlockLOS': '1'}}), 474 | ("collisionjoints", {'mesh_filename': 'joints1'}), 475 | ("$collisiontext:unimplemented", None), 476 | ] 477 | names = [cmd.__class__.__name__ if not isinstance(cmd, str) else cmd for cmd in commands] 478 | expected_names = [cmd[0] for cmd in expected_commands] 479 | self.assertCountEqual(names, expected_names, "Where First=Parsed, Second=Expected") 480 | 481 | for (expected_name, expected_params), cmd in zip(expected_commands, commands): 482 | name = cmd if isinstance(cmd, str) else cmd.__class__.__name__ 483 | options = None if isinstance(cmd, str) else cmd.__dict__ 484 | print(f'("{name}", {options})') 485 | self.assertEqual(name, expected_name) 486 | self.assertEqual(options, expected_params, f"At command: {name}\np: {options}\ne: {expected_params}") 487 | 488 | unittest.main() 489 | -------------------------------------------------------------------------------- /utils/shared/base_utils2.py: -------------------------------------------------------------------------------- 1 | 2 | from contextlib import suppress 3 | from pathlib import Path 4 | from enum import Enum 5 | import fnmatch 6 | import subprocess 7 | from types import GeneratorType 8 | from typing import Any, Callable, Iterable, Optional 9 | try: 10 | from keyvalues1 import KV 11 | except ImportError: 12 | from shared.keyvalues1 import KV 13 | 14 | import argparse 15 | arg_parser = argparse.ArgumentParser(usage = "-i -e ") 16 | arg_parser.add_argument("-i", "--src1gameinfodir", "-src1gameinfodir", help="An absolute path to S1 mod gameinfo.txt.") 17 | arg_parser.add_argument("-e", "-o", "--game", "-game", help="Name or full path to the S2 mod/addon to import into (ie. left4dead2_source2 or C:/../ep2).") 18 | arg_parser.add_argument("-b", "--branch", type=str, help="The engine branch belonging to this mod/addon (ie. hlvr or steamvr).") 19 | arg_parser.add_argument("--filter", help="Apply a substring filter to the import filelist") 20 | 21 | args_known, args_unknown = arg_parser.parse_known_args() 22 | 23 | '-src1gameinfodir "D:/Games/steamapps/common/Half-Life Alyx/game/csgo" -game hlvr_addons/csgo' 24 | 25 | class KVUtilFile(KV): 26 | @classmethod 27 | def RemapTable(cls): 28 | cls.path = EXPORT_CONTENT / "source1import_name_remap_table.txt" 29 | keyName = "name_remap_table" 30 | 31 | def remap(self, extType: str, s1Name: str, s2Remap: str): 32 | # Remap. Don't remap and WARN if already remapped. 33 | if not isinstance(self.get(extType), dict): 34 | self[extType] = {} 35 | 36 | exist = self[extType].setdefault(s1Name, s2Remap) 37 | if exist != s2Remap: 38 | WARN(f"Remap entry for '{s1Name}' -> '{s2Remap}' conflicts with existing value of '{exist}' (ignoring)") 39 | 40 | cls.remap = remap 41 | rv = cls(keyName, dict()) 42 | if cls.path.is_file(): 43 | rv.update(cls.FromFile(cls.path, case_sensitive=True)) 44 | return rv 45 | 46 | def save(self): 47 | return super().save(self.path, quoteKeys=True) 48 | 49 | 50 | from enum import Enum, unique, auto 51 | from functools import total_ordering 52 | 53 | @total_ordering 54 | class eS2Game(Enum): 55 | "known moddable source2 games" 56 | steamvr = "steamvr" 57 | hlvr = "hlvr" 58 | sbox = "sbox" 59 | adj = "adj" 60 | dota2 = "dota2" 61 | cs2 = "cs2" 62 | def __lt__(self, other): 63 | if self.__class__ is other.__class__: 64 | return self._positions[self] < self._positions[other] 65 | return NotImplemented 66 | eS2Game._positions = {x: i for i, x in enumerate(eS2Game)} 67 | 68 | @unique 69 | class eEngineUtils(Enum): 70 | def _generate_next_value_(name: str, *_) -> str: 71 | return name+".exe" 72 | def full_path(self) -> Path: 73 | return BIN / "win64" / self.value 74 | def avaliable(self): 75 | if BIN is None: 76 | return False 77 | return self.full_path().is_file() 78 | 79 | def __call__(self, args: list[str] = []) -> Optional[subprocess.CompletedProcess]: 80 | if self.avaliable(): 81 | #os.system(f'"{self.full_path}" {" ".join(args)}') 82 | return subprocess.run( 83 | args, 84 | executable=self.full_path(), 85 | #shell=True, 86 | stdout=subprocess.PIPE, 87 | creationflags= subprocess.CREATE_NO_WINDOW | subprocess.DETACHED_PROCESS, 88 | ) 89 | 90 | dmxconvert = auto() 91 | resourcecompiler = auto() 92 | resourcecopy = auto() 93 | 94 | class eEngineFolder(Enum): 95 | "Source 2 main folders relative to root" 96 | ROOT = Path() 97 | CONTENTROOT = Path("content") 98 | GAMEROOT = Path("game") 99 | SRC = Path("src") 100 | BIN = GAMEROOT / "bin" 101 | CORE_GAME = GAMEROOT / "core" 102 | @classmethod 103 | def update_root(__class__, s2_root = None): 104 | "Update ROOT, as well as paths deriving from it (GAMEROOT, CONTENTROOT, SRC...)" 105 | for folder in __class__: 106 | if s2_root is None: 107 | globals()[folder.name] = None 108 | continue 109 | globals()[folder.name] = s2_root / folder.value 110 | 111 | eEngineFolder.update_root(None) # Add ROOT, CONTENTROOT, CORE_GAME... to globals() as None 112 | IMPORT_CONTENT: Path = None 113 | IMPORT_GAME: Path = None 114 | EXPORT_CONTENT: Path = None 115 | EXPORT_GAME: Path = None 116 | #IMPORT_LEAFIEST_GAME,IMPORT_LEAFIEST_CONTENT,EXPORT_LEAFIEST_GAME,EXPORT_LEAFIEST_CONTENT 117 | gameinfo: KV = None 118 | gameinfo2: KV = None 119 | 120 | IMPORT_MOD = "" 121 | DOTA2: bool = False 122 | ADJ: bool = False 123 | STEAMVR: bool = False 124 | HLVR: bool = False 125 | SBOX: bool = False 126 | CS2: bool = False 127 | 128 | destmod: eS2Game = None 129 | 130 | def update_destmod(new_dest: eS2Game): 131 | global destmod 132 | destmod = new_dest 133 | for game in eS2Game.__members__.keys(): 134 | globals()[game.upper()] = False 135 | globals()[new_dest.value.upper()] = True 136 | 137 | update_destmod(eS2Game(args_known.branch if args_known.branch else "hlvr")) 138 | import_context: dict = None 139 | RemapTable: KVUtilFile = None 140 | 141 | _mod: Callable = None 142 | _recurse: Callable = None 143 | _dest: Callable = None 144 | output: Callable = None 145 | 146 | 147 | from functools import wraps 148 | def add_property(cls): 149 | def decorator(func): 150 | @wraps(func) 151 | def wrapper(*args, **kwargs): 152 | return func(*args, **kwargs) 153 | setattr(cls, func.__name__, property(fget=wrapper)) 154 | return func 155 | return decorator 156 | 157 | def add_method(cls): 158 | def decorator(func): 159 | @wraps(func) 160 | def wrapper(*args, **kwargs): 161 | return func(*args, **kwargs) 162 | setattr(cls, func.__name__, wrapper) 163 | return func 164 | return decorator 165 | 166 | 167 | def in_source2_environment(): 168 | return ROOT is not None 169 | 170 | def argv_error(*args, **kwargs): 171 | arg_parser.print_usage() 172 | #print("ERROR:", *args, **kwargs) 173 | raise SystemExit(*args) 174 | 175 | def parse_in_path(): 176 | global IMPORT_CONTENT, IMPORT_GAME, EXPORT_CONTENT, EXPORT_GAME 177 | global gameinfo, gameinfo2, IMPORT_MOD 178 | 179 | in_path = Path(args_known.src1gameinfodir) 180 | if not in_path.exists(): 181 | argv_error(f"src1 game path does not exist \"{in_path}\"") 182 | if in_path.is_file() and in_path.name == 'gameinfo.txt': 183 | in_path = in_path.parent 184 | gi_txt = in_path / 'gameinfo.txt' 185 | if not gi_txt.is_file(): 186 | argv_error(f"gameinfo.txt not found inside src1 mod `{in_path.name}`") 187 | try: 188 | gameinfo = KV.FromFile(gi_txt) 189 | except Exception: 190 | print("Warning: Error reading gameinfo.txt") 191 | IMPORT_GAME = in_path 192 | IMPORT_MOD = in_path.name 193 | if IMPORT_GAME.parent.name == 'game': # Source 2 dir 194 | eEngineFolder.update_root(IMPORT_GAME.parents[1]) 195 | IMPORT_CONTENT = CONTENTROOT / IMPORT_GAME.name 196 | 197 | def parse_out_path(source2_mod: Path): 198 | "Must call after parse_in_path" 199 | global IMPORT_CONTENT, IMPORT_GAME, EXPORT_CONTENT, EXPORT_GAME 200 | global destmod, gameinfo, gameinfo2 201 | 202 | if source2_mod.is_absolute(): 203 | if source2_mod.is_file(): 204 | argv_error("Cannot specify file as export game") 205 | for possible_rel in (GAMEROOT, CONTENTROOT):#, ROOT): 206 | if possible_rel is not None and source2_mod.is_relative_to(possible_rel): 207 | source2_mod = source2_mod.relative_to(possible_rel) 208 | if source2_mod.is_absolute(): # Relativity loop above didnt work 209 | for p_index, p in enumerate(source2_mod.parts[-3:-1]): 210 | if p in ('content', 'game'): # has game/content at -2 or -3 211 | p_index+=len(source2_mod.parts)-3 212 | # Importing from a source 2 app into different source 2 app. 213 | # The latter becomes root (script's working environment). 214 | eEngineFolder.update_root(Path(*source2_mod.parts[:p_index])) 215 | source2_mod = Path(*source2_mod.parts[p_index+1:]) 216 | break 217 | if p not in ('content', 'game'): # Export game has no game-content structure (sbox?) 218 | EXPORT_GAME = EXPORT_CONTENT = source2_mod 219 | elif not in_source2_environment(): 220 | argv_error(f"Cannot figure out where \"{source2_mod}\" is located. Please correct or use absolute path.") 221 | 222 | if not source2_mod.is_absolute() and len(source2_mod.parts) <=3: 223 | EXPORT_GAME = GAMEROOT / source2_mod 224 | EXPORT_CONTENT = CONTENTROOT / source2_mod 225 | 226 | elif EXPORT_GAME is EXPORT_CONTENT is None: 227 | argv_error(f"Invalid export game \"{source2_mod}\"") 228 | 229 | 230 | # if not forcing a branch, try to guess it from the path 231 | if not args_known.branch: 232 | pp = { 233 | "steamtours": eS2Game.steamvr, 234 | "hlvr": eS2Game.hlvr, 235 | "sbox": eS2Game.sbox, 236 | "s&box": eS2Game.sbox, 237 | "steampal": eS2Game.adj, 238 | "dota": eS2Game.dota2, 239 | "csgo": eS2Game.cs2, 240 | } 241 | for p in EXPORT_GAME.parts: 242 | for k, v in pp.items(): 243 | if k in p: 244 | update_destmod(v) 245 | break 246 | # Optionals 247 | 248 | # Done Parsing. Fill variables 249 | global import_context, RemapTable 250 | global _mod, _recurse, _dest, output 251 | 252 | # default import context 253 | import_context = { 254 | 'mod': None, 255 | 'recurse': True, 256 | 'importfunc': 'integ', 257 | 'src': IMPORT_GAME, 258 | 'dest': EXPORT_CONTENT, 259 | 'ignoresource2namefixup': False, 260 | 'getSkinningFromLod0': False, 261 | } 262 | RemapTable = KVUtilFile.RemapTable() 263 | 264 | _mod = lambda: import_context['mod'] 265 | _recurse = lambda: import_context['recurse'] 266 | _dest = lambda: import_context['dest'] 267 | 268 | @add_property(Path) 269 | def local(self): 270 | try: return self.relative_to(import_context['src']) 271 | except ValueError: 272 | return self.relative_to(import_context['dest']) 273 | 274 | @add_property(Path) 275 | def legacy_local(self): 276 | return self.relative_to(import_context['src'] / importing) 277 | 278 | def output(input, out_ext=None, dest=None) -> Path: 279 | if dest is None: 280 | dest = _dest() 281 | try: out = dest / input.local 282 | except Exception: out = dest / input 283 | #out = source2namefixup(out) 284 | if out_ext is not None: 285 | return out.with_suffix(out_ext) 286 | return out 287 | 288 | def parse_unknowns(submodule_globals: dict[str, Any]): 289 | def try_assign(): 290 | for arg in args_unknown: 291 | arg = arg.lstrip('-').upper() 292 | if not arg.startswith(var): 293 | continue 294 | value = arg[len(var):].lstrip('= ') 295 | if value.isnumeric(): 296 | value = int(value) 297 | elif value.lower() in ('true', 'false'): 298 | value = value.lower() == 'true' 299 | submodule_globals[var] = value 300 | break 301 | 302 | 303 | for var in submodule_globals: 304 | if not var.isupper(): 305 | continue 306 | try_assign() 307 | 308 | 309 | def parse_argv(submodule_globals: None | dict[str, Any] = None): 310 | if not args_known.src1gameinfodir: 311 | argv_error(f"Missing required argument: -i | --src1gameinfodir") 312 | parse_in_path() 313 | if not args_known.game: 314 | argv_error(f"Missing required argument: -e | --game") 315 | parse_out_path(Path(args_known.game)) 316 | if submodule_globals: 317 | parse_unknowns(submodule_globals) 318 | 319 | importing = Path() 320 | filter_=args_known.filter 321 | 322 | if __name__ == 'shared.base_utils2': 323 | #print(__name__, 'parse on import?') 324 | pass 325 | 326 | elif __name__ == '__main__': 327 | parse_argv() 328 | print(f"{ROOT=}\n{IMPORT_CONTENT=}\n{IMPORT_GAME=}\n{EXPORT_CONTENT=}\n{EXPORT_GAME=}") 329 | print(f"{gameinfo['game']=}") 330 | print(f"{destmod=}") 331 | print(f"{filter_=}") 332 | print(BIN, eEngineUtils.dmxconvert.value, eEngineUtils.dmxconvert.avaliable()) 333 | print(args_known.__dict__) 334 | 335 | import unittest 336 | class Test_ParsedPaths(unittest.TestCase): 337 | def test_s2_export(self): 338 | if EXPORT_CONTENT != EXPORT_GAME: 339 | self.assertEqual(EXPORT_CONTENT.parts.count('content'), 1) 340 | self.assertEqual(EXPORT_GAME.parts.count('game'), 1) 341 | 342 | def test_export_exist(self): 343 | self.assertTrue(EXPORT_CONTENT.is_dir()) 344 | self.assertTrue(EXPORT_GAME.is_dir()) 345 | 346 | import sys;sys.argv = sys.argv[:1] 347 | 348 | unittest.main() 349 | raise SystemExit(0) 350 | 351 | @add_method(Path) 352 | def as_posix(self) -> str: 353 | """pathlib's as_posix replaces '/' with '/' on linux 🤦""" 354 | return str(self).replace('\\', '/').lower() 355 | 356 | @add_method(Path) 357 | def without_spaces(self, repl = '_') -> Path: 358 | return self.parent / self.name.replace(' ', repl) 359 | 360 | @add_method(Path) 361 | def lowercase(self) -> Path: 362 | return Path(*(str(part).lower() for part in self.parts)) 363 | 364 | @add_method(Path) 365 | def MakeDir(self): 366 | "parents=True, exist_ok=True" 367 | self.mkdir(parents=True, exist_ok=True) 368 | 369 | def src(local_path: Path) -> Path: 370 | return import_context['src'] / local_path 371 | 372 | 373 | def collect(root, inExt, outExt, existing:bool = False, outNameRule = None, searchPath = None, match = None, skiplist = None): 374 | if not isinstance(outExt, (set, tuple, list)): 375 | outExt = ((outExt),) # support multiple output extensions 376 | 377 | if searchPath is None: 378 | searchPath = src(root) 379 | if skiplist is None: skiplist = _get_blacklist(root) 380 | 381 | if searchPath.is_file(): 382 | if searchPath.suffix == inExt: 383 | yield searchPath 384 | else: print(f"~ File suffix is not a {inExt}") 385 | 386 | elif searchPath.is_dir(): 387 | skipCountExists, skipCountBlacklist = 0, 0 388 | print(f'\n- %sSearching for%s %s files...' % ( 389 | "Shallow "*(not _recurse()), 390 | " unimported"*(not existing), 391 | f"[ {match} ]" if match else inExt, 392 | )) 393 | if match is None: 394 | match = ('**/'*_recurse()) + '*' + inExt 395 | 396 | for filePath in globsort(searchPath.glob(match)): 397 | skip_reason = '' 398 | if filter_ is not None: 399 | if not fnmatch.fnmatch(filePath, filter_): 400 | continue 401 | if outNameRule: 402 | possibleNameList = outNameRule(filePath) 403 | else: possibleNameList = filePath 404 | if not isinstance(possibleNameList, (list, GeneratorType)): possibleNameList = [possibleNameList] # support multiple output names 405 | 406 | for filePath2 in possibleNameList: # try a number of possible outputs. default is list() which will give one output 407 | if skip_reason: break 408 | for outExt_ in outExt: 409 | if skip_reason: break 410 | if not existing and output(filePath2, outExt_, import_context['dest']).exists(): 411 | skipCountExists += 1 412 | skip_reason = 'already-exist' 413 | 414 | for skip_match in skiplist: 415 | if skip_reason: break 416 | if (skip_match.replace("\\", "/") in filePath2.local.as_posix()) or filePath2.match(skip_match): 417 | skipCountBlacklist += 1 418 | skip_reason = 'blacklist' 419 | 420 | if skip_reason: 421 | skip(skip_reason, filePath2) 422 | continue #del files_with_ext[files_with_ext.index(filePath)] 423 | yield filePath 424 | 425 | if skipCountExists or skipCountBlacklist: 426 | print(' '*4 + f"Skipped: " + f"{skipCountExists} already imported | "*(not existing) +\ 427 | f"{skipCountBlacklist} found in blacklist" 428 | ) 429 | else: 430 | print("ERROR while searching: Does not exist:", searchPath) 431 | 432 | def source2namefixup(path: Path): 433 | return path.parent / path.name.lower().replace(' ', '_') 434 | 435 | #def overwrite_allowed(path, bAllowed=import_context['overwrite']): 436 | # return path.exists() and bAllowed 437 | 438 | def skip(skip_reason: str, path: Path): 439 | status(f"- skipping [{skip_reason}]: {path.local.as_posix()}") 440 | 441 | MOCK = False 442 | def globsort(iterable_files: Iterable[Path]) -> list[Path]: 443 | if MOCK: 444 | return sorted(iterable_files) 445 | return iterable_files 446 | 447 | DEBUG = False 448 | def msg(*args, **kwargs): 449 | if DEBUG: 450 | print("@ DBG:", *args, **kwargs) 451 | 452 | def warn(*args, **kwargs): print("WARNING:", *args, **kwargs) 453 | def WARN(*args, **kwargs): # TODO: source1importwarnings_lastrun.txt 454 | print("*** WARNING:", *args, **kwargs) 455 | 456 | def status(text): 457 | print(text, end='\x1b[1K\r') 458 | 459 | from os import stat 460 | from zlib import crc32 461 | def get_crc(fpath: Path): 462 | crc = 0 463 | with open(fpath, 'rb', 65536) as ins: 464 | for _ in range(int((stat(fpath).st_size / 65536)) + 1): 465 | crc = crc32(ins.read(65536), crc) 466 | return '%08X' % (crc & 0xFFFFFFFF) 467 | 468 | import json 469 | def _get_blacklist(root): 470 | json_path = Path(__file__).parent / "import_blacklist.json" 471 | try: 472 | with open(json_path, "r") as fp: 473 | return json.load(fp).get(str(root), set()) 474 | except Exception as ex: 475 | print(type(ex), "COULDNT GET BLACKLIST", json_path) 476 | return set() 477 | 478 | def GetJson(jsonPath: Path, bCreate: bool = False) -> dict: 479 | if not jsonPath.is_file(): 480 | if bCreate: 481 | jsonPath.parent.MakeDir() 482 | open(jsonPath, 'a').close() 483 | return {} 484 | with open(jsonPath) as fp: 485 | try: 486 | return json.load(fp) 487 | except json.decoder.JSONDecodeError: 488 | return {} 489 | 490 | def UpdateJson(jsonPath: Path, update: dict) -> dict: 491 | with open(jsonPath, 'w+') as fp: 492 | try: stored = json.load(fp) 493 | except json.decoder.JSONDecodeError: 494 | stored = {} 495 | stored.update(update) 496 | 497 | json.dump(update, fp, sort_keys=True, indent=4) 498 | return #stored -------------------------------------------------------------------------------- /utils/scripts_import.py: -------------------------------------------------------------------------------- 1 | import shared.base_utils2 as sh 2 | from shutil import copyfile 3 | from pathlib import Path 4 | from shared.keyvalues1 import KV, VDFDict 5 | from shared.keyvalues3 import KV3File 6 | import itertools 7 | 8 | OVERWRITE_ASSETS = False 9 | 10 | SOUNDSCAPES = True 11 | GAMESOUNDS = True 12 | SURFACES = True 13 | MISCELLANEOUS = True 14 | 15 | scripts = Path('scripts') 16 | SOUNDSCAPES_MANIFEST = scripts / "soundscapes_manifest.txt" 17 | SURFACEPROPERTIES_MANIFEST = scripts / "surfaceproperties_manifest.txt" 18 | 19 | soundscapes = Path('soundscapes') 20 | sounds = Path('sounds') 21 | surfaces = Path('surfaces') 22 | 23 | def main(): 24 | 25 | print("Importing Scripts!") 26 | 27 | if SOUNDSCAPES: 28 | print("- Soundscapes!") # soundscapes vsc, txt, and manifest... 29 | 30 | if sh.SBOX: 31 | sh.import_context['dest'] = sh.EXPORT_CONTENT 32 | for soundscape_collection_path in itertools.chain( 33 | sh.globsort((sh.src(scripts)).glob('**/soundscapes_*.vsc')), 34 | sh.globsort((sh.src(scripts)).glob('**/soundscapes_*.txt')) 35 | ): 36 | if soundscape_collection_path.name == SOUNDSCAPES_MANIFEST.name: 37 | continue 38 | SoundscapeImporter.ImportSoundscapesToVdata(soundscape_collection_path) 39 | else: 40 | sh.import_context['dest'] = sh.EXPORT_GAME 41 | for soundscapes in itertools.chain( 42 | sh.collect("scripts", ".vsc", ".txt", OVERWRITE_ASSETS, match="soundscapes_*.vsc"), 43 | sh.collect("scripts", ".txt", ".txt", OVERWRITE_ASSETS, match="soundscapes_*.txt") 44 | ): 45 | if soundscapes.name == SOUNDSCAPES_MANIFEST.name: 46 | SoundscapeImporter.ImportSoundscapeManifest(soundscapes) 47 | continue 48 | SoundscapeImporter.ImportSoundscapes(soundscapes) 49 | 50 | sh.import_context['dest'] = sh.EXPORT_CONTENT 51 | 52 | if GAMESOUNDS: 53 | print("- Game Sounds!") # game sounds: scripts -> soundevents 54 | 55 | for file in sh.globsort(sh.src(scripts).glob('**/game_sounds*.txt')): 56 | if file.name != 'game_sounds_manifest.txt': 57 | ImportGameSounds(file) 58 | 59 | if (boss:=sh.src(scripts)/'level_sounds_general.txt').is_file(): 60 | ImportGameSounds(boss) 61 | 62 | if SURFACES: 63 | print("- Surfaces!") # surfaces: scripts -> surfaceproperties.vsurf 64 | 65 | manifest_handle = VsurfManifestHandler() 66 | for surfprop_txt in sh.globsort(sh.src(scripts).glob('**/surfaceproperties*.txt')): 67 | if surfprop_txt.name == SURFACEPROPERTIES_MANIFEST.name: 68 | manifest_handle.read_manifest(surfprop_txt) 69 | continue 70 | 71 | folderOrFile = ImportSurfaceProperties(surfprop_txt) 72 | if not sh.SBOX: 73 | manifest_handle.retrieve_surfaces(folderOrFile) # file only 74 | 75 | manifest_handle.after_all_converted() 76 | 77 | if MISCELLANEOUS: 78 | print("- Other scripts!") 79 | 80 | propdata = sh.src(scripts / "propdata.txt") 81 | if propdata.is_file(): 82 | if sh.ADJ or sh.DOTA2: 83 | sh.import_context['dest'] = sh.EXPORT_CONTENT 84 | sh.MakeDir(sh.output(scripts)) 85 | if not (sh.output(propdata, ".vdata").is_file() and not OVERWRITE_ASSETS): 86 | kv = KV.FromFile(propdata, case_sensitive=True) 87 | kv3 = KV3File(generic_data_type = "prop_data") 88 | for name, data in kv.items(): 89 | if name.lower() == "breakablemodels": 90 | continue 91 | kv3[name] = dict() 92 | for key, value in data.items(): 93 | if key.lower() == "base": 94 | key = "_base" 95 | kv3[name][key] = value 96 | sh.output(propdata, ".vdata").write_text(kv3.ToString()) 97 | print("+ Saved scripts/propdata.vdata") 98 | else: 99 | sh.import_context['dest'] = sh.EXPORT_GAME 100 | sh.MakeDir(sh.output(scripts)) 101 | if not (sh.output(propdata).is_file() and not OVERWRITE_ASSETS): 102 | copyfile(propdata, sh.output(propdata)) 103 | print("+ Copied scripts/propdata.txt") 104 | 105 | 106 | 107 | print("Looks like we are done!") 108 | 109 | def fix_wave_resource(old_value): 110 | soundchars = '*?!#><^@~+)(}$' + '`' # public\soundchars.h 111 | old_value = old_value.strip(soundchars) 112 | 113 | return f"sounds/{Path(old_value).with_suffix('.vsnd').as_posix()}" 114 | 115 | class SoundscapeImporter: 116 | fixups = {'wave': fix_wave_resource} 117 | @staticmethod 118 | def recursively_fixup(kv: VDFDict): 119 | for key, value in kv.iteritems(indexed_keys=True): 120 | if isinstance(value, VDFDict): 121 | SoundscapeImporter.recursively_fixup(value) 122 | elif (k:=key[1]) in SoundscapeImporter.fixups: 123 | kv[key] = SoundscapeImporter.fixups[k](value) 124 | 125 | @staticmethod 126 | def FixedUp(file: Path): 127 | soundscape_collection = KV.CollectionFromFile(file) 128 | SoundscapeImporter.recursively_fixup(soundscape_collection) # change wav to vsnd 129 | return soundscape_collection 130 | 131 | @staticmethod 132 | def ImportSoundscapes(file: Path): 133 | new_soundscapes = '' 134 | for name, properties in SoundscapeImporter.FixedUp(file).items(): 135 | if isinstance(properties, VDFDict): 136 | new_soundscapes += f"{name}{properties.ToString()}" 137 | else: 138 | new_soundscapes += f'{name}\t"{properties}"\n' 139 | continue 140 | 141 | newsc_path = sh.output(file, '.txt') 142 | newsc_path.parent.MakeDir() 143 | newsc_path.write_text(new_soundscapes) 144 | print("+ Saved", newsc_path.local) 145 | return newsc_path 146 | #soundscapes_manifest.add("file", f'scripts/{newsc_path.name}') 147 | 148 | @staticmethod 149 | def ImportSoundscapesToVdata(file: Path): 150 | "scripts/soundscapes_*.vsc -> (n)[soundscapes/*/a.b.sndscape]" 151 | sndscape_folder = sh.EXPORT_CONTENT / soundscapes / file.stem.removeprefix('soundscapes').lstrip("_") 152 | sndscape_folder.MakeDir() 153 | 154 | for name, properties in SoundscapeImporter.FixedUp(file).items(): 155 | sndscape_file = sndscape_folder / f'{name}.sndscape' 156 | sndscape_data = dict(properties) 157 | if not OVERWRITE_ASSETS and sndscape_file.exists(): 158 | sh.status(f"Skipping {sndscape_file.local} [already-exist]") 159 | continue 160 | ... 161 | # https://developer.valvesoftware.com/wiki/Soundscape#Rules 162 | # soundscape format is not yet clear 163 | # source uses .wavs + volume + pitch 164 | # .sound assets are similar: .wavs + properties 165 | # so might need to convert play* properties to .sound assets 166 | """ 167 | data = 168 | { 169 | dsp = 5 170 | fadetime = 1.0 171 | position = 5 172 | playrandom = 173 | { 174 | sound = "sounds/soundscapes/name.sound" 175 | } 176 | playlooping = 177 | { 178 | sound = "sounds/soundscapes/name.sound" 179 | origin = [0, 0, 0] 180 | } 181 | playsoundscape = 182 | { 183 | soundscape = "soundscapes/name.sndscape" 184 | position = 6 185 | } 186 | } 187 | """ 188 | sndscape_file.write_text(KV3File(data=sndscape_data).ToString()) 189 | print("+ Saved", sndscape_file.local) 190 | return sndscape_folder 191 | 192 | @staticmethod 193 | def ImportSoundscapeManifest(asset_path: Path): 194 | "Integ, but with '.vsc' fixup for csgo" 195 | 196 | out_manifest = sh.output(asset_path) 197 | out_manifest.parent.MakeDir() 198 | 199 | with open(asset_path) as old, open(out_manifest, 'w') as out: 200 | contents = old.read().replace('.vsc', '.txt').replace('soundscaples_manifest', 'soundscapes_manifest') 201 | out.write(contents) 202 | 203 | print("+ Saved manifest file", out_manifest.local) 204 | return out_manifest 205 | 206 | """ 207 | channel ['CHAN_VOICE'] 208 | volume [1, 0.3, '0.4, 0.7'] 209 | soundlevel ['SNDLVL_NORM', 0] 210 | pitch ['PITCH_NORM', 150] 211 | wave ['common/null.wav'] 212 | rndwave [VDFDict([('wave', '~player/footsteps/slosh1.wav'), ('wave', '~player/footsteps/slosh2.wav'), ('wave', '~player/footsteps/slosh3.wav'), ('wave', '~player/footsteps/slosh4.wav')])] 213 | """ 214 | def ImportGameSounds(asset_path: Path): 215 | """ 216 | VALVE: scripts/game_sounds*.txt -> soundevents/game_sounds*.vsndevts 217 | SBOX: scripts/game_sounds*.txt -> (n)[sounds/*/a.b.sound] 218 | """ 219 | vsndevts_file = sh.EXPORT_CONTENT / "soundevents" / asset_path.local.relative_to(scripts).with_suffix('.vsndevts') 220 | out_sound_folder = sh.EXPORT_CONTENT / sounds / asset_path.stem.removeprefix('game_sounds_') 221 | if not sh.SBOX: 222 | if not OVERWRITE_ASSETS and vsndevts_file.exists(): 223 | return vsndevts_file 224 | vsndevts_file.parent.MakeDir() 225 | else: 226 | out_sound_folder.MakeDir() 227 | 228 | SNDLVL = { 229 | 'SNDLVL_NONE': 0, 230 | 'SNDLVL_25dB': 25, 231 | 'SNDLVL_30dB': 30, 232 | 'SNDLVL_35dB': 35, 233 | 'SNDLVL_40dB': 40, 234 | 'SNDLVL_45dB': 45, 235 | 'SNDLVL_50dB': 50, 236 | 'SNDLVL_55dB': 55, 237 | 'SNDLVL_IDLE': 60, 238 | 'SNDLVL_TALKING': 60, 239 | 'SNDLVL_60dB': 60, 240 | 'SNDLVL_65dB': 65, 241 | 'SNDLVL_STATIC': 66, 242 | 'SNDLVL_70dB': 70, 243 | 'SNDLVL_NORM': 75, 244 | 'SNDLVL_75dB': 75, 245 | 'SNDLVL_80dB': 80, 246 | 'SNDLVL_85dB': 85, 247 | 'SNDLVL_90dB': 90, 248 | 'SNDLVL_95dB': 95, 249 | 'SNDLVL_100dB': 100, 250 | 'SNDLVL_105dB': 105, 251 | 'SNDLVL_120dB': 120, 252 | 'SNDLVL_130dB': 130, 253 | 'SNDLVL_GUNFIRE': 140, 254 | 'SNDLVL_140dB': 140, 255 | 'SNDLVL_150dB': 150, 256 | } 257 | 258 | PITCH = { 259 | 'PITCH_NORM': 100, 260 | 'PITCH_LOW': 95, 261 | 'PITCH_HIGH': 120, 262 | } 263 | 264 | def _handle_range(k, v) -> "median, deviation": 265 | if not (type(v) is str and ',' in v): 266 | return 267 | try: 268 | mm = tuple(v.split(',', 1)) 269 | min, max = float(mm[0]), float(mm[1]) 270 | except Exception: 271 | return 272 | else: 273 | out_v = min+max / 2 274 | range = out_v - min 275 | if k == 'pitch': # Normalize pitch 276 | range=range/100;out_v=out_v/100 277 | return out_v, range 278 | 279 | kv = KV.CollectionFromFile(asset_path) 280 | kv3 = KV3File() 281 | 282 | for gamesound, gs_data in kv.items(): # "weapon.fire", {} 283 | 284 | if not sh.SBOX and gamesound[0].isdigit(): 285 | gamesound = '_' + gamesound 286 | 287 | # Valve 288 | out_kv = dict(type='src1_3d') # why 3d? 289 | 290 | # SBOX 291 | sound_file = out_sound_folder / f'{gamesound}.sound' 292 | sound_data = dict( 293 | ui = False, 294 | volume = 1.0, 295 | volumerandom = 0.0, 296 | pitch = 1.0, 297 | pitchrandom = 0.0, 298 | distancemax = 2000.0, 299 | sounds = [], 300 | selectionmode = "0", 301 | ) 302 | 303 | if sh.SBOX and not OVERWRITE_ASSETS and sound_file.exists(): 304 | sh.status(f"Skipping {sound_file.local} [already-exist]") 305 | continue 306 | 307 | for (i, k), v in gs_data.items(indexed_keys=True): 308 | out_k, out_v = k, v 309 | ## Turns out you can have multiple 'wave' in counter strike global offensive! 310 | # instead of using rndwave {} !! 311 | if k == 'wave': 312 | fixed_wav = fix_wave_resource(v) 313 | #if out_v == "sounds/common/null.vsnd" and not sh.SBOX: 314 | # continue 315 | if not sh.SBOX: 316 | out_kv.setdefault('vsnd_files', []).append(fixed_wav) 317 | continue 318 | sound_data['sounds'].append(fix_wave_resource(v)) 319 | 320 | elif k == 'rndwave': 321 | out_k, out_v = 'vsnd_files', [] 322 | for rndwave_k, rndwave_v in v.items(indexed_keys=True): 323 | if rndwave_k[1] != 'wave': 324 | continue 325 | res = fix_wave_resource(rndwave_v) 326 | if res != 'sounds/common/null.vsnd': 327 | out_v.append(res) 328 | sound_data['sounds'].append(res) 329 | 330 | if not len(out_v) and not sh.SBOX: continue 331 | 332 | elif k in ('volume', 'pitch', 'soundlevel'): 333 | if range:=_handle_range(k, v): 334 | out_kv.update({k:range[0], k+"_rand_min":-range[1], k+"_rand_min":range[1],}) 335 | sound_data[k] = range[0] 336 | sound_data[k+'random'] = range[1] 337 | continue 338 | if k == 'volume': 339 | if isinstance(v, str) and 'VOL_NORM' in v: 340 | if sh.SBOX: 341 | continue 342 | v = 1.0 # aka just continue? (default) 343 | else: 344 | try: 345 | v = float(v) 346 | except ValueError: 347 | v = 1.0 348 | out_v = sound_data[k] = v 349 | elif k == 'pitch': 350 | if type(v) is str: 351 | v = PITCH.get(v, 100) 352 | # Normalize pitch 353 | out_v = sound_data[k] = v / 100 354 | elif k == 'soundlevel': 355 | if sh.SBOX: 356 | ... 357 | elif type(v) is not str: 358 | ... 359 | else: 360 | if (out_v:=SNDLVL.get(v)) is None: 361 | out_v = 75 362 | if v.startswith('SNDLVL_'): 363 | try: 364 | out_v = int(v[7:-2]) 365 | except Exception: 366 | print(v[7:]) 367 | else: print(v) 368 | if not sh.SBOX: 369 | if k == 'delay_msec': out_k, out_v = 'delay', v/1000 370 | elif k == 'ignore_occlusion': out_k, out_v = 'occlusion_scale', (1 if not v else 0)#'sa_enable_occlusion' 371 | elif k == 'operator_stacks': # this only exists in globul offensif 372 | ... 373 | continue 374 | elif k in ('soundentry_version', 'alert', 'hrtf_follow','gamedata',): # skiplist 375 | continue 376 | else: 377 | continue 378 | 379 | out_kv[out_k] = out_v 380 | 381 | if sh.SBOX: 382 | sound_file.write_text(KV3File(data=sound_data).ToString()) 383 | print("+ Saved", sound_file.local) 384 | else: 385 | if out_kv == dict(type='src1_3d'): # empty 386 | out_kv = None 387 | else: 388 | wav_list = out_kv.get('vsnd_files') 389 | if wav_list is not None and len(wav_list) == 1: 390 | if wav_list[0] == 'sounds/common/null.vsnd': 391 | out_kv['vsnd_files'] = None 392 | else: 393 | out_kv['vsnd_files'] = wav_list[0] 394 | kv3[gamesound] = out_kv 395 | 396 | if sh.SBOX: 397 | return out_sound_folder 398 | else: 399 | vsndevts_file.write_text(kv3.ToString()) 400 | 401 | print("+ Saved", vsndevts_file.local) 402 | return vsndevts_file 403 | 404 | 405 | vsurf_base_params = { 406 | 'physics': ('density','elasticity','friction','dampening','thickness',), 407 | 'audiosounds':('bulletimpact','scraperough','scrapesmooth','impacthard','impactsoft','rolling','break','strain',), 408 | 'audioparams': ('audioreflectivity','audiohardnessfactor','audioroughnessfactor','scrapeRoughThreshold','impactHardThreshold',), 409 | } 410 | 411 | class CaseInsensitiveKey(str): 412 | def __hash__(self): return hash(self.lower()) 413 | def __eq__(self, other: str): return self.lower() == other.lower() 414 | class CaseInsensitiveDict(dict): 415 | def __setitem__(self, key, value): super().__setitem__(CaseInsensitiveKey(key), value) 416 | def __getitem__(self, key): return super().__getitem__(CaseInsensitiveKey(key)) 417 | 418 | def ImportSurfaceProperties(asset_path: Path): 419 | """ 420 | VALVE: scripts/surfaceproperties*.txt -> surfaceproperties/surfaceproperties*.vsurf 421 | SBOX: scripts/surfaceproperties*.txt -> (n)[surfaces/*/name.surface] 422 | """ 423 | 424 | vsurf_file: Path = sh.EXPORT_CONTENT / "surfaceproperties" / asset_path.local.relative_to(scripts).with_suffix('.vsurf') 425 | surface_folder = sh.EXPORT_CONTENT / surfaces / asset_path.stem.removeprefix('surfaceproperties').lstrip("_") 426 | 427 | if not sh.SBOX: 428 | if vsurf_file.is_file() and not OVERWRITE_ASSETS: 429 | return sh.skip('already-exist', vsurf_file) 430 | vsurf_file.parent.MakeDir() 431 | else: 432 | surface_folder.MakeDir() 433 | 434 | surface_collection = KV.CollectionFromFile(asset_path) 435 | vsurf = KV3File(SurfacePropertiesList = []) 436 | 437 | for surface, properties in {**surface_collection}.items(): 438 | new_surface = dict(surfacePropertyName = surface) 439 | unsupported_params = {} 440 | surface_file = surface_folder / f'{surface}.surface' 441 | if not OVERWRITE_ASSETS and surface_file.exists(): 442 | sh.status(f"Skipping {surface_file.local} [already-exist]") 443 | continue 444 | # Sbox 445 | surface_data = CaseInsensitiveDict({ 446 | CaseInsensitiveKey("Friction"): 0.5, 447 | CaseInsensitiveKey("Elasticity"): 0.5, 448 | CaseInsensitiveKey("Density"): 0.5, 449 | CaseInsensitiveKey("Thickness"): 0.5, 450 | CaseInsensitiveKey("Dampening"): 0.0, 451 | CaseInsensitiveKey("BounceThreshold"): 0.0, 452 | CaseInsensitiveKey("ImpactEffects"): CaseInsensitiveDict({ 453 | CaseInsensitiveKey("Bullet"): [], 454 | CaseInsensitiveKey("BulletDecal"): [], 455 | CaseInsensitiveKey("Regular"): [], 456 | }), 457 | CaseInsensitiveKey("Sounds"): CaseInsensitiveDict({ 458 | CaseInsensitiveKey("ImpactSoft"): "", 459 | CaseInsensitiveKey("ImpactHard"): "", 460 | CaseInsensitiveKey("RoughScrape"): "", 461 | CaseInsensitiveKey("FootLeft"): "", 462 | CaseInsensitiveKey("FootRight"): "", 463 | CaseInsensitiveKey("FootLaunch"): "", 464 | CaseInsensitiveKey("FootLand"): "", 465 | }), 466 | CaseInsensitiveKey("basesurface"): "surfaces/default.surface", 467 | CaseInsensitiveKey("description"): "", 468 | }) 469 | for key, value in properties.items(): 470 | # Valve 471 | context = next((ctx for ctx, group in vsurf_base_params.items() if key in group), None) 472 | if context is not None: 473 | new_surface.setdefault(context, {})[key] = value 474 | elif key in ('base'): 475 | new_surface[key] = value 476 | else: 477 | unsupported_params[key] = value 478 | 479 | # SBOX 480 | key = {'stepleft':'footleft','stepright':'footright','base':'basesurface'}.get(key, key) 481 | if key in surface_data: # needs to be a counterpart 482 | if key == "basesurface": 483 | value = f"{surface_folder.relative_to(sh.EXPORT_CONTENT).as_posix()}/{value}.surface" 484 | surface_data[key] = value 485 | elif key in surface_data["Sounds"]: 486 | surface_data["Sounds"][key] = value 487 | 488 | if sh.SBOX: 489 | surface_file.write_text(KV3File(data=surface_data).ToString()) 490 | print("+ Saved", surface_file.local) 491 | else: 492 | # Add default base 493 | if 'base' not in new_surface: 494 | if surface not in ('default', 'player'): 495 | new_surface['base'] = 'default' 496 | 497 | # Add unsupported parameters last 498 | if unsupported_params: 499 | new_surface['legacy_import'] = unsupported_params 500 | 501 | vsurf['SurfacePropertiesList'].append(new_surface) 502 | 503 | if sh.SBOX: 504 | return surface_folder 505 | else: 506 | vsurf_file.write_text(vsurf.ToString()) 507 | print("+ Saved", vsurf_file.local) 508 | 509 | return vsurf_file, vsurf['SurfacePropertiesList'] 510 | 511 | class VsurfManifestHandler: 512 | """ 513 | * source only reads files listed in manifest 514 | * source2 only reads a single `surfaceproperties.vsurf` file. 515 | - 516 | --> so collect these split surfaces to main file as per rules of manifest 517 | """ 518 | def __init__(self): 519 | self.manifest_files = [] 520 | self.all_surfaces: dict[Path, list] = {} 521 | 522 | def read_manifest(self, manifest_file: Path): 523 | self.manifest_files.extend(KV.FromFile(manifest_file).get_all_for('file')) 524 | 525 | def retrieve_surfaces(self, rv: tuple[Path, list]): 526 | if rv is not None: 527 | self.all_surfaces[rv[0]] = rv[1] 528 | 529 | def after_all_converted(self): 530 | # Only include surfaces from files that are on manifest. 531 | # Last file has override priority 532 | if not (self.manifest_files and self.all_surfaces): 533 | return 534 | vsurf_path = next(iter(self.all_surfaces)).with_stem('surfaceproperties') 535 | vsurf = KV3File(SurfacePropertiesList = []) 536 | for file in self.manifest_files[::-1]: 537 | file = vsurf_path.parents[1] / 'surfaceproperties' / Path(file).with_suffix('.vsurf').name 538 | for surfaceproperty in self.all_surfaces.get(file, ()): 539 | if not surfaceproperty: 540 | break 541 | # ignore if this surface is already defined 542 | if any(surfaceproperty2['surfacePropertyName'].lower() == surfaceproperty['surfacePropertyName'].lower() 543 | for surfaceproperty2 in vsurf['SurfacePropertiesList'] 544 | ): 545 | continue 546 | vsurf['SurfacePropertiesList'].append(surfaceproperty) 547 | 548 | vsurf_path.write_text(vsurf.ToString()) 549 | print("+ Saved", vsurf_path.local) 550 | 551 | if __name__ == '__main__': 552 | sh.parse_argv(globals()) 553 | main() 554 | -------------------------------------------------------------------------------- /utils/shared/cppkeyvalues.py: -------------------------------------------------------------------------------- 1 | # cppkeyvalues.py 2 | # A keyvalues.cpp python rewrite 3 | 4 | import collections 5 | 6 | #from ctypes import * 7 | NULL = 0 8 | 9 | MAX_ERROR_STACK = 64 10 | INVALID_KEY_SYMBOL = -1 11 | class CKeyValuesErrorStack(): 12 | errorStack = [] 13 | FileName = "NULL" 14 | errorIndex = 0 15 | maxErrorIndex = 0 16 | EncounteredErrors = False 17 | 18 | stackLevel = 0 19 | 20 | def SetFilename(self, filename): 21 | self.FileName = filename 22 | self.maxErrorIndex = 0 23 | 24 | def Push(self, symName): 25 | if self.errorIndex < MAX_ERROR_STACK: 26 | self.errorStack.insert(self.errorIndex, symName) 27 | self.errorIndex += 1 28 | self.maxErrorIndex = max(self.maxErrorIndex, self.errorIndex-1) 29 | return self.errorIndex-1 30 | def Pop(self): 31 | self.errorIndex -=1 32 | assert self.errorIndex >= 0 33 | def Reset(self, symName: int): 34 | assert self.stackLevel >= 0 and self.stackLevel < self.errorIndex 35 | if self.stackLevel < MAX_ERROR_STACK: 36 | self.errorStack[self.stackLevel] = symName 37 | # Hit an error, report it and the parsing stack for context 38 | def ReportError(self, Error): 39 | pv = "KeyValues Error: %s in file %s\n" % (Error, self.FileName) 40 | for i in range(self.maxErrorIndex): 41 | if ( i < MAX_ERROR_STACK and self.errorStack[i] != INVALID_KEY_SYMBOL ): 42 | if ( i < self.errorIndex ): 43 | pv += "%s, " 44 | else: 45 | pv += "(*%s*), " 46 | pv += "\n" 47 | print(pv, end='') 48 | self.EncounteredErrors = True 49 | g_KeyValuesErrorStack = CKeyValuesErrorStack() 50 | 51 | class CKeyErrorContext: 52 | stackLevel = 0 53 | def __init__(self, symName: int): 54 | self.stackLevel = g_KeyValuesErrorStack.Push(symName) 55 | def Reset(self, symName: int): 56 | g_KeyValuesErrorStack.Reset(self.stackLevel, symName) 57 | def GetStackLevel(self): 58 | return self.stackLevel 59 | class Conv: 60 | def GetDelimiter(self): 61 | return '"' 62 | def GetDelimiterLength(self): 63 | return 1 64 | 65 | #from io import BufferedReader # ???? 66 | class CUtlBuffer(collections.UserString): 67 | 68 | def IsValid(self) -> bool: 69 | return self.data != "" 70 | 71 | def GetDelimitedString(self, conv: Conv, nMaxChars: int) -> str: 72 | 73 | string = CUtlBuffer(self.data) 74 | 75 | if not string.IsValid(): 76 | return string # null, empty string 77 | if nMaxChars == 0: 78 | nMaxChars = 2147483647 79 | 80 | return string.split('"')[1][:nMaxChars] 81 | 82 | def lcut(self, n): self.data = self.data[n:] 83 | def rcut(self, n): self.data = self.data[:n] 84 | 85 | def PeekGet(self, i) -> str: # from start get i chars 86 | 87 | return self.data[:i] 88 | peek = [] 89 | for j in range(i): 90 | try: char = self.data[j] 91 | except IndexError: char = 0 92 | peek.append(char) 93 | return peek 94 | 95 | def EatWhiteSpace(self): 96 | self.data = self.data.lstrip() 97 | 98 | def EatCPPComment(self) -> bool: 99 | #peek = self.PeekGet(2) 100 | #print(f"our peek {peek}") 101 | #if (not peek or (peek[0] != '/') or (peek[1] != '/')): 102 | # return False 103 | if (not self.data or (self.data[0] != '/') or (self.data[1] != '/')): 104 | return False 105 | 106 | self.lcut(2)#.data = self.data[2:] 107 | 108 | for i, char in enumerate(self.data, 1): 109 | if char != '\n': 110 | continue 111 | self.data = self.data[i:] 112 | return True 113 | 114 | KEYVALUES_TOKEN_SIZE = 1024 * 32 115 | 116 | # const char *KeyValues::ReadToken( CUtlBuffer &buf, bool &wasQuoted, bool &wasConditional ) 117 | class Token(collections.UserString): 118 | def __init__(self, data = "") -> None: 119 | super().__init__(data) 120 | self.wasQuoted = False 121 | self.wasConditional = False 122 | 123 | class NullObj: 124 | def __bool__(self): 125 | return False 126 | def __eq__(self, other): 127 | if other == 0: 128 | return True 129 | 130 | class NullToken(NullObj): # str 131 | def __init__(self, wasQuoted = False, wasConditional = False): 132 | self.wasQuoted = wasQuoted 133 | self.wasConditional = wasConditional 134 | 135 | class CKeyValuesTokenReader: 136 | def __init__(self, buf: CUtlBuffer) -> None: 137 | self.m_Buffer: CUtlBuffer = buf 138 | self.m_nTokensRead: int = 0 139 | 140 | self.lastToken = Token() 141 | 142 | def ReadToken(self): 143 | nullToken = NullToken(self.lastToken.wasQuoted, self.lastToken.wasConditional) # 144 | token = Token() 145 | self.lastToken = token 146 | 147 | if not self.m_Buffer: 148 | return nullToken 149 | 150 | while ( True ): 151 | self.m_Buffer.EatWhiteSpace() 152 | if not self.m_Buffer.IsValid(): return nullToken 153 | if not self.m_Buffer.EatCPPComment(): 154 | break 155 | 156 | c = self.m_Buffer[0] 157 | 158 | if c == 0: 159 | return nullToken 160 | 161 | # read quoted strings specially 162 | if c == '\"': 163 | token.wasQuoted = True 164 | token.data = self.m_Buffer.GetDelimitedString(Conv(), KEYVALUES_TOKEN_SIZE) 165 | 166 | self.m_nTokensRead += 1 167 | self.m_Buffer.lcut(len(token)+2) # buffer workaround 168 | return token 169 | 170 | if c == '{' or c == '}' or c == '=': 171 | token.data = c 172 | self.m_nTokensRead += 1 173 | self.m_Buffer.lcut(1) # buffer workaround 174 | return token 175 | 176 | # read in the token until we hit a whitespace or a control character 177 | bReportedError = False 178 | bConditionalStart = False 179 | nCount = 0 180 | 181 | #while (True): #( c = (const char*)buf.PeekGet( sizeof(char), 0 ) ) 182 | for c in self.m_Buffer.data: 183 | # break if any control character appears in non quoted tokens 184 | if c == '"' or c == '{' or c == '}' or c == '=': 185 | break 186 | if c == '[': 187 | bConditionalStart = True 188 | if c == ']' and bConditionalStart: 189 | bConditionalStart = False 190 | self.wasConditional = True 191 | # break on whitespace 192 | if c.isspace() and not bConditionalStart: 193 | break 194 | if nCount < (KEYVALUES_TOKEN_SIZE-1): 195 | token.data += c 196 | nCount+=1 197 | elif(not bReportedError): 198 | bReportedError = True 199 | g_KeyValuesErrorStack.ReportError(" ReadToken overflow") 200 | 201 | if not token.data: 202 | token.data = 0 203 | self.m_nTokensRead += 1 204 | 205 | self.m_Buffer.lcut(nCount) # buffer workaround 206 | return token 207 | 208 | def SeekBackOneToken(self): # and return it 209 | return self.lastToken 210 | 211 | from enum import IntEnum, Enum 212 | from typing import Generator, Optional, Sequence, Union, Iterable, TypedDict 213 | try: 214 | from cstr import strtol, strtod 215 | except ImportError: 216 | from shared.cstr import strtol, strtod 217 | class KeyValues: pass # Prototype LUL ( for typing to work inside own class functions) 218 | 219 | from functools import partial, wraps 220 | 221 | def _dec_subkeyvalue(func, isSub = True): 222 | @wraps(func) 223 | def ret_fun(self, *args, **kwargs): 224 | if self.IsSub() == isSub: 225 | return func(self, *args, **kwargs) 226 | return None 227 | return ret_fun 228 | 229 | class KeyValues: pass # Prototype LUL ( for typing to work inside own class functions) 230 | class KVCollection: pass 231 | class GenericValue: pass 232 | 233 | #KVCollection = NewType("KVCollection", collections.UserList) 234 | 235 | class KVValue:#(KVCollection): 236 | KVCollect = partial(_dec_subkeyvalue, isSub=True) 237 | KVSingle = partial(_dec_subkeyvalue, isSub=False) 238 | 239 | def __new__(cls, val) -> Union[KVCollection, GenericValue]: 240 | if cls is KVValue: 241 | # Iterator BUG here as every KV is considered Iterable simply becaue it has a __iter__ 242 | # KV("", KV("", 2).value) -> KV("", []) instead of KV("", 2) 243 | # KVValue(KVValue()) forces into KVCollection instead of preserving data type 244 | if isinstance(val, Sequence) and not isinstance(val, str): 245 | cls = KVCollection 246 | else: 247 | cls = GenericValue 248 | self = object.__new__(cls) 249 | self.data = val 250 | return self 251 | 252 | def __init__(self, val: Union[int, float, str, Iterable[int], Iterable[KeyValues]]) -> None: 253 | self.data = val 254 | #self.fancyGetInt = self.data.__int__ 255 | #self.fancyGetFlat = self.data.__float__ 256 | 257 | def __iter__(self): 258 | if self.IsSub(): 259 | return super().__iter__() 260 | return iter(()) # is this the behavior of cpp? i think so 261 | 262 | def append(self, item: KeyValues) -> None: 263 | print('Appending item', item) 264 | if not isinstance(item, KeyValues): 265 | raise ValueError("Can only add KeyValues") 266 | return super().append(item) 267 | 268 | def IsSub(self): 269 | return isinstance(self.data, list) 270 | 271 | @KVSingle 272 | def GetInt(self) -> int: return int(self.data) # atoi, int cast# 273 | @KVSingle 274 | def GetFloat(self) -> int: return float(self.data) # atof, float cast 275 | 276 | 277 | #@KVCollect 278 | def GetValues(self) -> Generator[KeyValues, None, None]: 279 | yield from self 280 | 281 | def __repr__(self): 282 | return repr(self.data) 283 | 284 | def ToString(self, level = 0): 285 | line_indent = '\t' * level 286 | s = "" 287 | if self.IsSub(): 288 | s += "\n" + line_indent + '{\n' 289 | for item in self: 290 | s += (item.ToString(level+1)) 291 | s += line_indent + "}\n" 292 | else: 293 | s = f'\t"{self.data}"\n' 294 | return s 295 | 296 | def ToBuiltin(self): 297 | if self.IsSub(): 298 | return [*(kv.ToTuple() for kv in self.data)] 299 | return self.data 300 | 301 | class KVCollection(collections.UserList, KVValue): ... 302 | class ColorValue(collections.UserList, KVValue): ... # unsigned char [4] 303 | class GenericValue(KVValue): ... # int, float, char*, wchar*, ptr, 304 | 305 | class KVType(IntEnum): 306 | TYPE_NONE = 0 # hasChild 307 | TYPE_STRING = 1 308 | TYPE_INT = 2 309 | TYPE_FLOAT = 3 310 | TYPE_PTR = 4 311 | TYPE_WSTRING = 5 312 | TYPE_COLOR = 6 313 | TYPE_UINT64 = 7 314 | 315 | class KeyValues: 316 | "Key that holds a Value `('k', 'v')`. Value can be a list holding other KeyValues `('k', [...])`" 317 | 318 | 319 | def __init__(self, k: Optional[str] = None, v: Union[int, float, str, KVCollection] = None, case_sensitive = False, escape = False): 320 | 321 | self.DataType: KVType = KVType.TYPE_NONE 322 | self.KeyNameCaseSensitive2: bool = case_sensitive 323 | self.HasEscapeSequences: bool = escape # Always acts as False 324 | 325 | self.keyName = str(k) 326 | if not self.KeyNameCaseSensitive2 and k is not None: 327 | self.keyName= self.keyName.lower() 328 | 329 | self.value = KVValue(v) 330 | 331 | # listlike pointery variables 332 | self.Peer: KeyValues = None 333 | #self.Sub: KeyValues = None 334 | self.Chain: KeyValues = None 335 | 336 | self._sValue: str = None 337 | self._wsValue: str = None 338 | self._iValue: int = None 339 | self._flValue: float = None 340 | self._Color: list = None 341 | 342 | #def __str__(self) -> str: 343 | # return f"(\"{self.keyName}\": {self.value})" 344 | 345 | @property 346 | def Sub(self): 347 | return self.value.GetValues() 348 | @Sub.setter 349 | def Sub(self, subvalues: Iterable[KeyValues]): 350 | self.value = KVValue(subvalues) 351 | @Sub.deleter 352 | def Sub(self): 353 | del self.value 354 | self.value = GenericValue(None) 355 | @property ##### m_sValue 356 | def m_sValue(self): 357 | return self._sValue 358 | @m_sValue.setter 359 | def m_sValue(self, val: str): 360 | self.value = KVValue(val) 361 | self._sValue = val 362 | @m_sValue.deleter 363 | def m_sValue(self): 364 | self._sValue = None 365 | 366 | @property ##### m_iValue 367 | def m_iValue(self): 368 | return self._iValue 369 | @m_iValue.setter 370 | def m_iValue(self, val: float): 371 | self.value = KVValue(val) 372 | self._iValue = val 373 | @m_iValue.deleter 374 | def m_iValue(self): 375 | self._iValue = None 376 | 377 | @property ##### m_flValue 378 | def m_flValue(self): 379 | return self._flValue 380 | @m_flValue.setter 381 | def m_flValue(self, val: float): 382 | self.value = KVValue(val) 383 | self._flValue = val 384 | @m_flValue.deleter 385 | def m_flValue(self): 386 | self._flValue = None 387 | 388 | def FindKey(self, keyName, bCreate): 389 | if keyName is None: 390 | return self 391 | 392 | lastItem = None 393 | dat = None 394 | for dat in self.value: 395 | lastItem = dat 396 | if dat.keyName == keyName: 397 | break 398 | 399 | if not dat: 400 | if bCreate: 401 | ... 402 | else: 403 | return None 404 | return lastItem 405 | 406 | 407 | def Clear(self): 408 | del self.Sub 409 | self.Sub = None 410 | self.DataType = KVType.TYPE_NONE 411 | 412 | def SetName(self, name: str): 413 | if not self.KeyNameCaseSensitive2: 414 | name = name.lower() 415 | self.keyName = name 416 | 417 | def LoadFromFile(self, resourceName, **params): 418 | with open(resourceName, 'r') as f: 419 | buf = CUtlBuffer(f.read()) 420 | self.LoadFromBuffer(resourceName, buf, **params) 421 | 422 | def RecursiveLoadFromFile(self, resourceName, **params): 423 | with open(resourceName, 'r') as f: 424 | self.RecursiveLoadFromBuffer(resourceName, CKeyValuesTokenReader(CUtlBuffer(f.read())), True) 425 | 426 | def LoadFromBuffer(self, resourceName, buf: CUtlBuffer, **params) -> bool: 427 | previousKey: KeyValues = None 428 | currentKey: KeyValues = self 429 | includedKeys: "list[KeyValues]" = [] 430 | baseKeys: "list[KeyValues]" = [] 431 | #wasQuoted: bool 432 | #wasConditional: bool 433 | tokenReader = CKeyValuesTokenReader(buf) # (self, buf) 434 | #print(tokenReader, tokenReader.__dict__) 435 | 436 | g_KeyValuesErrorStack.SetFilename( resourceName ) 437 | while True: # do while 438 | # the first thing must be a key 439 | s = tokenReader.ReadToken() 440 | if not buf.IsValid() or s == 0: 441 | break 442 | 443 | if not s.wasQuoted and not s: 444 | # non quoted empty strings stop parsing 445 | # quoted empty strings are allowed to support unnnamed KV sections 446 | break 447 | 448 | if s == '#include' or s == '#base': # special include macro (not a key name) 449 | macro = str(s) 450 | s = tokenReader.ReadToken() 451 | # Name of subfile to load is now in s 452 | 453 | if not s: 454 | g_KeyValuesErrorStack.ReportError(f"{macro} is NULL.") 455 | else: 456 | ... 457 | #ParseIncludedKeys(resourceName, s, "baseKeys if #base else includedKeys") #TODO 458 | 459 | continue 460 | 461 | # I think this just means: did you invoke this function as a classmethod -- which cannot happen here 462 | if not currentKey: 463 | currentKey = KeyValues(s, case_sensitive=self.KeyNameCaseSensitive2, escape=self.HasEscapeSequences) 464 | #currentKey.HasEscapeSequences = self.HasEscapeSequences # same format has parent use 465 | 466 | if previousKey: 467 | previousKey.SetNextKey(currentKey) 468 | else: 469 | currentKey.SetName(str(s)) 470 | 471 | s = tokenReader.ReadToken() 472 | 473 | if s.wasConditional: 474 | s = tokenReader.ReadToken() 475 | 476 | if s and s[0] == '{' and not s.wasQuoted: 477 | # header is valid so load the file 478 | currentKey.RecursiveLoadFromBuffer(resourceName, tokenReader) 479 | else: 480 | g_KeyValuesErrorStack.ReportError("LoadFromBuffer: missing {") 481 | 482 | if False: 483 | if previousKey: 484 | previousKey.SetNextKey(None) 485 | 486 | if not buf.IsValid(): 487 | break 488 | 489 | def RecursiveLoadFromBuffer(self, resourceName, tokenReader: CKeyValuesTokenReader, loadingCollectionFile = False): 490 | self.Sub = [] # change value type to collection so you can append other KVs - aka sub-keyvalues 491 | while True: 492 | bAccepted = True 493 | # get the key name 494 | name = tokenReader.ReadToken() 495 | if name == 0: # EOF stop reading 496 | if not loadingCollectionFile: 497 | g_KeyValuesErrorStack.ReportError("RecursiveLoadFromBuffer: got EOF instead of keyname") 498 | break 499 | if name == "": # empty token, maybe "" or EOF BUG this doesnt make sense for empty keys? 500 | g_KeyValuesErrorStack.ReportError("RecursiveLoadFromBuffer: got empty keyname") 501 | break 502 | if name[0] == '}' and not name.wasQuoted: # top level closed, stop reading 503 | break 504 | 505 | dat = KeyValues(name, case_sensitive=self.KeyNameCaseSensitive2) 506 | self.value.append(dat) 507 | del name 508 | value = tokenReader.ReadToken() 509 | 510 | vne = (value != "") # value not empty -> True 511 | 512 | foundConditional = value.wasConditional 513 | if value.wasConditional and value: 514 | bAccepted = self.EvaluateConditional(peek, pfnEvaluateSymbolProc) 515 | value = tokenReader.ReadToken() 516 | if value == 0: 517 | g_KeyValuesErrorStack.ReportError("RecursiveLoadFromBuffer: got NULL key") 518 | del self.value[-1] 519 | break 520 | 521 | 522 | # support the '=' as an assignment, makes multiple-keys-on-one-line easier to read in a keyvalues file 523 | if vne and value[0] == '=' and not value.wasQuoted: #value[0] == '=' value is sometimes empty giving IndexError 524 | # just skip over it 525 | value = tokenReader.ReadToken() 526 | foundConditional = value.wasConditional 527 | if value.wasConditional and value: 528 | bAccepted = self.EvaluateConditional(peek, pfnEvaluateSymbolProc) 529 | value = tokenReader.ReadToken() 530 | if foundConditional and True: 531 | # if there is a conditional key see if we already have the key defined and blow it away, last one in the list wins 532 | ... 533 | if value == 0: 534 | g_KeyValuesErrorStack.ReportError("RecursiveLoadFromBuffer: got NULL key" ) 535 | del self.value[-1] 536 | break 537 | if vne and value[0] == '}' and not value.wasQuoted: 538 | g_KeyValuesErrorStack.ReportError("RecursiveLoadFromBuffer: got } in key") 539 | del self.value[-1] 540 | break 541 | if vne and value[0] == '{' and not value.wasQuoted: 542 | # sub value list 543 | dat.RecursiveLoadFromBuffer(resourceName, tokenReader) 544 | else: 545 | if value.wasConditional: 546 | g_KeyValuesErrorStack.ReportError("RecursiveLoadFromBuffer: got conditional between key and value" ) 547 | break 548 | if dat.m_sValue: 549 | del dat.m_sValue # dont need 550 | dat.m_sValue = None 551 | 552 | length = len(value) 553 | pSEnd = length 554 | 555 | lval = strtol(str(value)) 556 | pIEnd = lval.endpos 557 | lval = lval.value 558 | 559 | fval = strtod(str(value)) 560 | pFEnd = fval.endpos 561 | fval = fval.value 562 | 563 | overflow: bool = (lval == 2147483647 or lval == -2147483646) 564 | if not vne:#value == "": 565 | dat.DataType = KVType.TYPE_STRING 566 | elif 18 == length and value[0] == '0' and value[1] == 'x': 567 | dat.m_sValue = str(int(str(value), 16)) # 16? 568 | dat.DataType = KVType.TYPE_UINT64 569 | elif (pFEnd > pIEnd) and (pFEnd == pSEnd):#len(str(fval).rstrip('0').rstrip('.')) > len(str(lval)): # TODO support this '1.511111111fafsadasd' 570 | dat.m_flValue = fval 571 | dat.DataType = KVType.TYPE_FLOAT 572 | elif (pIEnd == pSEnd) and not overflow: # len(str(lval)) == length 573 | dat.m_iValue = lval 574 | dat.DataType = KVType.TYPE_INT 575 | else: 576 | dat.DataType = KVType.TYPE_STRING 577 | 578 | if dat.DataType == KVType.TYPE_STRING: 579 | dat.m_sValue = str(value) 580 | 581 | # Look ahead one token for a conditional tag 582 | #peek = tokenReader.ReadToken() 583 | #if peek.wasConditional: 584 | # bAccepted = self.EvaluateConditional(peek, pfnEvaluateSymbolProc) 585 | #else: 586 | # tokenReader.SeekBackOneToken() 587 | 588 | #if bAccepted: 589 | # ... 590 | # #self.value.append(dat) 591 | #else: 592 | # # remove key from list 593 | # del self.value[dat] 594 | # del dat 595 | 596 | def EvaluateConditional(self, **args): 597 | return True 598 | def __repr__(self): 599 | return f"{self.__class__.__name__}({self.keyName!r}, {self.value.__class__.__name__}({self.value!r}))" 600 | 601 | def ToString(self, level=0): 602 | line_indent = "\t" * level 603 | 604 | return line_indent + f'"{self.keyName}"{self.value.ToString(level)}' 605 | 606 | def ToTuple(self) -> tuple: 607 | return self.keyName, self.value.ToBuiltin() 608 | 609 | if __name__ == "__main__": 610 | 611 | from pathlib import Path 612 | 613 | def updateTestOutpt(file: Path): 614 | with open(file, "r") as fp: 615 | kv2 = KeyValues() 616 | kv2.LoadFromFile(file) 617 | newfile = file.parents[1] / "ndata" / file.name 618 | with newfile.open("w") as newfp: 619 | newfp.write(kv2.ToString()) 620 | 621 | for file in Path(r".\test\keyvalues\data").glob("*"): 622 | pass 623 | #KeyValues().LoadFromFile(file) 624 | #updateTestOutpt(file) 625 | import unittest 626 | from time import time, sleep 627 | def measure(func): 628 | @wraps(func) 629 | def _time_it(*args, **kwargs): 630 | start = int(round(time() * 1000)) 631 | try: return func(*args, **kwargs) 632 | finally: 633 | end_ = int(round(time() * 1000)) - start 634 | print(f"Total execution time: {end_ if end_ > 0 else 0} ms") 635 | return _time_it 636 | 637 | #def test_speed(): 638 | # for _ in range(100): KeyValues().LoadFromFile("test/materials/test_proxy.vmt") 639 | #measure(test_speed)();sleep(1) 640 | 641 | class Test_KeyValues(unittest.TestCase): 642 | def test_1(self): 643 | text = "//asdasd\nvalue {\"key\" \"key\" \"\"value }" 644 | text_expected = '"value"\n{\n\t"key"\t"key"\n}\n' 645 | kv = KeyValues() 646 | kv.LoadFromBuffer("NULL_test", CUtlBuffer(text)) 647 | self.assertEqual(kv.ToString(), text_expected) 648 | 649 | for i, file in enumerate(Path(r".\test\keyvalues\data").glob("*")): 650 | def test_filen(self): 651 | with (file.parents[1] / "ndata" / file.name).open() as e: 652 | expect = e.read() 653 | kv = KeyValues() 654 | kv.LoadFromFile(file.as_posix()) 655 | 656 | self.assertEqual(kv.ToString(), expect) 657 | setattr(Test_KeyValues, f"{test_filen.__name__}{i}", test_filen) 658 | 659 | unittest.main() 660 | --------------------------------------------------------------------------------