├── .gitignore ├── LICENSE ├── README.md ├── project_generators ├── cmake │ └── cmake.py ├── compile_commands │ └── compile_commands.py ├── makefile │ └── makefile.py ├── ninja │ └── ninja.py ├── qmake │ └── qmake.py ├── shared │ ├── cmd_line_gen.py │ └── msvc_tools.py ├── visual_studio │ └── visual_studio.py └── vsc │ └── vsc.py ├── qpc.py ├── qpc_args.py ├── qpc_base.py ├── qpc_c_parser.py ├── qpc_generator_handler.py ├── qpc_hash.py ├── qpc_logging.py ├── qpc_parser.py ├── qpc_project.py ├── qpc_reader.py └── qpc_vpc_converter.py /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .vscode 3 | hashes 4 | .idea 5 | *.bak 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Mozilla Public License Version 2.0 2 | ================================== 3 | 4 | 1. Definitions 5 | -------------- 6 | 7 | 1.1. "Contributor" 8 | means each individual or legal entity that creates, contributes to 9 | the creation of, or owns Covered Software. 10 | 11 | 1.2. "Contributor Version" 12 | means the combination of the Contributions of others (if any) used 13 | by a Contributor and that particular Contributor's Contribution. 14 | 15 | 1.3. "Contribution" 16 | means Covered Software of a particular Contributor. 17 | 18 | 1.4. "Covered Software" 19 | means Source Code Form to which the initial Contributor has attached 20 | the notice in Exhibit A, the Executable Form of such Source Code 21 | Form, and Modifications of such Source Code Form, in each case 22 | including portions thereof. 23 | 24 | 1.5. "Incompatible With Secondary Licenses" 25 | means 26 | 27 | (a) that the initial Contributor has attached the notice described 28 | in Exhibit B to the Covered Software; or 29 | 30 | (b) that the Covered Software was made available under the terms of 31 | version 1.1 or earlier of the License, but not also under the 32 | terms of a Secondary License. 33 | 34 | 1.6. "Executable Form" 35 | means any form of the work other than Source Code Form. 36 | 37 | 1.7. "Larger Work" 38 | means a work that combines Covered Software with other material, in 39 | a separate file or files, that is not Covered Software. 40 | 41 | 1.8. "License" 42 | means this document. 43 | 44 | 1.9. "Licensable" 45 | means having the right to grant, to the maximum extent possible, 46 | whether at the time of the initial grant or subsequently, any and 47 | all of the rights conveyed by this License. 48 | 49 | 1.10. "Modifications" 50 | means any of the following: 51 | 52 | (a) any file in Source Code Form that results from an addition to, 53 | deletion from, or modification of the contents of Covered 54 | Software; or 55 | 56 | (b) any new file in Source Code Form that contains any Covered 57 | Software. 58 | 59 | 1.11. "Patent Claims" of a Contributor 60 | means any patent claim(s), including without limitation, method, 61 | process, and apparatus claims, in any patent Licensable by such 62 | Contributor that would be infringed, but for the grant of the 63 | License, by the making, using, selling, offering for sale, having 64 | made, import, or transfer of either its Contributions or its 65 | Contributor Version. 66 | 67 | 1.12. "Secondary License" 68 | means either the GNU General Public License, Version 2.0, the GNU 69 | Lesser General Public License, Version 2.1, the GNU Affero General 70 | Public License, Version 3.0, or any later versions of those 71 | licenses. 72 | 73 | 1.13. "Source Code Form" 74 | means the form of the work preferred for making modifications. 75 | 76 | 1.14. "You" (or "Your") 77 | means an individual or a legal entity exercising rights under this 78 | License. For legal entities, "You" includes any entity that 79 | controls, is controlled by, or is under common control with You. For 80 | purposes of this definition, "control" means (a) the power, direct 81 | or indirect, to cause the direction or management of such entity, 82 | whether by contract or otherwise, or (b) ownership of more than 83 | fifty percent (50%) of the outstanding shares or beneficial 84 | ownership of such entity. 85 | 86 | 2. License Grants and Conditions 87 | -------------------------------- 88 | 89 | 2.1. Grants 90 | 91 | Each Contributor hereby grants You a world-wide, royalty-free, 92 | non-exclusive license: 93 | 94 | (a) under intellectual property rights (other than patent or trademark) 95 | Licensable by such Contributor to use, reproduce, make available, 96 | modify, display, perform, distribute, and otherwise exploit its 97 | Contributions, either on an unmodified basis, with Modifications, or 98 | as part of a Larger Work; and 99 | 100 | (b) under Patent Claims of such Contributor to make, use, sell, offer 101 | for sale, have made, import, and otherwise transfer either its 102 | Contributions or its Contributor Version. 103 | 104 | 2.2. Effective Date 105 | 106 | The licenses granted in Section 2.1 with respect to any Contribution 107 | become effective for each Contribution on the date the Contributor first 108 | distributes such Contribution. 109 | 110 | 2.3. Limitations on Grant Scope 111 | 112 | The licenses granted in this Section 2 are the only rights granted under 113 | this License. No additional rights or licenses will be implied from the 114 | distribution or licensing of Covered Software under this License. 115 | Notwithstanding Section 2.1(b) above, no patent license is granted by a 116 | Contributor: 117 | 118 | (a) for any code that a Contributor has removed from Covered Software; 119 | or 120 | 121 | (b) for infringements caused by: (i) Your and any other third party's 122 | modifications of Covered Software, or (ii) the combination of its 123 | Contributions with other software (except as part of its Contributor 124 | Version); or 125 | 126 | (c) under Patent Claims infringed by Covered Software in the absence of 127 | its Contributions. 128 | 129 | This License does not grant any rights in the trademarks, service marks, 130 | or logos of any Contributor (except as may be necessary to comply with 131 | the notice requirements in Section 3.4). 132 | 133 | 2.4. Subsequent Licenses 134 | 135 | No Contributor makes additional grants as a result of Your choice to 136 | distribute the Covered Software under a subsequent version of this 137 | License (see Section 10.2) or under the terms of a Secondary License (if 138 | permitted under the terms of Section 3.3). 139 | 140 | 2.5. Representation 141 | 142 | Each Contributor represents that the Contributor believes its 143 | Contributions are its original creation(s) or it has sufficient rights 144 | to grant the rights to its Contributions conveyed by this License. 145 | 146 | 2.6. Fair Use 147 | 148 | This License is not intended to limit any rights You have under 149 | applicable copyright doctrines of fair use, fair dealing, or other 150 | equivalents. 151 | 152 | 2.7. Conditions 153 | 154 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted 155 | in Section 2.1. 156 | 157 | 3. Responsibilities 158 | ------------------- 159 | 160 | 3.1. Distribution of Source Form 161 | 162 | All distribution of Covered Software in Source Code Form, including any 163 | Modifications that You create or to which You contribute, must be under 164 | the terms of this License. You must inform recipients that the Source 165 | Code Form of the Covered Software is governed by the terms of this 166 | License, and how they can obtain a copy of this License. You may not 167 | attempt to alter or restrict the recipients' rights in the Source Code 168 | Form. 169 | 170 | 3.2. Distribution of Executable Form 171 | 172 | If You distribute Covered Software in Executable Form then: 173 | 174 | (a) such Covered Software must also be made available in Source Code 175 | Form, as described in Section 3.1, and You must inform recipients of 176 | the Executable Form how they can obtain a copy of such Source Code 177 | Form by reasonable means in a timely manner, at a charge no more 178 | than the cost of distribution to the recipient; and 179 | 180 | (b) You may distribute such Executable Form under the terms of this 181 | License, or sublicense it under different terms, provided that the 182 | license for the Executable Form does not attempt to limit or alter 183 | the recipients' rights in the Source Code Form under this License. 184 | 185 | 3.3. Distribution of a Larger Work 186 | 187 | You may create and distribute a Larger Work under terms of Your choice, 188 | provided that You also comply with the requirements of this License for 189 | the Covered Software. If the Larger Work is a combination of Covered 190 | Software with a work governed by one or more Secondary Licenses, and the 191 | Covered Software is not Incompatible With Secondary Licenses, this 192 | License permits You to additionally distribute such Covered Software 193 | under the terms of such Secondary License(s), so that the recipient of 194 | the Larger Work may, at their option, further distribute the Covered 195 | Software under the terms of either this License or such Secondary 196 | License(s). 197 | 198 | 3.4. Notices 199 | 200 | You may not remove or alter the substance of any license notices 201 | (including copyright notices, patent notices, disclaimers of warranty, 202 | or limitations of liability) contained within the Source Code Form of 203 | the Covered Software, except that You may alter any license notices to 204 | the extent required to remedy known factual inaccuracies. 205 | 206 | 3.5. Application of Additional Terms 207 | 208 | You may choose to offer, and to charge a fee for, warranty, support, 209 | indemnity or liability obligations to one or more recipients of Covered 210 | Software. However, You may do so only on Your own behalf, and not on 211 | behalf of any Contributor. You must make it absolutely clear that any 212 | such warranty, support, indemnity, or liability obligation is offered by 213 | You alone, and You hereby agree to indemnify every Contributor for any 214 | liability incurred by such Contributor as a result of warranty, support, 215 | indemnity or liability terms You offer. You may include additional 216 | disclaimers of warranty and limitations of liability specific to any 217 | jurisdiction. 218 | 219 | 4. Inability to Comply Due to Statute or Regulation 220 | --------------------------------------------------- 221 | 222 | If it is impossible for You to comply with any of the terms of this 223 | License with respect to some or all of the Covered Software due to 224 | statute, judicial order, or regulation then You must: (a) comply with 225 | the terms of this License to the maximum extent possible; and (b) 226 | describe the limitations and the code they affect. Such description must 227 | be placed in a text file included with all distributions of the Covered 228 | Software under this License. Except to the extent prohibited by statute 229 | or regulation, such description must be sufficiently detailed for a 230 | recipient of ordinary skill to be able to understand it. 231 | 232 | 5. Termination 233 | -------------- 234 | 235 | 5.1. The rights granted under this License will terminate automatically 236 | if You fail to comply with any of its terms. However, if You become 237 | compliant, then the rights granted under this License from a particular 238 | Contributor are reinstated (a) provisionally, unless and until such 239 | Contributor explicitly and finally terminates Your grants, and (b) on an 240 | ongoing basis, if such Contributor fails to notify You of the 241 | non-compliance by some reasonable means prior to 60 days after You have 242 | come back into compliance. Moreover, Your grants from a particular 243 | Contributor are reinstated on an ongoing basis if such Contributor 244 | notifies You of the non-compliance by some reasonable means, this is the 245 | first time You have received notice of non-compliance with this License 246 | from such Contributor, and You become compliant prior to 30 days after 247 | Your receipt of the notice. 248 | 249 | 5.2. If You initiate litigation against any entity by asserting a patent 250 | infringement claim (excluding declaratory judgment actions, 251 | counter-claims, and cross-claims) alleging that a Contributor Version 252 | directly or indirectly infringes any patent, then the rights granted to 253 | You by any and all Contributors for the Covered Software under Section 254 | 2.1 of this License shall terminate. 255 | 256 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all 257 | end user license agreements (excluding distributors and resellers) which 258 | have been validly granted by You or Your distributors under this License 259 | prior to termination shall survive termination. 260 | 261 | ************************************************************************ 262 | * * 263 | * 6. Disclaimer of Warranty * 264 | * ------------------------- * 265 | * * 266 | * Covered Software is provided under this License on an "as is" * 267 | * basis, without warranty of any kind, either expressed, implied, or * 268 | * statutory, including, without limitation, warranties that the * 269 | * Covered Software is free of defects, merchantable, fit for a * 270 | * particular purpose or non-infringing. The entire risk as to the * 271 | * quality and performance of the Covered Software is with You. * 272 | * Should any Covered Software prove defective in any respect, You * 273 | * (not any Contributor) assume the cost of any necessary servicing, * 274 | * repair, or correction. This disclaimer of warranty constitutes an * 275 | * essential part of this License. No use of any Covered Software is * 276 | * authorized under this License except under this disclaimer. * 277 | * * 278 | ************************************************************************ 279 | 280 | ************************************************************************ 281 | * * 282 | * 7. Limitation of Liability * 283 | * -------------------------- * 284 | * * 285 | * Under no circumstances and under no legal theory, whether tort * 286 | * (including negligence), contract, or otherwise, shall any * 287 | * Contributor, or anyone who distributes Covered Software as * 288 | * permitted above, be liable to You for any direct, indirect, * 289 | * special, incidental, or consequential damages of any character * 290 | * including, without limitation, damages for lost profits, loss of * 291 | * goodwill, work stoppage, computer failure or malfunction, or any * 292 | * and all other commercial damages or losses, even if such party * 293 | * shall have been informed of the possibility of such damages. This * 294 | * limitation of liability shall not apply to liability for death or * 295 | * personal injury resulting from such party's negligence to the * 296 | * extent applicable law prohibits such limitation. Some * 297 | * jurisdictions do not allow the exclusion or limitation of * 298 | * incidental or consequential damages, so this exclusion and * 299 | * limitation may not apply to You. * 300 | * * 301 | ************************************************************************ 302 | 303 | 8. Litigation 304 | ------------- 305 | 306 | Any litigation relating to this License may be brought only in the 307 | courts of a jurisdiction where the defendant maintains its principal 308 | place of business and such litigation shall be governed by laws of that 309 | jurisdiction, without reference to its conflict-of-law provisions. 310 | Nothing in this Section shall prevent a party's ability to bring 311 | cross-claims or counter-claims. 312 | 313 | 9. Miscellaneous 314 | ---------------- 315 | 316 | This License represents the complete agreement concerning the subject 317 | matter hereof. If any provision of this License is held to be 318 | unenforceable, such provision shall be reformed only to the extent 319 | necessary to make it enforceable. Any law or regulation which provides 320 | that the language of a contract shall be construed against the drafter 321 | shall not be used to construe this License against a Contributor. 322 | 323 | 10. Versions of the License 324 | --------------------------- 325 | 326 | 10.1. New Versions 327 | 328 | Mozilla Foundation is the license steward. Except as provided in Section 329 | 10.3, no one other than the license steward has the right to modify or 330 | publish new versions of this License. Each version will be given a 331 | distinguishing version number. 332 | 333 | 10.2. Effect of New Versions 334 | 335 | You may distribute the Covered Software under the terms of the version 336 | of the License under which You originally received the Covered Software, 337 | or under the terms of any subsequent version published by the license 338 | steward. 339 | 340 | 10.3. Modified Versions 341 | 342 | If you create software not governed by this License, and you want to 343 | create a new license for such software, you may create and use a 344 | modified version of this License if you rename the license and remove 345 | any references to the name of the license steward (except to note that 346 | such modified license differs from this License). 347 | 348 | 10.4. Distributing Source Code Form that is Incompatible With Secondary 349 | Licenses 350 | 351 | If You choose to distribute Source Code Form that is Incompatible With 352 | Secondary Licenses under the terms of this version of the License, the 353 | notice described in Exhibit B of this License must be attached. 354 | 355 | Exhibit A - Source Code Form License Notice 356 | ------------------------------------------- 357 | 358 | This Source Code Form is subject to the terms of the Mozilla Public 359 | License, v. 2.0. If a copy of the MPL was not distributed with this 360 | file, You can obtain one at http://mozilla.org/MPL/2.0/. 361 | 362 | If it is not possible or desirable to put the notice in a particular 363 | file, then You may include the notice in a location (such as a LICENSE 364 | file in a relevant directory) where a recipient would be likely to look 365 | for such a notice. 366 | 367 | You may add additional accurate notices of copyright ownership. 368 | 369 | Exhibit B - "Incompatible With Secondary Licenses" Notice 370 | --------------------------------------------------------- 371 | 372 | This Source Code Form is "Incompatible With Secondary Licenses", as 373 | defined by the Mozilla Public License, v. 2.0. 374 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # REQUIRES PYTHON 3.7+ AND LXML INSTALLED 2 | For Windows, install lxml with `py -m pip install lxml` 3 | For Linux, install lxml with `python3 -m pip install lxml` 4 | 5 | # Quiver Project Creator 6 | 7 | Generates projects for different build systems, scroll down for supported build systems. 8 | 9 | This is inspired by Valve's VPC (Valve Project Creator). 10 | 11 | It aims to retain the good elements of VPC (simplicity, ease of use, extensibility and configurability, "direct" access to compiler options and the like) 12 | 13 | ...but improve the areas where VPC failed (multi-platform support, speed, syntax, modernization) 14 | 15 | We also want this to be better documented - eventually. Right now, you can look at some example scripts [here](https://github.com/Demez/demez_asw_base/tree/master/_qpc_scripts) 16 | 17 | ## Command Line Values: 18 | - all caps on a word like `NAME` is a single string 19 | - a value with `[]` around it like `[NAMES]` is a list, for multiple values 20 | 21 | ## Command Line usage: 22 | 23 | ``` 24 | -d --rootdir DIR Change the current working directory of the script 25 | 26 | -b --basefile FILE Set the path to the base script to use 27 | 28 | -f --force Force Regenerate All Projects 29 | 30 | -fm --force_master Force Regenerate Master File 31 | 32 | -v --verbose Enable verbose console output 33 | 34 | -w --hidewarnings Suppress all warnings 35 | 36 | -cf --checkfiles Check if all files added exists 37 | 38 | -t --time Display the time taken to parse projects, generate projects and master files 39 | 40 | -s --skipprojects Skip Generating projects, useful for working on master files in generators 41 | 42 | -mf --masterfile NAME Create a master file to build all projects with (ex. vstudio solution) 43 | 44 | -m --macros [names] Set global macros. (ex: -m HL2 is equal to macro HL2 "1", -m "VIDEOPROVIDER=MPV" is equal to macro VIDEOPROVIDER MPV) 45 | 46 | -c --configs [] Set configs, if no configs are set in a basefile, qpc will use "Default" 47 | ``` 48 | 49 | ### Adding and removing projects: 50 | 51 | ``` 52 | -a --add [projects/groups] Add groups or projects 53 | 54 | -r --remove [projects/groups] Don't use these projects or groups 55 | ``` 56 | 57 | ### Generating for other platforms and architectures 58 | 59 | This option allows you to generate projects for multiple different platforms and architectures at a time 60 | 61 | It Defaults to the current platform and arch you are on 62 | ``` 63 | -p --platforms [] 64 | 65 | -ar --archs [] 66 | ``` 67 | Current Platforms: 68 | - windows 69 | - linux 70 | - macos 71 | 72 | Current Architectures: 73 | - i386 74 | - amd64 75 | - arm 76 | - arm64 77 | 78 | ### Project Generators: 79 | 80 | Choosing project generators to use is done with adding any valid generator name into the input list here 81 | 82 | ``` 83 | -g --generators [generators] Project types to generate 84 | ``` 85 | 86 | Current Project Generators: 87 | 88 | ``` 89 | visual_studio Create Visual Studio Projects 90 | 91 | makefile Create Makefiles 92 | 93 | compile_commands Create files in the compile_commands.json format, stored in compile_commands folder 94 | 95 | ninja Create ninja build scripts in build_ninja in the directory qpc is called from (or from --rootdir) 96 | 97 | cmake Create CMakeLists.txt files where the project script is located, also works with multiple in the same folder 98 | 99 | vsc Create A basic vscode file with paths setup for the compile_commands generator 100 | ``` 101 | 102 | You can make your own project generator by looking at [this page on the wiki](https://github.com/quiverteam/QuiverProjectCreator/wiki/Creating-your-own-generator) 103 | -------------------------------------------------------------------------------- /project_generators/cmake/cmake.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from qpc_args import args 4 | from qpc_base import BaseProjectGenerator, Platform, Arch, is_arch_64bit 5 | from qpc_project import ConfigType, Language, ProjectContainer, ProjectPass, Configuration, ProjectDefinition 6 | from qpc_parser import BaseInfo, BaseInfoPlatform 7 | from qpc_logging import warning, error, verbose, print_color, Color 8 | from ..shared import cmd_line_gen, msvc_tools 9 | from typing import List 10 | 11 | 12 | class CMakeGenerator(BaseProjectGenerator): 13 | def __init__(self): 14 | super().__init__("CMake Generator") 15 | self._add_platforms(Platform.WINDOWS, Platform.LINUX, Platform.MACOS) 16 | self._set_generate_master_file(True) 17 | self._set_macro("CMAKE") 18 | 19 | self.cmd_gen = cmd_line_gen.CommandLineGen() 20 | self.cmake_dirs = [] 21 | 22 | def does_project_exist(self, project_out_dir: str) -> bool: 23 | # TODO: check if the cmake file exists and contains an add_target line for this project 24 | # ...except i don't know what the config_type is at this stage, so we can't really do that 25 | # unless i hash it, but that's dumb, instead, 26 | # generators should optionally be able to add stuff to a project's hash file 27 | return False 28 | 29 | # what was i thinking with this function again? 30 | def get_master_file_path(self, master_file_path: str) -> str: 31 | return master_file_path 32 | 33 | def set_platform_and_archs(self, platform: Platform, arch: Arch) -> str: 34 | string = "if" + ifndef("QPC_PLATFORM", f"\tset(QPC_PLATFORM \"{platform.name}\")") 35 | string += "\n\nif" + ifndef( 36 | "QPC_ARCH", 37 | f"\tset(QPC_ARCH \"{arch.name}\")" 38 | # + f"\n\tset(CMAKE_GENERATOR_PLATFORM \"{'x86' if arch == Arch.I386 else 'x64'}\")" 39 | ) 40 | return string 41 | 42 | # write a master CMakeLists.txt file 43 | def create_master_file(self, settings: BaseInfo, master_file_path: str) -> str: 44 | main_cmakelists = "cmake_minimum_required(VERSION 3.5)\n\n" 45 | main_cmakelists += f"project({os.path.basename(master_file_path)})\n\n" 46 | 47 | print_color(Color.CYAN, "Creating Master CMakeLists.txt") 48 | 49 | # this chooses 64 bit architectures over 32 bit for a default arch 50 | architecture: Arch = args.archs[0] 51 | for arch in args.archs: 52 | if not architecture or is_arch_64bit(arch) and architecture and not is_arch_64bit(architecture): 53 | architecture = arch 54 | 55 | main_cmakelists += self.set_platform_and_archs(settings.info_list[0].platform, architecture) 56 | main_cmakelists += "\n\n" 57 | 58 | subdirs = set() 59 | for project in settings.projects: 60 | path = os.path.split(project.path)[0] 61 | if path not in subdirs: 62 | main_cmakelists += f"add_subdirectory({path})\n" 63 | subdirs.add(path) 64 | 65 | with open("CMakeLists.txt", "w", encoding="utf8") as cmakelist_io: 66 | cmakelist_io.write(main_cmakelists) 67 | 68 | return "" 69 | 70 | def create_project(self, project: ProjectContainer) -> None: 71 | project_passes: List[ProjectPass] = self._get_passes(project) 72 | if not project_passes: 73 | return 74 | 75 | print_color(Color.CYAN, "CMake Generator Running on " + project.file_name) 76 | 77 | main_cmakelists = "" 78 | if os.getcwd() not in self.cmake_dirs: 79 | self.cmake_dirs.append(os.getcwd()) 80 | main_cmakelists += self.gen_declaration(project_passes) 81 | else: 82 | if os.path.isfile("CMakeLists.txt"): 83 | # check for cmake_minimum_required( in file? 84 | with open("CMakeLists.txt", "r", encoding="utf8") as cmakelist_io: 85 | main_cmakelists += cmakelist_io.read() 86 | 87 | for i, proj in enumerate(project_passes): 88 | main_cmakelists += "else" if i != 0 else "" 89 | # NOTE - can't use custom build types with CMAKE_BUILD_TYPE, really sucks 90 | main_cmakelists += "if( " + \ 91 | strequal("CMAKE_BUILD_TYPE", proj.cfg_name) + " AND " + \ 92 | strequal("QPC_PLATFORM", proj.platform.name) + " AND " + \ 93 | strequal("QPC_ARCH", proj.arch.name) + " )\n" 94 | 95 | main_cmakelists += self.handle_pass(proj) 96 | 97 | main_cmakelists += "endif()\n\n" 98 | 99 | # issue - could be multiple projects in this same folder 100 | with open("CMakeLists.txt", "w", encoding="utf8") as cmakelist_io: 101 | cmakelist_io.write(main_cmakelists) 102 | 103 | def gen_declaration(self, project_passes: List[ProjectPass]) -> str: 104 | # i don't actually know what the minimum here would be 105 | declaration = "cmake_minimum_required(VERSION 3.5)\n\n" 106 | 107 | declaration += self.set_platform_and_archs(project_passes[0].platform, project_passes[0].arch) 108 | 109 | declaration += f"\n\n" 110 | 111 | return declaration 112 | 113 | def handle_pass(self, proj: ProjectPass) -> str: 114 | cmakelists = "" 115 | # proj_name = proj.config.general.out_name.upper() 116 | proj_name = proj.container.file_name.upper() 117 | 118 | self.cmd_gen.set_mode(proj.cfg.general.compiler) 119 | 120 | if proj.cfg.general.config_type == ConfigType.APPLICATION: 121 | target = "executable" 122 | else: 123 | target = "library" 124 | 125 | cmakelists += "\n" + gen_list_option("set", f"{proj_name}_SRC_FILES", *abspathlist(list(proj.source_files))) 126 | cmakelists += gen_list_option("set", f"{proj_name}_INC_FILES", *abspathlist(proj.get_headers())) 127 | 128 | if proj.cfg.general.config_type == ConfigType.STATIC_LIB: 129 | target_type = " STATIC" 130 | elif proj.cfg.general.config_type == ConfigType.DYNAMIC_LIB: 131 | target_type = " SHARED" 132 | else: 133 | target_type = "" 134 | 135 | cmakelists += gen_option(f"\tadd_{target}", f"{proj_name}{target_type}", 136 | f"${{{proj_name}_SRC_FILES}}", 137 | f"${{{proj_name}_INC_FILES}}") 138 | 139 | lang = "CXX" if proj.cfg.general.language == Language.CPP else "C" 140 | 141 | target_props = { 142 | "PREFIX": f"\"{proj.cfg.general.out_name_prefix}\"", 143 | "OUTPUT_NAME": f"\"{proj.cfg.general.out_name}\"", 144 | f"{lang}_COMPILER": f"\"{proj.cfg.general.compiler}\"", 145 | f"{lang}_STANDARD": proj.cfg.general.standard.name[len(proj.cfg.general.language.name):], 146 | # f"{lang}_STANDARD_REQUIRED": "YES" 147 | } 148 | 149 | if proj.cfg.general.config_type == ConfigType.STATIC_LIB: 150 | cmake_output_dir = "ARCHIVE_OUTPUT_DIRECTORY" 151 | # cmake_output_dir = "LIBRARY_OUTPUT_DIRECTORY" 152 | else: 153 | cmake_output_dir = "RUNTIME_OUTPUT_DIRECTORY" 154 | 155 | output_dir = proj.cfg.general.out_dir 156 | if proj.cfg.link.output_file: 157 | output_dir = os.path.split(proj.cfg.link.output_file)[0] 158 | 159 | target_props[cmake_output_dir] = q_abspath(output_dir) 160 | 161 | if proj.cfg.link.import_lib: 162 | target_props["ARCHIVE_OUTPUT_DIRECTORY"] = q_abspath(os.path.split(proj.cfg.link.import_lib)[0]) 163 | 164 | elif proj.cfg.general.config_type != ConfigType.STATIC_LIB: 165 | target_props["ARCHIVE_OUTPUT_DIRECTORY"] = q_abspath(proj.cfg.general.build_dir) 166 | 167 | target_props_strs = [f"{k} {v}" for k, v in target_props.items()] 168 | cmakelists += "\n" + gen_list_option("set_target_properties", f"{proj_name} PROPERTIES", *target_props_strs) 169 | 170 | all_scripts = [] 171 | for proj_def in proj.container.base_info.projects: 172 | all_scripts.append(proj_def.path) 173 | 174 | dependencies = [] 175 | for path in proj.container.dependencies: 176 | if path not in all_scripts: 177 | continue 178 | dependencies.append(os.path.splitext(os.path.basename(path))[0].upper()) 179 | 180 | if dependencies: 181 | cmakelists += gen_list_option("add_dependencies", proj_name, *dependencies) 182 | 183 | if proj.cfg.compile.inc_dirs: 184 | inc_dirs = abspathlist(proj.cfg.compile.inc_dirs) 185 | if proj.cfg.compile.default_inc_dirs and proj.platform == Platform.WINDOWS: 186 | inc_dirs.extend(abspathlist(msvc_tools.get_inc_dirs(""))) 187 | cmakelists += gen_target_option("include_directories", f"{proj_name} PRIVATE", *inc_dirs) 188 | 189 | if proj.cfg.link.lib_dirs: 190 | lib_dirs = abspathlist(proj.cfg.link.lib_dirs) 191 | if proj.cfg.link.default_lib_dirs and proj.platform == Platform.WINDOWS: 192 | lib_dirs.extend(abspathlist(msvc_tools.get_lib_dirs(""))) 193 | cmakelists += gen_target_option("link_directories", f"{proj_name} PRIVATE", *lib_dirs) 194 | 195 | if proj.cfg.link.libs: 196 | libs = [] 197 | for lib in proj.cfg.link.libs: 198 | if os.path.split(lib)[0]: 199 | if not os.path.splitext(lib)[1]: 200 | lib += proj.macros["EXT_LIB"] 201 | libs.append(q_abspath(lib)) 202 | else: 203 | # libs.append(f"\"{lib}\"") 204 | libs.append(lib) 205 | 206 | cmakelists += gen_target_option("link_libraries", proj_name, *libs) 207 | 208 | if proj.cfg.compile.defines: 209 | cmakelists += gen_add_definitions(f"{proj_name} PRIVATE", proj.cfg.compile.defines) 210 | 211 | if proj.cfg.compile.options: 212 | cmakelists += gen_target_option("compile_options", f"{proj_name} PRIVATE", *proj.cfg.compile.options) 213 | 214 | link_options = [] 215 | if proj.cfg.link.ignore_libs: 216 | link_options.extend(self.cmd_gen.ignore_libs(proj.cfg.link.ignore_libs)) 217 | 218 | if proj.cfg.link.options: 219 | link_options.extend(proj.cfg.link.options) 220 | 221 | if link_options: 222 | cmakelists += gen_target_option("link_options", f"{proj_name} PRIVATE", *link_options) 223 | 224 | return cmakelists 225 | 226 | 227 | def abspath(path: str) -> str: 228 | return os.path.abspath(path).replace("\\", "/") 229 | 230 | 231 | def q_abspath(path: str) -> str: 232 | return f"\"{abspath(path)}\"" 233 | 234 | 235 | def abspathlist(paths: list) -> list: 236 | return [q_abspath(path) for path in paths] 237 | 238 | 239 | def ifndef(var: str, text: str) -> str: 240 | return f"(NOT DEFINED {var})\n{text}\nendif()" 241 | 242 | 243 | def ifdef(var: str, text: str) -> str: 244 | return f"(DEFINED {var})\n{text}\nendif()" 245 | 246 | 247 | def strequal(str1: str, str2: str) -> str: 248 | return f"({str1} STREQUAL {str2})" 249 | 250 | 251 | def gen_add_definitions(proj_name: str, defines: List[str]) -> str: 252 | return gen_list_option_custom("target_compile_definitions", proj_name, "\n\t\t-D", *defines) 253 | 254 | 255 | def gen_list_option_custom(target: str, name: str, join_char: str, *args) -> str: 256 | return f"\t{target}(\n\t\t{name}{join_char}" + join_char.join(args) + "\n\t)\n\n" 257 | 258 | 259 | def gen_target_option(target: str, *args) -> str: 260 | return f"\ttarget_{target}(\n\t\t" + "\n\t\t".join(args) + "\n\t)\n\n" 261 | 262 | 263 | def gen_list_option(target: str, *args) -> str: 264 | return f"\t{target}(\n\t\t" + "\n\t\t".join(args) + "\n\t)\n\n" 265 | 266 | 267 | def gen_option(target: str, *args) -> str: 268 | return f"{target}( " + " ".join(args) + " )\n" 269 | 270 | 271 | 272 | 273 | -------------------------------------------------------------------------------- /project_generators/compile_commands/compile_commands.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | from qpc_base import BaseProjectGenerator, Platform, create_directory 5 | from qpc_project import ConfigType, Language, ProjectContainer, ProjectPass, Configuration 6 | from qpc_logging import warning, error, verbose, print_color, Color 7 | from ..shared import cmd_line_gen 8 | from typing import List 9 | 10 | 11 | class CompileCommandsGenerator(BaseProjectGenerator): 12 | def __init__(self): 13 | super().__init__("compile_commands.json") 14 | self._add_platforms(Platform.WINDOWS, Platform.LINUX, Platform.MACOS) 15 | 16 | self.cmd_gen = cmd_line_gen.CommandLineGen() 17 | self.commands_list = {} 18 | self.all_files = {} 19 | 20 | def post_args_init(self): 21 | pass 22 | 23 | def does_project_exist(self, project_out_dir: str) -> bool: 24 | return False 25 | 26 | def projects_finished(self): 27 | if not self.commands_list: 28 | return 29 | print("------------------------------------------------------------------------") 30 | create_directory("compile_commands") 31 | for label, commands_list in self.commands_list.items(): 32 | print_color(Color.CYAN, "Writing: " + f"compile_commands/{label}.json") 33 | compile_commands = json.dumps(commands_list, indent=4) 34 | with open(f"compile_commands/{label}.json", "w") as file_io: 35 | file_io.write(compile_commands) 36 | 37 | def create_project(self, project: ProjectContainer) -> None: 38 | project_passes: List[ProjectPass] = self._get_passes(project) 39 | if not project_passes: 40 | return 41 | 42 | print_color(Color.CYAN, "Adding to Compile Commands: " + project.file_name) 43 | 44 | for proj_pass in project_passes: 45 | self.cmd_gen.set_mode(proj_pass.cfg.general.compiler) 46 | label = f"{proj_pass.cfg_name.lower()}_{proj_pass.platform.name.lower()}_{proj_pass.arch.name.lower()}" 47 | if label not in self.all_files: 48 | self.all_files[label] = set() 49 | if label not in self.commands_list: 50 | self.commands_list[label] = [] 51 | 52 | for file in proj_pass.source_files: 53 | if file not in self.all_files[label]: 54 | self.all_files[label].add(file) 55 | self.commands_list[label].append(self.handle_file(file, proj_pass)) 56 | 57 | def handle_file(self, file: str, project: ProjectPass) -> dict: 58 | file_dict = { 59 | "directory": os.getcwd().replace("\\", "/"), 60 | "command": cmd_line_gen.get_compiler(project.cfg.general.compiler, project.cfg.general.language) + " ", 61 | "file": file 62 | } 63 | 64 | file_dict["command"] += " ".join(self.cmd_gen.convert_defines(project.cfg.compile.defines)) 65 | file_dict["command"] += " " + " ".join(self.cmd_gen.convert_includes(project.cfg.compile.inc_dirs)) 66 | 67 | file_dict["command"] += " " + " ".join(project.cfg.compile.options) 68 | if f"{self.cmd_gen.switch}c" not in project.cfg.compile.options: 69 | file_dict["command"] += f" {self.cmd_gen.switch}c" 70 | 71 | file_dict["command"] += " " + file 72 | 73 | return file_dict 74 | 75 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /project_generators/makefile/makefile.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | from enum import Enum 4 | 5 | from qpc_args import args 6 | import qpc_hash 7 | from project_generators.shared.cmd_line_gen import get_compiler 8 | from qpc_base import BaseProjectGenerator, Platform, Arch, is_arch_64bit 9 | from qpc_project import ConfigType, Language, ProjectContainer, ProjectPass, Configuration 10 | from qpc_parser import BaseInfo 11 | from qpc_logging import warning, error, verbose, print_color, Color 12 | 13 | import qpc_c_parser as cp 14 | 15 | header_extensions = { 16 | 'h', 17 | 'hh', 18 | 'hpp', 19 | 'h++', 20 | 'hxx' 21 | } 22 | 23 | 24 | MAKEFILE_EXT = ".mak" 25 | 26 | 27 | class MakefileGenerator(BaseProjectGenerator): 28 | def __init__(self): 29 | super().__init__("Makefile") 30 | self._add_platforms(Platform.LINUX, Platform.MACOS) 31 | self._add_architectures(Arch.I386, Arch.AMD64, Arch.ARM, Arch.ARM64) 32 | self._set_generate_master_file(True) 33 | 34 | def create_project(self, project: ProjectContainer) -> None: 35 | project_passes = self._get_passes(project) 36 | if not project_passes: 37 | return 38 | 39 | print_color(Color.CYAN, "Creating: " + project.file_name + MAKEFILE_EXT) 40 | compiler = get_compiler(project_passes[0].cfg.general.compiler, project_passes[0].cfg.general.language) 41 | makefile = gen_defines(project, compiler, project.base_info.get_base_info(self._platforms[0]).configs) 42 | 43 | for p in project_passes: 44 | makefile += gen_project_config_definitions(p) 45 | 46 | with open(project.file_name + MAKEFILE_EXT, "w", encoding="utf-8") as f: 47 | f.write(makefile) 48 | 49 | def does_project_exist(self, project_out_dir: str) -> bool: 50 | return os.path.isfile(os.path.splitext(project_out_dir)[0] + MAKEFILE_EXT) 51 | 52 | def get_master_file_path(self, master_file_path: str) -> str: 53 | return master_file_path + MAKEFILE_EXT 54 | 55 | def create_master_file(self, info: BaseInfo, master_file_path: str) -> None: 56 | print_color(Color.CYAN, "Creating Master File: " + master_file_path) 57 | 58 | out_dir_dict = {} 59 | dependency_dict = {} 60 | wanted_projects = info.get_projects(Platform.LINUX, Platform.MACOS) 61 | for project_def in wanted_projects: 62 | if not project_def.path: 63 | continue # wtf 64 | out_dir = qpc_hash.get_out_dir(info.project_hashes[project_def.path]) 65 | if out_dir: 66 | out_dir_dict[project_def.path] = os.path.relpath(out_dir) 67 | dependency_dict[project_def.path] = info.project_dependencies[project_def.path] 68 | 69 | # this chooses 64 bit architectures over 32 bit for a default arch 70 | architecture = None 71 | for arch in args.archs: 72 | if arch in self._architectures and not architecture or \ 73 | is_arch_64bit(arch) and architecture and not is_arch_64bit(architecture): 74 | architecture = arch 75 | 76 | master_file = f"""#!/usr/bin/make -f 77 | 78 | SETTINGS = ARCH={architecture.name.lower()} CONFIG={info.get_configs()[0]} 79 | 80 | all: 81 | """ 82 | # sort dict by most dependencies to least dependencies, 100% a flawed way of doing this 83 | make_paths, make_files = self.order_dependencies(out_dir_dict, dependency_dict) 84 | 85 | for index, path in enumerate(make_paths): 86 | master_file += f"\tmake -C {path} -f {make_files[index]} $(SETTINGS)\n" 87 | 88 | with open(master_file_path, "w") as master_file_w: 89 | master_file_w.write(master_file + "\n") 90 | 91 | def does_master_file_exist(self, master_file_path: str) -> bool: 92 | return True 93 | 94 | def order_dependencies(self, out_dir_dict: dict, dependency_dict: dict) -> tuple: 95 | sorted_scripts = self.topological_sort(list(out_dir_dict.keys()), dependency_dict) 96 | # completely avoids removing duplicate paths or file names, like if it was in a dict, no duplicate keys 97 | make_paths, make_files = [], [] 98 | for script_path in sorted_scripts: 99 | make_paths.append(out_dir_dict[script_path]) 100 | make_files.append(os.path.splitext(os.path.basename(script_path))[0] + MAKEFILE_EXT) 101 | return (make_paths, make_files) 102 | 103 | # https://www.geeksforgeeks.org/python-program-for-topological-sorting/ 104 | def topological_sort(self, script_list: list, dependency_dict: dict): 105 | # Mark all the vertices as not visited 106 | visited = {} 107 | [visited.update({script_path: False}) for script_path in script_list] 108 | stack = [] 109 | 110 | # Call the recursive helper function to store Topological 111 | # Sort starting from all projects one by one 112 | for i in dependency_dict: 113 | if not visited[i]: 114 | self.topological_sort_util(dependency_dict, i, visited, stack) 115 | return stack 116 | 117 | def topological_sort_util(self, dependency_dict: dict, v, visited, stack): 118 | # Mark the current node as visited. 119 | visited[v] = True 120 | 121 | # Recur for all the projects adjacent to this project 122 | for i in dependency_dict[v]: 123 | try: 124 | if not visited[i]: 125 | self.topological_sort_util(dependency_dict, i, visited, stack) 126 | except KeyError as F: 127 | pass # project probably wasn't added to be generated 128 | 129 | # Push current project to stack which stores result 130 | stack.append(v) 131 | 132 | 133 | def make_ifeq(a: str, b: str, body: str) -> str: 134 | return f"\nifeq ({a},{b})\n{body}\nendif\n" 135 | 136 | 137 | def make_ifndef(item: str, body: str) -> str: 138 | return f"ifndef $({item})\n{body}\nendif" 139 | 140 | 141 | # TODO: move to cmd line gen cleanly 142 | def gen_cflags(conf: Configuration, libs: bool = True, defs: bool = True, includes: bool = True) -> str: 143 | mk = "" 144 | if conf.compile.options: 145 | mk += " " + " ".join(conf.compile.options) 146 | if conf.link.options: 147 | mk += " " + " ".join(conf.link.options) 148 | if conf.compile.defines and defs: 149 | mk += ' -D ' + ' -D '.join(conf.compile.defines) 150 | if conf.link.lib_dirs and libs: 151 | mk += ' -L' + ' -L'.join(conf.link.lib_dirs) 152 | if conf.link.libs and libs: 153 | mk += ' -l' + ' -l'.join([os.path.splitext(i)[0] for i in conf.link.libs]) 154 | if conf.compile.inc_dirs and includes: 155 | mk += ' -I' + ' -I'.join(conf.compile.inc_dirs) 156 | return mk 157 | 158 | 159 | def gen_compile_exe(conf: Configuration) -> str: 160 | entry = f"-Wl,--entry={conf.link.entry_point}" if conf.link.entry_point != "" else "" 161 | return f"@$(COMPILER) $(SOURCES) {entry} {gen_cflags(conf)} -o $@" 162 | 163 | 164 | def gen_compile_dyn(conf: Configuration) -> str: 165 | return f"@$(COMPILER) -shared -fPIC $(SOURCES) {gen_cflags(conf)} -o $@" 166 | 167 | 168 | def gen_compile_stat() -> str: 169 | return f"@ar rcs $@ $(OBJECTS)" 170 | 171 | 172 | def gen_project_targets(cfg: Configuration) -> str: 173 | makefile = "\n\n# TARGETS\n\n" 174 | target_name = cfg.link.output_file if cfg.link.output_file else "$(OUTNAME)" 175 | 176 | # ADD IMPORT LIBRARY OPTION, MAKES A STATIC LIBRARY 177 | if cfg.link.import_lib and cfg.general.config_type != ConfigType.STATIC_LIB: 178 | import_library = f"{os.path.splitext(cfg.link.import_lib)[0]}.a" 179 | else: 180 | import_library = "" 181 | 182 | if cfg.general.config_type == ConfigType.APPLICATION: 183 | makefile += f"{target_name}: __PREBUILD $(OBJECTS) __PRELINK {import_library}\n" 184 | makefile += f"\t@echo '$(GREEN)Compiling executable {target_name}$(NC)'\n" 185 | makefile += '\t' + '\n\t'.join(gen_compile_exe(cfg).split('\n')) 186 | 187 | elif cfg.general.config_type == ConfigType.DYNAMIC_LIB: 188 | makefile += f"$(addsuffix .so,{target_name}): __PREBUILD $(OBJECTS) __PRELINK {import_library}\n" 189 | makefile += f"\t@echo '$(CYAN)Compiling dynamic library {target_name}.so$(NC)'\n" 190 | makefile += '\t' + '\n\t'.join(gen_compile_dyn(cfg).split('\n')) 191 | 192 | elif cfg.general.config_type == ConfigType.STATIC_LIB: 193 | makefile += f"$(addsuffix .a,{target_name}): __PREBUILD $(OBJECTS) __PRELINK\n" 194 | makefile += f"\t@echo '$(CYAN)Compiling static library {target_name}.a$(NC)'\n" 195 | makefile += '\t' + '\n\t'.join(gen_compile_stat().split('\n')) 196 | 197 | # must be after setting the target above 198 | if import_library: 199 | makefile += f"\n\n{import_library}: __PREBUILD $(OBJECTS) __PRELINK\n" 200 | makefile += f"\t@echo '$(CYAN)Creating import library {import_library}$(NC)'\n" 201 | makefile += '\t' + '\n\t'.join(gen_compile_stat().split('\n')) 202 | 203 | makefile += "\n\t" + "\n\t".join(cfg.post_build) 204 | 205 | return makefile 206 | 207 | 208 | def gen_dependency_tree(objects, conf: Configuration) -> str: 209 | makefile = "\n#DEPENDENCY TREE:\n\n" 210 | pic = "-fPIC" 211 | 212 | for obj, path in objects.items(): 213 | makefile += f"\n{obj}: {path}\n" 214 | makefile += f"\t@echo '$(CYAN)Building Object {path}$(NC)'\n" 215 | makefile += f"\t@$(COMPILER) -c {pic} {gen_cflags(conf)} {path} -o $@\n" 216 | 217 | return makefile 218 | 219 | 220 | def gen_clean_target() -> str: 221 | return f""" 222 | # CLEAN TARGET: 223 | 224 | clean: 225 | \t@echo "Cleaning objects, archives, shared objects, and dynamic libs" 226 | \t@rm -f $(wildcard *.o *.a *.so *.dll *.dylib) 227 | 228 | .PHONY: clean __PREBUILD __PRELINK __POSTBUILD 229 | 230 | 231 | """ 232 | 233 | 234 | def gen_script_targets(conf: Configuration) -> str: 235 | makefile = "\n\n__PREBUILD:\n" 236 | makefile += '\t' + '\n\t'.join(conf.pre_build) + "\n\n" 237 | 238 | makefile += "\n\n__PRELINK:\n" 239 | makefile += '\t' + '\n\t'.join(conf.pre_link) + "\n\n" 240 | 241 | return makefile 242 | 243 | 244 | # TODO: less shit name 245 | def gen_project_config_definitions(project: ProjectPass) -> str: 246 | objects = {} 247 | for i in project.source_files: 248 | objects[f"{project.cfg.general.build_dir}/{os.path.splitext(os.path.basename(i))[0]}.o"] = i 249 | 250 | if project.cfg.link.output_file: 251 | makefile = f"\n# CREATE OUT DIR\n$(shell mkdir -p {os.path.split(project.cfg.link.output_file)[0]})\n" 252 | elif project.cfg.general.out_dir: 253 | makefile = f"\n# CREATE OUT DIR\n$(shell mkdir -p {project.cfg.general.out_dir})\n" 254 | else: 255 | makefile = "" 256 | 257 | makefile += f"\n# CREATE BUILD DIR\n$(shell mkdir -p {project.cfg.general.build_dir})\n" 258 | 259 | makefile += "\n# SOURCE FILES:\n\n" 260 | makefile += "SOURCES = " + '\t\\\n\t'.join(project.source_files) + "\n" 261 | 262 | makefile += "\n#OBJECTS:\n\n" 263 | makefile += "OBJECTS = " + '\t\\\n\t'.join(objects.keys()) + "\n" 264 | 265 | makefile += "\n# MACROS:\n\n" 266 | 267 | makefile += "OUTNAME = " + project.cfg.general.get_out_name() 268 | 269 | makefile += gen_project_targets(project.cfg) 270 | 271 | makefile += gen_clean_target() 272 | 273 | makefile += gen_dependency_tree(objects, project.cfg) 274 | 275 | makefile += gen_script_targets(project.cfg) 276 | 277 | return make_ifeq(project.cfg_name, "$(CONFIG)", make_ifeq(project.arch.name.lower(), "$(ARCH)", makefile)) 278 | 279 | 280 | def get_default_platform(project: ProjectContainer) -> str: 281 | archs = project.get_archs() 282 | # prefer 64 bit archs over 32 bit 283 | if Arch.AMD64 in archs: 284 | return "amd64" 285 | if Arch.ARM64 in archs: 286 | return "arm64" 287 | else: 288 | return archs[0].name.lower() 289 | 290 | 291 | def gen_defines(project: ProjectContainer, compiler: str, configs: list) -> str: 292 | return f"""#!/usr/bin/make -f 293 | 294 | 295 | # MAKEFILE GENERATED BY QPC 296 | # IF YOU ARE READING THIS AND DID NOT GENERATE THIS FILE WITH QPC, 297 | # IT PROBABLY WILL NOT WORK. DOWNLOAD QPC AND BUILD THE MAKEFILE 298 | # YOURSELF. 299 | 300 | 301 | # | ̄ ̄ ̄ ̄ ̄ ̄ ̄ ̄| 302 | # | make > * | 303 | # |________| 304 | # (\__/) || 305 | # (•ㅅ•) || 306 | # /   づ 307 | 308 | # don't mess with this, might break stuff 309 | {make_ifndef("ARCH", 'ARCH = ' + get_default_platform(project))} 310 | 311 | # change the config with CONFIG=[{','.join(configs)}] to make 312 | {make_ifndef("CONFIG", 'CONFIG = ' + configs[0])} 313 | 314 | # edit this in your QPC script configuration/general/compiler 315 | {make_ifndef("COMPILER", 'COMPILER = ' + compiler)} 316 | 317 | 318 | # COLORS!!! 319 | 320 | 321 | RED =\033[0;31m 322 | CYAN =\033[0;36m 323 | GREEN =\033[0;32m 324 | NC =\033[0m 325 | 326 | ############################ 327 | ### BEGIN BUILD TARGETS ### 328 | ########################### 329 | """ 330 | -------------------------------------------------------------------------------- /project_generators/ninja/ninja.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | 4 | from qpc_base import BaseProjectGenerator, Platform, create_directory 5 | from qpc_project import ConfigType, Language, ProjectContainer, ProjectPass, Configuration, General, SourceFileCompile 6 | from qpc_parser import BaseInfo 7 | from qpc_logging import warning, error, verbose, print_color, Color, verbose_color 8 | from ..shared.cmd_line_gen import get_compiler, Mode 9 | from ..shared import cmd_line_gen 10 | from ..shared import msvc_tools 11 | from typing import List 12 | 13 | 14 | class NinjaGenerator(BaseProjectGenerator): 15 | def __init__(self): 16 | super().__init__("build.ninja") 17 | self._add_platforms(Platform.WINDOWS, Platform.LINUX, Platform.MACOS) 18 | 19 | self.cmd_gen = cmd_line_gen.CommandLineGen() 20 | self.commands_list = {} 21 | self.all_files = {} 22 | self.output_files = {} 23 | self.dependencies = {} 24 | 25 | def post_args_init(self): 26 | pass 27 | 28 | def does_project_exist(self, project_out_dir: str) -> bool: 29 | return False 30 | 31 | def projects_finished(self): 32 | if not self.commands_list: 33 | return 34 | print("------------------------------------------------------------------------") 35 | create_directory("build_ninja") 36 | for label, commands_list in self.commands_list.items(): 37 | print_color(Color.CYAN, "Writing: " + f"build_ninja/{label}.ninja") 38 | script = self.gen_rules() 39 | 40 | for item, deps in self.dependencies.items(): 41 | if item in self.output_files[label]: 42 | out_file, command = self.output_files[label][item] 43 | if command in commands_list: 44 | new_command = command.split("\n") 45 | dep_list = self.get_dependencies(label, deps) 46 | new_command[0] += " || " + " ".join(dep_list) 47 | commands_list[commands_list.index(command)] = "\n".join(new_command) 48 | 49 | script += '\n\n'.join(commands_list) 50 | with open(f"build_ninja/{label}.ninja", "w") as file_io: 51 | file_io.write(script) 52 | 53 | def get_dependencies(self, label: str, dep_list: list) -> list: 54 | output_list = [] 55 | for dep in dep_list: 56 | if dep in self.output_files[label]: 57 | output_list.append(self.output_files[label][dep][0]) 58 | return output_list 59 | 60 | def create_project(self, project: ProjectContainer) -> None: 61 | project_passes: List[ProjectPass] = self._get_passes(project) 62 | if not project_passes: 63 | return 64 | 65 | proj_name = project.file_name.replace('.', '_').replace(':', '$') 66 | 67 | print_color(Color.CYAN, "Adding to Ninja: " + project.file_name) 68 | 69 | if project.dependencies: 70 | self.dependencies[project.project_path] = project.dependencies.copy() 71 | 72 | for proj_pass in project_passes: 73 | conf = proj_pass.cfg 74 | self.cmd_gen.set_mode(proj_pass.cfg.general.compiler) 75 | label = f"{proj_pass.cfg_name.lower()}_{proj_pass.platform.name.lower()}_{proj_pass.arch.name.lower()}" 76 | 77 | if label not in self.all_files: 78 | self.all_files[label] = set() 79 | if label not in self.commands_list: 80 | self.commands_list[label] = [] 81 | if label not in self.output_files: 82 | self.output_files[label] = {} 83 | 84 | compiler = get_compiler(proj_pass.cfg.general.compiler, 85 | proj_pass.cfg.general.language) 86 | 87 | self.commands_list[label].append(self.gen_header(conf, project, compiler, proj_name)) 88 | 89 | for file, file_compile in proj_pass.source_files.items(): 90 | self.commands_list[label].append(self.handle_file(file, file_compile.compiler, proj_pass, proj_name)) 91 | 92 | output_file = self.handle_target(proj_pass, proj_name, proj_pass.source_files) 93 | self.commands_list[label].append(output_file) 94 | self.output_files[label][project.project_path] = (self.get_output_file(proj_pass), output_file) 95 | 96 | @staticmethod 97 | def gen_rules_gcc_clang(compiler: str): 98 | return f""" 99 | 100 | rule cc_{compiler} 101 | command = $compiler -c -o $out $in $cflags 102 | 103 | rule exe_{compiler} 104 | command = $compiler -o $out $in $cflags 105 | 106 | rule ar_{compiler} 107 | command = ar rcs $out $in 108 | 109 | rule so_{compiler} 110 | command = $compiler -o $out $in -fPIC -shared $cflags 111 | """ 112 | 113 | def gen_rules(self) -> str: 114 | rules = f""" 115 | rule cc_msvc 116 | command = $compiler /nologo /Fd"$out.pdb" /Fo"$out" /c $in $cflags 117 | 118 | rule exe_msvc 119 | command = link.exe /OUT:$out @$out.rsp $cflags 120 | rspfile = $out.rsp 121 | rspfile_content = $in 122 | 123 | rule ar_msvc 124 | command = lib.exe /OUT:$out @$out.rsp $cflags 125 | rspfile = $out.rsp 126 | rspfile_content = $in 127 | 128 | rule so_msvc 129 | command = link.exe /DLL /OUT:$out @$out.rsp $cflags 130 | rspfile = $out.rsp 131 | rspfile_content = $in 132 | """ 133 | # command = link.exe /DLL /OUT:$out $in $cflags 134 | 135 | rules += self.gen_rules_gcc_clang("gcc") 136 | rules += self.gen_rules_gcc_clang("clang") 137 | 138 | return f"\n# rules" + rules + "\n\n" \ 139 | "rule mkdir\n command = mkdir \"$out\"\n\n" 140 | 141 | def gen_header(self, conf: Configuration, project: ProjectContainer, compiler: str, proj_name: str): 142 | outname = conf.general.out_name if conf.general.out_name else project.file_name 143 | # {proj_name}_build_dir = {abs_path(conf.general.build_dir)} 144 | return f"""#!/usr/bin/env ninja -f 145 | # variables 146 | {proj_name}_src_dir = {os.getcwd()} 147 | out_file = {outname} 148 | {proj_name}_compiler = {compiler} 149 | {proj_name}_build_dir = {os.path.abspath(conf.general.build_dir)} 150 | 151 | build ${proj_name}_build_dir: mkdir ${proj_name}_build_dir 152 | """ 153 | 154 | def get_file_build_path(self, proj_name: str, general: General, file: str): 155 | return os.path.abspath(self.cmd_gen.get_file_build_path(general, file)).replace(':', '$:') 156 | # return f"${proj_name}_src_dir/{self.cmd_gen.get_file_build_path(general, file)}" 157 | 158 | @staticmethod 159 | def get_target_type_ext(project: ProjectPass) -> tuple: 160 | target_type, ext = { 161 | ConfigType.APPLICATION: ('exe', project.macros["EXT_APP"]), 162 | ConfigType.DYNAMIC_LIB: ('so', project.macros["EXT_DLL"]), 163 | ConfigType.STATIC_LIB: ('ar', project.macros["EXT_LIB"]) 164 | }[project.cfg.general.config_type] 165 | return (target_type, ext) 166 | 167 | def get_output_file(self, project: ProjectPass): 168 | target_name = project.cfg.link.output_file if project.cfg.link.output_file else project.cfg.general.out_name 169 | return f"{abs_path(target_name)}{self.get_target_type_ext(project)[1]}" 170 | 171 | # TODO: handle dependencies 172 | def handle_target(self, project: ProjectPass, proj_name: str, source_files) -> str: 173 | obj_files = " ".join([self.get_file_build_path(proj_name, project.cfg.general, a) for a in source_files]) 174 | 175 | target_name = self.get_output_file(project) 176 | target_type, ext = self.get_target_type_ext(project) 177 | 178 | build = f"build {target_name}: {target_type}_{self.cmd_gen.mode.name.lower()} {obj_files}" 179 | 180 | link_flags = add_escapes(self.cmd_gen.link_flags(project.cfg, libs=False)) 181 | link_flags += " " + " ".join(self.cmd_gen.lib_dirs([""])) 182 | 183 | # slightly hacky, oh well 184 | libs = [f"${proj_name}_src_dir/{lib}" if "/" in lib else lib for lib in project.cfg.link.libs] 185 | libs = " ".join(self.cmd_gen.libs(libs)) 186 | 187 | return f"{build}\n cflags = {link_flags} {libs}\n" 188 | 189 | # Build definition for file 190 | def handle_file(self, file: str, file_compile: SourceFileCompile, proj: ProjectPass, proj_name: str) -> str: 191 | # print(os.getcwd(), project) 192 | build_path = self.get_file_build_path(proj_name, proj.cfg.general, file) 193 | cmd = f"build {build_path}: cc_{self.cmd_gen.mode.name.lower()} {abs_path(file)}\n" 194 | cmd += f" cflags = {add_escapes(self.cmd_gen.file_compile_flags(proj.cfg, file_compile))}\n" 195 | cmd += f" compiler = ${proj_name}_compiler\n" 196 | return cmd 197 | 198 | 199 | def add_escapes(string: str) -> str: 200 | return string.replace('$', '$$').replace(':', '$:') 201 | 202 | 203 | def abs_path(path: str) -> str: 204 | return add_escapes(os.path.abspath(path)) 205 | 206 | -------------------------------------------------------------------------------- /project_generators/qmake/qmake.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from qpc_args import args 4 | from qpc_base import BaseProjectGenerator, Platform, Arch, is_arch_64bit 5 | from qpc_project import ConfigType, Language, ProjectContainer, ProjectPass, Standard 6 | from qpc_parser import BaseInfo, BaseInfoPlatform 7 | from qpc_logging import warning, error, verbose, print_color, Color 8 | from ..shared import cmd_line_gen, msvc_tools 9 | from typing import List 10 | 11 | 12 | DICT_QT_ARCH = { 13 | Arch.AMD64: "x86_64", 14 | Arch.I386: "i386", 15 | Arch.ARM64: "arm64", 16 | Arch.ARM: "arm", 17 | } 18 | 19 | 20 | DICT_QT_PLAT = { 21 | Platform.WINDOWS: "win32", 22 | Platform.LINUX: "unix:!macx", 23 | Platform.MACOS: "macx", 24 | } 25 | 26 | 27 | def gen_qpc_cond_ex(cfg: str, all_cfgs: list, plat: Platform, arch: Arch): 28 | # : is logical AND 29 | # CONFIG(debug, debug|release) 30 | return f"CONFIG({cfg.lower()}, {'|'.join(all_cfgs)}):{DICT_QT_PLAT[plat]}:equals(QT_ARCH, {DICT_QT_ARCH[arch]})" 31 | 32 | 33 | def gen_qpc_cond(proj: ProjectPass): 34 | cfgs = [cfg.lower() for cfg in proj.container.get_cfgs()] 35 | return gen_qpc_cond_ex(proj.cfg_name, cfgs, proj.platform, proj.arch) 36 | 37 | 38 | class QTGenerator(BaseProjectGenerator): 39 | def __init__(self): 40 | super().__init__("QT Generator") 41 | self._add_platforms(Platform.WINDOWS, Platform.LINUX, Platform.MACOS) 42 | self._set_generate_master_file(False) 43 | self._set_macro("GEN_QT") 44 | 45 | self.cmd_gen = cmd_line_gen.CommandLineGen() 46 | 47 | def does_project_exist(self, project_out_dir: str) -> bool: 48 | split_ext_path = os.path.splitext(project_out_dir)[0] 49 | if os.path.isfile(split_ext_path + ".pro"): 50 | verbose(f"File Exists: {split_ext_path}.pro") 51 | return True 52 | return False 53 | 54 | def create_project(self, project: ProjectContainer) -> None: 55 | # TODO: for some reason, qt creator doesn't want to make a new directory for out_dir/DESTDIR 56 | 57 | project_passes: List[ProjectPass] = self._get_passes(project) 58 | if not project_passes: 59 | return 60 | 61 | print_color(Color.CYAN, "QT Project Generator Running on " + project.file_name) 62 | 63 | proj_file = "" 64 | 65 | for i, proj in enumerate(project_passes): 66 | condition = gen_qpc_cond(proj) 67 | proj_file += f"{condition} {{\n\tmessage(type: {condition})\n\t\n" 68 | proj_file += self.handle_pass(proj) 69 | proj_file += "}\n\n" 70 | 71 | with open(project.file_name + ".pro", "w", encoding="utf8") as proj_io: 72 | proj_io.write(proj_file) 73 | 74 | def handle_pass(self, proj: ProjectPass) -> str: 75 | self.cmd_gen.set_mode(proj.cfg.general.compiler) 76 | 77 | qproj = "" 78 | 79 | qt_libs = [] 80 | for lib in proj.cfg.link.libs: 81 | if get_qt_lib(lib): 82 | qt_libs.append(get_qt_lib(lib)) 83 | 84 | qproj += gen_list("QT", *qt_libs) 85 | 86 | # also has staticlib on the wiki? 87 | if proj.cfg.general.config_type == ConfigType.APPLICATION: 88 | qproj += "\tTEMPLATE = app\n\n" 89 | elif proj.cfg.general.config_type == ConfigType.STATIC_LIB: 90 | qproj += "\tTEMPLATE = staticlib\n\n" 91 | else: 92 | qproj += "\tTEMPLATE = lib\n\n" 93 | 94 | qproj += "\t# Qt Creator doesn't want to make a new directory if this doesn't exist, fun\n" 95 | qproj += f"\t# DESTDIR = {proj.cfg.general.out_dir}\n\n" 96 | 97 | qproj += gen_list("SOURCES", *pathlist(list(proj.source_files))) 98 | qproj += gen_list("HEADERS", *pathlist(list(proj.get_headers()))) 99 | 100 | qproj += gen_list("INCLUDEPATH", *pathlist(proj.cfg.compile.inc_dirs)) 101 | qproj += gen_list("DEFINES", *proj.cfg.compile.defines) 102 | 103 | # little hack to remove UNICODE from this unless the user wants unicode 104 | if "_UNICODE" not in proj.cfg.compile.defines or "UNICODE" not in proj.cfg.compile.defines: 105 | qproj += gen_rm_list("DEFINES", "_UNICODE", "UNICODE") 106 | 107 | libs = [] 108 | libs.extend(self.cmd_gen.lib_dirs(proj.cfg.link.lib_dirs)) 109 | for lib in proj.cfg.link.libs: 110 | if get_qt_lib(lib) == "": 111 | libs.append(lib) 112 | 113 | qproj += gen_list("LIBS", *libs) 114 | 115 | if proj.cfg.general.language == Language.CPP: 116 | qproj += gen_list("QMAKE_CXXFLAGS", *proj.cfg.compile.options) 117 | else: 118 | qproj += gen_list("QMAKE_CFLAGS", *proj.cfg.compile.options) 119 | 120 | config = [ 121 | get_c_ver(proj.cfg.general.standard), 122 | ] 123 | 124 | qproj += gen_list("CONFIG", *config) 125 | 126 | # another hack, cool 127 | # if proj.cfg.general.standard != Standard.CPP11: 128 | # qproj += gen_rm_list("CONFIG", "c++11") 129 | 130 | # TODO: 131 | # - linker options 132 | # - import library 133 | # - ignore libs 134 | # - output path 135 | # - output name 136 | # - compiler (can i even do this without the pro.user file?) 137 | 138 | return qproj 139 | 140 | 141 | def get_c_ver(lang: Standard) -> str: 142 | if lang.name.startswith("CPP"): 143 | return lang.name.replace("CPP", "c++") 144 | else: 145 | return lang.name.lower() 146 | 147 | 148 | # TODO: unfinished 149 | # also is this even needed actually? 150 | def get_qt_lib(lib: str) -> str: 151 | if "Qt5Widgets" in lib: 152 | return "widgets" 153 | elif "Qt5Core" in lib: 154 | return "core" 155 | elif "Qt5Gui" in lib: 156 | return "gui" 157 | elif "Qt5WinExtras" in lib: 158 | return "winextras" 159 | return "" 160 | 161 | 162 | def gen_list(var: str, *args) -> str: 163 | return f"\t{var} += \\\n\t\t" + " \\\n\t\t".join(args) + "\n\n" 164 | 165 | 166 | def gen_rm_list(var: str, *args) -> str: 167 | return f"\t{var} -= \\\n\t\t" + " \\\n\t\t".join(args) + "\n\n" 168 | 169 | 170 | def q_path(path: str) -> str: 171 | return f"\"{path}\"".replace("\\", "/") 172 | 173 | 174 | def pathlist(paths: list) -> list: 175 | return [q_path(path) for path in paths] 176 | 177 | -------------------------------------------------------------------------------- /project_generators/shared/cmd_line_gen.py: -------------------------------------------------------------------------------- 1 | import os 2 | from enum import Enum, auto 3 | from qpc_project import (Language, Configuration, Compile, Link, General, 4 | SourceFile, SourceFileCompile, ProjectPass, PrecompiledHeader) 5 | from qpc_logging import warning 6 | from ..shared import msvc_tools 7 | 8 | 9 | class Mode(Enum): 10 | MSVC = auto(), 11 | GCC = auto(), 12 | CLANG = auto(), 13 | 14 | 15 | LINUX_DEFAULT_INC_DIRS = [ 16 | "/usr/local/include" 17 | "/usr/include" 18 | ] 19 | 20 | 21 | class CommandLineGen: 22 | def __init__(self, mode: str = ""): 23 | self._compiler = None 24 | self.mode = None 25 | self.switch = None 26 | self._char_inc_dir = None 27 | self._char_define = None 28 | self.set_mode(mode) 29 | 30 | def set_mode(self, mode: str): 31 | if mode and mode != self._compiler: 32 | self._compiler = mode 33 | if mode.startswith("msvc"): 34 | self.mode = Mode.MSVC 35 | self.switch = "/" 36 | self._char_inc_dir = "/I" 37 | self._char_define = "/D" 38 | 39 | elif mode.startswith("gcc") or mode.startswith("g++") or mode.startswith("clang"): 40 | self.switch = "-" 41 | self._char_inc_dir = "-I" 42 | self._char_define = "-D" 43 | self.mode = Mode.GCC if mode.startswith("gcc") or mode.startswith("g++") else Mode.CLANG 44 | 45 | def get_file_build_path(self, general: General, file: str) -> str: 46 | path = f"{general.build_dir}/{os.path.splitext(os.path.basename(file))[0]}" 47 | return f"{path}.obj" if self.mode == Mode.MSVC else f"{path}.o" 48 | 49 | def file_compile_flags(self, cfg: Configuration, file: SourceFileCompile) -> str: 50 | return self.compile_flags(cfg.compile, cfg.general, file) 51 | 52 | def compile_flags(self, c: Compile, general: General = None, file: SourceFileCompile = None) -> str: 53 | cmd = [] 54 | cmd.extend(self.convert_defines(c.defines)) 55 | if file: 56 | cmd.extend(self.convert_defines(file.defines)) 57 | 58 | cmd.extend(self.convert_includes(c.inc_dirs)) 59 | 60 | if general is not None: 61 | if c.default_inc_dirs: 62 | if self.mode == Mode.MSVC: 63 | cmd.extend(self.convert_includes(msvc_tools.get_inc_dirs(general.compiler))) 64 | # elif os.name.startswith("linux"): 65 | # cmd.extend(self.convert_includes(LINUX_DEFAULT_INC_DIRS)) 66 | # else: 67 | # warning("unknown default include paths") 68 | 69 | # temporarily disabled 70 | # pch = self.get_pch_all(c.pch if not file.pch else file.pch, 71 | # file.pch_file, file.pch_out, 72 | # c.pch_file, c.pch_out) 73 | # if pch: 74 | # cmd.extend(pch) 75 | 76 | cmd.extend(c.options) 77 | if file: 78 | cmd.extend(file.options) 79 | 80 | return " ".join(cmd) 81 | 82 | def link_flags(self, cfg: Configuration, libs: bool = True) -> str: 83 | cmd = [] 84 | 85 | if cfg.link.debug_file: 86 | cmd.append(self.debug_file(cfg.link.debug_file)) 87 | 88 | cmd.extend(self.lib_dirs(cfg.link.lib_dirs)) 89 | 90 | if cfg.link.default_lib_dirs: 91 | if self.mode == Mode.MSVC: 92 | cmd.extend(self.lib_dirs(msvc_tools.get_lib_dirs(""))) 93 | 94 | if libs and cfg.link.libs: 95 | cmd.extend(self.libs(cfg.link.libs)) 96 | 97 | if cfg.link.ignore_libs: 98 | cmd.extend(self.ignore_libs(cfg.link.ignore_libs)) 99 | 100 | if cfg.link.import_lib: 101 | cmd.append(self.import_lib(cfg.link.import_lib)) 102 | 103 | cmd.extend(cfg.link.options) 104 | 105 | return " ".join(cmd) 106 | 107 | # def convert_compile_group(self, compile_group: Compile): 108 | # pass 109 | 110 | def convert_includes(self, include_paths: list) -> list: 111 | converted_paths = [] 112 | [converted_paths.append(f"{self._char_inc_dir}\"{os.path.abspath(path)}\"") for path in include_paths] 113 | return converted_paths 114 | 115 | @staticmethod 116 | def convert_char(char: str, items: list) -> list: 117 | converted_paths = [] 118 | [converted_paths.append(f"{char}{item}") for item in items] 119 | return converted_paths 120 | 121 | @staticmethod 122 | def convert_char_abs(char: str, items: list) -> list: 123 | converted_paths = [] 124 | [converted_paths.append(f"{char}\"{os.path.abspath(item)}\"") for item in items] 125 | return converted_paths 126 | 127 | @staticmethod 128 | def convert_char_basename(char: str, items: list) -> list: 129 | converted_paths = [] 130 | [converted_paths.append(f"{char}{os.path.basename(item)}") for item in items] 131 | return converted_paths 132 | 133 | def convert_defines(self, defines: list) -> list: 134 | return self.convert_char(self._char_define, defines) 135 | 136 | def lib_dirs(self, dirs: list) -> list: 137 | return self.convert_char_abs("/LIBPATH:" if self.mode == Mode.MSVC else "-L ", dirs) 138 | 139 | def libs(self, libs: list) -> list: 140 | return self.convert_char("" if self.mode == Mode.MSVC else "-l ", libs) 141 | 142 | def ignore_libs(self, libs: list) -> list: 143 | if not libs: 144 | return [] 145 | 146 | if self.mode == Mode.GCC: # maybe clang as well? 147 | return ["--exclude-libs," + ",".join(libs)] 148 | 149 | if self.mode == Mode.MSVC: 150 | # this also supports the same syntax as --exclude-libs, could use that 151 | return self.convert_char("/NODEFAULTLIB:", libs) 152 | 153 | return [] 154 | 155 | def import_lib(self, lib: str) -> str: 156 | if not lib: 157 | return "" 158 | 159 | if self.mode == Mode.MSVC: 160 | return f"/IMPLIB:\"{os.path.abspath(os.path.splitext(lib)[0])}.lib\"" 161 | 162 | # does clang or gcc have an import library option? 163 | 164 | return "" 165 | 166 | def output_file(self, path: str) -> str: 167 | if not path: 168 | return "" 169 | 170 | if self.mode == Mode.MSVC: 171 | return f"/OUT:\"{path}\"" 172 | 173 | return "" 174 | 175 | def debug_file(self, path: str) -> str: 176 | if not path: 177 | return "" 178 | 179 | if self.mode == Mode.MSVC: 180 | return f"/PDB:\"{path}\"" 181 | 182 | return "" 183 | 184 | def get_pch_all(self, pch: PrecompiledHeader, pch_file: str, pch_out: str, 185 | backup_file: str = None, backup_out: str = None) -> list: 186 | """returns all pch settings, (pch create/use, pch file, and pch out)""" 187 | pch_list = [] 188 | 189 | if pch and pch != PrecompiledHeader.NONE: 190 | if pch_file: 191 | pch_list.append(self.get_pch(pch, pch_file)) 192 | elif backup_file: 193 | pch_list.append(self.get_pch(pch, backup_file)) 194 | 195 | if pch_out: 196 | pch_list.append(self.get_pch_out(pch_out)) 197 | elif backup_out: 198 | pch_list.append(self.get_pch_out(backup_out)) 199 | 200 | return pch_list 201 | 202 | def get_pch(self, pch: PrecompiledHeader, path: str) -> str: 203 | if pch == PrecompiledHeader.USE: 204 | if self.mode == Mode.MSVC: 205 | return f"/Yu\"{path}\"" 206 | elif self.mode == Mode.CLANG: 207 | return f"-include-pch \"{path}\"" 208 | 209 | if pch == PrecompiledHeader.CREATE: 210 | if self.mode == Mode.MSVC: 211 | return f"/Yc\"{path}\"" 212 | elif self.mode == Mode.CLANG: 213 | return f"-emit-pch \"{path}\"" 214 | 215 | return "" 216 | 217 | def get_pch_out(self, path: str) -> str: 218 | if not path: 219 | return "" 220 | 221 | if self.mode == Mode.MSVC: 222 | return f"/Fp\"{os.path.abspath(os.path.splitext(path)[0])}.pch\"" 223 | 224 | return "" 225 | 226 | 227 | # meh 228 | def get_compiler(compiler: str, language: Enum) -> str: 229 | if compiler.startswith("msvc"): 230 | return "cl.exe" 231 | elif compiler.startswith("gcc_"): 232 | if language == Language.CPP: 233 | return "g++-" + str(compiler[4:]) 234 | else: # assuming language == Language.C: 235 | return "gcc-" + str(compiler[4:]) 236 | elif compiler.startswith("clang_") and compiler != "clang_cl": 237 | return "clang-" + str(compiler[6:]) 238 | else: 239 | return compiler 240 | 241 | -------------------------------------------------------------------------------- /project_generators/shared/msvc_tools.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | 5 | def get_inc_dirs(version: str) -> list: 6 | return MSVC_DEFAULT_INC_DIRS.copy() 7 | 8 | 9 | def get_lib_dirs(version: str) -> list: 10 | return MSVC_DEFAULT_LIB_DIRS.copy() 11 | 12 | 13 | # stupid and ugly, and these are for the latest msvc version 14 | # should attempt to set something up that can grab the correct directories for these files based on your msvc version 15 | # and where they are on your computer 16 | MSVC_DEFAULT_LIB_DIRS = [ 17 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Tools\\MSVC\\14.25.28610\\lib\\x86", 18 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Tools\\MSVC\\14.25.28610\\atlmfc\\lib\\x86", 19 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\VS\\lib\\x86", 20 | "C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.18362.0\\ucrt\\x86", 21 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\VS\\UnitTest\\lib", 22 | "C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.18362.0\\um\\x86", 23 | "C:\\Program Files (x86)\\Windows Kits\\NETFXSDK\\4.8\\lib\\um\\x86" 24 | ] 25 | 26 | MSVC_DEFAULT_INC_DIRS = [ 27 | # base includes 28 | # $(VC_IncludePath) 29 | # $(WindowsSDK_IncludePath) 30 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Tools\\MSVC\\14.25.28610\\include", 31 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Tools\\MSVC\\14.25.28610\\atlmfc\\include", 32 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\VS\\include", 33 | "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.18362.0\\ucrt", 34 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\VS\\UnitTest\\include", 35 | "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.18362.0\\um", 36 | "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.18362.0\\shared", 37 | "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.18362.0\\winrt", 38 | "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.18362.0\\cppwinrt", 39 | "C:\\Program Files (x86)\\Windows Kits\\NETFXSDK\\4.8\\Include\\um", 40 | 41 | # other mfc and atl crap 42 | # $(VC_SourcePath) 43 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Tools\\MSVC\\14.25.28610\\atlmfc\\src\\mfc", 44 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Tools\\MSVC\\14.25.28610\\atlmfc\\src\\mfcm", 45 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Tools\\MSVC\\14.25.28610\\atlmfc\\src\\atl", 46 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Tools\\MSVC\\14.25.28610\\crt\\src", 47 | "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Auxiliary\\VS\\src", 48 | "C:\\Program Files (x86)\\Windows Kits\\10\\Source\\10.0.18362.0\\ucrt", 49 | ] 50 | 51 | 52 | -------------------------------------------------------------------------------- /project_generators/vsc/vsc.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | from qpc_base import BaseProjectGenerator, Platform, create_directory 5 | from qpc_project import ConfigType, Language, ProjectContainer, ProjectPass, Configuration 6 | from qpc_logging import warning, error, verbose, print_color, Color 7 | from ..shared import cmd_line_gen 8 | from typing import List 9 | 10 | 11 | class VSCGenerator(BaseProjectGenerator): 12 | def __init__(self): 13 | super().__init__("vscode") 14 | self._add_platforms(Platform.WINDOWS, Platform.LINUX, Platform.MACOS) 15 | 16 | self.cmd_gen = cmd_line_gen.CommandLineGen() 17 | self.commands_list = {} 18 | self.all_files = {} 19 | 20 | def post_args_init(self): 21 | pass 22 | 23 | def does_project_exist(self, project_out_dir: str) -> bool: 24 | return False 25 | 26 | def projects_finished(self): 27 | ball: dict = { 28 | "configurations": [], 29 | "version": 4, 30 | } 31 | if not self.commands_list: 32 | return 33 | print("------------------------------------------------------------------------") 34 | create_directory(".vscode") 35 | for label, commands_list in self.commands_list.items(): 36 | ball["configurations"].append(commands_list) 37 | print_color(Color.CYAN, "Writing: " + f".vscode/c_cpp_properties.json") 38 | compile_commands = json.dumps(ball, indent=4) 39 | with open(f".vscode/c_cpp_properties.json", "w") as file_io: 40 | file_io.write(compile_commands) 41 | 42 | def create_project(self, project: ProjectContainer) -> None: 43 | project_passes: List[ProjectPass] = self._get_passes(project) 44 | if not project_passes: 45 | return 46 | 47 | print_color(Color.CYAN, "Adding to VSCode Files: " + project.file_name) 48 | 49 | for proj_pass in project_passes: 50 | self.cmd_gen.set_mode(proj_pass.cfg.general.compiler) 51 | label = f"{proj_pass.cfg_name.lower()}_{proj_pass.platform.name.lower()}_{proj_pass.arch.name.lower()}" 52 | if label not in self.all_files: 53 | self.all_files[label] = set() 54 | if label not in self.commands_list: 55 | self.commands_list[label] = self.handle_file(proj_pass, label) 56 | 57 | def handle_file(self, project: ProjectPass, label: str) -> dict: 58 | file_dict = { 59 | "name": label, 60 | "includePath": ["${workspaceFolder}/**"], 61 | "defines": [], 62 | "compilerPath": "/usr/bin/gcc", 63 | "cStandard": "gnu11", 64 | "cppStandard": "gnu++14", 65 | "intelliSenseMode": "gcc-x64", 66 | "compileCommands": f"compile_commands/{label}.json" 67 | } 68 | return file_dict 69 | 70 | 71 | 72 | 73 | -------------------------------------------------------------------------------- /qpc.py: -------------------------------------------------------------------------------- 1 | # --------------------------------------------------------------------- 2 | # Quiver Project Creator 3 | # Written by Demez 4 | # --------------------------------------------------------------------- 5 | 6 | import os 7 | import sys 8 | 9 | from time import perf_counter 10 | from enum import Enum 11 | 12 | import qpc_reader 13 | from qpc_generator_handler import GeneratorHandler 14 | from qpc_parser import Parser, ProjectDefinition 15 | from qpc_args import args, parse_args 16 | from qpc_base import BaseProjectGenerator, create_directory, Platform, Arch 17 | import qpc_logging 18 | 19 | import qpc_hash 20 | 21 | 22 | PRINT_LINE = "------------------------------------------------------------------------" 23 | 24 | 25 | def get_platform_list() -> list: 26 | platforms = [] 27 | for platform in args.platforms: 28 | if platform in Platform and platform not in platforms: 29 | platforms.append(platform) 30 | break 31 | return platforms 32 | 33 | 34 | def get_generators_all() -> list: 35 | generator_list = [] 36 | 37 | for generator in GENERATOR_HANDLER.project_generators: 38 | platforms = generator.get_supported_platforms() 39 | # intersection is if any items in a set is in another set 40 | has_valid_platforms = set(args.platforms).intersection(set(platforms)) 41 | if has_valid_platforms: 42 | generator_list.append(generator) 43 | 44 | return generator_list 45 | 46 | 47 | # unused? 48 | def get_generators_other(platform: Enum) -> list: 49 | generator_list = [] 50 | for generator in GENERATOR_HANDLER.project_generators: 51 | platforms = generator.get_supported_platforms() 52 | # intersection is if any items in a set is in another set 53 | has_valid_platforms = platform in platforms 54 | if has_valid_platforms and generator not in generator_list: 55 | generator_list.append(generator) 56 | return generator_list 57 | 58 | 59 | def check_platforms(platform_list: set, generator_platforms: list) -> set: 60 | # intersection is if any items in a set is in another set 61 | return platform_list.intersection(set(generator_platforms)) 62 | 63 | 64 | def get_generator_need_rebuild(project_script: str, generator_list: list) -> list: 65 | generators = [] 66 | for generator in generator_list: 67 | if not generator.does_project_exist(project_script): 68 | generators.append(generator) 69 | return generators 70 | 71 | 72 | def get_generators(platforms: set, generator_list: list) -> list: 73 | valid_generators = [] 74 | for generator in generator_list: 75 | if check_platforms(platforms, generator.get_supported_platforms()): 76 | valid_generators.append(generator) 77 | return valid_generators 78 | 79 | 80 | def generator_needs_rebuild(project_script: str, generator: BaseProjectGenerator, rebuild_info: dict) -> bool: 81 | if generator.filename in rebuild_info["generators"]: 82 | return True 83 | return False 84 | 85 | 86 | # only run if the hash check fails or if the user force creates projects 87 | # may look in the hash for where the project output directory is in the future 88 | def should_build_project(project_script: str, generator_list: list) -> bool: 89 | if args.skip_projects: 90 | return False 91 | if args.force: 92 | return True 93 | return not qpc_hash.check_hash(project_script) 94 | 95 | 96 | def should_call_create_master_file(file_path: str, info, generator: BaseProjectGenerator, hashes: dict) -> bool: 97 | if args.force_master: 98 | return True 99 | if file_path: 100 | if not os.path.isfile(file_path): 101 | return True 102 | if not qpc_hash.check_master_file_hash(file_path, info, generator, hashes): 103 | return True 104 | return False 105 | 106 | 107 | def main(): 108 | create_directory(qpc_hash.QPC_HASH_DIR) 109 | os.chdir(args.root_dir) 110 | 111 | parser = Parser() 112 | 113 | info = parser.parse_base_info(args.base_file) 114 | generator_list = get_generators_all() 115 | 116 | if args.time: 117 | start_time = perf_counter() 118 | 119 | for project_def in info.projects: 120 | project_script = project_def.path 121 | 122 | valid_generators = get_generators(project_def.platforms, generator_list) 123 | 124 | if not valid_generators: 125 | continue 126 | 127 | if not args.skip_projects: 128 | print() 129 | 130 | generators_rebuild = get_generator_need_rebuild(project_script, valid_generators) 131 | if generators_rebuild or should_build_project(project_script, valid_generators): 132 | rebuild_info = qpc_hash.get_rebuild_info(project_script, generators_rebuild) 133 | 134 | project_dir, project_filename = os.path.split(project_script) 135 | 136 | if project_dir and project_dir != args.root_dir and os.path.isdir(project_dir): 137 | os.chdir(project_dir) 138 | 139 | project = parser.parse_project(project_def, project_script, info, valid_generators) 140 | if not project: 141 | continue 142 | 143 | if args.force or rebuild_info["rebuild_all"]: 144 | [generator.create_project(project) for generator in valid_generators] 145 | else: 146 | # does any generator need to rebuild? 147 | for generator in generators_rebuild: 148 | if generator_needs_rebuild(project_filename, generator, rebuild_info): 149 | generator.create_project(project) 150 | 151 | if project_dir and project_dir != args.root_dir: 152 | os.chdir(args.root_dir) 153 | 154 | info.add_project_dependencies(project_script, project.dependencies) 155 | qpc_hash.write_project_hash(project_script, project, valid_generators) 156 | 157 | else: 158 | info.add_project_dependencies(project_script, qpc_hash.get_project_dependencies(project_script)) 159 | 160 | info.project_hashes[project_script] = qpc_hash.get_hash_file_path(project_script) 161 | 162 | if args.time: 163 | print("\nFinished Parsing Projects" 164 | "\n\tTime: " + str(round(perf_counter() - start_time, 4)) + 165 | "\n\tParse Count: " + str(parser.counter)) 166 | 167 | [generator.projects_finished() for generator in generator_list] 168 | 169 | if args.master_file: 170 | print(PRINT_LINE) 171 | for generator in generator_list: 172 | if not generator.generates_master_file(): 173 | continue 174 | 175 | file_path = generator.get_master_file_path(args.master_file) 176 | generator_platforms = set() 177 | [generator_platforms.add(platform) for platform in generator.get_supported_platforms()] 178 | project_hashes = info.get_hashes(*generator_platforms) 179 | 180 | if should_call_create_master_file(file_path, info, generator, project_hashes): 181 | generator.create_master_file(info, file_path) 182 | qpc_hash.write_master_file_hash(file_path, info, generator.get_supported_platforms(), generator.path) 183 | 184 | 185 | if __name__ == "__main__": 186 | # TODO: maybe print more info here if verbose? 187 | print(PRINT_LINE + "\n" 188 | " Quiver Project Creator\n " + ' '.join(sys.argv[1:]) + 189 | "\n" + PRINT_LINE) 190 | 191 | # doing this so we only allow valid generator options 192 | GENERATOR_HANDLER = GeneratorHandler() 193 | parse_args(GENERATOR_HANDLER.get_generator_args()) 194 | GENERATOR_HANDLER.post_args_init() 195 | qpc_hash.post_args_init() 196 | main() 197 | 198 | print(f"{PRINT_LINE}\nFinished - {qpc_logging.WARNING_COUNT} Warnings\n{PRINT_LINE}") 199 | -------------------------------------------------------------------------------- /qpc_args.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import argparse 4 | import shutil 5 | from enum import Enum, auto, EnumMeta 6 | import glob 7 | from qpc_base import Platform, Arch, get_default_platform, get_default_archs 8 | 9 | 10 | args = argparse.Namespace() 11 | DEFAULT_BASEFILE = "_qpc_scripts/_default.qpc_base" 12 | 13 | 14 | # this is here so i can check arguments globally across files 15 | def parse_args(generators: list) -> None: 16 | platforms = [platform.name.lower() for platform in Platform] 17 | archs = [arch.name.lower() for arch in Arch] 18 | 19 | cmd_parser = argparse.ArgumentParser() 20 | 21 | # maybe change to path? meh 22 | cmd_parser.add_argument("--root_dir", "-d", default=os.getcwd(), help="Set the root directory of the script") 23 | cmd_parser.add_argument("--base_file", "-b", help="Optional file with project, group, and config definitions") 24 | 25 | cmd_parser.add_argument("--time", "-t", action="store_true", help="Print the time taken to parse") 26 | cmd_parser.add_argument("--verbose", "-v", action="store_true", help="Enable verbose console output") 27 | cmd_parser.add_argument("--force", "-f", action="store_true", help="Force recreate all projects") 28 | cmd_parser.add_argument("--force_master", "-fm", action="store_true", help="Force recreate master file") 29 | cmd_parser.add_argument("--hide_warnings", "-w", action="store_true", help="Suppress all warnings") 30 | cmd_parser.add_argument("--check_files", "-cf", action="store_true", help="Check if any added file exists") 31 | cmd_parser.add_argument("--skip_projects", "-sp", action="store_true", help="Don't generate projects") 32 | cmd_parser.add_argument("--legacy_macros", "-lm", action="store_true", help="Legacy Macros (only start with $)") 33 | cmd_parser.add_argument("--system_folders", "-sf", action="store_true", 34 | help="Use filesystem folders instead of custom folders for IDE's like visual studio") 35 | 36 | cmd_parser.add_argument("--configs", "-c", nargs="+", default=(), help="Select configs, added to configs set in base files") 37 | cmd_parser.add_argument("--platforms", "-p", nargs="+", default=(get_default_platform(),), choices=platforms, 38 | help="Select platforms to generate for instead of the default") 39 | cmd_parser.add_argument("--archs", "-ar", nargs="+", default=get_default_archs(), choices=archs, 40 | help="Select architectures to generate for instead of the default") 41 | cmd_parser.add_argument("--generators", "-g", nargs="+", default=generators, choices=generators, help="Project types to generate") 42 | cmd_parser.add_argument("--add", "-a", nargs="+", default=(), help="Add projects or groups to generate") 43 | cmd_parser.add_argument("--remove", "-r", default=(), nargs="+", help="Remove projects or groups from generating") 44 | cmd_parser.add_argument("--macros", "-m", nargs="+", default=(), help="Macros to define and set to '1' in projects") 45 | 46 | # TODO: rework parts of qpc to allow adding or removing projects after parsing a project, then add these 47 | # cmd_parser.add_argument("-at", "--add_tree", nargs="+", help="Add a project and all projects that depend on it") 48 | # cmd_parser.add_argument("-ad", "--add_depend", nargs="+", help="Add a project and all projects that it depends on") 49 | # cmd_parser.add_argument("-rt", "--remove_tree", nargs="+", help="Remove a project and all projects that depend on it") 50 | # cmd_parser.add_argument("-rd", "--remove_depend", nargs="+", help="Remove a project and all projects that it depends on") 51 | 52 | cmd_parser.add_argument("--masterfile", "-mf", dest="master_file", 53 | help='Create a master file for building all projects with') 54 | 55 | global args 56 | args.__dict__.update(cmd_parser.parse_args().__dict__) 57 | 58 | args.root_dir = os.path.normpath(args.root_dir) if os.path.isabs(args.root_dir) else \ 59 | os.path.normpath(os.getcwd() + os.sep + args.root_dir) 60 | 61 | # args.base_file = os.path.normpath(args.base_file) if os.path.isabs(args.base_file) else \ 62 | # os.path.normpath(args.root_dir + os.sep + args.base_file) 63 | 64 | # args.out_dir = os.path.normpath(args.out_dir) if os.path.isabs(args.out_dir) else \ 65 | # os.path.normpath(args.root_dir + os.sep + args.out_dir) 66 | 67 | args.platforms = _convert_to_enum(args.platforms, Platform) 68 | args.archs = _convert_to_enum(args.archs, Arch) 69 | 70 | 71 | # could just make a dictionary, where keys are enums and values are your mom? 72 | def _get_enum_from_name(enum_name: str, enum_list: EnumMeta) -> Enum: 73 | for enum in enum_list: 74 | if enum.name.lower() == enum_name: 75 | return enum 76 | 77 | 78 | # convert stuff in args to the enum value 79 | def _convert_to_enum(arg_list: list, enum_list: EnumMeta) -> list: 80 | if type(arg_list) == tuple: 81 | return arg_list 82 | for index, value in enumerate(arg_list): 83 | arg_list[index] = _get_enum_from_name(value, enum_list) 84 | return arg_list 85 | 86 | 87 | def get_arg_macros() -> dict: 88 | from qpc_logging import warning # avoids circular imports 89 | arg_macros = {} 90 | for macro in args.macros: 91 | name = macro 92 | value = "1" 93 | if "=" in macro: 94 | name, value = macro.split("=", 1) 95 | if not value: 96 | warning(f"Macro \"{macro}\" has trailing equals sign, setting to 1") 97 | value = "1" 98 | arg_macros[name] = value 99 | return arg_macros 100 | -------------------------------------------------------------------------------- /qpc_base.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | from platform import machine 4 | from enum import Enum, auto, EnumMeta 5 | from time import perf_counter 6 | 7 | global args 8 | 9 | 10 | QPC_DIR = os.path.dirname(os.path.realpath(__file__)).replace("\\", "/") + "/" 11 | QPC_GENERATOR_DIR = QPC_DIR + "project_generators" 12 | 13 | 14 | def timer_diff(start_time: float) -> str: 15 | return str(round(perf_counter() - start_time, 4)) 16 | 17 | 18 | # header files like c/c++ would really be nice right about now 19 | # this is to avoid circular imports, but still be able to use arguments here 20 | def post_args_init(): 21 | global args 22 | from qpc_args import args 23 | 24 | 25 | class Arch(Enum): 26 | AMD64 = auto(), 27 | I386 = auto(), 28 | ARM = auto(), 29 | ARM64 = auto(), 30 | # POWER10 = auto(), 31 | # POWER9 = auto(), 32 | 33 | 34 | class Platform(Enum): 35 | WINDOWS = auto(), 36 | LINUX = auto(), 37 | MACOS = auto(), 38 | 39 | 40 | # architectures the platform is on 41 | PLATFORM_ARCHS = { 42 | Platform.WINDOWS: {Arch.I386, Arch.AMD64, Arch.ARM, Arch.ARM64}, 43 | Platform.LINUX: {Arch.I386, Arch.AMD64, Arch.ARM, Arch.ARM64}, 44 | Platform.MACOS: {Arch.AMD64}, 45 | } 46 | 47 | 48 | class BaseProjectGenerator: 49 | def __init__(self, name: str): 50 | self.name = name 51 | self.filename = None 52 | self.path = None 53 | self.id = None 54 | self._platforms = [] 55 | self._architectures = [] 56 | self._uses_folders = False 57 | self._uses_master_file = False 58 | self._macro = "" 59 | 60 | self._start_time = None 61 | self._current_build = None 62 | 63 | # use this for anything that needs to be set after arguments are parsed/initialized 64 | def post_args_init(self): 65 | pass 66 | 67 | # finished parsing all projects, override function 68 | def projects_finished(self): 69 | pass 70 | 71 | def _print_creating(self, output_name: str): 72 | if args.time: 73 | self._start_time = perf_counter() 74 | else: 75 | print("Creating: " + output_name) 76 | self._current_build = output_name 77 | 78 | def _print_finished(self): 79 | if args.time and self._current_build: 80 | print(timer_diff(self._start_time) + " - Created: " + self._current_build) 81 | self._current_build = None 82 | 83 | # ProjectContainer from qpc_project.py, and returns List[ProjectPass], also from qpc_project.py 84 | def _get_passes(self, project) -> list: 85 | return project.get_passes(self.id) 86 | 87 | def _add_platform(self, platform: Platform) -> None: 88 | if platform not in Platform: 89 | raise Exception(f"Generator \"{self.name}\" tried adding an invalid platform: {platform.name}") 90 | elif platform not in self._platforms: 91 | self._platforms.append(platform) 92 | 93 | def _add_architecture(self, architecture: Arch) -> None: 94 | if architecture not in Arch: 95 | raise Exception(f"Generator \"{self.name}\" tried adding an invalid platform: {architecture.name}") 96 | elif architecture not in self._architectures: 97 | self._architectures.append(architecture) 98 | 99 | def _add_architectures(self, *architectures) -> None: 100 | [self._add_architecture(arch) for arch in architectures] 101 | 102 | def _add_platforms(self, *platforms) -> None: 103 | [self._add_platform(platform) for platform in platforms] 104 | 105 | def _set_project_folders(self, uses_project_folders: bool) -> None: 106 | self._uses_folders = uses_project_folders if type(uses_project_folders) == bool else self._uses_folders 107 | 108 | def _set_generate_master_file(self, use_master_file: bool) -> None: 109 | self._uses_master_file = use_master_file if type(use_master_file) == bool else self._uses_master_file 110 | 111 | def _set_macro(self, macro: str) -> None: 112 | self._macro = macro 113 | 114 | def get_macro(self) -> str: 115 | # return {"$" + self._macro: "1"} if self._macro else {} 116 | return self._macro 117 | 118 | def uses_folders(self) -> bool: 119 | return self._uses_folders 120 | 121 | def generates_master_file(self) -> bool: 122 | return self._uses_master_file 123 | 124 | def get_supported_platforms(self) -> list: 125 | return self._platforms 126 | 127 | def create_project(self, project_list) -> None: 128 | pass 129 | 130 | def does_project_exist(self, project_out_dir: str) -> bool: 131 | return True 132 | 133 | @staticmethod 134 | def _get_base_path(project_out_dir: str) -> str: 135 | return os.path.split(project_out_dir)[0] + "/" 136 | 137 | def get_master_file_path(self, master_file_path: str) -> str: 138 | print(f'Warning: Generator "{self.name}" doesn\'t override get_master_file_path but has _set_generate_master_file set to True') 139 | return "" 140 | 141 | def create_master_file(self, settings, master_file_path: str) -> str: 142 | # return file name or abspath or whatever 143 | pass 144 | 145 | def does_master_file_exist(self, master_file_path: str) -> bool: 146 | return True 147 | 148 | 149 | def get_default_platform() -> Platform: 150 | if sys.platform == "win32": 151 | return Platform.WINDOWS 152 | 153 | elif sys.platform.startswith("linux"): 154 | return Platform.LINUX 155 | 156 | elif sys.platform == "darwin": 157 | return Platform.MACOS 158 | 159 | 160 | def get_default_archs() -> tuple: 161 | if machine() in {"AMD64", "x86_64"}: 162 | return (Arch.I386, Arch.AMD64) 163 | 164 | # very rare 165 | elif machine() == "i386": 166 | return (Arch.I386,) 167 | 168 | elif machine().startswith("arm"): 169 | if machine() == "arm7l": 170 | return (Arch.ARM, Arch.ARM64) 171 | else: 172 | return (Arch.ARM,) 173 | 174 | 175 | def is_arch_64bit(arch: Arch) -> bool: 176 | return arch in {Arch.AMD64, Arch.ARM64} 177 | 178 | 179 | # os.path.normpath is not doing this on linux for me, fun 180 | ''' 181 | def fix_path_separator(string: str) -> str: 182 | if os.name == "nt": 183 | return string # .replace("/", "\\") 184 | else: 185 | return string.replace("\\", "/") 186 | ''' 187 | 188 | 189 | # just use this for everything probably, works just fine on windows 190 | def posix_path(string: str) -> str: 191 | return string.replace("\\", "/") 192 | 193 | 194 | def norm_path(path: str) -> str: 195 | return posix_path(os.path.normpath(path)) 196 | 197 | 198 | def join_path(*paths) -> str: 199 | paths = list(paths) 200 | if len(paths) > 1: 201 | if "" in paths: 202 | paths.remove("") 203 | return posix_path(os.path.normpath("/".join(paths))) 204 | return posix_path(paths[0]) 205 | 206 | 207 | def join_path_list(include_dir: str, *paths: str) -> list: 208 | if include_dir: 209 | return [norm_path(include_dir + "/" + path) for path in paths] 210 | return [posix_path(path) for path in paths] 211 | 212 | 213 | def check_file_path_glob(file_path: str) -> bool: 214 | return "*" in file_path or "[" in file_path and "]" in file_path or "?" in file_path 215 | 216 | 217 | def create_directory(directory: str): 218 | if not os.path.isdir(directory): 219 | os.makedirs(directory) 220 | if args.verbose: 221 | print("Created Directory: " + directory) 222 | 223 | 224 | def get_all_dict_values(d: dict): 225 | found_values = [] 226 | for k, v in d.items(): 227 | if isinstance(v, dict): 228 | found_values.extend(get_all_dict_values(v)) 229 | else: 230 | # return found_values 231 | found_values.append(v) 232 | return found_values 233 | 234 | 235 | def debug_assert(result: bool): 236 | if result: 237 | print("put breakpoint here") 238 | -------------------------------------------------------------------------------- /qpc_c_parser.py: -------------------------------------------------------------------------------- 1 | # ================================================================================================== 2 | # This file has a ton of optimizations for avoiding disk usage as much as possible 3 | # It can be a bit messy 4 | # ================================================================================================== 5 | 6 | import re 7 | import os.path 8 | from qpc_args import args 9 | 10 | include_pattern = re.compile(br"^[ \t]*#include[ \t]+[\"<]([a-zA-Z0-9\-_\./\\]+)[>\"]") 11 | 12 | INCLUDE_DICT_DIR = {} 13 | INCLUDE_DICT = {} 14 | # HEADER_DICT = {} 15 | HEADER_PATHS = set() 16 | INVALID_PATHS = set() # so we don't check the disk for paths that don't exist a million times 17 | 18 | INCLUDE_LIST_DIR = {} # does os.listdir on these include folders 19 | EXCLUDE_DIRS = set() # these directories don't exist 20 | 21 | EXCLUDE_LIST = {"windows.h", "Windows.h", "stdio.h", "crtdbg.h", "minidump.h", "string.h", "stdlib.h", "malloc.h", 22 | "ctype.h", "wctype.h", "wchar.h", "math.h", "limits.h", "typeinfo", "memory", "stdarg.h", "time.h", 23 | "shlwapi.h", "algorithm"} 24 | 25 | 26 | HEADER_EXTS = { 27 | 'h', 28 | 'hh', 29 | 'hpp', 30 | 'h++', 31 | 'hxx' 32 | } 33 | 34 | 35 | # headers include probably wouldn't speed anything up tbh 36 | def get_includes(file_path: str, include_dirs: list, headers: list) -> list: 37 | abs_path = os.path.abspath(file_path) # some files might have the same relative path, but different abs paths 38 | if abs_path not in INCLUDE_DICT: 39 | INCLUDE_DICT[abs_path] = _get_includes(abs_path, include_dirs) 40 | 41 | return INCLUDE_DICT[abs_path] 42 | 43 | # this takes slightly longer, but then it won't use absolute directories 44 | rel_path = os.getcwd().split(args.root_dir) 45 | rel_path = rel_path[1] if len(rel_path) == 2 else "" 46 | 47 | cwd = "{0}".format("../" * rel_path.count("/")) 48 | if cwd not in INCLUDE_DICT_DIR: 49 | INCLUDE_DICT_DIR[cwd] = {} 50 | 51 | if abs_path not in INCLUDE_DICT: 52 | INCLUDE_DICT[abs_path] = _get_includes(abs_path, include_dirs) 53 | 54 | if abs_path not in INCLUDE_DICT_DIR[cwd]: 55 | INCLUDE_DICT_DIR[cwd][abs_path] = [os.path.relpath(include) for include in INCLUDE_DICT[abs_path]] 56 | 57 | try: 58 | return INCLUDE_DICT_DIR[cwd][abs_path] 59 | except KeyError as F: 60 | print(F) 61 | 62 | 63 | def _get_includes(file_path: str, include_dirs: list) -> list: 64 | includes = [] 65 | include_dirs = [] if include_dirs is None else include_dirs 66 | 67 | if os.path.isfile(file_path): 68 | with open(file_path, 'rb') as f: 69 | lines = f.read().splitlines() 70 | else: 71 | return [] 72 | 73 | include_dirs_abs = [] 74 | for include_dir in include_dirs: 75 | include_dir_abs = os.path.abspath(include_dir) 76 | if include_dir_abs in EXCLUDE_DIRS: 77 | continue 78 | elif include_dir_abs in INCLUDE_LIST_DIR: 79 | include_dirs_abs.append(include_dir_abs) 80 | elif os.path.isdir(include_dir_abs): 81 | include_dirs_abs.append(include_dir_abs) 82 | INCLUDE_LIST_DIR[include_dir_abs] = set(os.listdir(include_dir_abs)) 83 | else: 84 | EXCLUDE_DIRS.add(include_dir_abs) 85 | 86 | def add_header(_header: str, abs_path: str) -> None: 87 | includes.append(abs_path) 88 | # HEADER_DICT[_header] = abs_path 89 | HEADER_PATHS.add(abs_path) 90 | 91 | for line in lines: 92 | line = line.strip() 93 | found_header = include_pattern.match(line) 94 | if found_header: 95 | found_header = found_header.group(1).decode() 96 | 97 | if found_header in EXCLUDE_LIST: 98 | continue 99 | 100 | found_header_path, found_header_name = os.path.split(found_header) 101 | for include_dir in include_dirs_abs: 102 | path_extended = include_dir + "/" + found_header_path 103 | if found_header_name in INCLUDE_LIST_DIR[include_dir] or \ 104 | path_extended in INCLUDE_LIST_DIR and found_header_name in INCLUDE_LIST_DIR[path_extended]: 105 | add_header(found_header, include_dir + "/" + found_header) 106 | break 107 | else: 108 | header_paths = [include_dir + "/" + found_header for include_dir in include_dirs_abs] 109 | header_paths.insert(0, os.path.abspath(found_header)) 110 | 111 | # first check if its in INVALID_PATHS or in HEADER_PATHS, much faster 112 | for header_path_abs in header_paths: 113 | if header_path_abs in INVALID_PATHS: 114 | break 115 | elif header_path_abs in HEADER_PATHS: 116 | add_header(found_header, header_path_abs) 117 | break 118 | else: 119 | # then check the disk if none have been found, last resort slow method 120 | for header_path_abs in header_paths: 121 | if os.path.isfile(header_path_abs): 122 | header_dir = os.path.split(header_path_abs)[0] 123 | INCLUDE_LIST_DIR[header_dir] = set(os.listdir(header_dir)) 124 | add_header(found_header, header_path_abs) 125 | break 126 | # adding it to this so we don't waste time checking the disk 127 | # for if the file exists, since we know it doesn't 128 | INVALID_PATHS.add(header_path_abs) 129 | # else: 130 | # if not args.hide_warnings: 131 | # print("File doesn't exist: " + found_header) 132 | return includes 133 | -------------------------------------------------------------------------------- /qpc_generator_handler.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | # import qpc_hash 4 | from enum import Enum 5 | from glob import glob 6 | from qpc_args import args 7 | from qpc_base import BaseProjectGenerator, QPC_DIR, QPC_GENERATOR_DIR, post_args_init 8 | 9 | 10 | GENERATOR_FOLDER = os.path.split(QPC_GENERATOR_DIR)[1] 11 | GENERATOR_PATH = QPC_GENERATOR_DIR + "/**" 12 | # GENERATOR_LIST = [module[:-3] for module in os.listdir(GENERATOR_PATH) if module[-3:] == '.py'] 13 | GENERATOR_LIST = [] 14 | GENERATOR_PATHS = [] 15 | 16 | for generator_folder in glob(GENERATOR_PATH): 17 | __generator = generator_folder + os.sep + os.path.split(generator_folder)[1] + ".py" 18 | if os.path.isfile(__generator): 19 | GENERATOR_LIST.append(os.path.basename(__generator)[:-3]) 20 | GENERATOR_PATHS.append(__generator) 21 | 22 | 23 | def str_to_class(class_name: str): 24 | # return getattr(sys.modules[__name__], class_name) 25 | return sys.modules[class_name] 26 | 27 | 28 | # https://stackoverflow.com/questions/5881873/python-find-all-classes-which-inherit-from-this-one 29 | def inheritors(klass): 30 | subclasses = set() 31 | work = [klass] 32 | while work: 33 | parent = work.pop() 34 | for child in parent.__subclasses__(): 35 | if child not in subclasses: 36 | subclasses.add(child) 37 | work.append(child) 38 | return subclasses 39 | 40 | 41 | class GeneratorHandler: 42 | def __init__(self): 43 | self.project_generator_modules = {} 44 | self.project_generators_all = [] 45 | self.project_generators = [] 46 | 47 | [self._import_generator(name) for name in GENERATOR_LIST] 48 | [self._init_generator(project_generator_type) for project_generator_type in inheritors(BaseProjectGenerator)] 49 | 50 | def _import_generator(self, name: str): 51 | __import__(f"{GENERATOR_FOLDER}.{name}.{name}", locals(), globals()) 52 | self.project_generator_modules[name] = str_to_class(f"{GENERATOR_FOLDER}.{name}.{name}") 53 | 54 | def _init_generator(self, project_generator_type: type): 55 | project_generator = project_generator_type() 56 | for index, generator_module in enumerate(self.project_generator_modules.values()): 57 | if project_generator_type in generator_module.__dict__.values(): 58 | project_generator.path = generator_module.__file__.replace("\\", "/") 59 | project_generator.filename = os.path.basename(project_generator.path)[:-3] 60 | project_generator.id = index 61 | break 62 | self.project_generators_all.append(project_generator) 63 | 64 | def post_args_init(self): 65 | post_args_init() 66 | for generator in self.project_generators_all: 67 | if generator.filename in args.generators: 68 | self.project_generators.append(generator) 69 | [generator.post_args_init() for generator in self.project_generators] 70 | 71 | def get_generator_names(self) -> list: 72 | return [project_generator.output_type for project_generator in self.project_generators] 73 | 74 | def get_generator_args(self): 75 | return [project_generator.filename for project_generator in self.project_generators_all] 76 | 77 | def get_generators(self, generator_names: list) -> list: 78 | return [self.get_generator(name) for name in generator_names] 79 | 80 | def get_generator(self, generator_name: str) -> BaseProjectGenerator: 81 | for project_generator in self.project_generators: 82 | if project_generator.output_type == generator_name: 83 | return project_generator 84 | 85 | def get_generator_supported_platforms(self, generator_name: str) -> list: 86 | generator = self.get_generator(generator_name) 87 | if generator: 88 | return generator.get_supported_platforms() 89 | 90 | def does_project_exist(self, project_path: str, generator_name: str) -> bool: 91 | generator = self.get_generator(generator_name) 92 | if generator: 93 | return generator.does_project_exist(project_path) 94 | 95 | -------------------------------------------------------------------------------- /qpc_hash.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import qpc_reader 3 | from qpc_args import args 4 | from qpc_base import posix_path, QPC_DIR, QPC_GENERATOR_DIR 5 | from qpc_reader import QPCBlockRoot, QPCBlock 6 | from qpc_generator_handler import GENERATOR_PATHS, GENERATOR_LIST 7 | from qpc_logging import verbose 8 | import qpc_parser 9 | import qpc_project 10 | import glob 11 | import os 12 | 13 | 14 | QPC_HASH_DIR = QPC_DIR + "hashes/" 15 | 16 | 17 | # Source: https://bitbucket.org/prologic/tools/src/tip/md5sum 18 | def make_hash(filename: str) -> str: 19 | md5 = hashlib.md5() 20 | if os.path.isfile(filename): 21 | with open(filename, "rb") as f: 22 | for chunk in iter(lambda: f.read(128 * md5.block_size), b""): 23 | md5.update(chunk) 24 | return md5.hexdigest() 25 | else: 26 | return "" 27 | 28 | 29 | def hash_from_string(string: str): 30 | return hashlib.md5(string.encode()).hexdigest() 31 | 32 | 33 | BASE_QPC_HASH_LIST = ( 34 | "qpc.py", 35 | "qpc_base.py", 36 | "qpc_c_parser.py", 37 | "qpc_hash.py", 38 | "qpc_parser.py", 39 | "qpc_project.py", 40 | "qpc_reader.py", 41 | # "qpc_vpc_converter.py", 42 | "qpc_generator_handler.py", 43 | ) 44 | 45 | 46 | QPC_BASE_HASHES = {} 47 | QPC_GENERATOR_HASHES = {} 48 | 49 | for file in BASE_QPC_HASH_LIST: 50 | QPC_BASE_HASHES[QPC_DIR + file] = make_hash(QPC_DIR + file) 51 | 52 | for file in GENERATOR_LIST: 53 | _generator = f"{QPC_GENERATOR_DIR}/{file}/{file}.py" 54 | QPC_GENERATOR_HASHES[_generator] = make_hash(_generator) 55 | 56 | QPC_HASHES = {**QPC_BASE_HASHES, **QPC_GENERATOR_HASHES} 57 | 58 | CHECKED_HASHES = {} 59 | GENERATOR_FILE_NAMES = [] 60 | ARCH_NAMES = [] 61 | 62 | 63 | def post_args_init(): 64 | GENERATOR_FILE_NAMES.extend([os.path.splitext(os.path.basename(__generator))[0] for __generator in args.generators]) 65 | ARCH_NAMES.extend([arch.name.casefold() for arch in args.archs]) 66 | 67 | 68 | # to be called after check_hash is called, so we know what we need to rebuild exactly 69 | def get_rebuild_info(project_path: str, rebuild_generators: list) -> dict: 70 | if project_path not in CHECKED_HASHES: 71 | check_hash(project_path, False) 72 | 73 | if rebuild_generators: 74 | for gen in rebuild_generators: 75 | if gen.filename not in CHECKED_HASHES[project_path]["generators"]: 76 | CHECKED_HASHES[project_path]["generators"].append(gen.filename) 77 | 78 | elif not CHECKED_HASHES[project_path]["generators"]: 79 | CHECKED_HASHES[project_path]["generators"] = GENERATOR_FILE_NAMES 80 | 81 | return CHECKED_HASHES[project_path] 82 | 83 | 84 | def check_hash(project_path: str, print_allowed: bool = True) -> bool: 85 | if project_path in CHECKED_HASHES: 86 | return CHECKED_HASHES[project_path]["result"] 87 | 88 | project_hash_file_path = get_hash_file_path(project_path) 89 | project_dir = os.path.split(project_path)[0] 90 | total_blocks = sorted(("commands", "glob_files", "hashes")) 91 | blocks_found = [] 92 | CHECKED_HASHES[project_path] = {"result": True, "generators": [], "rebuild_all": False} 93 | result = True 94 | 95 | if os.path.isfile(project_hash_file_path): 96 | hash_file = qpc_reader.read_file(project_hash_file_path) 97 | 98 | if not hash_file: 99 | CHECKED_HASHES[project_path]["result"] = False 100 | CHECKED_HASHES[project_path]["rebuild_all"] = True 101 | return False 102 | 103 | for block in hash_file: 104 | if not result: 105 | CHECKED_HASHES[project_path]["result"] = False 106 | return False 107 | 108 | if block.key == "commands": 109 | blocks_found.append(block.key) 110 | result = _check_commands(project_dir, block.items, 4) 111 | CHECKED_HASHES[project_path]["rebuild_all"] = not result 112 | 113 | elif block.key == "hashes": 114 | blocks_found.append(block.key) 115 | result = _project_check_file_hash(project_dir, block.items, project_path) 116 | 117 | elif block.key == "dependencies": 118 | pass 119 | 120 | elif block.key == "glob_files": 121 | blocks_found.append(block.key) 122 | result = _check_glob_files(project_dir, block.items) 123 | CHECKED_HASHES[project_path]["rebuild_all"] = not result 124 | 125 | elif print_allowed: 126 | # how would this happen 127 | block.warning("Unknown Key in Hash: ") 128 | 129 | if total_blocks == sorted(blocks_found): 130 | if print_allowed: 131 | print("Valid: " + project_path + get_hash_file_ext(project_path)) 132 | CHECKED_HASHES[project_path]["result"] = True 133 | return True 134 | CHECKED_HASHES[project_path]["result"] = False 135 | return False 136 | else: 137 | if print_allowed: 138 | verbose("Hash File does not exist") 139 | CHECKED_HASHES[project_path]["result"] = False 140 | CHECKED_HASHES[project_path]["rebuild_all"] = True 141 | return False 142 | 143 | 144 | def _project_check_file_hash(project_dir: str, hash_list: list, project_path: str) -> bool: 145 | result = True 146 | for hash_block in hash_list: 147 | if os.path.isabs(hash_block.values[0]) or not project_dir: 148 | project_file_path = posix_path(os.path.normpath(hash_block.values[0])) 149 | else: 150 | project_file_path = posix_path(os.path.normpath(project_dir + "/" + hash_block.values[0])) 151 | 152 | if hash_block.key != make_hash(project_file_path): 153 | if not CHECKED_HASHES[project_path]["rebuild_all"] and hash_block.values[0] in QPC_GENERATOR_HASHES: 154 | generator_name = os.path.splitext(os.path.basename(hash_block.values[0]))[0] 155 | if generator_name in args.generators: 156 | CHECKED_HASHES[project_path]["generators"].append(generator_name) 157 | else: 158 | CHECKED_HASHES[project_path]["rebuild_all"] = True 159 | verbose("File Modified: " + hash_block.values[0]) 160 | result = False 161 | return result 162 | 163 | 164 | def check_master_file_hash(project_path: str, base_info, generator, hash_list: dict) -> bool: 165 | project_hash_file_path = get_hash_file_path(project_path) 166 | project_dir = os.path.split(project_path)[0] 167 | total_blocks = sorted(("commands", "hashes", "files")) 168 | blocks_found = [] 169 | 170 | if os.path.isfile(project_hash_file_path): 171 | hash_file = qpc_reader.read_file(project_hash_file_path) 172 | 173 | if not hash_file: 174 | return False 175 | 176 | for block in hash_file: 177 | if block.key == "commands": 178 | blocks_found.append(block.key) 179 | if not _check_commands(project_dir, block.items, 5): 180 | return False 181 | 182 | elif block.key == "hashes": 183 | blocks_found.append(block.key) 184 | if not _check_file_hash(project_dir, block.items): 185 | return False 186 | 187 | elif block.key == "files": 188 | blocks_found.append(block.key) 189 | if not base_info.project_hashes: 190 | continue 191 | if generator.uses_folders(): 192 | if not _check_files(project_dir, block.items, hash_list, base_info.projects): 193 | return False 194 | else: 195 | if not _check_files(project_dir, block.items, hash_list): 196 | return False 197 | 198 | else: 199 | # how would this happen 200 | block.warning("Unknown Key in Hash: ") 201 | 202 | if total_blocks == sorted(blocks_found): 203 | print("Valid: " + project_path + get_hash_file_ext(project_path)) 204 | return True 205 | return False 206 | else: 207 | verbose("Hash File does not exist") 208 | return False 209 | 210 | 211 | def get_out_dir(project_hash_file_path): 212 | if os.path.isfile(project_hash_file_path): 213 | hash_file = qpc_reader.read_file(project_hash_file_path) 214 | 215 | if not hash_file: 216 | return "" 217 | 218 | commands_block = hash_file.get_item("commands") 219 | 220 | if commands_block is None: 221 | print("hold up") 222 | return "" 223 | 224 | return posix_path(os.path.normpath(commands_block.get_item_values("working_dir")[0])) 225 | # working_dir = commands_block.get_item_values("working_dir")[0] 226 | # out_dir = commands_block.get_item_values("out_dir")[0] 227 | # return posix_path(os.path.normpath(working_dir + "/" + out_dir)) 228 | 229 | 230 | def _check_commands(project_dir: str, command_list: list, total_commands: int) -> bool: 231 | commands_found = 0 232 | for command_block in command_list: 233 | if command_block.key == "working_dir": 234 | commands_found += 1 235 | directory = args.root_dir 236 | if project_dir: 237 | directory += "/" + project_dir 238 | # something just breaks here i use PosixPath in the if statement 239 | directory = posix_path(directory) 240 | hash_directory = posix_path(command_block.values[0]) 241 | if hash_directory.endswith("/"): 242 | hash_directory = hash_directory[:-1] 243 | if directory != hash_directory: 244 | return False 245 | 246 | elif command_block.key == "out_dir": 247 | pass 248 | 249 | elif command_block.key == "add": 250 | commands_found += 1 251 | if sorted(args.add) != sorted(command_block.values): 252 | return False 253 | 254 | elif command_block.key == "remove": 255 | commands_found += 1 256 | if sorted(args.remove) != sorted(command_block.values): 257 | return False 258 | 259 | elif command_block.key == "architectures": 260 | commands_found += 1 261 | if sorted(ARCH_NAMES) != sorted(command_block.values): 262 | return False 263 | 264 | elif command_block.key == "macros": 265 | commands_found += 1 266 | if sorted(args.macros) != sorted(command_block.values): 267 | return False 268 | 269 | elif command_block.key == "qpc_py_count": 270 | commands_found += 1 271 | if len(QPC_BASE_HASHES) != int(command_block.values[0]): 272 | return False 273 | 274 | else: 275 | command_block.warning("Unknown Key in Hash: ") 276 | return commands_found == total_commands 277 | 278 | 279 | def _check_file_hash(project_dir: str, hash_list: list) -> bool: 280 | for hash_block in hash_list: 281 | if os.path.isabs(hash_block.values[0]) or not project_dir: 282 | project_file_path = posix_path(os.path.normpath(hash_block.values[0])) 283 | else: 284 | project_file_path = posix_path(os.path.normpath(project_dir + "/" + hash_block.values[0])) 285 | 286 | if hash_block.key != make_hash(project_file_path): 287 | verbose("File Modified: " + hash_block.values[0]) 288 | return False 289 | return True 290 | 291 | 292 | def _check_files(project_dir, hash_file_list, file_list, project_def_list: dict = None) -> bool: 293 | if len(hash_file_list) != len(file_list): 294 | return False 295 | for file_block in hash_file_list: 296 | hash_path = file_block.get_item_values("hash_path")[0] 297 | hash_folder = file_block.get_item_values("folder") 298 | hash_folder = hash_folder[0] if hash_folder else "" 299 | dependency_hash = file_block.get_item_values("dependency_hash") 300 | dependency_hash = dependency_hash[0] if dependency_hash else "" 301 | 302 | if os.path.isabs(hash_path) or not project_dir: 303 | hash_path = posix_path(os.path.normpath(hash_path)) 304 | else: 305 | hash_path = posix_path(os.path.normpath(project_dir + "/" + hash_path)) 306 | 307 | if hash_path not in file_list.values(): 308 | verbose("New project added: " + file_block.key) 309 | return False 310 | 311 | elif hash_folder and project_def_list: 312 | for project_def in project_def_list: 313 | if file_block.key == project_def.path: 314 | folder = "/".join(project_def_list[project_def]) 315 | if hash_folder != folder: 316 | # uh, what if this generator doesn't use folders 317 | verbose(f"Project Folder Path Changed on \"{file_block.key}\":\n" 318 | f"\"{hash_folder}\" -> \"{folder}\"") 319 | return False 320 | break 321 | 322 | # Now check dependencies 323 | project_dep_list = get_project_dependencies(file_block.key) 324 | if not project_dep_list: 325 | if dependency_hash: # and not script_path.values[0] == "": 326 | # all dependencies were removed from it, and we think it has some still, rebuild 327 | verbose("Outdated dependency list: " + file_block.key) 328 | return False 329 | continue 330 | elif not dependency_hash and project_dep_list: 331 | # project has dependencies now, and we think it doesn't, rebuild 332 | return False 333 | 334 | project_dep_list.sort() 335 | if dependency_hash != hash_from_string(' '.join(project_dep_list)): 336 | verbose(f"Dependencies Changed: \"{file_block.key}\"") 337 | return False 338 | 339 | return True 340 | 341 | 342 | def _check_glob_files(project_dir: str, file_list: list) -> bool: 343 | for file_block in file_list: 344 | file_hash = file_block.key 345 | file_glob = file_block.values[0] 346 | 347 | glob_list = glob.glob(project_dir + "/" + file_glob) 348 | for index, path in enumerate(glob_list): 349 | glob_list[index] = posix_path(path) 350 | 351 | glob_list.sort() 352 | 353 | if file_hash != hash_from_string(' '.join(glob_list)): 354 | verbose("Files found are different: " + file_glob) 355 | return False 356 | 357 | return True 358 | 359 | 360 | def get_hash_file_path(project_path) -> str: 361 | return posix_path(os.path.normpath(QPC_HASH_DIR + get_hash_file_name(project_path))) 362 | 363 | 364 | def get_hash_file_name(project_path) -> str: 365 | hash_name = project_path.replace("\\", ".").replace("/", ".") 366 | return hash_name + get_hash_file_ext(hash_name) 367 | 368 | 369 | def get_hash_file_ext(project_path) -> str: 370 | if os.path.splitext(project_path)[1] == ".qpc": 371 | return "_hash" 372 | return ".qpc_hash" 373 | 374 | 375 | def get_project_dependencies(project_path: str, recurse: bool = False) -> list: 376 | project_hash_file_path = get_hash_file_path(project_path) 377 | dep_list = set() 378 | 379 | if os.path.isfile(project_hash_file_path): 380 | hash_file = qpc_reader.read_file(project_hash_file_path) 381 | 382 | if not hash_file: 383 | return list(dep_list) 384 | 385 | for block in hash_file: 386 | if block.key == "dependencies": 387 | for dep_block in block.items: 388 | # maybe get dependencies of that file as well? recursion? 389 | dep_list.add(dep_block.key) 390 | if recurse: 391 | dep_list.update(get_project_dependencies(dep_block.key)) 392 | if dep_block.values and dep_block.values[0] != "": 393 | for path in dep_block.values: 394 | if path: 395 | dep_list.add(path) 396 | if recurse: 397 | dep_list.update(get_project_dependencies(dep_block.key)) 398 | break 399 | return list(dep_list) 400 | 401 | 402 | def write_project_hash(project_path: str, project: qpc_project.ProjectContainer, generators: list) -> None: 403 | base_block = QPCBlockRoot(project_path) 404 | 405 | _write_hash_commands(base_block, project.out_dir) 406 | 407 | hashes = base_block.add_item("hashes", []) 408 | [hashes.add_item(hash_value, script_path) for script_path, hash_value in QPC_BASE_HASHES.items()] 409 | 410 | for generator in generators: 411 | if generator.path in QPC_GENERATOR_HASHES: 412 | hashes.add_item(QPC_GENERATOR_HASHES[generator.path], generator.path) 413 | 414 | hash_list = project.get_hashes() 415 | if hash_list: 416 | [hashes.add_item(hash_value, script_path) for script_path, hash_value in hash_list.items()] 417 | 418 | glob_files_block = base_block.add_item("glob_files", []) 419 | for path in project.get_glob_files(): 420 | found_files = glob.glob(os.path.split(project_path)[0] + "/" + path) 421 | for index, _path in enumerate(found_files): 422 | found_files[index] = posix_path(_path) 423 | found_files.sort() 424 | glob_files_block.add_item(hash_from_string(' '.join(found_files)), path) 425 | 426 | if project.dependencies: 427 | dependencies_block = base_block.add_item("dependencies", []) 428 | [dependencies_block.add_item(script_path, None) for script_path in project.dependencies] 429 | 430 | with open(get_hash_file_path(project_path), mode="w", encoding="utf-8") as hash_file: 431 | # hash_file.write(base_block.to_string(0, True, True)) 432 | hash_file.write(base_block.to_string(True, True)) 433 | 434 | 435 | def write_master_file_hash(project_path: str, base_info, platforms: list, generator_path: str, out_dir: str = ""): 436 | base_block = QPCBlockRoot(project_path) 437 | 438 | _write_hash_commands(base_block, out_dir, True) 439 | 440 | hashes = base_block.add_item("hashes", []) 441 | [hashes.add_item(hash_value, script_path) for script_path, hash_value in QPC_BASE_HASHES.items()] 442 | 443 | if generator_path in QPC_GENERATOR_HASHES: 444 | hashes.add_item(QPC_GENERATOR_HASHES[generator_path], generator_path) 445 | 446 | info_list = set() 447 | [info_list.add(base_info.get_base_info(platform)) for platform in platforms] 448 | if None in info_list: 449 | info_list.remove(None) 450 | files = base_block.add_item("files", []) 451 | 452 | for info_platform in info_list: 453 | for project_def in info_platform.projects: 454 | if project_def.path not in base_info.project_hashes: 455 | continue 456 | 457 | folder = "/".join(info_platform.project_folders[project_def.name]) 458 | 459 | script = files.add_item(project_def.path, []) 460 | 461 | if project_def.path in base_info.project_hashes: 462 | hash_path = base_info.project_hashes[project_def.path] 463 | script.add_item("hash_path", hash_path) 464 | 465 | # if project_def.folder_list: 466 | script.add_item("folder", folder) 467 | 468 | if project_def.path in base_info.project_dependencies: 469 | dependency_list = list(base_info.project_dependencies[project_def.path]) 470 | dependency_list.sort() 471 | value = hash_from_string(" ".join(dependency_list)) if dependency_list else "" 472 | script.add_item("dependency_hash", value) 473 | 474 | with open(get_hash_file_path(project_path), mode="w", encoding="utf-8") as hash_file: 475 | hash_file.write(base_block.to_string(True, True)) 476 | 477 | 478 | def _write_hash_commands(base_block: QPCBlockRoot, out_dir: str = "", master_file: bool = False) -> None: 479 | commands = base_block.add_item("commands", []) 480 | commands.add_item("working_dir", os.getcwd().replace('\\', '/') + "/" + os.path.split(base_block.file_path)[0]) 481 | commands.add_item("out_dir", out_dir.replace('\\', '/')) 482 | commands.add_item("macros", args.macros) 483 | commands.add_item("architectures", ARCH_NAMES) 484 | 485 | if master_file: 486 | commands.add_item("add", args.add) 487 | commands.add_item("remove", args.remove) 488 | else: 489 | commands.add_item("qpc_py_count", str(len(QPC_BASE_HASHES))) 490 | 491 | 492 | def _write_hash_paths(base_block: QPCBlockRoot, hash_file_paths: dict): 493 | if hash_file_paths: 494 | files = base_block.add_item("files", []) 495 | [files.add_item(hash_path, script_path) for script_path, hash_path in hash_file_paths.items()] 496 | -------------------------------------------------------------------------------- /qpc_logging.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import platform 4 | from qpc_args import args 5 | from enum import Enum 6 | 7 | 8 | _win32_legacy_con = False 9 | _win32_handle = None 10 | 11 | 12 | if os.name == "nt": 13 | if platform.release().startswith("10"): 14 | # hack to enter virtual terminal mode, 15 | # could do it properly, but that's a lot of lines and this works just fine 16 | import subprocess 17 | subprocess.call('', shell=True) 18 | else: 19 | import ctypes 20 | _win32_handle = ctypes.windll.kernel32.GetStdHandle(-11) 21 | _win32_legacy_con = True 22 | 23 | 24 | class Color(Enum): 25 | if _win32_legacy_con: 26 | RED = "4" 27 | DGREEN = "2" 28 | GREEN = "10" 29 | YELLOW = "6" 30 | BLUE = "1" 31 | MAGENTA = "13" 32 | CYAN = "3" # or 9 33 | 34 | DEFAULT = "7" 35 | else: # ansi escape chars 36 | RED = "\033[0;31m" 37 | DGREEN = "\033[0;32m" 38 | GREEN = "\033[1;32m" 39 | YELLOW = "\033[0;33m" 40 | BLUE = "\033[0;34m" 41 | MAGENTA = "\033[1;35m" 42 | CYAN = "\033[0;36m" 43 | 44 | DEFAULT = "\033[0m" 45 | 46 | 47 | class Severity(Enum): 48 | WARNING = Color.YELLOW 49 | ERROR = Color.RED 50 | 51 | 52 | WARNING_COUNT = 0 53 | 54 | 55 | def warning(*text): 56 | warning_no_line(*text[:-1], text[-1] + "\n") 57 | 58 | 59 | def warning_no_line(*text): 60 | if not args.hide_warnings: 61 | _print_severity(Severity.WARNING, "\n ", *text) 62 | global WARNING_COUNT 63 | WARNING_COUNT += 1 64 | 65 | 66 | def error(*text): 67 | _print_severity(Severity.ERROR, "\n ", *text, "\n") 68 | quit(1) 69 | 70 | 71 | def verbose(*text): 72 | if args.verbose: 73 | print("".join(text)) 74 | 75 | 76 | def verbose_color(color: Color, *text): 77 | if args.verbose: 78 | print_color(color, "".join(text)) 79 | 80 | 81 | def _print_severity(level: Severity, spacing: str, *text): 82 | print_color(level.value, f"[{level.name}] {spacing.join(text)}") 83 | 84 | 85 | def win32_set_fore_color(color: int): 86 | if not ctypes.windll.kernel32.SetConsoleTextAttribute(_win32_handle, color): 87 | print(f"[ERROR] WIN32 Changing Colors Failed, Error Code: {str(ctypes.GetLastError())}," 88 | f" color: {color}, handle: {str(_win32_handle)}") 89 | 90 | 91 | def stdout_color(color: Color, *text): 92 | if _win32_legacy_con: 93 | win32_set_fore_color(int(color.value)) 94 | sys.stdout.write("".join(text)) 95 | win32_set_fore_color(int(Color.DEFAULT.value)) 96 | else: 97 | sys.stdout.write(color.value + "".join(text) + Color.DEFAULT.value) 98 | 99 | 100 | def print_color(color: Color, *text): 101 | stdout_color(color, *text, "\n") 102 | -------------------------------------------------------------------------------- /qpc_parser.py: -------------------------------------------------------------------------------- 1 | import os 2 | import glob 3 | import qpc_hash 4 | from qpc_reader import read_file, QPCBlock, QPCBlockRoot 5 | from qpc_args import args, get_arg_macros 6 | from qpc_base import Platform, Arch, check_file_path_glob 7 | from qpc_project import ProjectContainer, ProjectPass, ProjectDefinition, ProjectGroup, BuildEvent, ConfigType, \ 8 | replace_macros, replace_macros_list 9 | from qpc_logging import warning, error, verbose, verbose_color, print_color, Color 10 | from enum import Enum 11 | from time import perf_counter 12 | from typing import Dict, List 13 | 14 | 15 | # unused, idk if this will ever be useful either 16 | def replace_exact_macros(split_string, macros): 17 | for macro, macro_value in macros.items(): 18 | for index, item in enumerate(split_string): 19 | if macro == item: 20 | split_string[index] = macro_value 21 | 22 | return split_string 23 | 24 | 25 | def get_platform_macros(platform: Enum) -> dict: 26 | # OS Specific Defines 27 | base_platforms = { 28 | "WINDOWS": str(int(platform == Platform.WINDOWS)), 29 | "POSIX": str(int(platform in {Platform.LINUX, Platform.MACOS})), 30 | "LINUX": str(int(platform == Platform.LINUX)), 31 | "MACOS": str(int(platform == Platform.MACOS)) 32 | } 33 | 34 | if platform == Platform.WINDOWS: 35 | return { 36 | **base_platforms, 37 | "EXT_DLL": ".dll", 38 | "EXT_LIB": ".lib", 39 | "EXT_APP": ".exe", 40 | } 41 | 42 | elif platform == Platform.LINUX: 43 | return { 44 | **base_platforms, 45 | "EXT_DLL": ".so", 46 | "EXT_LIB": ".a", 47 | "EXT_APP": "", 48 | } 49 | 50 | # TODO: finish setting up MacOS stuff here 51 | elif platform == Platform.MACOS: 52 | return { 53 | **base_platforms, 54 | "EXT_DLL": ".dylib", 55 | "EXT_LIB": ".a", 56 | "EXT_APP": ".app", 57 | } 58 | 59 | 60 | class BaseInfoPlatform: 61 | def __init__(self, base_info, platform: Enum): 62 | self.shared = base_info 63 | self.platform = platform 64 | self.macros = {**get_arg_macros(), **get_platform_macros(platform)} 65 | 66 | verbose("") 67 | [verbose_color(Color.DGREEN, 'Set Macro: {0} = "{1}"'.format(name, value)) for name, value in self.macros.items()] 68 | 69 | self._projects_all = [] 70 | 71 | # this stores all everything in dependency_paths in a base file 72 | # and also has path fixes on it if used with a include with a path to change to 73 | self.dependency_dict = {} 74 | # if any path was modified above, then it's also added here with the original path 75 | self.dependency_dict_original = {} 76 | 77 | # for generators and parts of qpc to use: 78 | self.configs = [] 79 | self.projects = [] 80 | self.project_folders = {} 81 | 82 | self.project_dependencies = {} 83 | 84 | def add_project(self, project_name: str, project_path: str, include_dir: str = "") -> None: 85 | # TODO: check if script path is already used 86 | project_def = self.shared.add_project(project_name) 87 | project_def.platforms.add(self.platform) 88 | 89 | if include_dir and not include_dir.endswith("/"): 90 | include_dir += "/" 91 | 92 | project_path = replace_macros(project_path, self.macros) 93 | 94 | if not project_path: 95 | return 96 | 97 | if os.path.isfile(project_path): 98 | project_def.path_real = project_path 99 | project_def.path = include_dir + project_path 100 | else: 101 | warning("Script does not exist: " + project_path) 102 | 103 | self._projects_all.append(project_def) 104 | 105 | def add_project_to_group(self, project_name: str, project_group: ProjectGroup, folder_list: list): 106 | project_def = self.get_project(project_name) 107 | if not project_def and self.add_project_by_script(project_name): 108 | project_def = self.get_project(project_name) 109 | 110 | if project_def: 111 | project_group.add_project(project_def.name, folder_list) 112 | else: 113 | project_def = ProjectDefinition(self.shared, project_name) 114 | self._projects_all.append(project_def) 115 | 116 | def add_project_by_script(self, project_path: str) -> bool: 117 | if check_file_path_glob(project_path): 118 | for found_file in glob.glob(project_path): 119 | self.add_project(os.path.splitext(os.path.basename(found_file))[0], found_file) 120 | return True 121 | elif os.path.isfile(project_path): 122 | self.add_project(os.path.splitext(os.path.basename(project_path))[0], project_path) 123 | return True 124 | # elif not self.is_project_added(project_path) and project_path not in self.shared.groups: 125 | return False 126 | 127 | def init_args(self): 128 | for project_path in args.add: 129 | if not self.add_project_by_script(project_path): 130 | if project_path not in self.shared.groups and not self.is_project_added(project_path): 131 | warning("Project, Group, or File does not exist: " + project_path) 132 | 133 | for config in args.configs: 134 | if config not in self.configs: 135 | self.configs.append(config) 136 | 137 | if not self.configs: 138 | self.configs.extend(["Debug", "Release"]) 139 | 140 | def add_macro(self, project_block: QPCBlock): 141 | value = replace_macros(project_block.values[1], self.macros) 142 | verbose_color(Color.DGREEN, f"Set Macro: {project_block.values[0]} = \"{value}\"") 143 | self.macros[project_block.values[0]] = value 144 | 145 | def is_project_script_added(self, project_path: str) -> bool: 146 | return bool(self.get_project_by_script(project_path)) 147 | 148 | def is_project_added(self, project_name: str) -> bool: 149 | return bool(self.get_project_by_script(project_name)) 150 | 151 | def get_project_by_script(self, project_path: str) -> ProjectDefinition: 152 | for project in self._projects_all: 153 | if project_path == project.path: 154 | return project 155 | 156 | def get_project_by_name(self, project_name: str) -> ProjectDefinition: 157 | for project in self._projects_all: 158 | if project.name == project_name: 159 | return project 160 | 161 | def get_project(self, project_name: str) -> ProjectDefinition: 162 | for project in self._projects_all: 163 | if project_name in {project.name, project.path, project.path_real}: 164 | return project 165 | 166 | def get_dependency_path(self, key: str): 167 | project = self.get_project(key) 168 | if project: 169 | return project.path 170 | return key 171 | 172 | def _use_project(self, project: ProjectDefinition, unwanted_projects: dict, folders: tuple = None): 173 | if self.platform in project.platforms and project.name not in unwanted_projects: 174 | for added_project in self.projects: 175 | if added_project.name == project.name: 176 | break 177 | else: 178 | self.projects.append(project) 179 | self.project_folders[project.name] = folders if folders else () 180 | 181 | # get all the _passes the user wants (this is probably the worst part in this whole project) 182 | def setup_wanted_projects(self, add_list: list, remove_list: list, unwanted_projects: dict) -> None: 183 | self.projects = [] 184 | self.project_folders = {} 185 | 186 | for removed_item in remove_list: 187 | if removed_item in self.shared.groups: 188 | for project in self.shared.groups[removed_item].projects: 189 | if project not in unwanted_projects: 190 | unwanted_projects[project] = None 191 | 192 | elif removed_item in self.shared.projects_all: 193 | if self.shared.projects_all[removed_item] in self._projects_all: 194 | unwanted_projects[removed_item] = None 195 | else: 196 | for project in self._projects_all: 197 | if removed_item == project.path: 198 | unwanted_projects[project.name] = None 199 | break 200 | else: 201 | warning("Project, Group, or Script does not exist: " + removed_item) 202 | 203 | # TODO: clean up this mess 204 | if add_list: 205 | for added_item in add_list: 206 | if added_item in self.shared.groups: 207 | for project, folders in self.shared.groups[added_item].projects.items(): 208 | self._use_project(self.get_project(project), unwanted_projects, folders) 209 | 210 | elif added_item in self.shared.projects_all: 211 | if self.shared.projects_all[added_item] in self._projects_all: 212 | self._use_project(self.shared.projects_all[added_item], unwanted_projects) 213 | else: 214 | for project in self._projects_all: 215 | if added_item in {project.path, project.path_real}: 216 | self._use_project(project, unwanted_projects) 217 | break 218 | else: 219 | warning("Project, Group, or Script does not exist: " + added_item) 220 | else: 221 | raise Exception("No projects were added to generate for") 222 | 223 | 224 | class BaseInfo: 225 | def __init__(self): 226 | self.projects_all = {} 227 | self.projects = {} # maybe remove? 228 | self.groups = {} 229 | self.active_group = None 230 | self.system_folders = args.system_folders 231 | # maybe add something for archs? 232 | self.info_list = [BaseInfoPlatform(self, platform) for platform in args.platforms] 233 | 234 | self.project_hashes = {} 235 | self.project_dependencies = {} 236 | 237 | def finish_parsing(self): 238 | [info_plat.init_args() for info_plat in self.info_list] 239 | self._prepare_groups() 240 | self._prepare_projects() 241 | 242 | def _prepare_groups(self): 243 | for group_name, group in self.groups.items(): 244 | group.finished() 245 | pass 246 | 247 | def _prepare_projects(self) -> dict: 248 | self.projects = {} # dict keeps order, set doesn't as of 3.8, both faster than lists 249 | 250 | unwanted_projects = {} 251 | remove_list = [] 252 | add_list = [] 253 | 254 | def add_item(item_list: list, _item: str): 255 | if check_file_path_glob(_item): 256 | item_list.extend(glob.glob(_item)) 257 | else: 258 | item_list.append(_item) 259 | 260 | [add_item(add_list, item) for item in args.add] 261 | [add_item(remove_list, item) for item in args.remove] 262 | [add_list.remove(item) for item in remove_list if item in add_list] 263 | 264 | for base_info in self.info_list: 265 | # get folders from this 266 | base_info.setup_wanted_projects(add_list, remove_list, unwanted_projects.copy()) 267 | for project in base_info.projects: 268 | if project not in self.projects: 269 | 270 | if not project.path: 271 | warning(f"Project without a script path: {project.name}") 272 | continue 273 | 274 | self.projects[project] = base_info.project_folders[project.name] 275 | return self.projects 276 | 277 | def _add_group_project(self, name: str, proj_dict: dict, proj_type: type): 278 | if name in proj_dict: 279 | proj_obj = proj_dict[name] 280 | else: 281 | proj_obj = proj_type(self, name) 282 | proj_dict[proj_obj.name] = proj_obj 283 | return proj_obj 284 | 285 | def add_group(self, group: str) -> ProjectGroup: 286 | return self._add_group_project(group, self.groups, ProjectGroup) 287 | 288 | def add_project(self, project_name: str) -> ProjectDefinition: 289 | return self._add_group_project(project_name, self.projects_all, ProjectDefinition) 290 | 291 | def get_base_info(self, platform: Platform) -> BaseInfoPlatform: 292 | if platform in Platform: 293 | for base_info in self.info_list: 294 | if base_info.platform == platform: 295 | return base_info 296 | 297 | def get_configs(self) -> list: 298 | configs = [] 299 | for info in self.info_list: 300 | for cfg in info.configs: 301 | if cfg not in configs: 302 | configs.append(cfg) 303 | return configs 304 | 305 | def get_projects(self, *platforms) -> tuple: 306 | project_list = {} # dict keeps order, set doesn't as of 3.8, both faster than lists 307 | for base_info in self.info_list: 308 | if base_info.platform not in platforms: 309 | continue 310 | for project in base_info.projects: 311 | if project not in project_list: 312 | project_list[project] = None 313 | project_list = tuple(project_list.keys()) 314 | return project_list 315 | 316 | def get_folders(self) -> list: 317 | return [] 318 | 319 | def get_project_folder(self, project: ProjectDefinition) -> tuple: 320 | pass 321 | 322 | def get_project_folder_by_name(self, project_name: str) -> tuple: 323 | pass 324 | 325 | def add_project_dependencies(self, project_script: str, dependencies: list): 326 | self.project_dependencies[project_script] = dependencies # might remove 327 | for base_info in self.info_list: 328 | # if base_info.platform in args.platforms: # what is this needed for again? 329 | base_info.project_dependencies[project_script] = dependencies 330 | 331 | def get_project_dependencies(self, *platforms) -> dict: 332 | all_dependencies = {} 333 | for base_info in self.info_list: 334 | if base_info.platform in platforms: 335 | all_dependencies.update(base_info.project_dependencies) 336 | return all_dependencies 337 | 338 | def get_hashes(self, *platforms) -> dict: 339 | all_hashes = {} 340 | for base_info in self.info_list: 341 | if base_info.platform in platforms: 342 | for project in base_info.projects: 343 | if project.path in self.project_hashes: 344 | all_hashes[project.path] = self.project_hashes[project.path] 345 | return all_hashes 346 | 347 | 348 | class Parser: 349 | def __init__(self): 350 | self.counter = 0 351 | self.read_files = {} 352 | 353 | # TODO: bug discovered with this, 354 | # if i include the groups before the base_info, it won't add any base_info 355 | # def parse_base_settings(self, base_file_path: str, output_type: str, platform: Enum) -> BaseInfo: 356 | def parse_base_info(self, base_file_path: str) -> BaseInfo: 357 | info = BaseInfo() 358 | 359 | if base_file_path: 360 | verbose("\nReading: " + args.base_file) 361 | 362 | base_file = self.read_file(base_file_path) 363 | if not base_file: 364 | warning("Base File does not exist: " + base_file_path) 365 | else: 366 | verbose("\nParsing: " + args.base_file) 367 | 368 | [self._parse_base_info_recurse(info_plat, base_file) for info_plat in info.info_list] 369 | 370 | info.finish_parsing() 371 | return info 372 | 373 | def _parse_base_info_recurse(self, info: BaseInfoPlatform, base_file: QPCBlockRoot, include_dir: str = "") -> None: 374 | for project_block in base_file: 375 | 376 | if not project_block.solve_condition(info.macros): 377 | continue 378 | 379 | elif project_block.key == "macro": 380 | info.add_macro(project_block) 381 | 382 | elif project_block.key == "getenv": 383 | macro_name = replace_macros(project_block.values[0], info.macros) 384 | var_value = self.get_env_var(project_block, macro_name, info.macros) 385 | 386 | if var_value is None: 387 | continue 388 | 389 | verbose_color(Color.DGREEN, f"Set Macro from envvar: {macro_name} = \"{var_value}\"") 390 | info.macros[project_block.values[0]] = var_value 391 | 392 | elif project_block.key == "system_folders": 393 | info.shared.system_folders = self.use_system_folders(project_block, info.shared.system_folders, info.macros) 394 | 395 | elif project_block.key == "configs": 396 | configs = project_block.get_item_list_cond(info.macros) 397 | [info.configs.append(config) for config in configs if config not in info.configs] 398 | 399 | elif not project_block.values: 400 | continue 401 | 402 | elif project_block.key == "project": 403 | self._base_project_define(project_block, info, include_dir) 404 | 405 | elif project_block.key == "group": 406 | self._base_group_define(project_block, info) 407 | 408 | elif project_block.key == "include": 409 | # "Ah shit, here we go again." 410 | file_path = os.path.normpath(replace_macros(project_block.values[0], info.macros)) 411 | new_include_dir = include_dir 412 | 413 | if len(project_block.values) >= 2: 414 | new_include_dir += "/" + project_block.values[1] if include_dir else project_block.values[1] 415 | new_include_dir = replace_macros(new_include_dir, info.macros) 416 | current_dir = os.getcwd() 417 | if os.path.isdir(new_include_dir): 418 | os.chdir(new_include_dir) 419 | 420 | verbose("Reading: " + file_path) 421 | 422 | try: 423 | include_file = read_file(file_path) 424 | 425 | verbose("Parsing... ") 426 | 427 | self._parse_base_info_recurse(info, include_file, new_include_dir) 428 | except FileNotFoundError: 429 | project_block.warning("File Does Not Exist: ") 430 | 431 | if len(project_block.values) >= 2: 432 | os.chdir(current_dir) 433 | 434 | else: 435 | project_block.warning(f"Unknown Key: \"{project_block.key}\"") 436 | 437 | def _base_group_define(self, group_block: QPCBlock, info: BaseInfoPlatform): 438 | if not group_block.values: 439 | group_block.warning("No Group Name Defined, skipping") 440 | return 441 | 442 | group = group_block.values[0] 443 | project_group = info.shared.add_group(group) 444 | self._parse_project_group_items(project_group, info, group_block, []) 445 | 446 | for contain_group_name in group_block.values[1:]: 447 | contain_group = info.shared.add_group(contain_group_name) 448 | contain_group.contains_group(project_group, []) 449 | 450 | @staticmethod 451 | def _base_project_define(block: QPCBlock, info: BaseInfoPlatform, include_dir: str = ""): 452 | script = block.values[1] if len(block.values) >= 2 else "" 453 | info.add_project(block.values[0], script, include_dir) 454 | 455 | @staticmethod 456 | def _check_plat_condition(condition: str) -> bool: 457 | cond = condition.lower() 458 | if "windows" in cond or "linux" in cond or "macos" in cond or "posix" in cond: 459 | return True 460 | 461 | def _parse_project_group_items(self, project_group: ProjectGroup, info: BaseInfoPlatform, 462 | project_block: QPCBlock, folder_list: list) -> None: 463 | for item in project_block.get_items_cond(info.macros): 464 | if item.key == "folder": 465 | folder_list.append(item.values[0]) 466 | self._parse_project_group_items(project_group, info, item, folder_list) 467 | folder_list.remove(item.values[0]) 468 | 469 | elif item.key == "contains": 470 | for group_name in item.values: 471 | if group_name in info.shared.groups: 472 | contain_group = info.shared.groups[group_name] 473 | else: 474 | contain_group = info.shared.add_group(group_name) 475 | project_group.contains_group(contain_group, folder_list) 476 | 477 | else: 478 | info.add_project_to_group(item.key, project_group, folder_list) 479 | 480 | def parse_project(self, project_def: ProjectDefinition, project_script: str, info: BaseInfo, generator_list: list) -> ProjectContainer: 481 | if args.time: 482 | start_time = perf_counter() 483 | elif not args.verbose: 484 | print("Parsing: " + project_script) 485 | 486 | project_filename = os.path.split(project_script)[1] 487 | project_block = self.read_file(project_filename) 488 | 489 | if project_block is None: 490 | warning("Script does not exist: " + project_script) 491 | return 492 | 493 | project_name = os.path.splitext(project_filename)[0] 494 | project_container = ProjectContainer(project_name, project_script, info, project_def, generator_list) 495 | 496 | for project_pass in project_container._passes: 497 | verbose(f"\n ---- Parsing Project - " 498 | f"Config: \"{project_pass.cfg_name}\" " 499 | f"Platform: \"{project_pass.platform.name}\" " 500 | f"Arch: \"{project_pass.arch.name}\" ---- \n") 501 | 502 | verbose("Parsing: " + project_script) 503 | project_pass.hash_list[project_filename] = qpc_hash.make_hash(project_filename) 504 | self._parse_project(project_block, project_pass, project_script) 505 | self.counter += 1 506 | 507 | if project_pass.cfg.general.config_type is None: 508 | error("No config_type Specified in Script!", 509 | "Pick one of these and add it to the \"general\" group:", 510 | " ".join([f"\"{enum.name.lower()}\"" for enum in ConfigType])) 511 | 512 | verbose("Parsed: " + project_container.get_display_name()) 513 | 514 | if args.time: 515 | print(str(round(perf_counter() - start_time, 4)) + " - Parsed: " + project_script) 516 | 517 | return project_container 518 | 519 | def _parse_project(self, project_file: QPCBlockRoot, project: ProjectPass, file_path: str, indent: str = "") -> None: 520 | file_dir, file_name = os.path.split(file_path) 521 | 522 | project_dir_abs = project.macros["ROOT_DIR_ABS"] + "/" + project.macros["PROJECT_DIR"] 523 | 524 | # the path to the project qpc script is relative to the root dir, 525 | # but every script included from the project script is relative to the project script directory, 526 | # and since we want $SCRIPT_DIR to always be relative to the project directory, 527 | # we just set it to the project dir if it's the root script (project qpc script) 528 | if indent == "": # is the root script 529 | script_dir_abs = os.path.normpath(project_dir_abs) 530 | script_dir = "" 531 | else: 532 | script_dir_abs = os.path.normpath(project_dir_abs + "/" + file_dir) 533 | script_dir = file_dir 534 | 535 | def set_script_macros(): 536 | project.add_macro(indent, "SCRIPT_NAME", file_name) 537 | project.add_macro(indent, "SCRIPT_DIR", script_dir) 538 | project.add_macro(indent, "SCRIPT_DIR_ABS", script_dir_abs) 539 | 540 | set_script_macros() 541 | 542 | for project_block in project_file: 543 | 544 | if not project_block.solve_condition(project.macros): 545 | continue 546 | 547 | if project_block.key == "macro": 548 | project.add_macro(indent, *project.replace_macros_list(*project_block.values)) 549 | 550 | elif project_block.key == "getenv": 551 | macro_name = replace_macros(project_block.values[0], project.macros) 552 | var_value = self.get_env_var(project_block, macro_name, project.macros) 553 | 554 | if var_value is None: 555 | continue 556 | 557 | project.add_macro(indent, macro_name, var_value) 558 | 559 | elif project_block.key == "system_folders": 560 | project.system_folders = self.use_system_folders(project_block, project.system_folders, project.macros) 561 | 562 | elif project_block.key == "config": 563 | self._parse_config(project_block, project) 564 | 565 | elif project_block.key == "files": 566 | self._parse_files(project_block, project, []) 567 | 568 | elif project_block.key == "requires": 569 | for block in project_block.get_items_cond(project.macros): 570 | if block.key == "-": 571 | project.remove_dependencies(*block.values) 572 | else: 573 | project.add_dependencies(block.key, *block.values) 574 | 575 | elif project_block.key == "build_event": 576 | self._parse_build_event(project_block, project) 577 | 578 | elif project_block.key == "include": 579 | # Ah shit, here we go again. 580 | include_path = project.replace_macros(project_block.values[0]) 581 | include_file = self._include_file(include_path, project, indent + " ") 582 | if include_file: 583 | try: 584 | self._parse_project(include_file, project, include_path, indent + " ") 585 | # reset the script macros back to the values for this script 586 | set_script_macros() 587 | except RecursionError: 588 | raise RecursionError("Recursive Includes found:\n" + project_block.get_formatted_info()) 589 | verbose(indent + " " + "Finished Parsing") 590 | else: 591 | project_block.warning(f"File does not exist: {include_path}") 592 | 593 | else: 594 | project_block.warning(f"Unknown Key: \"{project_block.key}\"") 595 | 596 | def _include_file(self, include_path: str, project: ProjectPass, indent: str) -> QPCBlockRoot: 597 | project.hash_list[include_path] = qpc_hash.make_hash(include_path) 598 | include_file = self.read_file(include_path) 599 | 600 | if not include_file: 601 | return None 602 | 603 | verbose(indent + "Parsing: " + include_path) 604 | 605 | return include_file 606 | 607 | @staticmethod 608 | def _parse_build_event(project_block: QPCBlock, project: ProjectPass): 609 | if not project_block.values and not args.hide_warnings: 610 | project_block.warning("build_event has no name") 611 | 612 | # can only define it here 613 | elif project_block.get_items_cond(project.macros): 614 | # check to see if it's already defined 615 | if project_block.values[0] in project.build_events: 616 | if not args.hide_warnings: 617 | project_block.warning("build_event already defined, redefining") 618 | 619 | build_event = BuildEvent(*replace_macros_list(project.macros, *project_block.values)) 620 | 621 | command_list = replace_macros_list(project.macros, *project_block.get_item_list_cond(project.macros)) 622 | build_event.commands.append(command_list) 623 | 624 | project.build_events[project_block.values[0]] = build_event 625 | 626 | def _parse_files(self, files_block: QPCBlock, project: ProjectPass, folder_list: list) -> None: 627 | if not files_block.solve_condition(project.macros): 628 | return 629 | 630 | for block in files_block.get_items_cond(project.macros): 631 | if block.key == "folder": 632 | if not project.system_folders: 633 | folder_list.append(block.values[0]) 634 | self._parse_files(block, project, folder_list) 635 | if not project.system_folders: 636 | folder_list.remove(block.values[0]) 637 | elif block.key == "-": 638 | project.remove_file(folder_list, block) 639 | else: 640 | project.add_file(folder_list, block) 641 | 642 | if not block.items: 643 | continue 644 | 645 | for file_path in block.get_list(): 646 | if check_file_path_glob(file_path): 647 | [self._source_file(block, project, found_file) for found_file in glob.glob(file_path)] 648 | else: 649 | self._source_file(block, project, file_path) 650 | 651 | @staticmethod 652 | def _source_file(files_block: QPCBlock, project: ProjectPass, file_path: str): 653 | source_file = project.get_source_file(file_path) 654 | if not source_file: 655 | return 656 | 657 | for config_block in files_block.get_items_cond(project.macros): 658 | if config_block.key == "config": 659 | for group_block in config_block.items: 660 | # not checking the condition yet so this warning can go off 661 | if group_block.key != "compile": 662 | group_block.warning("Invalid Group, can only use compile") 663 | continue 664 | 665 | if group_block.solve_condition(project.macros): 666 | for option_block in group_block.get_items_cond(project.macros): 667 | source_file.compiler.parse_option(project.macros, option_block) 668 | else: 669 | # new, cleaner way, just assume it's compile 670 | source_file.compiler.parse_option(project.macros, config_block) 671 | 672 | def read_file(self, script_path: str) -> QPCBlockRoot: 673 | if script_path in self.read_files: 674 | return self.read_files[script_path] 675 | else: 676 | try: 677 | script = read_file(script_path) 678 | self.read_files[script_path] = script 679 | return script 680 | except FileNotFoundError: 681 | pass 682 | 683 | # i hate python for this kind of stuff, would be cleaner if it was c++ 684 | @staticmethod 685 | def get_env_var(block: QPCBlock, macro_name: str, macros: Dict[str, str]) -> str: 686 | if not block.values: 687 | block.warning(f"Nothing set to get in getenv!") 688 | return None 689 | 690 | # optional 3rd argument to have the macro name be different from the env var name 691 | var_name = replace_macros(block.values[1], macros) if len(block.values) == 2 else macro_name 692 | 693 | if var_name in os.environ: 694 | # should i use norm_path in qpc_base here? 695 | return os.environ[var_name] 696 | else: 697 | return var_name 698 | 699 | @staticmethod 700 | def use_system_folders(block: QPCBlock, old_value: bool, macros: Dict[str, str]) -> bool: 701 | use_sys_folders = replace_macros(block.get_value(), macros) 702 | if not use_sys_folders: 703 | block.warning("Nothing set for system_folders!") 704 | return old_value 705 | 706 | # could of used convert_bool_option(), but meh 707 | if use_sys_folders.casefold() in {"true", "1"}: 708 | return True 709 | elif use_sys_folders.casefold() in {"false", "0"}: 710 | return False 711 | 712 | # certified bruh moment 713 | warning(block.get_file_info(), 714 | f"Invalid bool option for system_folders: \"{use_sys_folders}\"", 715 | "Valid options are \"true\", \"false\", \"1\", \"0\"") 716 | 717 | return old_value 718 | 719 | # awful 720 | @staticmethod 721 | def _parse_config(config: QPCBlock, project: ProjectPass) -> None: 722 | if config.solve_condition(project.macros): 723 | for group in config.get_items_cond(project.macros): 724 | for option_block in group.get_items_cond(project.macros): 725 | project.cfg.parse_config_option(group, option_block) 726 | -------------------------------------------------------------------------------- /qpc_reader.py: -------------------------------------------------------------------------------- 1 | # Reads QPC files and returns a list of QPCBlocks 2 | 3 | # python 4 is chad 4 | from __future__ import annotations 5 | 6 | import os 7 | from typing import List 8 | from re import compile 9 | from qpc_logging import warning, error, warning_no_line, verbose, verbose_color, print_color, Color 10 | 11 | 12 | def posix_path(string: str) -> str: 13 | return string.replace("\\", "/") 14 | 15 | 16 | COND_OPERATORS = compile('(\\(|\\)|\\|\\||\\&\\&|>=|<=|==|!=|>|<)') 17 | 18 | 19 | class QPCBlock: 20 | def __init__(self, parent: QPCBlock, key: str, values: List[str] = None, condition: str = "", line_num: int = 0): 21 | self.parent: QPCBlock = parent 22 | self.items: List[QPCBlock] = [] 23 | self.key: str = key 24 | self.values: List[str] = self._values_check(values) 25 | self.condition: str = condition 26 | self.line_num: int = line_num 27 | 28 | def __iter__(self): 29 | return self.items.__iter__() 30 | 31 | def __getitem__(self, index): 32 | return self.items[index] 33 | 34 | def extend(self, item): 35 | self.items.extend(item) 36 | 37 | def append(self, item): 38 | self.items.append(item) 39 | 40 | def remove(self, item): 41 | self.items.remove(item) 42 | 43 | def index(self, item): 44 | self.items.index(item) 45 | 46 | def to_string(self, quote_keys=False, quote_values=False, break_multi_value=False, break_on_key=False, depth=0): 47 | indent = "{0}".format(depth * '\t') 48 | index = self.parent.items.index(self) 49 | 50 | if quote_keys: 51 | string = "{0}\"{1}\"".format(indent, self.key) 52 | else: 53 | string = indent + self.key 54 | 55 | if break_on_key: 56 | key_indent = 0 57 | else: 58 | key_indent = len(self.key) - 1 59 | 60 | if self.values: 61 | for value_index, value in enumerate(self.values): 62 | if quote_values: 63 | # we are adding quotes to this anyway, so just escape all existing quotes 64 | formatted_value = value.replace("'", "\\'").replace('"', '\\"') 65 | else: 66 | formatted_value = value.replace("'", "\\'") 67 | if formatted_value: 68 | if len(value) > 1: 69 | # if we already have quotes at the ends of the the value, do not escape those quotes 70 | formatted_value = formatted_value[0] + \ 71 | formatted_value[1:-1].replace('"', '\\"') + \ 72 | formatted_value[-1] 73 | else: 74 | # someone could do something weird with this and just have it be a single quote, right? 75 | # that single quote would need to be escaped 76 | formatted_value.replace('"', '\\"') 77 | 78 | if quote_values: 79 | string += " \"{0}\"".format(formatted_value) 80 | else: 81 | string += " {0}".format(formatted_value) 82 | # untested 83 | if break_multi_value and value_index < len(self.values): 84 | string += " \\\n{0}{1}".format(indent, " " * key_indent) 85 | 86 | if self.condition: 87 | string += " [" + add_spacing_to_condition(self.condition) + "]" 88 | 89 | if self.items: 90 | if 0 < index < len(self.parent.items): 91 | if not self.parent.items[index - 1].items: 92 | string = "\n" + string 93 | 94 | string += "\n" + indent + "{\n" 95 | for item in self.items: 96 | string += item.to_string(quote_keys, quote_values, break_multi_value, break_on_key, depth + 1) + "\n" 97 | string += indent + "}" 98 | 99 | if index < len(self.parent.items) - 1: 100 | string += "\n" 101 | 102 | return string 103 | 104 | def get_value(self, index: int = 0) -> str: 105 | return self.values[index] if len(self.values) > index else "" 106 | 107 | def get_list(self) -> tuple: 108 | return (self.key, *self.values) # need parenthesis for python versions older than 3.8 109 | 110 | def solve_condition(self, macros: dict): 111 | return solve_condition(self, self.condition, macros) 112 | 113 | def invalid_option(self, value: str, *valid_option_list): 114 | warning(self.get_file_info(), f"Invalid Option: {value}", "Valid Options:", *valid_option_list) 115 | 116 | def error(self, message): 117 | error(self.get_file_info(), message) 118 | 119 | def warning(self, message): 120 | warning(self.get_file_info(), message) 121 | 122 | def get_file_info(self) -> str: 123 | return f"File \"{self.get_file_path()}\" : Line {str(self.line_num)} : Key \"{self.key}\"" 124 | 125 | def print_info(self): 126 | print(self.get_file_info() + " this should not be called anymore") 127 | 128 | @staticmethod 129 | def _values_check(values) -> List[str]: 130 | if type(values) == str: 131 | return [values] 132 | elif values is None: 133 | return [] 134 | else: 135 | return values 136 | 137 | def move_item(self, item: QPCBlock): 138 | self.items.append(item) 139 | if item.parent: 140 | item.parent.remove(item) 141 | item.parent = self 142 | 143 | def add_item(self, key: str, values: List[str] = None, condition: str = "", line_num: int = 0) -> QPCBlock: 144 | values = self._values_check(values) 145 | sub_qpc = QPCBlock(self, key, values, condition, line_num=line_num) 146 | self.items.append(sub_qpc) 147 | return sub_qpc 148 | 149 | def add_item_index(self, index: int, key: str, values: List[str] = None, condition: str = "", line_num: int = 0) -> QPCBlock: 150 | values = self._values_check(values) 151 | sub_qpc = QPCBlock(self, key, values, condition, line_num=line_num) 152 | self.items.insert(index, sub_qpc) 153 | return sub_qpc 154 | 155 | def get_item(self, item_key) -> QPCBlock: 156 | for item in self.items: 157 | if item.key == item_key: 158 | return item 159 | return None 160 | 161 | def get_item_values(self, item_key) -> List[str]: 162 | for item in self.items: 163 | if item.key == item_key: 164 | return item.values 165 | return [] 166 | 167 | def get_items(self, item_key) -> List[QPCBlock]: 168 | items: List[QPCBlock] = [] 169 | for item in self.items: 170 | if item.key == item_key: 171 | items.append(item) 172 | return items 173 | 174 | def get_items_cond(self, macros: dict) -> List[QPCBlock]: 175 | items: List[QPCBlock] = [] 176 | for item in self.items: 177 | if solve_condition(self, item.condition, macros): 178 | items.append(item) 179 | return items 180 | 181 | def get_item_keys_cond(self, macros: dict) -> List[str]: 182 | items: List[str] = [] 183 | for item in self.items: 184 | if solve_condition(self, item.condition, macros): 185 | items.append(item.key) 186 | return items 187 | 188 | def get_item_values_cond(self, macros: dict, key: str = "") -> List[str]: 189 | items: List[str] = [] 190 | for item in self.items: 191 | if solve_condition(self, item.condition, macros): 192 | if not key or key == item.key: 193 | items.extend(item.values) 194 | return items 195 | 196 | def get_item_list_cond(self, macros: dict) -> List[QPCBlock]: 197 | items = [] 198 | for item in self.items: 199 | if solve_condition(self, item.condition, macros): 200 | items.extend([item.key, *item.values]) 201 | return items 202 | 203 | def get_keys_in_items(self): 204 | return [value.key for value in self.items] 205 | 206 | def get_item_index(self, qpc_item: QPCBlock): 207 | try: 208 | return self.items.index(qpc_item) 209 | except IndexError: 210 | return None 211 | 212 | def get_root(self) -> QPCBlockRoot: 213 | return self.parent.get_root() 214 | 215 | def get_file_path(self) -> str: 216 | return self.get_root().file_path 217 | 218 | def get_file_name(self) -> str: 219 | return os.path.basename(self.get_root().file_path) 220 | 221 | 222 | # tbh, this "base" class is pretty stupid and probably useless 223 | # this should be like a "root" class 224 | class QPCBlockRoot(QPCBlock): 225 | def __init__(self, file_path: str = ""): 226 | super().__init__(self, "", []) 227 | self.file_path = file_path 228 | 229 | def to_string(self, quote_keys=False, quote_values=False, break_multi_value=False, break_on_key=False, depth=0): 230 | final_string = "" 231 | for item in self.items: 232 | final_string += item.to_string(quote_keys, quote_values, break_multi_value, break_on_key, 0) + "\n" 233 | return final_string 234 | 235 | def get_root(self) -> QPCBlockRoot: 236 | return self 237 | 238 | 239 | def replace_macros_condition(split_string: List[str], macros): 240 | for index, item_token in enumerate(split_string): 241 | flip_value = str(item_token).startswith("!") 242 | has_tokens = item_token[1 if flip_value else 0] == "$" and item_token.endswith("$") 243 | if has_tokens: 244 | item = item_token[2 if flip_value else 1:-1] 245 | else: 246 | item = item_token[1 if flip_value else 0:] 247 | 248 | if item in macros: 249 | if flip_value: 250 | try: 251 | split_string[index] = str(int(not int(macros[item]))) 252 | except ValueError: 253 | split_string[index] = str(int(not macros[item])) 254 | else: 255 | split_string[index] = macros[item] 256 | 257 | elif flip_value: 258 | split_string[index] = "1" 259 | 260 | elif has_tokens: 261 | split_string[index] = "0" 262 | 263 | return split_string 264 | 265 | 266 | def _print_solved_condition(split_string: list, result: int): 267 | pass 268 | # verbose_color(Color.BLUE, f"Solved Condition: \"[{' '.join(split_string)}]\" -> \"{result}\"") 269 | 270 | 271 | def solve_condition(qpcblock: QPCBlock, condition: str, macros: dict) -> int: 272 | if not condition: 273 | return True 274 | 275 | solved_cond = condition 276 | # solve any sub conditionals first 277 | while "(" in solved_cond: 278 | sub_cond_line = (solved_cond.split('(')[1]).split(')')[0] 279 | sub_cond_value = solve_condition(qpcblock, sub_cond_line, macros) 280 | solved_cond = solved_cond.split('(', 1)[0] + str(sub_cond_value * 1) + solved_cond.split(')', 1)[1] 281 | 282 | split_string = COND_OPERATORS.split(solved_cond) 283 | 284 | solved_cond = replace_macros_condition(split_string.copy(), macros) 285 | 286 | if len(solved_cond) == 1: 287 | try: 288 | solved_cond[0] = int(solved_cond[0]) 289 | except ValueError: 290 | _print_solved_condition(split_string, 1) 291 | return 1 292 | 293 | while len(solved_cond) > 1: 294 | try: 295 | solved_cond = _solve_single_condition(solved_cond) 296 | except Exception as F: 297 | qpcblock.error(f'Error Solving Condition: {str(F)}\n' 298 | f'\tCondition: [{condition}] -> [{" ".join(solved_cond)}]\n') 299 | return 0 300 | 301 | _print_solved_condition(split_string, solved_cond[0]) 302 | return solved_cond[0] 303 | 304 | 305 | def _solve_single_condition(cond): 306 | index = 1 307 | result = 0 308 | # highest precedence order 309 | if "<" in cond: 310 | index = cond.index("<") 311 | if int(cond[index - 1]) < int(cond[index + 1]): 312 | result = 1 313 | 314 | elif "<=" in cond: 315 | index = cond.index("<=") 316 | if int(cond[index - 1]) <= int(cond[index + 1]): 317 | result = 1 318 | 319 | elif ">=" in cond: 320 | index = cond.index(">=") 321 | if int(cond[index - 1]) >= int(cond[index + 1]): 322 | result = 1 323 | 324 | elif ">" in cond: 325 | index = cond.index(">") 326 | if int(cond[index - 1]) > int(cond[index + 1]): 327 | result = 1 328 | 329 | # next in order of precedence, check equality 330 | # you can compare stings with these 2 331 | elif "==" in cond: 332 | index = cond.index("==") 333 | if str(cond[index - 1]) == str(cond[index + 1]): 334 | result = 1 335 | 336 | elif "!=" in cond: 337 | index = cond.index("!=") 338 | if str(cond[index - 1]) != str(cond[index + 1]): 339 | result = 1 340 | 341 | # and then, check for any &&'s 342 | elif "&&" in cond: 343 | index = cond.index("&&") 344 | if int(cond[index - 1]) > 0 and int(cond[index + 1]) > 0: 345 | result = 1 346 | 347 | # and finally, check for any ||'s 348 | elif "||" in cond: 349 | index = cond.index("||") 350 | if int(cond[index - 1]) > 0 or int(cond[index + 1]) > 0: 351 | result = 1 352 | 353 | cond[index] = result 354 | del cond[index + 1] 355 | del cond[index - 1] 356 | 357 | return cond 358 | 359 | 360 | def add_spacing_to_condition(cond): 361 | cond = cond.strip(" ") 362 | 363 | if ">=" not in cond: 364 | cond = cond.replace(">", " > ") 365 | if "<=" not in cond: 366 | cond = cond.replace("<", " < ") 367 | 368 | for operator in ("<=", ">=", "==", "||", "&&"): 369 | cond = cond.replace(operator, ' ' + operator + ' ') 370 | 371 | return cond 372 | 373 | 374 | def read_file(path: str, keep_quotes: bool = False, allow_escapes: bool = True, multiline_quotes: bool = False) -> QPCBlockRoot: 375 | path = posix_path(path) 376 | lexer = QPCLexer(path, keep_quotes, allow_escapes, multiline_quotes) 377 | qpc_file = QPCBlockRoot(path) 378 | path = posix_path(os.getcwd() + "/" + path) 379 | parse_recursive(lexer, qpc_file, path) 380 | return qpc_file 381 | 382 | 383 | def parse_recursive(lexer, block, path): 384 | while lexer.char_num < lexer.file_len - 1: 385 | key, line_num = lexer.next_key() 386 | 387 | if not key: 388 | if lexer.next_symbol() == "}": 389 | return 390 | elif lexer.char_num >= lexer.file_len: 391 | if type(block) == QPCBlock: 392 | block.warning("brackets do not close") 393 | return 394 | # print("WARNING: script is probably incorrect somewhere, no key specified, or a reader error") 395 | # block.print_info() 396 | 397 | # line_num = lexer.line_num 398 | values = lexer.next_value_list() 399 | condition = lexer.next_condition() 400 | 401 | sub_block = block.add_item(key, values, condition, line_num) 402 | 403 | next_symbol = lexer.next_symbol() 404 | if next_symbol == "{": 405 | parse_recursive(lexer, sub_block, path) 406 | elif next_symbol == "}": 407 | return 408 | 409 | 410 | class QPCLexer: 411 | def __init__(self, path: str, keep_quotes: bool = False, allow_escapes: bool = True, multiline_quotes: bool = False): 412 | self.char_num = 0 413 | self.line_num = 1 414 | self.line_char = 0 415 | self.path = path 416 | self.keep_quotes = keep_quotes 417 | self.allow_escapes = allow_escapes 418 | self.multiline_quotes = multiline_quotes 419 | 420 | try: 421 | with open(path, mode="r", encoding="utf-8") as file: 422 | self.file = file.read() 423 | except UnicodeDecodeError: 424 | with open(path, mode="r", encoding="ansi") as file: 425 | self.file = file.read() 426 | 427 | self.file_len = len(self.file) - 1 428 | self.split_file = self.file.splitlines() 429 | 430 | self.chars_escape = {'\'', '"', '\\'} 431 | self.chars_comment = {'/', '*'} 432 | self.chars_item = {'{', '}'} 433 | self.chars_cond = {'[', ']'} 434 | self.chars_space = {' ', '\t'} 435 | self.chars_quote = {'"', '\''} 436 | 437 | def formatted_info(self) -> str: 438 | return f"File \"{self.path}\" : Line {str(self.line_num)} : Char {self.char_num}" 439 | 440 | def get_current_line(self) -> str: 441 | if -1 < self.line_num <= self.file_len: 442 | return self.split_file[self.line_num - 1] 443 | return "" 444 | 445 | @staticmethod 446 | def _make_arrow(index: int, length: int) -> str: 447 | arrow = "{0}^{1}".format(" " * (index - 1), "~" * length if length else "") 448 | return arrow 449 | 450 | def warning_range(self, index: int, length: int, *text): 451 | file_error = self._make_arrow(index, length) 452 | warning_no_line(self.formatted_info(), *text) 453 | print(self.get_current_line().replace("\t", " ")) 454 | print_color(Color.GREEN, file_error) 455 | 456 | def next_line(self): 457 | self.line_num += 1 458 | self.line_char = 0 459 | 460 | def next_char(self, amount: int = 1): 461 | self.char_num += amount 462 | self.line_char += amount 463 | 464 | def next_value_list(self): 465 | start = self.line_char 466 | values = [] 467 | current_value = '' 468 | while self.char_num < self.file_len: 469 | char = self.file[self.char_num] 470 | 471 | if char in self.chars_item: 472 | break 473 | 474 | if char in self.chars_space: 475 | if current_value: 476 | if current_value != '\\': 477 | values.append(current_value) 478 | current_value = '' 479 | self.next_char() 480 | start = self.line_char 481 | continue 482 | 483 | if char in {'"', '\''}: 484 | if current_value and current_value != "\\": 485 | self.warning_range(start, self.line_char - start, 486 | "Opening a quote inside a string, using quote only") 487 | values.append(self.read_quote(char)) 488 | current_value = "" 489 | start = self.line_char 490 | continue 491 | 492 | # skip escape 493 | if char == '\\' and self.peek_char() in self.chars_escape: 494 | self.next_char(2) 495 | current_value += self.file[self.char_num] 496 | # char = self.file[self.char_num] 497 | 498 | elif char == '\n': 499 | if not current_value.endswith("\\"): 500 | if current_value and not current_value.startswith('[') and not current_value.endswith(']'): 501 | values.append(current_value) 502 | break 503 | else: 504 | self.next_line() 505 | start = 0 506 | 507 | elif char == '/' and self.peek_char() in self.chars_comment: 508 | self.skip_comment() 509 | continue 510 | 511 | else: 512 | if self.file[self.char_num] in self.chars_cond: 513 | break 514 | if current_value == '\\': 515 | current_value = '' 516 | current_value += self.file[self.char_num] 517 | 518 | self.next_char() 519 | 520 | return values 521 | 522 | def peek_char(self): 523 | if self.char_num + 1 >= self.file_len: 524 | return None 525 | return self.file[self.char_num + 1] 526 | 527 | # used to be NextString, but i only used it for keys 528 | def next_key(self): 529 | string = "" 530 | line_num = 0 531 | skip_list = {' ', '\t', '\n'} 532 | 533 | while self.char_num < self.file_len: 534 | char = self.file[self.char_num] 535 | 536 | if char in self.chars_item: 537 | line_num = self.line_num 538 | break 539 | 540 | elif char in self.chars_space: 541 | if string: 542 | line_num = self.line_num 543 | break 544 | 545 | elif char in self.chars_quote: 546 | string = self.read_quote(char) 547 | line_num = self.line_num 548 | break 549 | 550 | # skip escape 551 | elif char == '\\' and self.peek_char() in self.chars_escape: 552 | self.next_char(2) 553 | string += self.file[self.char_num] 554 | # char = self.file[self.char_num] 555 | 556 | elif char in skip_list: 557 | if string: 558 | line_num = self.line_num 559 | break 560 | if char == '\n': 561 | self.next_line() 562 | 563 | elif char == '/' and self.peek_char() in self.chars_comment: 564 | self.skip_comment() 565 | continue 566 | 567 | else: 568 | string += self.file[self.char_num] 569 | 570 | self.next_char() 571 | 572 | return string, line_num 573 | 574 | def next_symbol(self): 575 | while self.char_num <= self.file_len: 576 | char = self.file[self.char_num] 577 | 578 | if char in self.chars_item: 579 | self.next_char() 580 | return char 581 | 582 | # skip escape 583 | elif char == '\\' and self.peek_char() in self.chars_escape: 584 | self.next_char(2) 585 | 586 | elif char == '/' and self.peek_char() in self.chars_comment: 587 | self.skip_comment() 588 | continue 589 | 590 | elif char == '\n': 591 | self.next_line() 592 | 593 | elif char not in self.chars_space: 594 | break 595 | 596 | self.next_char() 597 | 598 | return None 599 | 600 | def next_condition(self): 601 | condition = '' 602 | while self.char_num < self.file_len: 603 | char = self.file[self.char_num] 604 | 605 | if char in self.chars_item: 606 | break 607 | 608 | elif char == '[': 609 | self.next_char() 610 | continue 611 | 612 | elif char == ']': 613 | self.next_char() 614 | break 615 | 616 | elif char in self.chars_space: 617 | self.next_char() 618 | continue 619 | 620 | elif char == '\n': 621 | self.next_line() 622 | self.next_char() 623 | break 624 | 625 | elif char == '/' and self.peek_char() in self.chars_comment: 626 | self.skip_comment() 627 | continue 628 | 629 | else: 630 | condition += self.file[self.char_num] 631 | 632 | self.next_char() 633 | 634 | return condition 635 | 636 | def skip_comment(self): 637 | self.next_char() 638 | char = self.file[self.char_num] 639 | if char == '/': 640 | # keep going until \n 641 | while self.char_num < self.file_len: 642 | self.next_char() 643 | if self.file[self.char_num] == "\n": 644 | break 645 | 646 | elif char == '*': 647 | while self.char_num < self.file_len: 648 | char = self.file[self.char_num] 649 | 650 | if char == '*' and self.peek_char() == '/': 651 | self.next_char(2) 652 | break 653 | 654 | if char == "\n": 655 | self.next_line() 656 | 657 | self.next_char() 658 | 659 | def read_quote(self, quote_char): 660 | start = self.line_char 661 | 662 | if self.keep_quotes: 663 | quote = quote_char 664 | else: 665 | quote = '' 666 | 667 | while self.char_num < self.file_len: 668 | self.next_char() 669 | char = self.file[self.char_num] 670 | 671 | if char == '\\' and self.peek_char() in self.chars_escape and self.allow_escapes: 672 | quote += self.peek_char() 673 | self.next_char() 674 | elif char == quote_char: 675 | if self.keep_quotes: 676 | quote += char 677 | break 678 | elif char == "\n" and not self.multiline_quotes: 679 | self.warning_range(start, self.line_char - start, "Quote does not end on line") 680 | break 681 | else: 682 | quote += char 683 | 684 | self.next_char() 685 | return quote 686 | --------------------------------------------------------------------------------