├── tests ├── __init__.py └── test_verifydump.py ├── verifydump ├── __init__.py ├── dat.py ├── console.py ├── convert.py └── verify.py ├── .gitignore ├── exeentry.py ├── .pre-commit-config.yaml ├── verifydump.spec ├── LICENSE.md ├── pyproject.toml ├── README.md └── poetry.lock /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /verifydump/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.1.0" 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .vscode 3 | build/ 4 | dist/ 5 | -------------------------------------------------------------------------------- /tests/test_verifydump.py: -------------------------------------------------------------------------------- 1 | from verifydump import __version__ 2 | 3 | 4 | def test_version(): 5 | assert __version__ == "0.1.0" 6 | -------------------------------------------------------------------------------- /exeentry.py: -------------------------------------------------------------------------------- 1 | # This file just exists because PyInstaller needs a script to run as its entry point. 2 | 3 | import verifydump.console 4 | 5 | verifydump.console.verifydump_main() 6 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 22.3.0 4 | hooks: 5 | - id: black 6 | args: [ '--check' , '--line-length', '1000000' ] 7 | -------------------------------------------------------------------------------- /verifydump.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | 4 | block_cipher = None 5 | 6 | 7 | a = Analysis( 8 | ['exeentry.py'], 9 | pathex=[], 10 | binaries=[], 11 | datas=[], 12 | hiddenimports=[], 13 | hookspath=[], 14 | hooksconfig={}, 15 | runtime_hooks=[], 16 | excludes=[], 17 | win_no_prefer_redirects=False, 18 | win_private_assemblies=False, 19 | cipher=block_cipher, 20 | noarchive=False, 21 | ) 22 | pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) 23 | 24 | exe = EXE( 25 | pyz, 26 | a.scripts, 27 | a.binaries, 28 | a.zipfiles, 29 | a.datas, 30 | [], 31 | name='verifydump', 32 | debug=False, 33 | bootloader_ignore_signals=False, 34 | strip=False, 35 | upx=True, 36 | upx_exclude=[], 37 | runtime_tmpdir=None, 38 | console=True, 39 | disable_windowed_traceback=False, 40 | argv_emulation=False, 41 | target_arch=None, 42 | codesign_identity=None, 43 | entitlements_file=None, 44 | ) 45 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright © 2022 j68k 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "verifydump" 3 | version = "1.0.0" 4 | description = "A tool for verifying compressed (.chd/.rvz) disc dumps against a Datfile" 5 | license = "MIT" 6 | authors = ["j68k"] 7 | readme = "README.md" 8 | homepage = "https://github.com/j68k/verifydump" 9 | repository = "https://github.com/j68k/verifydump.git" 10 | classifiers = [ 11 | "Environment :: Console", 12 | "Topic :: Games/Entertainment", 13 | "Topic :: System :: Archiving :: Compression", 14 | "Topic :: Utilities", 15 | ] 16 | 17 | [tool.poetry.dependencies] 18 | python = "^3.10,<3.11" # The upper-bound here is just because that is required for PyInstaller. 19 | 20 | [tool.poetry.dev-dependencies] 21 | pytest = "^7.1.1" 22 | black = "^22.3.0" 23 | pre-commit = "^2.17.0" 24 | pyinstaller = "^5.1" 25 | 26 | [build-system] 27 | requires = ["poetry-core>=1.0.0"] 28 | build-backend = "poetry.core.masonry.api" 29 | 30 | [tool.poetry.scripts] 31 | verifydump = 'verifydump.console:verifydump_main' 32 | # The following commands are useful for testing, but aren't production-ready for users: 33 | #convertdump = 'verifydump.console:convertdump_main' 34 | #convertgditocue = 'verifydump.console:convertgditocue_main' 35 | #testgditocueconversion = 'verifydump.console:testgditocueconversion_main' 36 | 37 | [tool.black] 38 | line-length = 1000000 39 | -------------------------------------------------------------------------------- /verifydump/dat.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pathlib 3 | import shutil 4 | from xml.etree import ElementTree 5 | import zipfile 6 | 7 | 8 | class Dat: 9 | def __init__(self, system: str): 10 | self.system = system 11 | self.games = [] 12 | self.roms_by_sha1hex = {} 13 | 14 | 15 | class Game: 16 | def __init__(self, name: str, dat: Dat): 17 | self.name = name 18 | self.roms = [] 19 | self.dat = dat 20 | 21 | 22 | class ROM: 23 | def __init__(self, name: str, size: int, sha1hex: str, game: Game): 24 | self.name = name 25 | self.size = size 26 | self.sha1hex = sha1hex 27 | self.game = game 28 | 29 | 30 | class DatParsingException(Exception): 31 | pass 32 | 33 | 34 | class DatParser: 35 | def __init__(self): 36 | self.tag_path = [] 37 | self.dat = None 38 | self.game = None 39 | 40 | def start(self, tag, attribs): 41 | self.tag_path.append(tag) 42 | 43 | if self.tag_path == ["datafile", "game"]: 44 | if self.game: 45 | raise DatParsingException("Found a within another ") 46 | if not self.dat: 47 | raise DatParsingException("Found a before the
was parsed") 48 | 49 | self.game = Game(name=self._get_required_attrib(attribs, "name"), dat=self.dat) 50 | 51 | elif self.tag_path == ["datafile", "game", "rom"]: 52 | if not self.game: 53 | raise DatParsingException("Found a that was not within a ") 54 | 55 | try: 56 | size_attrib = self._get_required_attrib(attribs, "size") 57 | size = int(size_attrib) 58 | except ValueError: 59 | raise DatParsingException(f" has size attribute that is not an integer: {size_attrib}") 60 | 61 | rom = ROM( 62 | name=self._get_required_attrib(attribs, "name"), 63 | size=size, 64 | sha1hex=self._get_required_attrib(attribs, "sha1"), 65 | game=self.game, 66 | ) 67 | 68 | self.game.roms.append(rom) 69 | roms_with_sha1 = self.dat.roms_by_sha1hex.setdefault(rom.sha1hex, []) 70 | roms_with_sha1.append(rom) 71 | 72 | def _get_required_attrib(self, attribs, name) -> str: 73 | value = attribs.get(name) 74 | if not value: 75 | raise DatParsingException(f"Found a <{self.tag_path[-1]}> without a {name} attribute") 76 | return value 77 | 78 | def end(self, tag): 79 | if self.tag_path == ["datafile", "game"]: 80 | self.dat.games.append(self.game) 81 | self.game = None 82 | 83 | self.tag_path.pop() 84 | 85 | def data(self, data): 86 | if self.tag_path == ["datafile", "header", "name"]: 87 | self.dat = Dat(system=data) 88 | 89 | def close(self) -> Dat: 90 | return self.dat 91 | 92 | 93 | class FileLikeParserFeeder: 94 | def __init__(self, parser): 95 | self.parser = parser 96 | 97 | def write(self, b): 98 | self.parser.feed(b) 99 | 100 | 101 | def load_dat(dat_path: pathlib.Path) -> Dat: 102 | def parse_dat_file(dat_file) -> Dat: 103 | xml_parser = ElementTree.XMLParser(target=DatParser()) 104 | shutil.copyfileobj(dat_file, FileLikeParserFeeder(xml_parser)) 105 | dat = xml_parser.close() 106 | logging.info(f"Datfile loaded successfully with {len(dat.games)} games") 107 | return dat 108 | 109 | if dat_path.suffix.lower() == ".zip": 110 | with zipfile.ZipFile(dat_path) as zip: 111 | for zip_member_info in zip.infolist(): 112 | if not zip_member_info.filename.lower().endswith(".dat"): 113 | continue 114 | logging.debug(f'Loading Datfile "{zip_member_info.filename}" from "{dat_path}"') 115 | with zip.open(zip_member_info) as dat_file: 116 | return parse_dat_file(dat_file) 117 | raise DatParsingException("No .dat file found within provided .zip") 118 | else: 119 | logging.debug(f'Loading Datfile "{dat_path}"') 120 | with open(dat_path, "rb") as dat_file: 121 | return parse_dat_file(dat_file) 122 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # verifydump 2 | 3 | verifydump is a command-line tool for verifying that compressed disc images in .chd or .rvz format correctly match those in the [Datfiles produced by the Redump disc preservation project](http://redump.org/downloads/). 4 | 5 | The reason it's useful is that the Redump Datfiles describe the uncompressed disc image files (usually in .bin/.cue or .iso format) but it's generally more pleasant for users to store their disc images in .chd/.rvz format because those formats offer excellent compression while still being directly usable in many emulators without the need to decompress the whole file first. 6 | 7 | verifydump works by converting the compressed .chd/.rvz file into the original format used by Redump, tweaking the converted dump to match the Redump conventions, and then verifying that it matches the Datfile. 8 | 9 | ## Required Tools 10 | 11 | To convert the disc images between formats verifydump uses third-party tools. 12 | 13 | To verify .chd files you need both of: 14 | - chdman, which is made by and [distributed with MAME](https://www.mamedev.org/release.html) 15 | - [binmerge](https://github.com/putnam/binmerge/releases) 16 | 17 | To verify .rvz files you need: 18 | - DolphinTool, which is made by and [distributed with Dolphin](https://dolphin-emu.org/download/) 19 | 20 | The tools must be available in your system's PATH so that verifydump can find them. 21 | 22 | ## Installation 23 | 24 | You can download a .exe of verifydump for Windows on the [releases](https://github.com/j68k/verifydump/releases) page. The program is written in Python so on Linux or macOS you can install it using [pipx](https://pypa.github.io/pipx/) with: 25 | ```Shell 26 | pipx install verifydump 27 | ``` 28 | or using any other method you like for installing Python packages. 29 | 30 | ## Usage 31 | 32 | To verify your dumps you need to supply verifydump with the Datfile and the compressed files that it should verify. The Datfile (which can be zipped) should be specified first, followed by one or more compressed dump files or folders that contain the dump files: 33 | ```Shell 34 | verifydump "Example - SystemName - Datfile (3902) (2022-01-01 01-02-03).zip" "C:\Games\SystemName" 35 | ``` 36 | 37 | If everything is successful, after a little while you'll see output like this: 38 | ``` 39 | Datfile loaded successfully with 3902 games 40 | Dump verified correct and complete: "Some Game (Disc 1)" 41 | Dump verified correct and complete: "Some Game (Disc 2)" 42 | Dump verified correct and complete: "Other Game" 43 | Dump verified correct and complete: "Best Game" 44 | Successfully verified 4 dumps 45 | ``` 46 | 47 | If any dump can't be successfully verified then you'll see output about what failed after all the other dumps have been checked. The program stops checking a dump after it finds an error, so the error reported might just be the first problem in a file. verifydump never modifies your files, so fixing problems like wrong filenames is up to you. 48 | 49 | There are a few options you can use, which can see documentation about by running `verifydump --help`. The `--verbose` option can be helpful if you get an unexpected result, because it makes the program show much more detailed output about exactly what it is doing. Another important option is `--extra-cue-source`, which is described in the following section. 50 | 51 | ## The problem with .cue files 52 | 53 | As mentioned above, verifydump works by converting your compressed disc images into the original format used by Redump. For CD images, that will be .bin/.cue format. There can be a problem, however, which is that the original Redump .cue files sometimes contain extra metadata about the disc that isn't representable in the .chd format. That means that when verifydump converts the .chd, the converted .cue is missing that metadata. It therefore doesn't match the .cue file described in the Datfile and can't be verified. If that happens, you'll see output like this: 54 | ``` 55 | "Some Game (Disc 1)" .bin files verified and complete, but .cue does not match Datfile 56 | You can either supply the original .cue file yourself using the '--extra-cue-source' option so that we can check that the generated .cue file's essential structure is correct, or ignore .cue file errors with the '--allow-cue-file-mismatches' option 57 | ``` 58 | 59 | As mentioned in that output, the solution to this problem is that you can provide the original .cue file. verifydump can then check that the provided .cue does match the Datfile, and then it can check the converted .cue against the provided one, while ignoring any metadata that isn't supported in the .chd format. That way, it can verify that the essential parts of the converted .cue are correct. 60 | 61 | The good news is that Redump makes the .cue files for all systems easily available on [their downloads page](http://redump.org/downloads/) in the Cuesheets column. So if you do encounter this issue, you just need to download the Cuesheets .zip for the system you're verifying, and tell verifydump where to find that file using the `--extra-cue-source` option. You'll then see output like this: 62 | ``` 63 | Dump .bin files verified correct and complete, and .cue essential structure matches: "Some Game (Disc 1)" 64 | ``` 65 | which is a slightly long-winded way of saying everything is great. 66 | 67 | ## Avoiding SSD wear from temporary files 68 | 69 | Because verifydump uses external tools to do its conversions, it necessarily creates temporary files for the converted files, and then promptly deletes them after verification. That's a bit unfortunate, because the lifetime of an SSD is limited by the amount of data that's written to it, so it's somewhat wasteful to write big files and then delete them again immediately. 70 | 71 | **It's probably not worth worrying about this if you're just going to verify your game collection occasionally**, but if you'll be verifying it very frequently, or if you have a huge collection then it might be worth using a [RAM drive](https://en.wikipedia.org/wiki/RAM_drive) to store the temporary files, so that they don't need to be written to your SSD. 72 | 73 | On Windows I've had success using [OSFMount](https://www.osforensics.com/tools/mount-disk-images.html) to mount a new RAM drive with the drive letter T:, and then in the PowerShell terminal that I run verifydump in, setting the TEMP environment variable with `$Env:TEMP="T:\"`. The RAM drive needs to be large enough to fit double the uncompressed size of the largest dump that you will verify, so ~1.5GB is good for CD images, or ~20GB for DVD images. 74 | 75 | ## Bugs/questions 76 | 77 | Please report any bugs or ask any questions by opening an [issue on the project's GitHub](https://github.com/j68k/verifydump/issues). Please assign an appropriate label to your issue to keep things organized. 78 | 79 | ## Contributing/future work 80 | 81 | Pull requests for bug fixes are very welcome, of course. If you're thinking of doing substantial work on a new feature then please open a new issue to discuss it first so we can make sure that we're on the same page about the proposed feature/design. 82 | 83 | There may be some [open issues for proposed new features](https://github.com/j68k/verifydump/labels/enhancement) already, and please feel free to star those issues to indicate which ones should be prioritized. 84 | 85 | One feature that probably won't be added is support for other compressed image formats, unless they have clear advantages over the ones that are supported now. I'd prefer to nudge users towards whatever the best format is for a given system rather supporting every format just because it's possible. 86 | 87 | Thank you very much for reading everything, and I hope you like the tool! 88 | -------------------------------------------------------------------------------- /verifydump/console.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import pathlib 4 | import shutil 5 | import sys 6 | import tempfile 7 | import zipfile 8 | 9 | from .convert import ConversionException, convert_chd_to_normalized_redump_dump_folder, convert_gdi_to_cue 10 | from .verify import VerificationException, verify_dumps 11 | from .dat import DatParsingException, load_dat 12 | 13 | 14 | def arg_parser_with_common_args() -> argparse.ArgumentParser: 15 | arg_parser = argparse.ArgumentParser() 16 | arg_parser.add_argument("--verbose", action=argparse.BooleanOptionalAction, default=False, help="Show more detailed output about what the program is doing") 17 | arg_parser.add_argument("--show-command-output", action=argparse.BooleanOptionalAction, default=False, help="Show the full output from external commands that are run") 18 | arg_parser.add_argument("--extra-cue-source", metavar="FILE_OR_FOLDER_OR_ZIP", help=f"A source of .cue files that will be used to verify dumps in cases where {pathlib.Path(sys.argv[0]).stem} can't generate the exact original .cue file itself. These are needed when the original .cue file contains metadata that isn't storable in the .chd format, for example. The value you provide here can be a single .cue file if you're just verifying one dump, or it can be a folder or .zip containing many .cue files (such as the one of the Cuesheets .zip files available on the Redump download page).") 19 | return arg_parser 20 | 21 | 22 | def handle_common_args(args): 23 | logging.basicConfig(format="%(message)s", level=logging.DEBUG if args.verbose else logging.INFO) 24 | 25 | 26 | def verifydump_main(): 27 | try: 28 | arg_parser = arg_parser_with_common_args() 29 | arg_parser.add_argument("--allow-cue-file-mismatches", action=argparse.BooleanOptionalAction, default=False, help=f"If the .cue file that {pathlib.Path(sys.argv[0]).stem} generates doesn't match the original dump or extra provided .cue file then it is usually reported as an error. If this option is used then the mismatch is still reported, but isn't treated as an error.") 30 | arg_parser.add_argument("--report-unverified", action=argparse.BooleanOptionalAction, default=False, help="Reports games that are present in the Datfile but were not successfully verified.") 31 | arg_parser.add_argument("dat_file", metavar="dat_file_or_zip", help="The Datfile that your dumps will be verified against. It can be zipped.") 32 | arg_parser.add_argument("dump_file_or_folder", nargs="+", help="The dump files to verify. Specify any number of .chd files, .rvz files, or folders containing those.") 33 | args = arg_parser.parse_args() 34 | 35 | handle_common_args(args) 36 | 37 | try: 38 | dat = load_dat(pathlib.Path(args.dat_file)) 39 | except DatParsingException as e: 40 | print(f"Error when parsing Datfile: {e}", file=sys.stderr) 41 | sys.exit(1) 42 | except Exception as e: 43 | print(f"Error reading Datfile: {e}") 44 | sys.exit(1) 45 | 46 | (verified_games, errors) = verify_dumps(dat, [pathlib.Path(i) for i in args.dump_file_or_folder], show_command_output=args.show_command_output, allow_cue_mismatches=args.allow_cue_file_mismatches, extra_cue_source=pathlib.Path(args.extra_cue_source) if args.extra_cue_source else None) 47 | 48 | if len(verified_games) > 1: 49 | print(f"Successfully verified {len(verified_games)} dumps") 50 | 51 | if args.report_unverified: 52 | if len(verified_games) < len(dat.games): 53 | unverified_games = list(set(dat.games) - set(verified_games)) 54 | unverified_games.sort(key=lambda x: x.name) 55 | print(f"{len(unverified_games)} game(s) present in the Datfile but not successfully verified:") 56 | for unverified_game in unverified_games: 57 | print(f'"{unverified_game.name}"') 58 | 59 | if len(errors) > 0: 60 | print(f"{len(errors)} dumps had errors:" if len(errors) > 1 else "1 dump had an error:", file=sys.stderr) 61 | 62 | for error in errors: 63 | if isinstance(error, ConversionException): 64 | print(f'Failed to process "{error.converted_file_path}" to verify it: {error}', file=sys.stderr) 65 | if error.tool_output: 66 | print(error.tool_output, end="", file=sys.stderr) 67 | elif isinstance(error, VerificationException): 68 | print(error, file=sys.stderr) 69 | else: 70 | raise error # wut? 71 | 72 | sys.exit(1 if len(errors) > 0 else 0) 73 | 74 | except KeyboardInterrupt: 75 | # This handler just stops Python from outputting a potentially-confusing exception message when interrupted with Ctrl-C. 76 | sys.exit(1) 77 | 78 | 79 | def convertdump_main(): 80 | arg_parser = arg_parser_with_common_args() 81 | arg_parser.add_argument("--output-folder", default=".") 82 | arg_parser.add_argument("--system", default=None, help="The name of the system the dumps are for. Some systems require special handling to correctly convert dumps (such as Dreamcast and other systems that use GD-ROM media). Use the full system name that is in the Redump Datfile's header field, or use the short name for the system that appears in Redump web site URLs.") 83 | arg_parser.add_argument("dump_file", nargs="+") 84 | args = arg_parser.parse_args() 85 | 86 | handle_common_args(args) 87 | 88 | for dump_file_name in args.dump_file: 89 | convert_chd_to_normalized_redump_dump_folder( 90 | pathlib.Path(dump_file_name), 91 | pathlib.Path(args.output_folder), 92 | system=args.system, 93 | show_command_output=args.show_command_output, 94 | ) 95 | 96 | 97 | def convertgditocue_main(): 98 | arg_parser = argparse.ArgumentParser() 99 | arg_parser.add_argument("gdi_file") 100 | arg_parser.add_argument("cue_file") 101 | args = arg_parser.parse_args() 102 | 103 | logging.basicConfig(format="%(message)s", level=logging.DEBUG) 104 | cue_file_path = pathlib.Path(args.cue_file) 105 | convert_gdi_to_cue(gdi_file_path=pathlib.Path(args.gdi_file), cue_file_path=cue_file_path) 106 | 107 | 108 | def testgditocueconversion_main(): 109 | arg_parser = argparse.ArgumentParser() 110 | arg_parser.add_argument("gdi_zip") 111 | arg_parser.add_argument("cue_zip") 112 | args = arg_parser.parse_args() 113 | 114 | logging.basicConfig(format="%(message)s", level=logging.DEBUG) 115 | 116 | with zipfile.ZipFile(args.gdi_zip) as gdi_zip, zipfile.ZipFile(args.cue_zip) as cue_zip, tempfile.TemporaryDirectory() as temp_folder_name: 117 | temp_folder_path = pathlib.Path(temp_folder_name) 118 | 119 | for gdi_zip_member_info in gdi_zip.infolist(): 120 | temp_gdi_path = pathlib.Path(temp_folder_path, gdi_zip_member_info.filename) 121 | with open(temp_gdi_path, "wb") as temp_gdi_file, gdi_zip.open(gdi_zip_member_info) as gdi_zip_member_file: 122 | shutil.copyfileobj(gdi_zip_member_file, temp_gdi_file) 123 | 124 | cue_filename = gdi_zip_member_info.filename.replace(".gdi", ".cue") 125 | converted_cue_path = pathlib.Path(temp_folder_path, cue_filename) 126 | 127 | convert_gdi_to_cue(gdi_file_path=temp_gdi_path, cue_file_path=converted_cue_path) 128 | 129 | with cue_zip.open(cue_filename) as cue_zip_member_file: 130 | if cue_zip_member_file.read() == converted_cue_path.read_bytes(): 131 | logging.info(f"Converted file matches: {cue_filename}") 132 | else: 133 | logging.error(f"Converted file does not match: {cue_filename}") 134 | -------------------------------------------------------------------------------- /verifydump/convert.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import pathlib 4 | import re 5 | import subprocess 6 | import tempfile 7 | 8 | 9 | class ConversionException(Exception): 10 | def __init__(self, message: str, converted_file_path: pathlib.Path, tool_output: str = None): 11 | super().__init__(message) 12 | self.converted_file_path = converted_file_path 13 | self.tool_output = tool_output 14 | 15 | 16 | def convert_chd_to_normalized_redump_dump_folder(chd_path: pathlib.Path, redump_dump_folder: pathlib.Path, system: str, show_command_output: bool): 17 | """ 18 | Convert a dump file to Redump format in the specified folder. 19 | """ 20 | 21 | cue_file_path = pathlib.Path(redump_dump_folder, chd_path.stem + ".cue") 22 | 23 | if system and system.lower() in ("Sega - Dreamcast".lower(), "dc", "Arcade - Sega - Chihiro".lower(), "chihiro", "Arcade - Sega - Naomi".lower(), "naomi", "Arcade - Sega - Naomi 2".lower(), "naomi2", "Arcade - Namco - Sega - Nintendo - Triforce".lower(), "trf") and not "(Unl)".lower() in chd_path.stem.lower(): 24 | # These systems use GD-ROM media, which needs special handling. chdman does support GD-ROM dumps, but it only supports converting them to and from .gdi format and not to and from .cue format (it will attempt the conversion to or from .cue format, but the results will not be correct, at least as of chdman 0.240). The Redump Datfiles use .cue format, but we can still use chdman to get the correct .bin files by telling it to convert to .gdi format, because the .bin files are the same for .gdi and .cue format dumps. We then manually convert the .gdi into a .cue to verify it against the Datfile. 25 | # The Dreamcast discs with "(Unl)" in the name are unlicensed games that were distributed on CD rather than GD-ROM, and we ignore those here so that they are handled using the normal .bin/.cue processing. 26 | convert_chd_to_bin_gdi(chd_path, cue_file_path.parent, show_command_output) 27 | normalize_redump_bin_gdi_dump(cue_file_path) 28 | else: 29 | convert_chd_to_bincue(chd_path, cue_file_path, show_command_output) 30 | normalize_redump_bincue_dump(cue_file_path) 31 | 32 | 33 | def convert_chd_to_bincue(chd_file_path: pathlib.Path, output_cue_file_path: pathlib.Path, show_command_output: bool): 34 | # Use another temporary directory for the chdman output files to keep those separate from the binmerge output files: 35 | with tempfile.TemporaryDirectory() as chdman_output_folder_path_name: 36 | chdman_cue_file_path = pathlib.Path(chdman_output_folder_path_name, output_cue_file_path.name) 37 | 38 | logging.debug(f'Converting "{chd_file_path.name}" to .bin/.cue format') 39 | chdman_result = subprocess.run(["chdman", "extractcd", "--input", str(chd_file_path), "--output", str(chdman_cue_file_path)], stdout=None if show_command_output else subprocess.DEVNULL) 40 | if chdman_result.returncode != 0: 41 | # chdman provides useful progress output on stderr so we don't want to capture stderr when running it. That means we can't provide actual error output to the exception, but I can't find a way around that. 42 | raise ConversionException("Failed to convert .chd using chdman", chd_file_path, None) 43 | 44 | logging.debug(f'Splitting "{output_cue_file_path.name}" to use separate tracks if necessary') 45 | binmerge_result = subprocess.run(["binmerge", "--split", "-o", str(output_cue_file_path.parent), str(chdman_cue_file_path), chdman_cue_file_path.stem], capture_output=True, text=True) 46 | if show_command_output: 47 | print(binmerge_result.stdout, end="") 48 | if binmerge_result.returncode != 0: 49 | raise ConversionException("Failed to split .bin into separate tracks using binmerge", chd_file_path, binmerge_result.stdout) 50 | 51 | 52 | def normalize_redump_bincue_dump(cue_file_path: pathlib.Path): 53 | dump_path = cue_file_path.parent 54 | dump_name = cue_file_path.stem 55 | 56 | has_multiple_tracks = len(list(dump_path.glob(f"{dump_name} (Track *).bin"))) > 1 57 | if not has_multiple_tracks: 58 | original_bin_name = f"{dump_name} (Track 1).bin" 59 | single_track_bin_name = f"{dump_name}.bin" 60 | 61 | logging.debug(f'Renaming "{original_bin_name}" to "{single_track_bin_name}" because there is only one .bin file in the dump') 62 | 63 | os.rename( 64 | pathlib.Path(dump_path, original_bin_name), 65 | pathlib.Path(dump_path, single_track_bin_name), 66 | ) 67 | 68 | cue_file_path.write_text(cue_file_path.read_text().replace(f'FILE "{original_bin_name}"', f'FILE "{single_track_bin_name}"'), newline="\r\n") 69 | 70 | is_cue_iso_compatible = not has_multiple_tracks and re.match(r'^\s*FILE\s+"' + re.escape(f"{dump_name}.bin") + r'"\s*BINARY\s+TRACK 01 MODE1/2048\s+INDEX 01 00:00:00\s*$', cue_file_path.read_text()) 71 | if is_cue_iso_compatible: 72 | logging.debug(f'"{cue_file_path.name}" is .iso compatible so converting dump to .iso and discarding .cue') 73 | 74 | single_track_bin_path = pathlib.Path(dump_path, single_track_bin_name) 75 | iso_file_path = pathlib.Path(dump_path, f"{dump_name}.iso") 76 | 77 | single_track_bin_path.rename(iso_file_path) 78 | cue_file_path.unlink() 79 | 80 | 81 | def convert_chd_to_bin_gdi(chd_file_path: pathlib.Path, output_folder_path: pathlib.Path, show_command_output: bool): 82 | logging.debug(f'Converting "{chd_file_path.name}" to .bin/.gdi format') 83 | chdman_gdi_file_path = pathlib.Path(output_folder_path, chd_file_path.with_suffix(".gdi").name) 84 | chdman_result = subprocess.run(["chdman", "extractcd", "--input", str(chd_file_path), "--output", str(chdman_gdi_file_path)], stdout=None if show_command_output else subprocess.DEVNULL) 85 | if chdman_result.returncode != 0: 86 | # chdman provides useful progress output on stderr so we don't want to capture stderr when running it. That means we can't provide actual error output to the exception, but I can't find a way around that. 87 | raise ConversionException("Failed to convert .chd to .bin/.gdi using chdman", chd_file_path, None) 88 | 89 | 90 | def normalize_redump_bin_gdi_dump(cue_file_path: pathlib.Path): 91 | game_name = cue_file_path.stem 92 | 93 | bin_and_raw_file_paths = list(cue_file_path.parent.glob(f"{game_name}*.bin")) + list(cue_file_path.parent.glob(f"{game_name}*.raw")) 94 | redump_bin_filename_format = get_redump_bin_filename_format(game_name, len(bin_and_raw_file_paths)) 95 | 96 | track_number_parser = re.compile(f"^{re.escape(game_name)}(?P[0-9]+)\\.(?:bin|raw)$") 97 | 98 | for original_bin_or_raw_file_path in bin_and_raw_file_paths: 99 | track_number_parser_result = track_number_parser.match(original_bin_or_raw_file_path.name) 100 | if not track_number_parser_result: 101 | raise ConversionException(".bin/.raw file doesn't match expected filename pattern", original_bin_or_raw_file_path, None) 102 | track_number = int(track_number_parser_result.group("track_number")) 103 | redump_bin_filename = redump_bin_filename_format.format(track_number=track_number) 104 | original_bin_or_raw_file_path.rename(original_bin_or_raw_file_path.with_name(redump_bin_filename)) 105 | 106 | # The Datfile includes .cue files rather than .gdi files so convert our .gdi into a .cue: 107 | gdi_file_path = cue_file_path.with_suffix(".gdi") 108 | convert_gdi_to_cue(gdi_file_path=gdi_file_path, cue_file_path=cue_file_path) 109 | gdi_file_path.unlink() 110 | 111 | 112 | def get_redump_bin_filename_format(game_name: str, number_of_tracks: int) -> str: 113 | track_number_digits_needed = 2 if number_of_tracks >= 10 else 1 114 | return game_name + " (Track {track_number:0" + str(track_number_digits_needed) + "d}).bin" 115 | 116 | 117 | def convert_gdi_to_cue(gdi_file_path: pathlib.Path, cue_file_path: pathlib.Path): 118 | logging.debug(f'Converting "{gdi_file_path.name}" to .cue format') 119 | 120 | gdi_track_lines = gdi_file_path.read_text().splitlines()[1:] # The first line in the file is just the total number of tracks. 121 | 122 | redump_bin_filename_format = get_redump_bin_filename_format(gdi_file_path.stem, len(gdi_track_lines)) 123 | 124 | gdi_line_pattern = re.compile(r"^\s*(?P[0-9]+)\s+(?P[0-9]+)\s+(?P[0-9]+)\s+(?P[0-9]+)\s+(?P\".*?\")\s+(?P[0-9]+)$") 125 | 126 | with open(cue_file_path, "wt", encoding="utf-8", newline="\r\n") as cue_file: 127 | for gdi_track_line in gdi_track_lines: 128 | gdi_track_match = gdi_line_pattern.match(gdi_track_line) 129 | 130 | if gdi_track_match is None: 131 | raise ConversionException(f"Line in .gdi file didn't match expected format: {gdi_track_line}", gdi_file_path) 132 | 133 | track_number = int(gdi_track_match.group("track_number")) 134 | lba = int(gdi_track_match.group("lba")) 135 | gdi_track_mode = int(gdi_track_match.group("gdi_track_mode")) 136 | sector_size = int(gdi_track_match.group("sector_size")) 137 | 138 | if track_number == 1: 139 | if lba != 0: 140 | raise ConversionException(f"Unexpected LBA of first track: {lba}", gdi_file_path) 141 | cue_file.write("REM SINGLE-DENSITY AREA\n") 142 | 143 | if track_number == 3: 144 | if lba != 45000: 145 | raise ConversionException(f"Unexpected LBA of track 3: {lba}", gdi_file_path) 146 | cue_file.write("REM HIGH-DENSITY AREA\n") 147 | 148 | if gdi_track_mode == 0: 149 | cue_track_mode = "AUDIO" 150 | elif gdi_track_mode == 4: 151 | # This isn't a perfect, because a track with .gdi mode 4 and one of these sector sizes could also be a .cue MODE 2 track, but I don't see a way to determine that from the .gdi file: 152 | if sector_size == 2048 or sector_size == 2352: 153 | cue_track_mode = f"MODE1/{sector_size:04d}" 154 | else: 155 | cue_track_mode = f"MODE2/{sector_size:04d}" 156 | else: 157 | raise ConversionException(f"Unexpected .gdi track mode: {gdi_track_mode}", gdi_file_path) 158 | 159 | cue_file.write(f'FILE "{redump_bin_filename_format.format(track_number=track_number)}" BINARY\n') 160 | cue_file.write(f" TRACK {track_number:02d} {cue_track_mode}\n") 161 | # The .gdi format apparently doesn't store information about the track pre-gaps, but it does seem that the pattern used on GD-ROM discs is predictable so we can just recreate them with some simple logic: 162 | if cue_track_mode == "AUDIO": 163 | cue_file.write(" INDEX 00 00:00:00\n") 164 | cue_file.write(" INDEX 01 00:02:00\n") 165 | else: 166 | if track_number == 1 or track_number == 3: 167 | # It's the first track of the single-density or high-density area. 168 | cue_file.write(" INDEX 01 00:00:00\n") 169 | elif track_number == len(gdi_track_lines): 170 | # It's the last track on the disc. 171 | cue_file.write(" INDEX 00 00:00:00\n") 172 | cue_file.write(" INDEX 01 00:03:00\n") 173 | else: 174 | # I think this is correct, but haven't verified it with an actual example (and I'm not even certain if there are allowed to be multiple data tracks in an area on GD-ROM discs). 175 | cue_file.write(" INDEX 00 00:00:00\n") 176 | cue_file.write(" INDEX 01 00:02:00\n") 177 | 178 | 179 | def get_sha1hex_for_rvz(rvz_path, show_command_output: bool) -> str: 180 | with tempfile.TemporaryDirectory() as dolphin_tool_user_folder_name: 181 | dolphintool_result = subprocess.run( 182 | ["DolphinTool", "verify", "-u", dolphin_tool_user_folder_name, "-i", str(rvz_path), "--algorithm=sha1"], 183 | capture_output=True, 184 | text=True, 185 | ) 186 | 187 | if show_command_output: 188 | print(dolphintool_result.stderr, end="") 189 | 190 | if dolphintool_result.returncode != 0: 191 | raise ConversionException("Failed to find SHA-1 using DolphinTool", rvz_path, dolphintool_result.stderr) 192 | 193 | return dolphintool_result.stdout.strip() 194 | -------------------------------------------------------------------------------- /verifydump/verify.py: -------------------------------------------------------------------------------- 1 | import enum 2 | import hashlib 3 | import logging 4 | import os 5 | import pathlib 6 | import shutil 7 | import sys 8 | import tempfile 9 | import typing 10 | import zipfile 11 | 12 | from .convert import ConversionException, convert_chd_to_normalized_redump_dump_folder, get_sha1hex_for_rvz 13 | from .dat import Dat, Game 14 | 15 | 16 | class VerificationException(Exception): 17 | pass 18 | 19 | 20 | @enum.unique 21 | class CueVerificationResult(enum.Enum): 22 | NO_CUE_NEEDED = enum.auto() 23 | GENERATED_CUE_VERIFIED_EXACTLY = enum.auto() 24 | GENERATED_CUE_MATCHES_ESSENTIALS_FROM_EXTRA_CUE = enum.auto() 25 | GENERATED_CUE_MISMATCH_WITH_NO_EXTRA_CUE_PROVIDED = enum.auto() 26 | GENERATED_CUE_DOES_NOT_MATCH_ESSENTIALS_FROM_EXTRA_CUE = enum.auto() 27 | 28 | 29 | class VerificationResult: 30 | def __init__(self, game: Game, cue_verification_result: CueVerificationResult): 31 | self.game = game 32 | self.cue_verification_result = cue_verification_result 33 | 34 | 35 | def verify_chd(chd_path: pathlib.Path, dat: Dat, show_command_output: bool, allow_cue_mismatches: bool, extra_cue_source: pathlib.Path) -> Game: 36 | logging.debug(f'Verifying dump file "{chd_path}"') 37 | with tempfile.TemporaryDirectory() as redump_dump_folder_name: 38 | redump_dump_folder = pathlib.Path(redump_dump_folder_name) 39 | convert_chd_to_normalized_redump_dump_folder(chd_path, redump_dump_folder, system=dat.system, show_command_output=show_command_output) 40 | verification_result = verify_redump_dump_folder(redump_dump_folder, dat=dat, extra_cue_source=extra_cue_source) 41 | 42 | if verification_result.cue_verification_result in (CueVerificationResult.NO_CUE_NEEDED, CueVerificationResult.GENERATED_CUE_VERIFIED_EXACTLY): 43 | logging.info(f'Dump verified correct and complete: "{verification_result.game.name}"') 44 | elif verification_result.cue_verification_result == CueVerificationResult.GENERATED_CUE_MATCHES_ESSENTIALS_FROM_EXTRA_CUE: 45 | logging.info(f'Dump .bin files verified correct and complete, and .cue essential structure matches: "{verification_result.game.name}"') 46 | elif verification_result.cue_verification_result == CueVerificationResult.GENERATED_CUE_MISMATCH_WITH_NO_EXTRA_CUE_PROVIDED: 47 | message = f'"{verification_result.game.name}" .bin files verified and complete, but .cue does not match Datfile' 48 | 49 | if allow_cue_mismatches: 50 | logging.warn(message) 51 | else: 52 | message += "\nYou can either supply the original .cue file yourself using the '--extra-cue-source' option so that we can check that the generated .cue file's essential structure is correct, or ignore .cue file errors with the '--allow-cue-file-mismatches' option" 53 | raise VerificationException(message) 54 | elif verification_result.cue_verification_result == CueVerificationResult.GENERATED_CUE_DOES_NOT_MATCH_ESSENTIALS_FROM_EXTRA_CUE: 55 | message = f'"{verification_result.game.name}" .bin files verified and complete, but .cue does not match Datfile or essential structure from extra .cue source' 56 | 57 | if allow_cue_mismatches: 58 | logging.warn(message) 59 | else: 60 | message += f"\nYou can choose to ignore .cue file errors with the '--allow-cue-file-mismatches' option" 61 | raise VerificationException(message) 62 | else: 63 | raise Exception(f"Unhandled CueVerificationResult value: {verification_result.cue_verification_result}") 64 | 65 | return verification_result.game 66 | 67 | 68 | class FileLikeHashUpdater: 69 | def __init__(self, hash): 70 | self.hash = hash 71 | 72 | def write(self, b): 73 | self.hash.update(b) 74 | 75 | 76 | # These are simply all the commands that are used in chdman's .cue file writing code: 77 | CHDMAN_SUPPORTED_CUE_COMMANDS = frozenset(("FILE", "TRACK", "PREGAP", "INDEX", "POSTGAP")) 78 | 79 | 80 | def strip_insignificant_whitespace_and_chdman_unsupported_commands_from_cue(cue_text: str) -> str: 81 | stripped_cue_lines = (line.strip() for line in cue_text.splitlines()) 82 | supported_cue_lines = (line for line in stripped_cue_lines if line.split(" ", 1)[0].upper() in CHDMAN_SUPPORTED_CUE_COMMANDS) 83 | return "\n".join(supported_cue_lines) 84 | 85 | 86 | def verify_redump_dump_folder(dump_folder: pathlib.Path, dat: Dat, extra_cue_source: pathlib.Path) -> VerificationResult: 87 | verified_roms = [] 88 | 89 | cue_verified = False 90 | 91 | for dump_file_path in dump_folder.iterdir(): 92 | if not dump_file_path.is_file(): 93 | raise VerificationException(f"Unexpected non-file in dump folder: {dump_file_path.name}") 94 | 95 | dump_file_is_cue = dump_file_path.suffix.lower() == ".cue" 96 | 97 | with open(dump_file_path, "rb") as dump_file: 98 | hash = hashlib.sha1() 99 | shutil.copyfileobj(dump_file, FileLikeHashUpdater(hash)) 100 | dump_file_sha1hex = hash.hexdigest() 101 | 102 | roms_with_matching_sha1 = dat.roms_by_sha1hex.get(dump_file_sha1hex) 103 | 104 | if not roms_with_matching_sha1: 105 | if dump_file_is_cue: 106 | cue_verified = False 107 | continue 108 | raise VerificationException(f'SHA-1 of dump file "{dump_file_path.name}" doesn\'t match any file in the Dat') 109 | 110 | rom_with_matching_sha1_and_name = next((rom for rom in roms_with_matching_sha1 if rom.name == dump_file_path.name), None) 111 | 112 | if not rom_with_matching_sha1_and_name: 113 | list_of_rom_names_that_match_sha1 = " or ".join([f'"{rom.name}"' for rom in roms_with_matching_sha1]) 114 | raise VerificationException(f'Dump file "{dump_file_path.name}" found in Dat, but it should be named {list_of_rom_names_that_match_sha1}') 115 | 116 | if rom_with_matching_sha1_and_name.size != dump_file_path.stat().st_size: 117 | print(f"{rom_with_matching_sha1_and_name.size} {dump_file_path.stat().st_size}") 118 | raise VerificationException(f'Dump file "{dump_file_path.name}" found in Dat, but it has the wrong size') 119 | 120 | rom = rom_with_matching_sha1_and_name 121 | 122 | if dump_file_is_cue: 123 | cue_verified = True 124 | 125 | logging.debug(f'Dump file "{rom.name}" found in Dat and verified') 126 | 127 | if len(verified_roms) > 0: 128 | previously_verified_roms_game = verified_roms[0].game 129 | if rom.game != previously_verified_roms_game: 130 | raise VerificationException(f'Dump file "{rom.name}" is from game "{rom.game.name}", but at least one other file in this dump is from "{previously_verified_roms_game.name}"') 131 | 132 | verified_roms.append(rom) 133 | 134 | if len(verified_roms) == 0: 135 | raise VerificationException("No game files found in dump folder") 136 | 137 | game = verified_roms[0].game 138 | 139 | for game_rom in game.roms: 140 | if game_rom not in verified_roms: 141 | if not game_rom.name.lower().endswith(".cue"): 142 | raise VerificationException(f'Game file "{game_rom.name}" is missing in dump') 143 | 144 | for verified_rom in verified_roms: 145 | if verified_rom not in game.roms: 146 | # This shouldn't be possible because of the logic above where we check that all files are from the same game, but it feels like it's worth keeping this as a sanity check. 147 | raise VerificationException(f'Dump has extra file "{verified_rom.name}" that isn\'t associated with the game "{game.name}" in the Dat') 148 | 149 | if cue_verified: 150 | return VerificationResult(game=game, cue_verification_result=CueVerificationResult.GENERATED_CUE_VERIFIED_EXACTLY) 151 | 152 | game_cue_rom = next((game_rom for game_rom in game.roms if game_rom.name.lower().endswith(".cue")), None) 153 | if game_cue_rom is None: 154 | return VerificationResult(game=game, cue_verification_result=CueVerificationResult.NO_CUE_NEEDED) 155 | 156 | if not extra_cue_source: 157 | return VerificationResult(game=game, cue_verification_result=CueVerificationResult.GENERATED_CUE_MISMATCH_WITH_NO_EXTRA_CUE_PROVIDED) 158 | 159 | if extra_cue_source.is_dir(): 160 | extra_cue_file_path = pathlib.Path(extra_cue_source, game_cue_rom.name) 161 | if not extra_cue_file_path.exists(): 162 | # This is subtley different from the file-existence check we do below that raises an exception. Here it's reasonable for the user to provide a folder of extra .cue files that doesn't include a .cue for this particular game: 163 | logging.debug(f'"{game_cue_rom.name}" doesn\'t match Datfile, and no matching file was found in the extra .cue folder to compare it with') 164 | return VerificationResult(game=game, cue_verification_result=CueVerificationResult.GENERATED_CUE_MISMATCH_WITH_NO_EXTRA_CUE_PROVIDED) 165 | else: 166 | extra_cue_file_path = extra_cue_source 167 | 168 | if not extra_cue_file_path.exists(): 169 | raise VerificationException(f'Extra .cue file source "{extra_cue_file_path}" doesn\'t exist') 170 | 171 | if extra_cue_file_path.suffix.lower() == ".zip": 172 | with zipfile.ZipFile(extra_cue_file_path) as zip: 173 | try: 174 | zip_member_info = zip.getinfo(game_cue_rom.name) 175 | except KeyError: 176 | logging.debug(f'"{game_cue_rom.name}" doesn\'t match Datfile, and no matching file was found in the extra .cue zip to compare it with') 177 | return VerificationResult(game=game, cue_verification_result=CueVerificationResult.GENERATED_CUE_MISMATCH_WITH_NO_EXTRA_CUE_PROVIDED) 178 | 179 | with zip.open(zip_member_info) as zip_member: 180 | extra_cue_bytes = zip_member.read() 181 | else: 182 | extra_cue_bytes = extra_cue_file_path.read_bytes() 183 | 184 | extra_cue_sha1hex = hashlib.sha1(extra_cue_bytes).hexdigest() 185 | 186 | if extra_cue_sha1hex != game_cue_rom.sha1hex: 187 | raise VerificationException(f'Provided extra .cue file "{game_cue_rom.name}" doesn\'t match Datfile') 188 | 189 | with open(pathlib.Path(dump_folder, game_cue_rom.name), "rb") as dump_cue_file: 190 | dump_cue_bytes = dump_cue_file.read() 191 | 192 | EXPECTED_CUE_ENCODING = "UTF-8" 193 | try: 194 | dump_cue_text = dump_cue_bytes.decode(EXPECTED_CUE_ENCODING) 195 | except UnicodeError: 196 | raise VerificationException(f'Failed to decode generated .cue file "{game_cue_rom.name}" as {EXPECTED_CUE_ENCODING}') 197 | try: 198 | extra_cue_text = extra_cue_bytes.decode(EXPECTED_CUE_ENCODING) 199 | except UnicodeError: 200 | raise VerificationException(f'Failed to decode provided .cue file "{game_cue_rom.name}" as {EXPECTED_CUE_ENCODING}') 201 | 202 | if strip_insignificant_whitespace_and_chdman_unsupported_commands_from_cue(dump_cue_text) == strip_insignificant_whitespace_and_chdman_unsupported_commands_from_cue(extra_cue_text): 203 | logging.debug(f'Dump file "{game_cue_rom.name}" matches essential parts of provided extra .cue file, and extra .cue file matches the Datfile') 204 | return VerificationResult(game=game, cue_verification_result=CueVerificationResult.GENERATED_CUE_MATCHES_ESSENTIALS_FROM_EXTRA_CUE) 205 | 206 | logging.debug(f'Dump file "{game_cue_rom.name}" does not match essential parts of provided extra .cue file, but extra .cue file does match the Datfile') 207 | return VerificationResult(game=game, cue_verification_result=CueVerificationResult.GENERATED_CUE_DOES_NOT_MATCH_ESSENTIALS_FROM_EXTRA_CUE) 208 | 209 | 210 | def verify_rvz(rvz_path: pathlib.Path, dat: Dat, show_command_output: bool) -> Game: 211 | logging.debug(f'Verifying dump file "{rvz_path}"') 212 | 213 | sha1hex = get_sha1hex_for_rvz(rvz_path, show_command_output=show_command_output) 214 | 215 | roms_with_matching_sha1 = dat.roms_by_sha1hex.get(sha1hex) 216 | 217 | if not roms_with_matching_sha1: 218 | raise VerificationException(f'SHA-1 of uncompressed version of "{rvz_path}" doesn\'t match any file in the Dat') 219 | 220 | expected_rom_name = rvz_path.with_suffix(".iso").name 221 | 222 | rom_with_matching_sha1_and_name = next((rom for rom in roms_with_matching_sha1 if rom.name == expected_rom_name), None) 223 | 224 | if not rom_with_matching_sha1_and_name: 225 | list_of_rom_names_that_match_sha1 = " or ".join([f'"{rom.name.replace(".iso", ".rvz")}"' for rom in roms_with_matching_sha1]) 226 | raise VerificationException(f'Dump file "{rvz_path.name}" found in Dat, but it should be named {list_of_rom_names_that_match_sha1}') 227 | 228 | logging.info(f'Dump verified correct and complete: "{rom_with_matching_sha1_and_name.game.name}"') 229 | return rom_with_matching_sha1_and_name.game 230 | 231 | 232 | def verify_dumps(dat: Dat, dump_file_or_folder_paths: typing.List[pathlib.Path], show_command_output: bool, allow_cue_mismatches: bool, extra_cue_source: pathlib.Path) -> tuple[list, list]: 233 | verified_games = [] 234 | errors = [] 235 | 236 | def verify_dump_if_format_is_supported(dump_path: pathlib.Path, error_if_unsupported: bool): 237 | suffix_lower = dump_path.suffix.lower() 238 | try: 239 | if suffix_lower == ".chd": 240 | verified_games.append(verify_chd(dump_path, dat=dat, show_command_output=show_command_output, allow_cue_mismatches=allow_cue_mismatches, extra_cue_source=extra_cue_source)) 241 | elif suffix_lower == ".rvz": 242 | verified_games.append(verify_rvz(dump_path, dat=dat, show_command_output=show_command_output)) 243 | elif error_if_unsupported: 244 | raise VerificationException(f'{pathlib.Path(sys.argv[0]).stem} doesn\'t know how to handle "{suffix_lower}" dumps') 245 | except VerificationException as e: 246 | errors.append(e) 247 | except ConversionException as e: 248 | errors.append(e) 249 | 250 | for dump_file_or_folder_path in dump_file_or_folder_paths: 251 | if dump_file_or_folder_path.is_dir(): 252 | for (dir_path, _, filenames) in os.walk(dump_file_or_folder_path, followlinks=True): 253 | for filename in filenames: 254 | full_path = pathlib.Path(dump_file_or_folder_path, dir_path, filename) 255 | verify_dump_if_format_is_supported(full_path, error_if_unsupported=False) 256 | 257 | else: 258 | verify_dump_if_format_is_supported(dump_file_or_folder_path, error_if_unsupported=True) 259 | 260 | return (verified_games, errors) 261 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "altgraph" 3 | version = "0.17.2" 4 | description = "Python graph (network) package" 5 | category = "dev" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [[package]] 10 | name = "atomicwrites" 11 | version = "1.4.0" 12 | description = "Atomic file writes." 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 16 | 17 | [[package]] 18 | name = "attrs" 19 | version = "21.4.0" 20 | description = "Classes Without Boilerplate" 21 | category = "dev" 22 | optional = false 23 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 24 | 25 | [package.extras] 26 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] 27 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 28 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] 29 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] 30 | 31 | [[package]] 32 | name = "black" 33 | version = "22.3.0" 34 | description = "The uncompromising code formatter." 35 | category = "dev" 36 | optional = false 37 | python-versions = ">=3.6.2" 38 | 39 | [package.dependencies] 40 | click = ">=8.0.0" 41 | mypy-extensions = ">=0.4.3" 42 | pathspec = ">=0.9.0" 43 | platformdirs = ">=2" 44 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 45 | 46 | [package.extras] 47 | colorama = ["colorama (>=0.4.3)"] 48 | d = ["aiohttp (>=3.7.4)"] 49 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 50 | uvloop = ["uvloop (>=0.15.2)"] 51 | 52 | [[package]] 53 | name = "cfgv" 54 | version = "3.3.1" 55 | description = "Validate configuration and produce human readable error messages." 56 | category = "dev" 57 | optional = false 58 | python-versions = ">=3.6.1" 59 | 60 | [[package]] 61 | name = "click" 62 | version = "8.1.2" 63 | description = "Composable command line interface toolkit" 64 | category = "dev" 65 | optional = false 66 | python-versions = ">=3.7" 67 | 68 | [package.dependencies] 69 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 70 | 71 | [[package]] 72 | name = "colorama" 73 | version = "0.4.4" 74 | description = "Cross-platform colored terminal text." 75 | category = "dev" 76 | optional = false 77 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 78 | 79 | [[package]] 80 | name = "distlib" 81 | version = "0.3.4" 82 | description = "Distribution utilities" 83 | category = "dev" 84 | optional = false 85 | python-versions = "*" 86 | 87 | [[package]] 88 | name = "filelock" 89 | version = "3.6.0" 90 | description = "A platform independent file lock." 91 | category = "dev" 92 | optional = false 93 | python-versions = ">=3.7" 94 | 95 | [package.extras] 96 | docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] 97 | testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] 98 | 99 | [[package]] 100 | name = "future" 101 | version = "0.18.2" 102 | description = "Clean single-source support for Python 3 and 2" 103 | category = "dev" 104 | optional = false 105 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 106 | 107 | [[package]] 108 | name = "identify" 109 | version = "2.4.12" 110 | description = "File identification library for Python" 111 | category = "dev" 112 | optional = false 113 | python-versions = ">=3.7" 114 | 115 | [package.extras] 116 | license = ["ukkonen"] 117 | 118 | [[package]] 119 | name = "iniconfig" 120 | version = "1.1.1" 121 | description = "iniconfig: brain-dead simple config-ini parsing" 122 | category = "dev" 123 | optional = false 124 | python-versions = "*" 125 | 126 | [[package]] 127 | name = "macholib" 128 | version = "1.16" 129 | description = "Mach-O header analysis and editing" 130 | category = "dev" 131 | optional = false 132 | python-versions = "*" 133 | 134 | [package.dependencies] 135 | altgraph = ">=0.15" 136 | 137 | [[package]] 138 | name = "mypy-extensions" 139 | version = "0.4.3" 140 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 141 | category = "dev" 142 | optional = false 143 | python-versions = "*" 144 | 145 | [[package]] 146 | name = "nodeenv" 147 | version = "1.6.0" 148 | description = "Node.js virtual environment builder" 149 | category = "dev" 150 | optional = false 151 | python-versions = "*" 152 | 153 | [[package]] 154 | name = "packaging" 155 | version = "21.3" 156 | description = "Core utilities for Python packages" 157 | category = "dev" 158 | optional = false 159 | python-versions = ">=3.6" 160 | 161 | [package.dependencies] 162 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" 163 | 164 | [[package]] 165 | name = "pathspec" 166 | version = "0.9.0" 167 | description = "Utility library for gitignore style pattern matching of file paths." 168 | category = "dev" 169 | optional = false 170 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 171 | 172 | [[package]] 173 | name = "pefile" 174 | version = "2021.9.3" 175 | description = "Python PE parsing module" 176 | category = "dev" 177 | optional = false 178 | python-versions = ">=3.6.0" 179 | 180 | [package.dependencies] 181 | future = "*" 182 | 183 | [[package]] 184 | name = "platformdirs" 185 | version = "2.5.1" 186 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 187 | category = "dev" 188 | optional = false 189 | python-versions = ">=3.7" 190 | 191 | [package.extras] 192 | docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] 193 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 194 | 195 | [[package]] 196 | name = "pluggy" 197 | version = "0.13.1" 198 | description = "plugin and hook calling mechanisms for python" 199 | category = "dev" 200 | optional = false 201 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 202 | 203 | [package.extras] 204 | dev = ["pre-commit", "tox"] 205 | 206 | [[package]] 207 | name = "pre-commit" 208 | version = "2.17.0" 209 | description = "A framework for managing and maintaining multi-language pre-commit hooks." 210 | category = "dev" 211 | optional = false 212 | python-versions = ">=3.6.1" 213 | 214 | [package.dependencies] 215 | cfgv = ">=2.0.0" 216 | identify = ">=1.0.0" 217 | nodeenv = ">=0.11.1" 218 | pyyaml = ">=5.1" 219 | toml = "*" 220 | virtualenv = ">=20.0.8" 221 | 222 | [[package]] 223 | name = "py" 224 | version = "1.11.0" 225 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 226 | category = "dev" 227 | optional = false 228 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 229 | 230 | [[package]] 231 | name = "pyinstaller" 232 | version = "5.1" 233 | description = "PyInstaller bundles a Python application and all its dependencies into a single package." 234 | category = "dev" 235 | optional = false 236 | python-versions = "<3.11,>=3.7" 237 | 238 | [package.dependencies] 239 | altgraph = "*" 240 | macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} 241 | pefile = {version = ">=2017.8.1", markers = "sys_platform == \"win32\""} 242 | pyinstaller-hooks-contrib = ">=2021.4" 243 | pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} 244 | 245 | [package.extras] 246 | encryption = ["tinyaes (>=1.0.0)"] 247 | hook_testing = ["pytest (>=2.7.3)", "execnet (>=1.5.0)", "psutil"] 248 | 249 | [[package]] 250 | name = "pyinstaller-hooks-contrib" 251 | version = "2022.5" 252 | description = "Community maintained hooks for PyInstaller" 253 | category = "dev" 254 | optional = false 255 | python-versions = ">=3.7" 256 | 257 | [[package]] 258 | name = "pyparsing" 259 | version = "3.0.7" 260 | description = "Python parsing module" 261 | category = "dev" 262 | optional = false 263 | python-versions = ">=3.6" 264 | 265 | [package.extras] 266 | diagrams = ["jinja2", "railroad-diagrams"] 267 | 268 | [[package]] 269 | name = "pytest" 270 | version = "7.1.1" 271 | description = "pytest: simple powerful testing with Python" 272 | category = "dev" 273 | optional = false 274 | python-versions = ">=3.7" 275 | 276 | [package.dependencies] 277 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 278 | attrs = ">=19.2.0" 279 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 280 | iniconfig = "*" 281 | packaging = "*" 282 | pluggy = ">=0.12,<2.0" 283 | py = ">=1.8.2" 284 | tomli = ">=1.0.0" 285 | 286 | [package.extras] 287 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] 288 | 289 | [[package]] 290 | name = "pywin32-ctypes" 291 | version = "0.2.0" 292 | description = "" 293 | category = "dev" 294 | optional = false 295 | python-versions = "*" 296 | 297 | [[package]] 298 | name = "pyyaml" 299 | version = "6.0" 300 | description = "YAML parser and emitter for Python" 301 | category = "dev" 302 | optional = false 303 | python-versions = ">=3.6" 304 | 305 | [[package]] 306 | name = "six" 307 | version = "1.16.0" 308 | description = "Python 2 and 3 compatibility utilities" 309 | category = "dev" 310 | optional = false 311 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 312 | 313 | [[package]] 314 | name = "toml" 315 | version = "0.10.2" 316 | description = "Python Library for Tom's Obvious, Minimal Language" 317 | category = "dev" 318 | optional = false 319 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 320 | 321 | [[package]] 322 | name = "tomli" 323 | version = "2.0.1" 324 | description = "A lil' TOML parser" 325 | category = "dev" 326 | optional = false 327 | python-versions = ">=3.7" 328 | 329 | [[package]] 330 | name = "virtualenv" 331 | version = "20.14.0" 332 | description = "Virtual Python Environment builder" 333 | category = "dev" 334 | optional = false 335 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 336 | 337 | [package.dependencies] 338 | distlib = ">=0.3.1,<1" 339 | filelock = ">=3.2,<4" 340 | platformdirs = ">=2,<3" 341 | six = ">=1.9.0,<2" 342 | 343 | [package.extras] 344 | docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] 345 | testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] 346 | 347 | [metadata] 348 | lock-version = "1.1" 349 | python-versions = "^3.10,<3.11" # The upper-bound here is just because that is required for PyInstaller. 350 | content-hash = "a88158fad115ec8ea4a7371e08427e828df6e77ba430846e1964510b11b89bcf" 351 | 352 | [metadata.files] 353 | altgraph = [ 354 | {file = "altgraph-0.17.2-py2.py3-none-any.whl", hash = "sha256:743628f2ac6a7c26f5d9223c91ed8ecbba535f506f4b6f558885a8a56a105857"}, 355 | {file = "altgraph-0.17.2.tar.gz", hash = "sha256:ebf2269361b47d97b3b88e696439f6e4cbc607c17c51feb1754f90fb79839158"}, 356 | ] 357 | atomicwrites = [ 358 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 359 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 360 | ] 361 | attrs = [ 362 | {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, 363 | {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, 364 | ] 365 | black = [ 366 | {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, 367 | {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, 368 | {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, 369 | {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, 370 | {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, 371 | {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, 372 | {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, 373 | {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, 374 | {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, 375 | {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, 376 | {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, 377 | {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, 378 | {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, 379 | {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, 380 | {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, 381 | {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, 382 | {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, 383 | {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, 384 | {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, 385 | {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, 386 | {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, 387 | {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, 388 | {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, 389 | ] 390 | cfgv = [ 391 | {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, 392 | {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, 393 | ] 394 | click = [ 395 | {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"}, 396 | {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"}, 397 | ] 398 | colorama = [ 399 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 400 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 401 | ] 402 | distlib = [ 403 | {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, 404 | {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, 405 | ] 406 | filelock = [ 407 | {file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"}, 408 | {file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"}, 409 | ] 410 | future = [ 411 | {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, 412 | ] 413 | identify = [ 414 | {file = "identify-2.4.12-py2.py3-none-any.whl", hash = "sha256:5f06b14366bd1facb88b00540a1de05b69b310cbc2654db3c7e07fa3a4339323"}, 415 | {file = "identify-2.4.12.tar.gz", hash = "sha256:3f3244a559290e7d3deb9e9adc7b33594c1bc85a9dd82e0f1be519bf12a1ec17"}, 416 | ] 417 | iniconfig = [ 418 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 419 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 420 | ] 421 | macholib = [ 422 | {file = "macholib-1.16-py2.py3-none-any.whl", hash = "sha256:5a0742b587e6e57bfade1ab90651d4877185bf66fd4a176a488116de36878229"}, 423 | {file = "macholib-1.16.tar.gz", hash = "sha256:001bf281279b986a66d7821790d734e61150d52f40c080899df8fefae056e9f7"}, 424 | ] 425 | mypy-extensions = [ 426 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 427 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 428 | ] 429 | nodeenv = [ 430 | {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, 431 | {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, 432 | ] 433 | packaging = [ 434 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, 435 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, 436 | ] 437 | pathspec = [ 438 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 439 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 440 | ] 441 | pefile = [ 442 | {file = "pefile-2021.9.3.tar.gz", hash = "sha256:344a49e40a94e10849f0fe34dddc80f773a12b40675bf2f7be4b8be578bdd94a"}, 443 | ] 444 | platformdirs = [ 445 | {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, 446 | {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, 447 | ] 448 | pluggy = [ 449 | {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, 450 | {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, 451 | ] 452 | pre-commit = [ 453 | {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"}, 454 | {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"}, 455 | ] 456 | py = [ 457 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 458 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 459 | ] 460 | pyinstaller = [ 461 | {file = "pyinstaller-5.1-py3-none-macosx_10_13_universal2.whl", hash = "sha256:3c9bc373b4a4bf6d81b306b9918e290a3de582d7b50a0f6f4e837970a4db0c54"}, 462 | {file = "pyinstaller-5.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:ac003d49bbd62f6712b5631487049cee4f0a07445dd31c6d05e5724e33689fca"}, 463 | {file = "pyinstaller-5.1-py3-none-manylinux2014_i686.whl", hash = "sha256:f786cb013ebfd3fb23ddb46285902663e31256220b12b368f8c36e859a77ebc9"}, 464 | {file = "pyinstaller-5.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:d8bfe5c32b4b5aaa91a5c1c7912fe3e5d13e7671f3468ee7742ec71ca192a975"}, 465 | {file = "pyinstaller-5.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:c421b46ebf81ad4498de480640ce1a47ac15dfd3938dfc2e1c0846cb5cdb0c59"}, 466 | {file = "pyinstaller-5.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:588f2840c27a8a7e1b1fa361f107a1060c5096b36d3c94c243b23687ee41609d"}, 467 | {file = "pyinstaller-5.1-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:d3852c1f8fe6ca2f5c7942c845dc6d1f6140aa7860fe3500eac3ac88875d1dd4"}, 468 | {file = "pyinstaller-5.1-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:29b3f17d75ff58b12db7b0116df1b3382c8345aefd500a177987eaf0599dec03"}, 469 | {file = "pyinstaller-5.1-py3-none-win32.whl", hash = "sha256:a90ee3181c88aed0893bf537a50ad547655f5d555a5b731215a5fe1946d2f3db"}, 470 | {file = "pyinstaller-5.1-py3-none-win_amd64.whl", hash = "sha256:996e296d1dfe27c3e89efc26dbcc247f49c0801a9b5e22e2b39f5b983cf5c2f5"}, 471 | {file = "pyinstaller-5.1.tar.gz", hash = "sha256:9596c70c860cbce19537354db95b180351959b4cd14a70db6ab1d1432668c313"}, 472 | ] 473 | pyinstaller-hooks-contrib = [ 474 | {file = "pyinstaller-hooks-contrib-2022.5.tar.gz", hash = "sha256:90a05207ceea2f8c166f12c3add46e24c0ed6a78234e5f99320f8683d56e0dec"}, 475 | {file = "pyinstaller_hooks_contrib-2022.5-py2.py3-none-any.whl", hash = "sha256:d0ff4c13eda0104db6eb43c60641c19cdf2bc2d6a2a1f0855d26629c3dacc8b7"}, 476 | ] 477 | pyparsing = [ 478 | {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, 479 | {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, 480 | ] 481 | pytest = [ 482 | {file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"}, 483 | {file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"}, 484 | ] 485 | pywin32-ctypes = [ 486 | {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, 487 | {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, 488 | ] 489 | pyyaml = [ 490 | {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, 491 | {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, 492 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, 493 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, 494 | {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, 495 | {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, 496 | {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, 497 | {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, 498 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, 499 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, 500 | {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, 501 | {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, 502 | {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, 503 | {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, 504 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, 505 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, 506 | {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, 507 | {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, 508 | {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, 509 | {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, 510 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, 511 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, 512 | {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, 513 | {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, 514 | {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, 515 | {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, 516 | {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, 517 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, 518 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, 519 | {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, 520 | {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, 521 | {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, 522 | {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, 523 | ] 524 | six = [ 525 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 526 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 527 | ] 528 | toml = [ 529 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 530 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 531 | ] 532 | tomli = [ 533 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 534 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 535 | ] 536 | virtualenv = [ 537 | {file = "virtualenv-20.14.0-py2.py3-none-any.whl", hash = "sha256:1e8588f35e8b42c6ec6841a13c5e88239de1e6e4e4cedfd3916b306dc826ec66"}, 538 | {file = "virtualenv-20.14.0.tar.gz", hash = "sha256:8e5b402037287126e81ccde9432b95a8be5b19d36584f64957060a3488c11ca8"}, 539 | ] 540 | --------------------------------------------------------------------------------