├── handler ├── data │ ├── __init__.py │ ├── data_handler.py │ ├── bincue_split.py │ ├── iso9660.py │ ├── manager.py │ ├── hxc_image.py │ ├── bincue.py │ ├── flux.py │ └── wav.py ├── media │ ├── __init__.py │ ├── media_handler.py │ ├── bd_redumper.py │ ├── dvd_redumper.py │ ├── dummy.py │ ├── ddisk.py │ ├── optical.py │ ├── dvd.py │ ├── manager.py │ ├── cd_redumper.py │ ├── floppy.py │ └── cd.py ├── controller │ ├── manager.py │ ├── gw.py │ ├── RoboRacerLS.py │ ├── controller_handler.py │ └── DiscRobotGeneric.py ├── util │ └── bincon.py └── handler.py ├── web ├── http │ ├── rip │ │ └── index.html │ ├── static │ │ ├── status.html │ │ ├── status-drives.html │ │ ├── style.css │ │ ├── status-view.css │ │ ├── status-drives.css │ │ ├── img │ │ │ ├── good.svg │ │ │ ├── warning.svg │ │ │ ├── idle.svg │ │ │ └── working.svg │ │ ├── status-drives.js │ │ ├── status-queue.js │ │ ├── script.js │ │ ├── jsonForm.js │ │ └── status-view.js │ └── home.html └── web.py ├── TODO.MD ├── config ├── mac.400.json ├── hp.lif.1651b.json ├── hp.lif.33dd.json ├── hp.lif.77dd.json ├── hp.lif.77hd.json ├── ibm.3740.json ├── jonos.35.json ├── sord.m23.35.json ├── ibm.1200.json ├── ibm.1440.json ├── ibm.360.json ├── ibm.720.json └── handler │ ├── flux │ └── diskdefs.cfg │ └── hxc_image │ └── config.script ├── LICENSE ├── .gitignore ├── README.md └── discrip.py /handler/data/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /handler/media/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /web/http/rip/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 |
Ripping
5 | 6 | 9 | 10 | -------------------------------------------------------------------------------- /TODO.MD: -------------------------------------------------------------------------------- 1 | ## Musicbrainz CD Stub 2 | 3 | Fix metadata output from musicbrainz when it returns a CD stub. 4 | 5 | ## Visual Update to Drive status 6 | 7 | Make drive status text always visible, not just on title text when hovering. 8 | -------------------------------------------------------------------------------- /config/mac.400.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-79" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "dsk", 10 | "gw": { 11 | "tracks": "c=0-79", 12 | "format": "mac.400" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /config/hp.lif.1651b.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-79" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-79", 12 | "format": "hp.lif.1651b" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /config/hp.lif.33dd.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-32" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-32", 12 | "format": "hp.lif.33dd" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /config/hp.lif.77dd.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-76" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-76", 12 | "format": "hp.lif.77dd" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /config/hp.lif.77hd.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-76" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-76", 12 | "format": "hp.lif.77hd" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /config/ibm.3740.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-76:h=0", 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-76:h=0", 12 | "format": "ibm.3740" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /config/jonos.35.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-69:h=0" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-69:h=0", 12 | "format": "jonos.35" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /config/sord.m23.35.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-69:h=0" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-69:h=0", 12 | "format": "sord.m23.35" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /web/http/static/status.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
10 |
11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /web/http/static/status-drives.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /web/http/static/style.css: -------------------------------------------------------------------------------- 1 | #media_sample_input 2 | { 3 | } 4 | 5 | .input_pair 6 | { 7 | margin: 0.25em 0; 8 | } 9 | .input_pair input, 10 | .input_pair select 11 | { 12 | float:right; 13 | } 14 | 15 | 16 | .object_form_data 17 | { 18 | margin: 0.25em 0; 19 | } 20 | .object_form_data textarea, 21 | .object_form_data input, 22 | .object_form_data select 23 | { 24 | float:right; 25 | } 26 | -------------------------------------------------------------------------------- /web/http/static/status-view.css: -------------------------------------------------------------------------------- 1 | .status-view 2 | { 3 | width:100%; 4 | margin-bottom: 1em; 5 | } 6 | 7 | .status-view img 8 | { 9 | max-width:200px; 10 | max-height:200px; 11 | height: auto; 12 | width:auto; 13 | } 14 | 15 | .status-view h3, 16 | .status-view h4 17 | { 18 | margin: 0px; 19 | } 20 | 21 | .status-view, 22 | .status-view td, 23 | .status-view th 24 | { 25 | border: 1px solid #000; 26 | border-spacing: 0px; 27 | } 28 | -------------------------------------------------------------------------------- /config/ibm.1200.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-79", 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-79", 12 | "format": "ibm.1200" 13 | } 14 | }, 15 | "Virtual": { 16 | "Data": [ 17 | { 18 | "input_type_id":"BINARY", 19 | "output_type_id":"Z_FILES", 20 | "cmd":"mcopy -spi {input_file} ::*.* {data_dir}", 21 | "data_output": { 22 | "type_id": "Z_FILES", 23 | "processed_by": [], 24 | "data_dir": "FILES", 25 | "data_files": { 26 | "Z_FILES": "" 27 | } 28 | } 29 | } 30 | ] 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /config/ibm.1440.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-79" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-79", 12 | "format": "ibm.1440" 13 | } 14 | }, 15 | "Virtual": { 16 | "Data": [ 17 | { 18 | "input_type_id":"BINARY", 19 | "output_type_id":"Z_FILES", 20 | "cmd":"mcopy -spi {input_file} ::*.* {data_dir}", 21 | "data_output": { 22 | "type_id": "Z_FILES", 23 | "processed_by": [], 24 | "data_dir": "FILES", 25 | "data_files": { 26 | "Z_FILES": "" 27 | } 28 | } 29 | } 30 | ] 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /config/ibm.360.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-39" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-39", 12 | "format": "ibm.360" 13 | } 14 | }, 15 | "Virtual": { 16 | "Data": [ 17 | { 18 | "input_type_id":"BINARY", 19 | "output_type_id":"Z_FILES", 20 | "cmd":"mcopy -spi {input_file} ::*.* {data_dir}", 21 | "data_output": { 22 | "type_id": "Z_FILES", 23 | "processed_by": [], 24 | "data_dir": "FILES", 25 | "data_files": { 26 | "Z_FILES": "" 27 | } 28 | } 29 | } 30 | ] 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /config/ibm.720.json: -------------------------------------------------------------------------------- 1 | { 2 | "Floppy": { 3 | "flux_output": "raw", 4 | "gw": { 5 | "tracks": "c=0-79" 6 | } 7 | }, 8 | "FLUX": { 9 | "convert_output": "img", 10 | "gw": { 11 | "tracks": "c=0-79", 12 | "format": "ibm.720" 13 | } 14 | }, 15 | "Virtual": { 16 | "Data": [ 17 | { 18 | "input_type_id":"BINARY", 19 | "output_type_id":"Z_FILES", 20 | "cmd":"mcopy -spi {input_file} ::*.* {data_dir}", 21 | "data_output": { 22 | "type_id": "Z_FILES", 23 | "processed_by": [], 24 | "data_dir": "FILES", 25 | "data_files": { 26 | "Z_FILES": "" 27 | } 28 | } 29 | } 30 | ] 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Shelby Jueden 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /web/http/static/status-drives.css: -------------------------------------------------------------------------------- 1 | #status-drives .idle 2 | { 3 | color: #fff; 4 | background-color: #777; 5 | padding-left: 2em; 6 | padding-right: 2em; 7 | } 8 | 9 | #status-drives .good 10 | { 11 | color: #000; 12 | background-color: #5f5; 13 | padding-left: 2em; 14 | padding-right: 2em; 15 | background-image: url("/static/img/good.svg"); 16 | background-repeat: no-repeat; 17 | } 18 | 19 | #status-drives .working 20 | { 21 | color: #000; 22 | background-color: #ff5; 23 | padding-left: 2em; 24 | padding-right: 2em; 25 | background-image: url("/static/img/working.svg"); 26 | background-repeat: no-repeat; 27 | } 28 | 29 | #status-drives .attention 30 | { 31 | color: #000; 32 | background-color: #f00; 33 | padding-left: 2em; 34 | padding-right: 2em; 35 | animation: blinker 2s linear infinite; 36 | background-image: url("/static/img/warning.svg"); 37 | background-repeat: no-repeat; 38 | } 39 | @keyframes blinker { 40 | 50% { 41 | background-color: rgba(0,0,0,0); 42 | } 43 | } 44 | 45 | .status-view h3, 46 | .status-view h4 47 | { 48 | margin: 0px; 49 | } 50 | 51 | .status-view, 52 | .status-view td, 53 | .status-view th 54 | { 55 | border: 1px solid #000; 56 | border-spacing: 0px; 57 | } 58 | -------------------------------------------------------------------------------- /handler/controller/manager.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Media ripping manager for pyDiscRip. Can be used to rip a CD and fetch metadata 4 | 5 | # External Modules 6 | import pyudev 7 | from pprint import pprint 8 | 9 | # Internal Modules 10 | from handler.controller.RoboRacerLS import ControllerRoboRacerLS 11 | from handler.controller.AutoPublisherLS import ControllerAutoPublisherLS 12 | from handler.controller.DiscRobotGeneric import ControllerDiscRobotGeneric 13 | from handler.controller.gw import ControllerGw 14 | # Testing only 15 | from handler.media.dummy import MediaHandlerDummy 16 | 17 | class ControllerHandlerManager(object): 18 | """Manager for controllers 19 | . 20 | """ 21 | 22 | def __init__(self): 23 | """Constructor to setup basic data and config defaults 24 | 25 | """ 26 | # Call parent constructor 27 | super().__init__() 28 | 29 | # Add all supported media types 30 | self.controller_types={} 31 | self.controller_types["RoboRacerLS"] = ControllerRoboRacerLS() 32 | self.controller_types["AutoPublisherLS"] = ControllerAutoPublisherLS() 33 | self.controller_types["DiscRobotGeneric"] = ControllerDiscRobotGeneric() 34 | self.controller_types["Greaseweazle"] = ControllerGw() 35 | 36 | def getController(self,controller_type): 37 | return(self.controller_types[controller_type]) 38 | 39 | -------------------------------------------------------------------------------- /handler/data/data_handler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Base data handler for pyDiscRip. 4 | 5 | # Python System 6 | import sys, os 7 | import json 8 | from enum import Enum 9 | from datetime import datetime 10 | 11 | # Internal Modules 12 | from handler.handler import Handler 13 | 14 | 15 | class DataHandler(Handler): 16 | """Base class for Data Types to handle identification and conversion 17 | 18 | Data dict structure example: 19 | { 20 | type_id: Data.BINCUE, 21 | data_dir: "some-folder", 22 | data_processed: False, 23 | data_files: { 24 | "BIN": name.bin, 25 | "cue": name.cue, 26 | "toc": name.toc 27 | } 28 | } 29 | 30 | """ 31 | 32 | def __init__(self): 33 | """Constructor to setup basic data and config defaults 34 | 35 | """ 36 | # Call parent constructor 37 | super().__init__() 38 | # Set data type id for later use 39 | self.type_id=None 40 | # Set directory to work in 41 | self.project_dir="" 42 | # Get current datetime 43 | self.project_timestamp=str(datetime.now().isoformat()).replace(":","-") 44 | # Data types output for later use 45 | self.data_outputs=[] 46 | 47 | 48 | def dataMatch(self, data_sample=None): 49 | """Check if the data sample should be handled by this type""" 50 | return data_sample["type_id"] == self.type_id 51 | 52 | -------------------------------------------------------------------------------- /handler/controller/gw.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # CD ripping module for pyDiscRip. Can be used to rip a CD and fetch metadata 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | import time 10 | from pprint import pprint 11 | 12 | # External Modules 13 | import libdiscid 14 | import musicbrainzngs 15 | import pycdio, cdio 16 | 17 | # Internal Modules 18 | from handler.controller.controller_handler import ControllerHandler 19 | 20 | 21 | class ControllerGw(ControllerHandler): 22 | """Handler for CD media types 23 | 24 | rips using a subprocess command to run `cdrdao` to create a BIN/CUE 25 | """ 26 | 27 | def __init__(self): 28 | """Constructor to setup basic data and config defaults 29 | 30 | """ 31 | # Call parent constructor 32 | super().__init__() 33 | # Set media type to handle 34 | self.type_id="Greaseweazle" 35 | # Default config data 36 | self.config_data=None 37 | # Data types output 38 | self.data_outputs=[] 39 | self.cd_tracks=0 40 | return 41 | 42 | def floppy_bus_check(self, state=None): 43 | """Sets and stores PID state of a process as a dict in json to /tmp folder 44 | 45 | """ 46 | print(f"Checking bus of : {self.controller_id}") 47 | 48 | tmp=self.ensureDir("/tmp/discrip/gw") 49 | if state is None: 50 | # read 51 | while os.path.isfile(f"{tmp}/{self.controller_id}"): 52 | time.sleep(3) 53 | else: 54 | # write 55 | print(f"Setting bus state: {self.controller_id} = {state}") 56 | if state: 57 | Path(f"{tmp}/{self.controller_id}").touch() 58 | else: 59 | os.remove(f"{tmp}/{self.controller_id}") 60 | 61 | -------------------------------------------------------------------------------- /web/http/static/img/good.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 36 | 38 | 42 | 48 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /handler/data/bincue_split.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # split BINCUE merging module for pyDiscRip. 4 | 5 | # Python System 6 | import os 7 | import glob 8 | import sys 9 | import json 10 | 11 | # Internal Modules 12 | from handler.data.data_handler import DataHandler 13 | from handler.util.bincon import cue_by_line 14 | 15 | 16 | class DataHandlerBINCUESPLIT(DataHandler): 17 | """Handler for split BINCUE data types 18 | 19 | Merges files using bincon 20 | """ 21 | 22 | def __init__(self): 23 | """Constructor to setup basic data and config defaults 24 | 25 | """ 26 | # Call parent constructor 27 | super().__init__() 28 | # Set handle ID 29 | self.handle_id="DataHandlerBINCUESPLIT" 30 | # Set data type to handle 31 | self.type_id="BINCUE_SPLIT" 32 | # Data types output 33 | self.data_outputs=["BINCUE"] 34 | 35 | 36 | def convertData(self,data_in): 37 | """Use bchunk to extract all WAVs and ISOs from BINCUE 38 | 39 | """ 40 | # Build data output 41 | data = { 42 | "type_id": "BINCUE", 43 | "processed_by": [], 44 | "done": False, 45 | "data_dir": self.ensureDir(f"{self.getPath()}/BINCUE/"), 46 | "data_files": { 47 | "BIN": f"{data_in["data_files"]["CUE"].replace(".cue","")}.bin", 48 | "CUE": f"{data_in["data_files"]["CUE"].replace(".cue","")}.cue" 49 | } 50 | } 51 | 52 | # Merge BIN files 53 | cue_by_line(data_in["data_dir"]+"/"+data_in["data_files"]["CUE"], data_in["data_files"]["CUE"].replace(".cue",""),path=data["data_dir"]) 54 | 55 | 56 | # Get files in output directory 57 | bins = list(map(os.path.basename, glob.glob(f"{data["data_dir"]}/*.bin"))) 58 | # Sort wavs to have file order make sense 59 | bins.sort() 60 | data["data_files"]["BIN"] = bins 61 | 62 | 63 | # Return all generated data 64 | return [data] 65 | 66 | -------------------------------------------------------------------------------- /handler/data/iso9660.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # ISO9660 conversion module for pyDiscRip. 4 | 5 | # Python System 6 | import os 7 | import glob 8 | import json 9 | 10 | # Internal Modules 11 | from handler.data.data_handler import DataHandler 12 | 13 | 14 | class DataHandlerISO9660(DataHandler): 15 | """Handler for BINCUE data types 16 | 17 | Extracts files using 7zip 18 | """ 19 | 20 | def __init__(self): 21 | """Constructor to setup basic data and config defaults 22 | 23 | """ 24 | # Call parent constructor 25 | super().__init__() 26 | # Set handle ID 27 | self.handle_id="DataHandlerISO9660" 28 | # Set data type to handle 29 | self.type_id="ISO9660" 30 | # Data types output 31 | self.data_outputs=["Z_FILES"] 32 | 33 | 34 | def convertData(self,data): 35 | """Use 7-zip to extract files out of ISO 36 | 37 | """ 38 | 39 | # Go through all ISOs 40 | for iso in data["data_files"]["ISO"]: 41 | 42 | # Build data output files 43 | data_files = { 44 | "type_id": "Z_FILES", 45 | "processed_by": [], 46 | "data_dir": self.ensureDir(f"{data["data_dir"]}/{iso.replace(".iso","")}"), 47 | "data_files": { 48 | "Z_FILES": f"{iso.replace(".iso","")}" 49 | } 50 | } 51 | 52 | print(f"Working on: {iso} => {data["data_dir"]}") 53 | 54 | # Build 7z command to extract files 55 | cmd = [ 56 | "7z", 57 | "-y", 58 | "x", 59 | f"{data["data_dir"]}/{iso}", 60 | f"-o{data_files["data_dir"]}" 61 | ] 62 | 63 | # Run command 64 | result = self.osRun(cmd) 65 | self.log("7z_stdout",str(result.stdout.decode("utf-8"))) 66 | self.log("7z_stderr",str(result.stderr.decode("utf-8"))) 67 | 68 | return [data_files] 69 | 70 | # Only returned if an error happens 71 | return None 72 | -------------------------------------------------------------------------------- /web/http/static/img/warning.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 36 | 38 | 42 | 47 | 54 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /handler/media/media_handler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Base media handler for pyDiscRip. 4 | 5 | # Python System 6 | import sys, os 7 | import json 8 | import shutil 9 | from enum import Enum 10 | from datetime import datetime 11 | 12 | # Internal Modules 13 | from handler.handler import Handler 14 | 15 | 16 | class MediaHandler(Handler): 17 | """Base class for Media Types to handle identification and ripping 18 | 19 | """ 20 | 21 | def __init__(self): 22 | """Constructor to setup basic data and config defaults 23 | 24 | """ 25 | # Call parent constructor 26 | super().__init__() 27 | # Set handler ID 28 | self.handler_id = None 29 | # Set media type id for later use 30 | self.type_id=None 31 | # Set directory to work in 32 | self.project_dir="" 33 | # Get current datetime 34 | self.project_timestamp=str(datetime.now().isoformat()).replace(":","-") 35 | # Data types output for later use 36 | self.data_outputs=[] 37 | # Set controller 38 | self.controller=None 39 | 40 | 41 | def mediaMatch(self, media_sample=None): 42 | """Check if the media sample should be handled by this type""" 43 | return media_sample["media_type"] == self.type_id 44 | 45 | def checkPhoto(self, media_sample): 46 | # Build path to check for image 47 | drivepath = self.cleanFilename(media_sample["drive"]) 48 | tmp="/tmp/discrip/photo/"+drivepath 49 | print(f"Looking for photo :{tmp}/photo.jpg") 50 | if os.path.isfile(f"{tmp}/photo.jpg"): 51 | 52 | data = { 53 | "type_id": "IMAGE", 54 | "processed_by": [], 55 | "data_dir": self.ensureDir(f"{self.getPath()}/status"), 56 | "data_files": { 57 | "JPG": f"media.jpg" # Reusing project dir for name 58 | } 59 | } 60 | 61 | dest=self.ensureDir(f"{self.getPath()}/status") 62 | print(f"Copying photo to :{dest}/media.jpg") 63 | shutil.copyfile(f"{tmp}/photo.jpg", dest+"/media.jpg") 64 | 65 | # Return all generated data 66 | return [data] 67 | else: 68 | print(f"No photo found") 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /web/http/static/status-drives.js: -------------------------------------------------------------------------------- 1 | 2 | function drivesBuild(data) 3 | { 4 | var table = document.createElement("table"); 5 | table.classList.add("status-drives"); 6 | 7 | var tr = document.createElement("tr"); 8 | var tr_names = document.createElement("tr"); 9 | 10 | for (const [key, value] of Object.entries(data)) 11 | { 12 | var td = document.createElement("td"); 13 | console.log(key) 14 | 15 | td.innerText = value["name"]; 16 | td.id = "driveStatus_"+key; 17 | td.onclick = function() { drivesUpdateAction(key); }; 18 | tr.appendChild(td); 19 | 20 | td = document.createElement("td"); 21 | td.id = "driveStatus_media_"+key; 22 | td.innerText = value["media"]; 23 | tr_names.appendChild(td); 24 | } 25 | built=true; 26 | table.appendChild(tr); 27 | table.appendChild(tr_names); 28 | return table 29 | } 30 | built=false 31 | 32 | function drivesUpdateAction(drive) 33 | { 34 | var data = {"drive_status":{ 35 | [drive] : { 36 | "action": Math.floor(Date.now() / 1000) 37 | } 38 | }}; 39 | fetch("/update", { 40 | method: "POST", 41 | headers: {'Content-Type': 'application/json'}, 42 | body: JSON.stringify(data) 43 | }).then(res => { 44 | console.log("Request complete! response:", res); 45 | }); 46 | } 47 | 48 | function drivesLoadStatus(event) 49 | { 50 | fetch('/status/drive_status.json').then((response) => response.json()) 51 | .then((data) => 52 | { 53 | elm = document.getElementById("status-drives") 54 | if (!built) 55 | elm.appendChild(drivesBuild(data)); 56 | 57 | for (const [key, value] of Object.entries(data)) 58 | { 59 | drive=document.getElementById("driveStatus_"+key); 60 | drive.className = ''; 61 | switch(value["status"]) 62 | { 63 | case 0: 64 | drive.classList.add("idle"); 65 | break; 66 | case 1: 67 | drive.classList.add("good"); 68 | break; 69 | case 2: 70 | case 4: 71 | drive.classList.add("working"); 72 | break; 73 | case 3: 74 | drive.classList.add("attention"); 75 | break; 76 | } 77 | 78 | if ("title" in value) 79 | { 80 | drive.title = value["title"]; 81 | } 82 | drive=document.getElementById("driveStatus_media_"+key); 83 | drive.innerText = value["media"]; 84 | } 85 | } 86 | ); 87 | 88 | 89 | setTimeout(drivesLoadStatus, 3000); 90 | } 91 | window.addEventListener("load", drivesLoadStatus); 92 | 93 | -------------------------------------------------------------------------------- /web/http/static/img/idle.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 36 | 38 | 42 | 49 | 56 | 63 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /handler/media/bd_redumper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # BD ripping module for pyDiscRip. Can be used to rip a BD 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | 10 | # Internal Modules 11 | from handler.media.media_handler import MediaHandler 12 | from handler.media.optical import MediaOptical 13 | 14 | 15 | class MediaHandlerBDRedumper(MediaOptical): 16 | """Handler for DVD media types 17 | 18 | rips using a subprocess command to run `ddrescue` to create an ISO file 19 | """ 20 | 21 | def __init__(self): 22 | """Constructor to setup basic data and config defaults 23 | 24 | """ 25 | # Call parent constructor 26 | super().__init__() 27 | # Set handler ID 28 | self.handler_id="bd_redumper" 29 | # Set media type to handle 30 | self.type_id="BD" 31 | # Data types output 32 | self.data_outputs=["BDIMG"] 33 | # DVD info to be collected 34 | self.dvd_partition_filesystem="" 35 | 36 | 37 | def ripBD(self, media_sample): 38 | """Use ddrescue to rip DVD with multiple passes and mapfile 39 | 40 | """ 41 | # TODO - Data is not always ISO9660, support for UDF is needed still 42 | data = { 43 | "type_id": "BDIMG", 44 | "processed_by": [], 45 | "done": False, 46 | "data_dir": self.ensureDir(f"{self.getPath()}/BDIMG/{media_sample["name"]}"), 47 | "data_files": { 48 | "ISO": [f"{media_sample["name"]}.iso"] 49 | } 50 | } 51 | self.status(data) 52 | 53 | # Don't re-rip BIN/TOC 54 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["ISO"]}"): 55 | # Build cdrdao command to read CD 56 | cmd = [ 57 | "redumper", 58 | "disc", 59 | "--retries=100", 60 | f"--drive={media_sample["drive"]}", 61 | f"--image-path={data["data_dir"]}" 62 | 63 | ] 64 | 65 | # Run command 66 | self.osRun(cmd) 67 | 68 | data["done"]=True 69 | self.status(data) 70 | # Return all generated data 71 | return data 72 | 73 | 74 | def rip(self, media_sample): 75 | """Rip DVD with ddrescue 76 | 77 | """ 78 | print("Ripping as BD") 79 | print("WARNING: This is probably useless, enjoy!!!") 80 | # Setup rip output path 81 | self.setProjectDir(media_sample["name"]) 82 | 83 | # Rip and return data 84 | return [self.ripBD(media_sample)] 85 | 86 | -------------------------------------------------------------------------------- /handler/media/dvd_redumper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # DVD ripping module for pyDiscRip. Can be used to rip a DVD 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | 10 | # Internal Modules 11 | from handler.media.media_handler import MediaHandler 12 | from handler.media.optical import MediaOptical 13 | 14 | 15 | class MediaHandlerDVDRedumper(MediaOptical): 16 | """Handler for DVD media types 17 | 18 | rips using a subprocess command to run `ddrescue` to create an ISO file 19 | """ 20 | 21 | def __init__(self): 22 | """Constructor to setup basic data and config defaults 23 | 24 | """ 25 | # Call parent constructor 26 | super().__init__() 27 | # Set handler ID 28 | self.handler_id="dvd_redumper" 29 | # Set media type to handle 30 | self.type_id="DVD" 31 | # Data types output 32 | self.data_outputs=["ISO9660"] 33 | # DVD info to be collected 34 | self.dvd_partition_filesystem="" 35 | 36 | 37 | def ripDVD(self, media_sample): 38 | """Use ddrescue to rip DVD with multiple passes and mapfile 39 | 40 | """ 41 | # TODO - Data is not always ISO9660, support for UDF is needed still 42 | data = { 43 | "type_id": "ISO9660", 44 | "processed_by": [], 45 | "done": False, 46 | "data_dir": self.ensureDir(f"{self.getPath()}/ISO9660/{media_sample["name"]}"), 47 | "data_files": { 48 | "ISO": [f"{media_sample["name"]}.iso"] 49 | } 50 | } 51 | self.status(data) 52 | 53 | # Don't re-rip BIN/TOC 54 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["ISO"]}"): 55 | # Build cdrdao command to read CD 56 | cmd = [ 57 | "redumper", 58 | "disc", 59 | "--retries=100", 60 | f"--drive={media_sample["drive"]}", 61 | f"--image-path={data["data_dir"]}", 62 | f"--image-name={media_sample["name"]}" 63 | 64 | ] 65 | 66 | # Run command 67 | self.osRun(cmd) 68 | 69 | data["done"]=True 70 | self.status(data) 71 | # Return all generated data 72 | return data 73 | 74 | 75 | def rip(self, media_sample): 76 | """Rip DVD with ddrescue 77 | 78 | """ 79 | print("Ripping as DVD") 80 | print("WARNING: This software does not yet distinguish between ISO9660 and UDF filesystems") 81 | # Setup rip output path 82 | self.setProjectDir(media_sample["name"]) 83 | 84 | # Rip and return data 85 | return [self.ripDVD(media_sample)] 86 | 87 | -------------------------------------------------------------------------------- /web/http/static/status-queue.js: -------------------------------------------------------------------------------- 1 | 2 | function queueBuild(data) 3 | { 4 | var table = document.createElement("table"); 5 | table.classList.add("status-drives"); 6 | 7 | var tr = document.createElement("tr"); 8 | 9 | for (const [key, value] of Object.entries(data)) 10 | { 11 | var td = document.createElement("td"); 12 | console.log(key) 13 | 14 | td.innerText = value["name"]; 15 | td.id = "driveStatus_"+key; 16 | tr.appendChild(td); 17 | td.onclick = function() { drivesUpdateAction(key); }; 18 | } 19 | built=true; 20 | table.appendChild(tr); 21 | return table 22 | } 23 | built=false 24 | 25 | function drivesUpdateAction(drive) 26 | { 27 | var data = {"drive_status":{ 28 | [drive] : { 29 | "action": Math.floor(Date.now() / 1000) 30 | } 31 | }}; 32 | fetch("/update", { 33 | method: "POST", 34 | headers: {'Content-Type': 'application/json'}, 35 | body: JSON.stringify(data) 36 | }).then(res => { 37 | console.log("Request complete! response:", res); 38 | }); 39 | } 40 | 41 | function queueLoadStatus(event) 42 | { 43 | fetch('/status/queue.json').then((response) => response.json()) 44 | .then((data) => 45 | { 46 | document.getElementById("queue").replaceChildren(); 47 | 48 | var table = document.createElement("table"); 49 | var tr = document.createElement("tr"); 50 | var th = document.createElement("th"); 51 | th.innerText = "Queue"; 52 | th.colSpan = 2; 53 | tr.appendChild(th); 54 | table.appendChild(tr); 55 | 56 | tr = document.createElement("tr"); 57 | th = document.createElement("th"); 58 | th.innerText = "Sample"; 59 | tr.appendChild(th); 60 | th = document.createElement("th"); 61 | th.innerText = "Source"; 62 | tr.appendChild(th); 63 | table.appendChild(tr); 64 | 65 | for (i in data) 66 | { 67 | if (data[i]["done"]) continue; 68 | 69 | tr = document.createElement("tr"); 70 | var td = document.createElement("td"); 71 | td.innerText = data[i]["name"]; 72 | tr.appendChild(td); 73 | 74 | td = document.createElement("td"); 75 | if ("group" in data[i]) 76 | { 77 | td.innerText = data[i]["group"]; 78 | }else{ 79 | td.innerText = data[i]["drive"]; 80 | } 81 | tr.appendChild(td); 82 | table.appendChild(tr); 83 | } 84 | 85 | 86 | document.getElementById("queue").appendChild(table); 87 | } 88 | ); 89 | 90 | 91 | setTimeout(queueLoadStatus, 3000); 92 | } 93 | window.addEventListener("load", queueLoadStatus); 94 | 95 | -------------------------------------------------------------------------------- /web/http/static/script.js: -------------------------------------------------------------------------------- 1 | var settings={}; 2 | var default_config={} 3 | 4 | function buildOptionGroupList(data,id,name) 5 | { 6 | /* 7 | < div *class="input_pair"> 8 | 9 | 19 |
20 | */ 21 | select = document.createElement("select"); 22 | select.name=name; 23 | select.id=id; 24 | drivegroup = document.createElement("optgroup"); 25 | drivegroup.label="Groups"; 26 | groupcheck=[]; 27 | groups=false; 28 | for (const [key, value] of Object.entries(data)) 29 | { 30 | console.log(key); 31 | optgroup = document.createElement("optgroup"); 32 | optgroup.label=key; 33 | for (var i = 0; i < value.length; ++i) { 34 | option = document.createElement("option"); 35 | option.value=value[i]["drive"]; 36 | option.innerText=value[i]["name"]; 37 | optgroup.appendChild(option); 38 | if ("group" in value[i]) 39 | { 40 | if(!groupcheck.includes(value[i]["group"])) 41 | { 42 | groups=true; 43 | option = document.createElement("option"); 44 | option.value=value[i]["group"]; 45 | option.innerText=value[i]["group"]; 46 | drivegroup.appendChild(option); 47 | 48 | groupcheck.push(value[i]["group"]); 49 | } 50 | } 51 | } 52 | if(groups) select.appendChild(drivegroup); 53 | select.appendChild(optgroup); 54 | } 55 | return select 56 | } 57 | 58 | function markerCustomAdd(event) 59 | { 60 | fetch('/settings.json').then((response) => response.json()) 61 | .then((data) => 62 | { 63 | settings = data; document.getElementById('media_drive').replaceWith(buildOptionGroupList(settings["drives"],"media_drive","media_drive")); 64 | } 65 | ); 66 | } 67 | window.addEventListener("load", markerCustomAdd); 68 | 69 | 70 | jform = new jsonForm( 71 | '/config_data.json', 72 | document.getElementById('config_options'), 73 | 'Config Options', 74 | {"top_blank":true,"form_names":false}, 75 | {"FLUX|diskdefs-direct":"textarea"} 76 | ) 77 | 78 | function sendMediaForm() 79 | { 80 | jform.prepare(); 81 | const data = new URLSearchParams(); 82 | for (const pair of new FormData(document.getElementById('media_form'))) { 83 | data.append(pair[0], pair[1]); 84 | } 85 | 86 | fetch("/rip", { 87 | method: 'post', 88 | body: data, 89 | }).then(() => { 90 | document.getElementById('media_name').value = ""; 91 | document.getElementById('media_description').value = ""; 92 | }); 93 | } 94 | document.getElementById('send_media_form').onclick = function() { sendMediaForm(); }; 95 | -------------------------------------------------------------------------------- /handler/data/manager.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Data conversion manager for pyDiscRip. Can be used to rip a CD and fetch metadata 4 | 5 | from pprint import pprint 6 | 7 | # Internal Modules 8 | from handler.data.data_handler import DataHandler 9 | from handler.data.bincue import DataHandlerBINCUE 10 | from handler.data.bincue_split import DataHandlerBINCUESPLIT 11 | from handler.data.iso9660 import DataHandlerISO9660 12 | from handler.data.wav import DataHandlerWAV 13 | from handler.data.flux import DataHandlerFLUX 14 | from handler.data.hxc_image import DataHandlerHXCImage 15 | 16 | 17 | class DataHandlerManager(object): 18 | """Manager for data types 19 | 20 | Provides process control functions for converting different data types and 21 | setting configuration data. 22 | """ 23 | 24 | def __init__(self): 25 | """Constructor to setup basic data and config defaults 26 | 27 | """ 28 | # Call parent constructor 29 | super().__init__() 30 | # Add all supported data types 31 | self.data_types={} 32 | self.data_types["BINCUE"] = DataHandlerBINCUE() 33 | self.data_types["BINCUE_SPLIT"] = DataHandlerBINCUESPLIT() 34 | self.data_types["ISO9660"] = DataHandlerISO9660() 35 | self.data_types["WAV"] = DataHandlerWAV() 36 | self.data_types["FLUX"] = DataHandlerFLUX() 37 | self.data_types["HXC"] = DataHandlerHXCImage() 38 | 39 | def configVirtual(self,config): 40 | """Configure a new handler to use as a virtual data format 41 | 42 | """ 43 | if "Virtual" in config: 44 | # Add all new virtual formats 45 | for data in config["Virtual"]["Data"]: 46 | # Create and configure new handler 47 | self.data_types[data["input_type_id"]] = DataHandler() 48 | self.data_types[data["input_type_id"]].prepareVirtualFormat(data) 49 | 50 | def findDataType(self,data): 51 | """Match data handler to type and return handler 52 | 53 | """ 54 | 55 | # Iterate through all handlers 56 | for type_id, data_handler in self.data_types.items(): 57 | if data_handler.dataMatch(data) and not data_handler.handle_id in data["processed_by"]: 58 | print(f"Found handler: {data_handler.handle_id}") 59 | return data_handler 60 | 61 | return None 62 | 63 | 64 | def configDump(self): 65 | """Get all config data for media handlers and dump it to json 66 | 67 | """ 68 | config_options={} 69 | # Iterate through all handlers 70 | for type_id, data_handler in self.data_types.items(): 71 | # Add all config options for handler 72 | config_options[type_id]=data_handler.configOptions() 73 | 74 | return config_options 75 | 76 | -------------------------------------------------------------------------------- /handler/media/dummy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # DVD ripping module for pyDiscRip. Can be used to rip a DVD 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | import random 10 | import time 11 | 12 | # Internal Modules 13 | from handler.media.media_handler import MediaHandler 14 | 15 | 16 | class MediaHandlerDummy(MediaHandler): 17 | """Handler for DVD media types 18 | 19 | rips using a subprocess command to run `ddrescue` to create an ISO file 20 | """ 21 | 22 | def __init__(self): 23 | """Constructor to setup basic data and config defaults 24 | 25 | """ 26 | # Call parent constructor 27 | super().__init__() 28 | # Set media type to handle 29 | self.type_id="DUMMY" 30 | # Data types output 31 | self.data_outputs=["BINARY"] 32 | # DVD info to be collected 33 | self.dvd_partition_filesystem="" 34 | 35 | 36 | def ripDummy(self, media_sample): 37 | """Use ddrescue to rip DVD with multiple passes and mapfile 38 | 39 | """ 40 | data = { 41 | "type_id": "BINARY", 42 | "processed_by": [], 43 | "done": False, 44 | "data_dir": self.ensureDir(f"{self.getPath()}/BINARY/{media_sample["name"]}"), 45 | "data_files": { 46 | "BINARY": [f"{media_sample["name"]}.img"] 47 | } 48 | } 49 | #self.status(data) 50 | 51 | # Don't re-rip ISO 52 | # if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}"): 53 | count=str(int(random.random()*100)) 54 | # ddrescue is a multi step process that is run three times 55 | cmd1 = [ 56 | "dd", 57 | "bs=8M", 58 | "count={count}", 59 | "if=/dev/random", 60 | f"of={data["data_dir"]}/{data["data_files"]["BINARY"][0]}" 61 | ] 62 | 63 | # Run command 64 | result = self.osRun(cmd1) 65 | #self.log("dd_out",str(result.stdout)) 66 | #self.log("dd_err",str(result.stderr)) 67 | 68 | data["done"]=True 69 | #self.status(data) 70 | # Return all generated data 71 | return data 72 | 73 | 74 | def rip(self, media_sample): 75 | """Rip DVD with ddrescue 76 | 77 | """ 78 | # Setup rip output path 79 | self.setProjectDir(self.project_timestamp+"_"+media_sample["name"]) 80 | 81 | # Rip and return data 82 | return [self.ripDummy(media_sample)] 83 | 84 | 85 | def load(self,media_sample,bypass=False): 86 | print(f"Dummy [{media_sample["name"]}] Loading to [{media_sample["drive"]}]") 87 | delay=int(random.random()*20) 88 | time.sleep(delay) 89 | 90 | 91 | def eject(self,media_sample): 92 | print(f"Dummy [{media_sample["name"]}] Done [{media_sample["drive"]}]") 93 | time.sleep(1) 94 | -------------------------------------------------------------------------------- /handler/data/hxc_image.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Flux conversion module for pyDiscRip. Uses greaseweazle software 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | import importlib 10 | from pprint import pprint 11 | 12 | # External Modules 13 | # Directly imports from greaseweazle module in code 14 | 15 | # Internal Modules 16 | from handler.data.data_handler import DataHandler 17 | 18 | 19 | class DataHandlerHXCImage(DataHandler): 20 | """Handler for FLUX data types 21 | 22 | converts using greaseweazle software by directly accessing python code 23 | """ 24 | 25 | def __init__(self): 26 | """Constructor to setup basic data and config defaults 27 | 28 | """ 29 | # Call parent constructor 30 | super().__init__() 31 | # Set handle ID 32 | self.handle_id="DataHandlerHXCImage" 33 | # Set data type to handle 34 | self.type_id="FLUX" 35 | # Data types output 36 | self.data_outputs=["IMAGE"] 37 | 38 | 39 | def convertData(self, data_in): 40 | """Use gw python modules to convert FLUX to BINARY 41 | 42 | """ 43 | 44 | data = { 45 | "type_id": "IMAGE", 46 | "processed_by": [], 47 | "data_dir": self.ensureDir(f"{self.getPath()}/status"), 48 | "data_files": { 49 | "PNG": f"flux_image.png" # Reusing project dir for name 50 | } 51 | } 52 | 53 | print("Make image") 54 | 55 | # Don't re-render image 56 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["PNG"]}"): 57 | script=os.path.realpath(__file__).replace(os.path.basename(__file__),"")+"/../../config/handler/hxc_image/config.script" 58 | # Build hxcfe command 59 | cmd = [ 60 | "hxcfe", 61 | f"-script:{script}", 62 | f"-finput:{os.getcwd()}/{data_in["data_dir"]}/{data_in["data_files"]["flux"][0]}", 63 | f"-foutput:{os.getcwd()}/{data["data_dir"]}/{data["data_files"]["PNG"]}.bmp", 64 | "-conv:BMP_DISK_IMAGE" 65 | ] 66 | 67 | # Run command 68 | print("run Make image") 69 | self.log("hxcfe_cmd",str(cmd)) 70 | result = self.osRun(cmd) 71 | self.log("hxcfe_stdout",str(result.stdout.decode("utf-8"))) 72 | self.log("hxcfe_stderr",str(result.stderr.decode("utf-8"))) 73 | 74 | 75 | from wand.image import Image 76 | img = Image(filename=f"{os.getcwd()}/{data["data_dir"]}/{data["data_files"]["PNG"]}.bmp") 77 | img.format = 'png' 78 | img.save(filename=f"{os.getcwd()}/{data["data_dir"]}/{data["data_files"]["PNG"]}") 79 | os.remove(f"{os.getcwd()}/{data["data_dir"]}/{data["data_files"]["PNG"]}.bmp") 80 | 81 | # Return all generated data 82 | return [data] 83 | 84 | 85 | 86 | -------------------------------------------------------------------------------- /web/http/home.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 62 | 63 | 64 | 65 | 67 | 69 | 70 | 71 | 72 |

pyDiscRip

21 |
22 | Media Sample 23 |
24 | 25 |
26 | 27 | 40 |
41 |
42 | 43 | 44 |
45 |
46 | 47 | 48 |
49 |
50 | 51 | 52 |
53 | 54 |
55 |
56 | 57 |
58 | 59 |
60 |
61 |
66 |
68 |
73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | -------------------------------------------------------------------------------- /handler/controller/RoboRacerLS.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Python System 4 | import os 5 | import sys 6 | import json 7 | from pathlib import Path 8 | import time 9 | from pprint import pprint 10 | 11 | # External Modules 12 | try: 13 | import serial 14 | except Exception as e: 15 | print("Need to install Python module [pyserial]") 16 | sys.exit(1) 17 | 18 | # Internal Modules 19 | from handler.controller.controller_handler import ControllerHandler 20 | 21 | 22 | class ControllerRoboRacerLS(ControllerHandler): 23 | """Handler for CD media types 24 | 25 | rips using a subprocess command to run `cdrdao` to create a BIN/CUE 26 | """ 27 | 28 | def __init__(self): 29 | """Constructor to setup basic data and config defaults 30 | 31 | """ 32 | # Call parent constructor 33 | super().__init__() 34 | # Set media type to handle 35 | self.type_id="RoboRacerLS" 36 | # Default config data 37 | self.config_data={"serial_port":None} 38 | # Device commands 39 | self.cmd = { 40 | "CLEAR":"\r\n", 41 | "ARM_UP":"!BNKPH94", 42 | "ARM_DOWN":"!BNKPG93", 43 | "DISC_DROP":"!BNKDP90" 44 | } 45 | 46 | # Initialized 47 | 48 | def initialize(self): 49 | try: 50 | # Arm up 51 | with serial.Serial(self.config_data["serial_port"],9600,timeout=1) as ser: 52 | time.sleep(1) 53 | ser.write( bytes(self.cmd["ARM_DOWN"],'ascii',errors='ignore') ) 54 | time.sleep(3) 55 | ser.write( bytes(self.cmd["ARM_UP"],'ascii',errors='ignore') ) 56 | 57 | return False 58 | 59 | except Exception as e: 60 | print("EMERGENCY STOP - ERROR ROBO RACER INIT") 61 | sys.exit(1) 62 | 63 | 64 | def load(self, drive): 65 | try: 66 | # Arm up 67 | with serial.Serial(self.config_data["serial_port"],9600,timeout=1) as ser: 68 | ser.write( bytes(self.cmd["ARM_UP"],'ascii',errors='ignore') ) 69 | time.sleep(0.5) 70 | 71 | # Tray should be ejected 72 | self.osRun(["eject", f"{drive}"]) 73 | time.sleep(5) 74 | 75 | # Drop disc 76 | with serial.Serial(self.config_data["serial_port"],9600,timeout=1) as ser: 77 | ser.write( bytes(self.cmd["DISC_DROP"],'ascii',errors='ignore') ) 78 | time.sleep(5) 79 | 80 | # Close tray 81 | self.osRun(["eject","-t", f"{drive}"]) 82 | time.sleep(10) 83 | 84 | return False 85 | 86 | except Exception as e: 87 | print("EMERGENCY STOP - ERROR LOADING ROBO RACER") 88 | sys.exit(1) 89 | 90 | 91 | def eject(self, drive): 92 | try: 93 | # Arm down 94 | with serial.Serial(self.config_data["serial_port"],9600,timeout=1) as ser: 95 | ser.write( bytes(self.cmd["ARM_DOWN"],'ascii',errors='ignore') ) 96 | time.sleep(5) 97 | 98 | # Tray should be ejected 99 | self.osRun(["eject", f"{drive}"]) 100 | time.sleep(5) 101 | 102 | return True 103 | 104 | except Exception as e: 105 | print("EMERGENCY STOP - ERROR LOADING ROBO RACER") 106 | sys.exit(1) 107 | 108 | 109 | 110 | -------------------------------------------------------------------------------- /config/handler/flux/diskdefs.cfg: -------------------------------------------------------------------------------- 1 | # Computer Devices Dot 2 | disk cdi.dot 3 | cyls = 70 4 | heads = 1 5 | tracks * ibm.mfm 6 | secs = 8 7 | bps = 512 8 | gap3 = 84 9 | rate = 250 10 | end 11 | end 12 | 13 | # Sord M23P Sony OA-D31V 14 | disk sord.m23.35 15 | cyls = 70 16 | heads = 1 17 | tracks * ibm.mfm 18 | secs = 16 19 | bps = 256 20 | interleave = 2 21 | end 22 | end 23 | 24 | # Jonos Escort CP/M 25 | disk jonos.35 26 | cyls = 70 27 | heads = 1 28 | tracks * ibm.mfm 29 | secs = 9 30 | bps = 512 31 | interleave = 2 32 | end 33 | end 34 | 35 | # HP LIF 3.5in Double Density/Double Sided for 1651b Logic Analyser 36 | disk hp.lif.1651b 37 | cyls = 79 38 | heads = 2 39 | tracks * ibm.mfm 40 | id = 1 41 | interleave = 1 42 | secs = 5 43 | bps = 1024 44 | end 45 | end 46 | 47 | # HP LIF 5.25in Double Density/Double Sided 48 | disk hp.lif.33dd 49 | cyls = 33 50 | heads = 2 51 | tracks * ibm.mfm 52 | id = 0 53 | interleave = 1 54 | secs = 16 55 | bps = 256 56 | end 57 | end 58 | 59 | # HP LIF 3.5in Double Density/Double Sided 60 | disk hp.lif.77dd 61 | cyls = 77 62 | heads = 2 63 | tracks * ibm.mfm 64 | interleave = 2 65 | secs = 16 66 | bps = 256 67 | end 68 | end 69 | 70 | # HP LIF 3.5in High Density/Double Sided 71 | disk hp.lif.77hd 72 | cyls = 77 73 | heads = 2 74 | tracks * ibm.mfm 75 | interleave = 2 76 | secs = 32 77 | bps = 256 78 | end 79 | end 80 | 81 | # IBM 3740 8in disk 82 | disk ibm.3740 83 | cyls = 77 84 | heads = 1 85 | tracks * ibm.fm 86 | secs = 26 87 | bps = 128 88 | rpm = 360 89 | end 90 | end 91 | 92 | # Kaypro IV 93 | disk kaypro.iv 94 | cyls = 40 95 | heads = 2 96 | tracks 0-39.0 ibm.mfm 97 | id=0 98 | secs = 10 99 | bps = 512 100 | interleave = 5 101 | end 102 | tracks 0-39.1 ibm.mfm 103 | id=10 104 | h = 0 105 | secs = 10 106 | bps = 512 107 | interleave = 5 108 | end 109 | end 110 | 111 | # CP/M-86 Boot 112 | disk cpm.86 113 | cyls = 70 114 | heads = 1 115 | tracks 0 ibm.fm 116 | secs = 26 117 | bps = 128 118 | rpm = 360 119 | end 120 | 121 | tracks 1-69 ibm.mfm 122 | secs = 8 123 | bps = 1024 124 | rpm = 360 125 | end 126 | end 127 | 128 | 129 | # CP/M-86 Boot 130 | disk cpm.86ds 131 | cyls = 70 132 | heads = 2 133 | tracks 0.0 ibm.fm 134 | secs = 26 135 | bps = 128 136 | rpm = 360 137 | end 138 | 139 | tracks * ibm.mfm 140 | hskew = 2 141 | secs = 8 142 | bps = 1024 143 | rpm = 360 144 | end 145 | end 146 | 147 | # Magnavox VideoWriter 148 | disk maganavox.videowriter 149 | cyls = 80 150 | heads = 1 151 | tracks * ibm.mfm 152 | id = 0 153 | secs = 18 154 | bps = 256 155 | end 156 | end 157 | 158 | # ZOBEX DD-FDC DSDD 159 | disk zobex-dsdd 160 | cyls = 77 161 | heads = 2 162 | tracks * ibm.mfm 163 | secs = 16 164 | bps = 512 165 | rpm = 360 166 | interleave = 3 167 | id = 0 168 | end 169 | end 170 | 171 | # ZOBEX DD-FDC SSDD 172 | disk zobex-ssdd 173 | cyls = 77 174 | heads = 1 175 | tracks * ibm.mfm 176 | secs = 16 177 | bps = 512 178 | rpm = 360 179 | interleave = 3 180 | id = 0 181 | end 182 | end 183 | -------------------------------------------------------------------------------- /handler/media/ddisk.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # DVD ripping module for pyDiscRip. Can be used to rip a DVD 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | 10 | # Internal Modules 11 | from handler.media.media_handler import MediaHandler 12 | 13 | 14 | class MediaHandlerDDisk(MediaHandler): 15 | """Handler for DVD media types 16 | 17 | rips using a subprocess command to run `ddrescue` to create an ISO file 18 | """ 19 | 20 | def __init__(self): 21 | """Constructor to setup basic data and config defaults 22 | 23 | """ 24 | # Call parent constructor 25 | super().__init__() 26 | # Set media type to handle 27 | self.type_id="DDISK" 28 | # Data types output 29 | self.data_outputs=["BINARY"] 30 | # DVD info to be collected 31 | self.dvd_partition_filesystem="" 32 | 33 | 34 | def ripDD(self, media_sample): 35 | """Use ddrescue to rip DVD with multiple passes and mapfile 36 | 37 | """ 38 | data = { 39 | "type_id": "BINARY", 40 | "processed_by": [], 41 | "done": False, 42 | "data_dir": self.ensureDir(f"{self.getPath()}/BINARY/{media_sample["name"]}"), 43 | "data_files": { 44 | "BINARY": [f"{media_sample["name"]}.img"] 45 | } 46 | } 47 | self.status(data) 48 | 49 | # Don't re-rip ISO 50 | # if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}"): 51 | 52 | # ddrescue is a multi step process that is run three times 53 | cmd1 = [ 54 | "ddrescue", 55 | "-b", 56 | "2048", 57 | "-n", 58 | "-v", 59 | f"{media_sample["drive"]}", 60 | f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}", 61 | f"{data["data_dir"]}/mapfile" 62 | ] 63 | cmd2 = [ 64 | "ddrescue", 65 | "-b", 66 | "2048", 67 | "-d", 68 | "-r", 69 | "3", 70 | "-v", 71 | f"{media_sample["drive"]}", 72 | f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}", 73 | f"{data["data_dir"]}/mapfile" 74 | ] 75 | cmd3 = [ 76 | "ddrescue", 77 | "-b", 78 | "2048", 79 | "-d", 80 | "-R", 81 | "-r", 82 | "3", 83 | "-v", 84 | f"{media_sample["drive"]}", 85 | f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}", 86 | f"{data["data_dir"]}/mapfile" 87 | ] 88 | 89 | # Run command 90 | result = self.osRun(cmd1) 91 | self.log("ddrescue_1-3_out",str(result.stdout)) 92 | self.log("ddrescue_1-3_err",str(result.stderr)) 93 | result = self.osRun(cmd2) 94 | self.log("ddrescue_2-3_out",str(result.stdout)) 95 | self.log("ddrescue_2-3_err",str(result.stderr)) 96 | result = self.osRun(cmd3) 97 | self.log("ddrescue_3-3_out",str(result.stdout)) 98 | self.log("ddrescue_3-3_err",str(result.stderr)) 99 | 100 | data["done"]=True 101 | self.status(data) 102 | # Return all generated data 103 | return data 104 | 105 | 106 | def rip(self, media_sample): 107 | """Rip DVD with ddrescue 108 | 109 | """ 110 | print("Ripping as generic disk with ddrescue") 111 | # Setup rip output path 112 | self.setProjectDir(media_sample["name"]) 113 | 114 | # Rip and return data 115 | return [self.ripDD(media_sample)] 116 | 117 | -------------------------------------------------------------------------------- /handler/media/optical.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # CD ripping module for pyDiscRip. Can be used to rip a CD and fetch metadata 4 | 5 | # Python System 6 | import os, sys 7 | import json 8 | from pathlib import Path 9 | import time 10 | from pprint import pprint 11 | from urllib import request, parse 12 | 13 | # External Modules 14 | try: 15 | import libdiscid 16 | except Exception as e: 17 | print("Need to install libdiscid system packages [libdiscid-dev build-essential python-dev-is-python3]") 18 | print("Need to install Python module [python-libdiscid]") 19 | sys.exit(1) 20 | try: 21 | import musicbrainzngs 22 | except Exception as e: 23 | print("Need to install Python module [musicbrainzngs]") 24 | sys.exit(1) 25 | try: 26 | import pycdio, cdio 27 | except Exception as e: 28 | print("Need to install pycdio system packages [libcdio-dev libiso9660-dev swig pkg-config build-essential python-dev-is-python3]") 29 | print("Need to install Python module [pycdio]") 30 | sys.exit(1) 31 | 32 | # Internal Modules 33 | from handler.media.media_handler import MediaHandler 34 | 35 | 36 | class MediaOptical(MediaHandler): 37 | """Handler for CD media types 38 | 39 | rips using a subprocess command to run `cdrdao` to create a BIN/CUE 40 | """ 41 | 42 | def __init__(self): 43 | """Constructor to setup basic data and config defaults 44 | 45 | """ 46 | # Call parent constructor 47 | super().__init__() 48 | # Set media type to handle 49 | self.type_id="OPTICAL" 50 | # Default config data 51 | self.config_data=None 52 | # Data types output 53 | self.data_outputs=[] 54 | self.cd_tracks=0 55 | 56 | 57 | def load(self,media_sample,bypass=False): 58 | """Load media before continuing. 59 | 60 | Default method call waits for user to press enter 61 | 62 | Overload with automatic methods where possible. 63 | """ 64 | if self.controller is not None: 65 | self.controller.load(media_sample["drive"]) 66 | 67 | error_count=0 68 | print(f"Please insert [{media_sample["name"]}] into [{media_sample["drive"]}]") 69 | wait_load=0 70 | while(True): 71 | try: 72 | time.sleep(wait_load) 73 | d=cdio.Device(media_sample["drive"]) 74 | tracks = d.get_num_tracks() 75 | print(f"Found disc with {tracks} tracks") 76 | return True 77 | except cdio.TrackError: 78 | print(f"Please insert [{media_sample["name"]}] into [{media_sample["drive"]}]") 79 | 80 | if self.controller is None: 81 | self.eject(media_sample) 82 | self.web_update({"drive_status":{media_sample["drive"]:{"status":3,"title":f"Please insert [{media_sample["name"]}] into [{media_sample["drive"]}]"}}},media_sample["config_data"]) 83 | wait_load=10 84 | error_count+=1 85 | if self.controller is not None and error_count > 10: 86 | return False 87 | 88 | 89 | def eject(self,media_sample, controller=None): 90 | """Eject drive tray 91 | """ 92 | print("OPTICAL EJECT") 93 | if self.controller is not None: 94 | controller = self.controller 95 | if controller is not None: 96 | print("Controller EJECT") 97 | if controller.eject(media_sample["drive"]): 98 | return 99 | print("EJECTING...") 100 | d=cdio.Device(media_sample["drive"]) 101 | d.eject_media() 102 | time.sleep(3) 103 | -------------------------------------------------------------------------------- /handler/media/dvd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # DVD ripping module for pyDiscRip. Can be used to rip a DVD 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | 10 | # Internal Modules 11 | from handler.media.media_handler import MediaHandler 12 | from handler.media.optical import MediaOptical 13 | 14 | 15 | class MediaHandlerDVD(MediaOptical): 16 | """Handler for DVD media types 17 | 18 | rips using a subprocess command to run `ddrescue` to create an ISO file 19 | """ 20 | 21 | def __init__(self): 22 | """Constructor to setup basic data and config defaults 23 | 24 | """ 25 | # Call parent constructor 26 | super().__init__() 27 | # Set handler ID 28 | self.handler_id="dvd_ddrescue" 29 | # Set media type to handle 30 | self.type_id="DVD" 31 | # Data types output 32 | self.data_outputs=["ISO9660"] 33 | # DVD info to be collected 34 | self.dvd_partition_filesystem="" 35 | 36 | 37 | def ripDVD(self, media_sample): 38 | """Use ddrescue to rip DVD with multiple passes and mapfile 39 | 40 | """ 41 | # TODO - Data is not always ISO9660, support for UDF is needed still 42 | data = { 43 | "type_id": "ISO9660", 44 | "processed_by": [], 45 | "done": False, 46 | "data_dir": self.ensureDir(f"{self.getPath()}/ISO9660/{media_sample["name"]}"), 47 | "data_files": { 48 | "ISO": [f"{media_sample["name"]}.iso"] 49 | } 50 | } 51 | self.status(data) 52 | 53 | # Don't re-rip ISO 54 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["ISO"][0]}"): 55 | 56 | # ddrescue is a multi step process that is run three times 57 | cmd1 = [ 58 | "ddrescue", 59 | "-b", 60 | "2048", 61 | "-n", 62 | "-v", 63 | f"{media_sample["drive"]}", 64 | f"{data["data_dir"]}/{data["data_files"]["ISO"][0]}", 65 | f"{data["data_dir"]}/mapfile" 66 | ] 67 | cmd2 = [ 68 | "ddrescue", 69 | "-b", 70 | "2048", 71 | "-d", 72 | "-r", 73 | "3", 74 | "-v", 75 | f"{media_sample["drive"]}", 76 | f"{data["data_dir"]}/{data["data_files"]["ISO"][0]}", 77 | f"{data["data_dir"]}/mapfile" 78 | ] 79 | cmd3 = [ 80 | "ddrescue", 81 | "-b", 82 | "2048", 83 | "-d", 84 | "-R", 85 | "-r", 86 | "3", 87 | "-v", 88 | f"{media_sample["drive"]}", 89 | f"{data["data_dir"]}/{data["data_files"]["ISO"][0]}", 90 | f"{data["data_dir"]}/mapfile" 91 | ] 92 | 93 | # Run command 94 | result = self.osRun(cmd1) 95 | self.log("ddrescue_stdout",str(result.stdout)) 96 | self.log("ddrescue_stderr",str(result.stderr)) 97 | 98 | self.osRun(cmd2) 99 | self.osRun(cmd3) 100 | 101 | data["done"]=True 102 | self.status(data) 103 | # Return all generated data 104 | return data 105 | 106 | 107 | def rip(self, media_sample): 108 | """Rip DVD with ddrescue 109 | 110 | """ 111 | print("Ripping as DVD") 112 | print("WARNING: This software does not yet distinguish between ISO9660 and UDF filesystems") 113 | # Setup rip output path 114 | self.setProjectDir(media_sample["name"]) 115 | 116 | # Rip and return data 117 | return [self.ripDVD(media_sample)] 118 | 119 | -------------------------------------------------------------------------------- /web/http/static/img/working.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 16 | 36 | 38 | 42 | 47 | 51 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # UV 98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | #uv.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 116 | .pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 121 | __pypackages__/ 122 | 123 | # Celery stuff 124 | celerybeat-schedule 125 | celerybeat.pid 126 | 127 | # SageMath parsed files 128 | *.sage.py 129 | 130 | # Environments 131 | .env 132 | .venv 133 | env/ 134 | venv/ 135 | ENV/ 136 | env.bak/ 137 | venv.bak/ 138 | 139 | # Spyder project settings 140 | .spyderproject 141 | .spyproject 142 | 143 | # Rope project settings 144 | .ropeproject 145 | 146 | # mkdocs documentation 147 | /site 148 | 149 | # mypy 150 | .mypy_cache/ 151 | .dmypy.json 152 | dmypy.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 166 | # and can be added to the global gitignore or merged into this file. For a more nuclear 167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 168 | #.idea/ 169 | 170 | # PyPI configuration file 171 | .pypirc 172 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pyDiscRip 2 | Automate ripping optical discs and extracting data 3 | 4 | ## Usage 5 | 6 | ``` 7 | usage: pyDiscRip [-h] [-c CSV] [-f CONFIG] [-d [CONFIGDUMP]] [-o OUTPUT] 8 | 9 | Media ripping manager program 10 | 11 | options: 12 | -h, --help show this help message and exit 13 | -c, --csv CSV CSV file in `Drive,Name,Description` format 14 | -f, --config CONFIG Config file for ripping 15 | -d, --configdump [CONFIGDUMP] 16 | Dump all config options. Optional filename to output to. 17 | -o, --output OUTPUT Directory to save data in 18 | 19 | By Shelby Jueden 20 | ``` 21 | ### Rip List CSV 22 | This program takes in a CSV as a parameter that holds data about what drive a media sample is in as well as a name and description. The CSV may optionally tell the software what format the media is if it is not an optical disc. 23 | 24 | The headers for the CSV are almost all required, but the order is not critical. The headers is: 25 | ``` 26 | Drive,Name,Description,media_type 27 | ``` 28 | 29 | The header `media_type` is not required for optical discs, but is requried for other formats. Omitting the `media_type` header is the same as setting the `media_type` to `auto`. 30 | 31 | #### Valid Media Types 32 | 33 | - CD 34 | - DVD 35 | - Some Bluray 36 | - Floppy 37 | 38 | #### CSV Line Examples 39 | 40 | - **Ripping a CD with automatic format detection:** `/dev/sr0, StAnger, Metallica - St. Anger` 41 | - **Ripping a CD with manual format specification:** `CD, /dev/sr0, StAnger, Metallica - St. Anger` 42 | - **Ripping a Floppy in Drive A with a Greaseweazle:** `floppy, a, doomsharev1.1_1-2, Doom Shareware v1.1 Disk 1 of 2` 43 | 44 | ### Config File 45 | 46 | A Json configuration file may be used to change some parameters of the rip such as the `cdrdao` driver or the format the Greaseweazle `convert` function will use to decode flux. You can have all possible configuration values dumped to a file with the `-d` parameter, a filename may be specified to put them into. 47 | 48 | ### Virtal Data Formats 49 | Virtual data formats may be specified in config files. This allows you to add additional conversion steps only in json. 50 | 51 | Here is an example of a virtual format for extracting contents a binary file that contains a FAT12 filesystem using `mtools`: 52 | 53 | ``` 54 | "Virtual": { 55 | "Data": [ 56 | { 57 | "input_type_id":"BINARY", 58 | "output_type_id":"Z_FILES", 59 | "cmd":"mcopy -spi {input_file} ::*.* {data_dir}", 60 | "data_output": { 61 | "type_id": "Z_FILES", 62 | "processed_by": [], 63 | "data_dir": "FILES", 64 | "data_files": { 65 | "Z_FILES": "" 66 | } 67 | } 68 | } 69 | ] 70 | } 71 | ``` 72 | 73 | The `{input_file}` and `{data_dir}` parts of the "cmd" get substituted before execution. 74 | 75 | ## Installation 76 | 77 | ### System 78 | Some features of this software require the usage of system level program execution. In the future it would be prefered to replace these with native python packages but that isn't possible currently. 79 | 80 | You will need the following system packages: 81 | 82 | cdrdao bchunk ddrescue 7z ibdiscid-dev python-dev-is-python3 libcdio-dev libiso9660-dev swig pkg-config libcdio-utils 83 | 84 | 85 | ### pip 86 | Make sure to install system packages first as some pip packages using them for building modules 87 | 88 | flask pyudev python-libdiscid musicbrainzngs pycdio unidecode ffmpeg-python pyserial 89 | 90 | For floppy media reading you will also need the greaseweazle software installed. You will most likely have it installed if you have used it before. But if you want to directly install the latest version you can use the following command: 91 | 92 | pip install git+https://github.com/keirf/greaseweazle@latest --force 93 | 94 | ## Roadmap 95 | 96 | 97 | ### Format: CD 98 | - Pre-gap detection and ripping (would be audio only so can go direct to WAV) 99 | 100 | ### Data: FLAC + Musicbrainz 101 | 102 | - **Mixed Mode Discs:** A disc that has Data tracks mixed with Audio may return metadata that includes the data tracks. This currently causes off by 1 errors. 103 | - - A possible solution would be to look at a BINCUE data set and determine the index positions of Data tracks and skip those in the tagging step. There is no way to cleanly associate an ISO with a BINCUE though. 104 | 105 | 106 | -------------------------------------------------------------------------------- /handler/controller/controller_handler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Base media handler for pyDiscRip. 4 | 5 | # Python System 6 | import sys, os 7 | import json 8 | import time 9 | from enum import Enum 10 | from datetime import datetime 11 | 12 | # Internal Modules 13 | try: 14 | from handler.handler import Handler 15 | except Exception as e: 16 | # Probably running directly 17 | sys.path.append('../../handler') 18 | from handler import Handler 19 | try: 20 | from wand.image import Image 21 | except Exception as e: 22 | print("Need to install Python module [wand]") 23 | sys.exit(1) 24 | 25 | class ControllerHandler(Handler): 26 | """Base class for Media Types to handle identification and ripping 27 | 28 | """ 29 | 30 | def __init__(self): 31 | """Constructor to setup basic data and config defaults 32 | 33 | """ 34 | # Call parent constructorw 35 | super().__init__() 36 | # Set media type id for later use 37 | self.type_id=None 38 | # Set id to match against 39 | self.controller_id=None 40 | # Set directory to work in 41 | self.project_dir="" 42 | # Get current datetime 43 | self.project_timestamp=str(datetime.now().isoformat()).replace(":","-") 44 | # Data types output for later use 45 | self.data_outputs=[] 46 | # Camera setting values 47 | self.camera_defaults={ 48 | "video_id":-1, # The /dev/video# id for the camera to use 49 | "camera_x":1920, 50 | "camera_y":1080, 51 | "crop_x0":0, 52 | "crop_y0":0, 53 | "crop_x1":1920, 54 | "crop_y1":1080, 55 | "focus":0 56 | } 57 | 58 | 59 | def initialize(self): 60 | return 61 | 62 | 63 | def controllerMatch(self, media_sample=None): 64 | """Check if the media sample should be handled by this type""" 65 | return media_sample["controller_type"] == self.type_id 66 | 67 | 68 | def load_hold(self,callback=None,callback_arg=None): 69 | if callback is not None: 70 | callback(callback_arg) 71 | 72 | 73 | def photoDrive(self,driveName, focus=None): 74 | """ Take a photo of media related to drive """ 75 | 76 | # Check if camera is configured 77 | if self.config_data["camera"]["video_id"] == -1: 78 | return False 79 | 80 | # Find focus value 81 | if focus is None: 82 | # Use default focus 83 | focus = self.config_data["camera"]["focus"] 84 | # Handle given drive name 85 | drivepath=driveName+"/" 86 | 87 | print("Taking photo of media") 88 | from linuxpy.video.device import Device, MenuControl, VideoCapture, BufferType 89 | # Init camera device 90 | cam = Device.from_id(self.config_data["camera"]["video_id"]) 91 | cam.open() 92 | # set camera data format 93 | capture = VideoCapture(cam) 94 | capture.set_format( 95 | self.config_data["camera"]["camera_x"], 96 | self.config_data["camera"]["camera_y"], 97 | "YUYV" 98 | ) 99 | cam.controls["focus_automatic_continuous"].value=False 100 | cam.controls["focus_absolute"].value=focus 101 | time.sleep(3) 102 | 103 | # get frame from camera 104 | img = None 105 | for i, frame in enumerate(cam): 106 | if i > 30: 107 | img = frame 108 | break 109 | 110 | # extract raw data from frame 111 | raw_yuv = list(img.data) 112 | 113 | # Byteswap for wand 114 | hold = None 115 | for i in range(0,len(raw_yuv),2): 116 | hold = raw_yuv[i] 117 | raw_yuv[i] = raw_yuv[i+1] 118 | raw_yuv[i+1] = hold 119 | data = bytes(raw_yuv) 120 | cam.close() 121 | 122 | with Image(blob=data, format='UYVY',width=self.config_data["camera"]["camera_x"],height=self.config_data["camera"]["camera_y"],depth=8,colorspace="yuv") as image: 123 | # Build path to save image 124 | tmp=self.ensureDir("/tmp/discrip/photo/"+drivepath) 125 | # Apply crop 126 | image.crop( 127 | self.config_data["camera"]["crop_x0"], 128 | self.config_data["camera"]["crop_y0"], 129 | self.config_data["camera"]["crop_x1"], 130 | self.config_data["camera"]["crop_y1"], 131 | ) 132 | 133 | image.save(filename=tmp+"photo.jpg") 134 | 135 | 136 | def load(self, drive): 137 | return False 138 | 139 | 140 | def eject(self, drive): 141 | return False 142 | -------------------------------------------------------------------------------- /handler/data/bincue.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # BINCUE conversion module for pyDiscRip. 4 | 5 | # Python System 6 | import os 7 | import glob 8 | import sys 9 | import json 10 | 11 | # Internal Modules 12 | from handler.data.data_handler import DataHandler 13 | 14 | 15 | class DataHandlerBINCUE(DataHandler): 16 | """Handler for BINCUE data types 17 | 18 | Extracts files using bchunk 19 | """ 20 | 21 | def __init__(self): 22 | """Constructor to setup basic data and config defaults 23 | 24 | """ 25 | # Call parent constructor 26 | super().__init__() 27 | # Set handle ID 28 | self.handle_id="DataHandlerBINCUE" 29 | # Set data type to handle 30 | self.type_id="BINCUE" 31 | # Data types output 32 | self.data_outputs=["WAV","ISO9660"] 33 | 34 | 35 | def convertData(self,data_in): 36 | """Use bchunk to extract all WAVs and ISOs from BINCUE 37 | 38 | """ 39 | 40 | if type(data_in["data_files"]["BIN"]) is list: 41 | bin_path = data_in["data_files"]["BIN"][0].replace(".bin","") 42 | else: 43 | bin_path = data_in["data_files"]["BIN"].replace(".bin","") 44 | 45 | 46 | # Build data output for WAV 47 | data_wav = { 48 | "type_id": "WAV", 49 | "processed_by": [], 50 | "data_dir": self.ensureDir(f"{self.getPath()}/WAV/{bin_path}"), 51 | "data_files": { 52 | "WAV": [] 53 | } 54 | } 55 | 56 | # Build data output ISO 57 | data_iso = { 58 | "type_id": "ISO9660", 59 | "processed_by": [], 60 | "data_dir": self.ensureDir(f"{self.getPath()}/ISO9660/{bin_path}"), 61 | "data_files": { 62 | "ISO": [] 63 | } 64 | } 65 | 66 | # Check for files in ouput directory 67 | wavs = glob.glob(f"{data_wav["data_dir"]}/*.wav") 68 | isos = glob.glob(f"{data_iso["data_dir"]}/*.iso") 69 | 70 | # Don't re-convert if files exist 71 | if len(wavs) == 0 and len(isos) == 0 : 72 | 73 | if type(data_in["data_files"]["BIN"]) is list: 74 | if len(data_in["data_files"]["BIN"]) > 1: 75 | 76 | with open(f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}") as in_cue: 77 | with open(f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}-s1.cue", 'w') as out_cue: 78 | for line in in_cue: 79 | if not "SESSION 02" in line: 80 | out_cue.write(line+"\n") 81 | else: 82 | break 83 | # Build bchunk command to generate CUE 84 | cmd = [ 85 | "bchunk", 86 | "-w", 87 | f"{data_in["data_dir"]}/{data_in["data_files"]["BIN"][0]}", 88 | f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}-s1.cue", 89 | f"{data_wav["data_dir"]}/track" 90 | ] 91 | 92 | 93 | else: 94 | # Build bchunk command to generate CUE 95 | cmd = [ 96 | "bchunk", 97 | "-w", 98 | f"{data_in["data_dir"]}/{data_in["data_files"]["BIN"][0]}", 99 | f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}", 100 | f"{data_wav["data_dir"]}/track" 101 | ] 102 | 103 | 104 | else: 105 | # Build bchunk command to generate CUE 106 | cmd = [ 107 | "bchunk", 108 | "-w", 109 | f"{data_in["data_dir"]}/{data_in["data_files"]["BIN"]}", 110 | f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}", 111 | f"{data_wav["data_dir"]}/track" 112 | ] 113 | 114 | # Run command 115 | result = self.osRun(cmd) 116 | self.log("bchunk_stdout",str(result.stdout)) 117 | self.log("bchunk_stderr",str(result.stderr)) 118 | 119 | 120 | # Get files in ouput directory 121 | wavs = glob.glob(f"{data_wav["data_dir"]}/*.wav") 122 | # Sort wavs to have file order make sense 123 | wavs.sort() 124 | 125 | # Build data output if WAVs were converted 126 | if len(wavs) > 0: 127 | 128 | # Add file paths to data output for all WAVs 129 | for wav in wavs: 130 | print(f"Working on: {wav}") 131 | data_wav["data_files"]["WAV"].append(f"{wav.replace(data_wav["data_dir"]+"/","")}") 132 | 133 | # Build data output if ISOs were converted 134 | isos = glob.glob(f"{data_wav["data_dir"]}/*.iso") + glob.glob(f"{data_iso["data_dir"]}/*.iso") 135 | if len(isos) > 0: 136 | 137 | # Add file paths to data output for all ISOs 138 | for iso in isos: 139 | print(f"Working on: {iso}") 140 | # The file paths get weird, this is a fix for it 141 | if "WAV" in iso: 142 | os.rename( 143 | iso, 144 | f"{data_iso["data_dir"]}/{iso.replace(data_wav["data_dir"]+"/","")}") 145 | iso = f"{data_iso["data_dir"]}/{iso.replace(data_wav["data_dir"]+"/","")}" 146 | data_iso["data_files"]["ISO"].append(f"{iso.replace(data_iso["data_dir"]+"/","")}") 147 | 148 | # Clear WAV data if no WAVs were created 149 | if len(wavs) == 0: 150 | data_wav = None 151 | 152 | # Clear ISO data if no ISOs were created 153 | if len(isos) == 0: 154 | data_iso = None 155 | 156 | # Return all generated data 157 | return [data_wav,data_iso] 158 | 159 | -------------------------------------------------------------------------------- /handler/util/bincon.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Python System 4 | import argparse 5 | import sys 6 | import os 7 | import re 8 | from enum import Enum 9 | 10 | class CD_MODE_SECTORS(Enum): 11 | AUDIO = 2352 12 | CDG = 2448 13 | MODE1_RAW = 2352 14 | MODE1_2048 = 2048 15 | MODE1_2352 = 2352 16 | MODE2_RAW = 2352 17 | MODE2_2048 = 2048 18 | MODE2_2324 = 2324 19 | MODE2_2336 = 2336 20 | MODE2_2352 = 2352 21 | CDI_2336 = 2336 22 | CDI_2352 = 2352 23 | 24 | 25 | def msf2sector(msf): 26 | sector=0 27 | sector+=int(msf.split(":")[0])*60*75 28 | sector+=int(msf.split(":")[1])*75 29 | sector+=int(msf.split(":")[2]) 30 | 31 | return sector 32 | 33 | def sector2msf(sector): 34 | msf="" 35 | m=sector // (60*75) 36 | msf+=str(int(m)).zfill(2)+":" 37 | s=(sector-(m*(60*75))) // (75) 38 | msf+=str(int(s)).zfill(2)+":" 39 | f=sector % 75 40 | msf+=str(int(f)).zfill(2) 41 | 42 | return msf 43 | 44 | def cue_by_line(cue_file, bin_out,path="./"): 45 | 46 | # Create output folder if it doesn't exist 47 | if not os.path.exists(path): 48 | os.makedirs(path) 49 | 50 | # Load CUE file 51 | cue_lines=None 52 | cue_dir=os.path.dirname(cue_file) if os.path.dirname(cue_file) != "" else "./" 53 | with open(cue_file) as file: 54 | cue_lines = [line.rstrip() for line in file] 55 | 56 | # Count sessions to know if is multisession disc image 57 | session_total=0 58 | 59 | # Check all BIN files exist 60 | for line in cue_lines: 61 | if "SESSION" in line: 62 | session_total+=1 63 | if "FILE" in line: 64 | # Exist if file not found 65 | if not os.path.exists(cue_dir+"/"+re.search(r'FILE "?(.*?)"? BINARY', line).group(1)): 66 | print(f'BIN file [{re.search(r'FILE "?(.*?)"? BINARY', line).group(1)}] from CUE not found.') 67 | sys.exit(1) 68 | 69 | # Setup runtime 70 | mode_size=2352 71 | session=1 72 | session_post="" if session_total == 0 or session_total == 1 else f'-s{session}' 73 | track=0 74 | # Track position in data with sector position relative to bin data 75 | sector=0 76 | file_size_full=0 77 | file_size_used=0 78 | 79 | # Prepare output files 80 | if bin_out: 81 | output = open(f'{path}/{bin_out+session_post}.bin', "w+b") 82 | cue = open(f'{path}/{bin_out}.cue', 'w') 83 | 84 | # Main CUE loop 85 | for line in cue_lines: 86 | # Reset on new session and start new file 87 | if "SESSION" in line: 88 | result=re.search(r'REM SESSION ([0-9]+)', line) 89 | sector=0 90 | session=int(result.group(1)) 91 | file_size_full=0 92 | file_size_used=0 93 | session_post=f'-s{session}' 94 | if bin_out: 95 | output.close() 96 | output = open(f'{path}/{bin_out+session_post}.bin', "w+b") 97 | 98 | # Use track to get sector size for upcoming data 99 | if "TRACK" in line: 100 | result=re.search(r'TRACK ([0-9]+) (.*)', line) 101 | if result is not None: 102 | track=result.group(1) 103 | mode_size=CD_MODE_SECTORS[result.group(2).replace("/","_")].value 104 | 105 | # Get size of files to calculate length of tracks using sector size 106 | if "FILE" in line: 107 | if file_size_full == 0: 108 | if bin_out: 109 | cue.write(f'FILE "{bin_out+session_post}.bin" BINARY'+"\n") 110 | 111 | # Copy bin file into output 112 | if bin_out: 113 | with open(cue_dir+"/"+re.search(r'FILE "?(.*?)"? BINARY', line).group(1), "rb") as r: 114 | output.write(r.read()) 115 | 116 | # Add any unaccounted for data to sector position 117 | sector+=file_size_full 118 | 119 | # Reset size 120 | file_size_used=0 121 | file_size_full=os.path.getsize(cue_dir+"/"+re.search(r'FILE "?(.*?)"? BINARY', line).group(1))/mode_size 122 | 123 | # Check for MSF times in INDEXes 124 | if "INDEX" in line: 125 | result = re.search(r'[0-9]+:[0-9]+:[0-9]+', line) 126 | if result is not None: 127 | # Consume current file data 128 | if file_size_full != 0: 129 | file_size_used=msf2sector(result.group(0)) 130 | 131 | # Update MSF in line 132 | line=line.replace(result.group(0),sector2msf(sector+file_size_used)) 133 | 134 | # Pass all lines to new CUE except old FILE lines 135 | if "FILE" not in line: 136 | print(line) 137 | if bin_out: 138 | cue.write(line+"\n") 139 | 140 | # Close new files 141 | if bin_out: 142 | cue.close() 143 | output.close() 144 | 145 | 146 | if __name__ == "__main__": 147 | """ Run directly 148 | 149 | """ 150 | parser = argparse.ArgumentParser( 151 | prog='bincon', 152 | description='BIN/CUE bin concatenation tool to combine multiple BIN files into one.', 153 | epilog='By Shelby Jueden') 154 | parser.add_argument('-d', '--debug', help="Only print CUE, don't write files", action='store_true') 155 | parser.add_argument('-o', '--output-folder', help="Path to output files to", default="./") 156 | parser.add_argument('filenames', help="", default=None, nargs=argparse.REMAINDER) 157 | args = parser.parse_args() 158 | 159 | 160 | if len(args.filenames) < 1: 161 | print("Please provide a CUE file to work on. And optionally an output BIN name.") 162 | sys.exit(1) 163 | 164 | # Allow sloppy file name parameters based on file existing or not 165 | cue=None 166 | bin_out="data" 167 | for check in args.filenames: 168 | if not os.path.exists(check): 169 | bin_out=check 170 | else: 171 | cue=check 172 | 173 | if args.debug: 174 | bin_out = None 175 | 176 | # Check CUE was passed and begin parsing 177 | if cue is None: 178 | print("Make sure CUE file exists.") 179 | sys.exit(1) 180 | else: 181 | print(f'Working on {cue}') 182 | cue_by_line(cue,bin_out,args.output_folder) 183 | -------------------------------------------------------------------------------- /handler/data/flux.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Flux conversion module for pyDiscRip. Uses greaseweazle software 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | import importlib 10 | from pprint import pprint 11 | 12 | # External Modules 13 | # Directly imports from greaseweazle module in code 14 | 15 | # Internal Modules 16 | from handler.data.data_handler import DataHandler 17 | 18 | 19 | class DataHandlerFLUX(DataHandler): 20 | """Handler for FLUX data types 21 | 22 | converts using greaseweazle software by directly accessing python code 23 | """ 24 | 25 | def __init__(self): 26 | """Constructor to setup basic data and config defaults 27 | 28 | """ 29 | # Call parent constructor 30 | super().__init__() 31 | # Set handle ID 32 | self.handle_id="DataHandlerFLUX" 33 | # Set data type to handle 34 | self.type_id="FLUX" 35 | # Default config data 36 | self.config_data={ 37 | "convert_output":"img", 38 | "gw":{ 39 | "tracks": None, 40 | "hard-sectors": None, 41 | "pll": None, 42 | "reverse": None, 43 | "diskdefs": None, 44 | "format": "ibm.1440" 45 | }, 46 | "diskdefs-direct": None 47 | } 48 | # Data types output 49 | self.data_outputs=["BINARY"] 50 | 51 | def buildArgs(self,data_in, data,default_diskdef=True): 52 | # gw modules individually parse arguments to control rip process. This 53 | # builds fake argumets to pass to module 54 | # For more information on gw parameters run `gw read --help` 55 | args=[] 56 | args.append("pyDiscRip") # Not actually used but index position is needed 57 | args.append("convert") # Not actually used but index position is needed 58 | 59 | # Process all config options to build parameters for gw module 60 | if "diskdefs-direct" in self.config_data and self.config_data["diskdefs-direct"] is not None: 61 | 62 | with open(f"/tmp/discrip/{self.project_timestamp}_diskdefs.cfg", 'w', encoding="utf-8") as output: 63 | output.write(self.config_data["diskdefs-direct"]) 64 | args.append("--diskdefs") 65 | args.append(f"/tmp/discrip/{self.project_timestamp}_diskdefs.cfg") 66 | else: 67 | if "diskdefs" in self.config_data["gw"] and self.config_data["gw"]["diskdefs"] is not None: 68 | args.append("--diskdefs") 69 | args.append(str(self.config_data["gw"]["diskdefs"])) 70 | else: 71 | if not default_diskdef: 72 | args.append("--diskdefs") 73 | args.append(os.path.realpath(__file__).replace(os.path.basename(__file__),"")+"/../../config/handler/flux/diskdefs.cfg") 74 | if "format" in self.config_data["gw"] and self.config_data["gw"]["format"] is not None: 75 | args.append("--format") 76 | args.append(str(self.config_data["gw"]["format"])) 77 | if "tracks" in self.config_data["gw"] and self.config_data["gw"]["tracks"] is not None: 78 | args.append("--tracks") 79 | args.append(str(self.config_data["gw"]["tracks"])) 80 | if "seek-retries" in self.config_data["gw"] and self.config_data["gw"]["seek-retries"] is not None: 81 | args.append("--seek-retries") 82 | args.append(str(self.config_data["gw"]["seek-retries"])) 83 | if "pll" in self.config_data["gw"] and self.config_data["gw"]["pll"] is not None: 84 | args.append("--pll") 85 | args.append(self.config_data["gw"]["pll"]) 86 | if "hard-sectors" in self.config_data["gw"] and self.config_data["gw"]["hard-sectors"] is not None: 87 | args.append("--hard-sectors") 88 | if "reverse" in self.config_data["gw"] and self.config_data["gw"]["reverse"] is not None: 89 | args.append("--reverse") 90 | 91 | # Add the file input as parameter 92 | if isinstance(data_in["data_files"]["flux"], list): 93 | args.append(f"{data_in["data_dir"]}/{data_in["data_files"]["flux"][0]}") 94 | else: 95 | args.append(f"{data_in["data_dir"]}/{data_in["data_files"]["flux"]}") 96 | 97 | # Add the file output as final parameter 98 | args.append(f"{data["data_dir"]}/{data["data_files"]["BINARY"]}") 99 | 100 | # Log all parameters to be passed to gw read 101 | self.log("floppy_gw_args",args,json_output=True) 102 | 103 | return args 104 | 105 | def convertData(self, data_in): 106 | """Use gw python modules to convert FLUX to BINARY 107 | 108 | """ 109 | 110 | if self.config_data["convert_output"] == "img": 111 | data = { 112 | "type_id": "BINARY", 113 | "processed_by": [], 114 | "data_dir": self.ensureDir(f"{self.getPath()}/BINARY"), 115 | "data_files": { 116 | "BINARY": f"{self.project_dir}.img" # Reusing project dir for name 117 | } 118 | } 119 | else: 120 | data = { 121 | "type_id": "BINARY", 122 | "processed_by": [], 123 | "data_dir": self.ensureDir(f"{self.getPath()}/BINARY"), 124 | "data_files": { 125 | "BINARY": f"{self.project_dir}.{self.config_data["convert_output"]}" # Reusing project dir for name 126 | } 127 | } 128 | 129 | 130 | # Import greaseweazle read module to access hardware 131 | mod = importlib.import_module('greaseweazle.tools.convert') 132 | main = mod.__dict__['main'] 133 | 134 | 135 | 136 | # Don't re-convert flux 137 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["BINARY"]}"): 138 | # Run the gw read process using arguments 139 | try: 140 | # Use default diskdef 141 | args = self.buildArgs(data_in, data) 142 | res = main(args) 143 | except Exception as e: 144 | # Use repo diskdef 145 | args = self.buildArgs(data_in, data,default_diskdef=False) 146 | res = main(args) 147 | 148 | # Return all generated data 149 | return [data] 150 | 151 | 152 | 153 | -------------------------------------------------------------------------------- /handler/media/manager.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Media ripping manager for pyDiscRip. Can be used to rip a CD and fetch metadata 4 | 5 | # External Modules 6 | import time, sys 7 | import json 8 | from pprint import pprint 9 | import pathlib 10 | try: 11 | import pyudev 12 | except Exception as e: 13 | print("Need to install Python module [pyudev]") 14 | sys.exit(1) 15 | 16 | # Internal Modules 17 | from handler.media.optical import MediaOptical 18 | from handler.media.cd import MediaHandlerCD 19 | from handler.media.cd_redumper import MediaHandlerCDRedumper 20 | from handler.media.dvd import MediaHandlerDVD 21 | from handler.media.dvd_redumper import MediaHandlerDVDRedumper 22 | from handler.media.bd_redumper import MediaHandlerBDRedumper 23 | from handler.media.ddisk import MediaHandlerDDisk 24 | from handler.media.floppy import MediaHandlerFloppy 25 | # Testing only 26 | from handler.media.dummy import MediaHandlerDummy 27 | 28 | class MediaHandlerManager(object): 29 | """Manager for media types 30 | 31 | Provides process control functions for ripping different media types and 32 | setting configuration data. 33 | """ 34 | 35 | def __init__(self): 36 | """Constructor to setup basic data and config defaults 37 | 38 | """ 39 | # Call parent constructor 40 | super().__init__() 41 | 42 | # Add all supported media types 43 | self.media_types={} 44 | self.media_types["OPTICAL"] = MediaOptical() 45 | self.media_types["CD_cdrdao"] = MediaHandlerCD() 46 | self.media_types["CD_redumper"] = MediaHandlerCDRedumper() 47 | self.media_types["DVD"] = MediaHandlerDVD() 48 | self.media_types["DVD_redumper"] = MediaHandlerDVDRedumper() 49 | self.media_types["BD_redumper"] = MediaHandlerBDRedumper() 50 | self.media_types["DDISK"] = MediaHandlerDDisk() 51 | self.media_types["FLOPPY"] = MediaHandlerFloppy() 52 | # Testing only 53 | self.media_types["DUMMY"] = MediaHandlerDummy() 54 | 55 | def loadMediaType(self,media_sample,bypass=False,controller=None): 56 | """Match media handler to type and return handler 57 | 58 | """ 59 | # Iterate through all handlers 60 | for type_id, media_type in self.media_types.items(): 61 | # If handler can proccess media return it 62 | if media_type.mediaMatch(media_sample): 63 | # Set controller 64 | media_type.controller = controller 65 | return media_type.load(media_sample,bypass) 66 | 67 | # No handlers found 68 | 69 | def ejectMediaType(self,media_sample,controller=None): 70 | """Match media handler to type and return handler 71 | 72 | """ 73 | print("Ejecting through manager") 74 | # Iterate through all handlers 75 | for type_id, media_type in self.media_types.items(): 76 | # If handler can proccess media return it 77 | if media_type.mediaMatch(media_sample): 78 | print(f"Matched: {type_id}") 79 | # Set controller 80 | media_type.controller = controller 81 | media_type.eject(media_sample) 82 | return 83 | 84 | # Generic optical 85 | print("No match found, attempting generic optical") 86 | if typeIsOptical(selfmedia_sample): 87 | self.media_types["OPTICAL"].eject(media_sample) 88 | # No handlers found 89 | return 90 | 91 | 92 | def findMediaType(self,media_sample,config_data): 93 | """Match media handler to type and return handler 94 | 95 | """ 96 | # Check if a media type was provided 97 | if "media_type" not in media_sample or media_sample["media_type"].upper() == "OPTICAL": 98 | # Access the drive associated to the media to determine the type 99 | print("Finding media type") 100 | media_sample["media_type"] = self.guessMediaType(media_sample["drive"]) 101 | 102 | # Iterate through all handlers 103 | for type_id, media_type in self.media_types.items(): 104 | # If handler can proccess media return it 105 | if media_type.mediaMatch(media_sample): 106 | if media_type.handler_id == None: 107 | return media_type 108 | if config_data["settings"]["media_handlers"][media_sample["media_type"]] == media_type.handler_id: 109 | return media_type 110 | 111 | # No handlers found 112 | print(f"No handlers found for following media sample:") 113 | pprint(media_sample) 114 | return None 115 | 116 | 117 | def configDump(self): 118 | """Get all config data for media handlers and dump it to json 119 | 120 | """ 121 | config_options={} 122 | # Iterate through all handlers 123 | for type_id, media_type in self.media_types.items(): 124 | # Add all config options for handler 125 | config_options[type_id]=media_type.configOptions() 126 | 127 | return config_options 128 | 129 | 130 | def guessMediaType(self,drivepath=None): 131 | """ Guess media type in drive which will determine how it is ripped 132 | 133 | Only useful for optical discs. 134 | """ 135 | 136 | # Init udev interface to access drive 137 | context = pyudev.Context() 138 | 139 | # Countdown to assume it's a weird CD 140 | countdown = 10 141 | 142 | # Get info from device 143 | output = True 144 | while(output): 145 | print("FIND A DISC TYPE") 146 | #print(f"Drive path: {drivepath}") 147 | 148 | # Solve and symlinks to standard drive path 149 | drivepath=str(pathlib.Path(drivepath).resolve()) 150 | # NOTE: Returns as list but we are accessing a specific device 151 | devices = context.list_devices(sys_name=drivepath.replace("/dev/","")) 152 | dev = next(iter(devices)) 153 | 154 | #print(json.dumps(dict(dev.properties),indent=4)) 155 | # Determine media type by ID 156 | if dev.properties.get("ID_CDROM_MEDIA_CD", False) or dev.properties.get("ID_CDROM_MEDIA_CD_R", False): 157 | media_type="CD" 158 | output = False 159 | print("Is CD") 160 | elif dev.properties.get("ID_CDROM_MEDIA_DVD", False): 161 | media_type="DVD" 162 | output = False 163 | print("Is DVD") 164 | elif dev.properties.get("ID_CDROM_MEDIA_BD", False): 165 | media_type="BD" 166 | output = False 167 | 168 | if output: 169 | countdown-=1 170 | if not countdown: 171 | media_type="CD" 172 | output = False 173 | print("Is probably a weird CD") 174 | 175 | #print(json.dumps(dict(dev.properties),indent=4)) 176 | time.sleep(3) 177 | 178 | return media_type 179 | 180 | def typeIsOptical(selfmedia_sample): 181 | 182 | # Generic optical 183 | match media_sample.type_id: 184 | case "CD" | "DVD"| "BD" | "CD_cdrdao" | "CD_redumper": 185 | return True 186 | case _: 187 | return False 188 | 189 | 190 | -------------------------------------------------------------------------------- /web/http/static/jsonForm.js: -------------------------------------------------------------------------------- 1 | /* jsonForm 2 | * 3 | * Takes a json url or data object and creates a form out of named keys. 4 | * 5 | * call prepare() on your instance of this class before submititng the form to build the data. 6 | * 7 | * Example: 8 | *
9 | * Note: `return 1` could be client side input validation instead 10 | */ 11 | class jsonForm 12 | { 13 | 14 | constructor(dataSource=null, element=null,title="Form",options=null,types=null) 15 | { 16 | options ||= {"top_blank":false,"form_names":false}; 17 | this.element = element; 18 | this.id = this.element.id; 19 | this.title = title; 20 | this.options = options; 21 | this.textarea = null; 22 | this.types = types; 23 | if (typeof dataSource == "object") 24 | { 25 | // Is data 26 | this.defaultData = dataSource; 27 | this.objectToForm(this.defaultData,title); 28 | 29 | }else if(typeof dataSource == "string") 30 | { 31 | // Is URL 32 | fetch(dataSource).then((response) => response.json()) 33 | .then((data) => 34 | { 35 | this.defaultData = data; 36 | this.objectToForm(this.defaultData,title); 37 | } 38 | ); 39 | } 40 | } 41 | 42 | 43 | objectToForm(data,title=null) 44 | { 45 | // WARNING RECURSIVE 46 | if (data == null) return null; 47 | 48 | // Wipe out element 49 | this.element.replaceChildren(); 50 | 51 | var options = document.createElement("div"); 52 | 53 | if (title != null) 54 | { 55 | var formtitle = document.createElement("h2"); 56 | formtitle.innerText=title; 57 | this.element.append(formtitle); 58 | } 59 | this.element.append(...this.objectHTML(data,null).children); 60 | var controls = document.createElement("div"); 61 | controls.classList.add("objectform_controls"); 62 | // 63 | var btn_save = document.createElement("input"); 64 | btn_save.type = "button"; 65 | btn_save.id = this.id+"_download"; 66 | btn_save.value = "Save Form Data"; 67 | btn_save.addEventListener('click', this.download.bind(this)); 68 | 69 | controls.append(btn_save); 70 | 71 | var btn_file_upload = document.createElement("input"); 72 | btn_file_upload.type = "file"; 73 | btn_file_upload.id = this.id+"_file_upload"; 74 | btn_file_upload.addEventListener('change', this.upload.bind(this)); 75 | controls.append(btn_file_upload); 76 | 77 | this.textarea = document.createElement("textarea"); 78 | this.textarea.name = this.id+"_json_data"; 79 | this.textarea.id = this.id+"_json_data"; 80 | this.textarea.style = "display:none;"; 81 | controls.append(this.textarea); 82 | 83 | this.element.append(controls); 84 | } 85 | 86 | objectHTML(data,prefix="") 87 | { 88 | // WARNING RECURSIVE 89 | if (data == null) return null; 90 | 91 | // Wipe out element 92 | 93 | var options = document.createElement("div"); 94 | var prefix_str = ""; 95 | 96 | for (const [key, value] of Object.entries(data)) 97 | { 98 | if(value == null || typeof value != "object") 99 | { 100 | // Option to skip top level unused settings 101 | if (prefix == null && this.options.top_blank) continue; 102 | 103 | var pair = document.createElement("div"); 104 | pair.classList.add("object_form_data"); 105 | // Add Label with key 106 | var label = document.createElement("label"); 107 | label.innerText=key.substring(0,1).toUpperCase()+key.substring(1).toLowerCase(); 108 | label.htmlFor=prefix+"|"+key; 109 | // Add input with name for value 110 | var elm = "input"; 111 | var type = "text"; 112 | if (this.types != null && prefix+"|"+key in this.types) 113 | { 114 | if(this.types[prefix+"|"+key] == "textarea") 115 | { 116 | elm = "textarea"; 117 | type=null; 118 | }else{ 119 | type=this.types[prefix+"|"+key]; 120 | } 121 | } 122 | 123 | var input = document.createElement(elm); 124 | if (this.options.form_names) 125 | input.name=prefix+"|"+key; 126 | input.id=prefix+"|"+key; 127 | input.value=value; 128 | if (type != null) input.type=type; 129 | pair.appendChild(label); 130 | pair.appendChild(input); 131 | options.appendChild(pair); 132 | }else{ 133 | // New fieldset 134 | prefix_str = prefix == null ? "" : prefix+"|"; 135 | 136 | var fieldset = document.createElement("fieldset"); 137 | // Add legend with key 138 | var legend = document.createElement("legend"); 139 | legend.id=prefix_str+key; 140 | legend.innerText=key.substring(0,1).toUpperCase()+key.substring(1).toLowerCase(); 141 | fieldset.appendChild(legend); 142 | // Go deeper 143 | var child=this.objectHTML(value,prefix_str+key); 144 | if (child != null) fieldset.appendChild(child); 145 | 146 | options.appendChild(fieldset); 147 | } 148 | 149 | } 150 | return options; 151 | } 152 | 153 | // TODO - FormtoObject to submit data. Also allows uploading json file instead. And user could save json file to reuse 154 | formToObject() 155 | { 156 | var data={}; 157 | // Get all inputs in form object from ID 158 | var sel="#"+this.id+" .object_form_data input, #"+this.id+" .object_form_data textarea"; 159 | var base=document.querySelectorAll(sel); 160 | // Handle each input 161 | for (const child of base) 162 | { 163 | // If there is not data, skip 164 | //if ( child.value == "") continue; 165 | 166 | // Copy data refrence 167 | var walk=data; 168 | // Get array of keys from ID 169 | var keys = child.id.split("|") 170 | // Stor last key wh 171 | var lastkey=keys.pop(); 172 | for (const key of keys) 173 | { 174 | // insantiate new object if undefined(false) 175 | walk[key] ||= {}; 176 | // Move walk refrence to lower key 177 | walk = walk[key]; 178 | } 179 | // Set final key to value 180 | if (child.value == "") 181 | { 182 | walk[lastkey] = null; 183 | }else{ 184 | walk[lastkey] = child.value; 185 | } 186 | } 187 | return data; 188 | } 189 | 190 | objectUpdate(base,update) 191 | { 192 | // WARNING RECURSIVE 193 | 194 | for (const [key, value] of Object.entries(update)) 195 | { 196 | if(typeof value != "object") 197 | { 198 | try{ 199 | base[key] = value; 200 | } catch (error) { 201 | console.error(error); 202 | } 203 | }else{ 204 | base[key] = this.objectUpdate(base[key],value); 205 | } 206 | } 207 | return base; 208 | } 209 | 210 | prepare() 211 | { 212 | this.textarea.value = JSON.stringify(this.formToObject()); 213 | } 214 | 215 | // Download 216 | download(filename, text) { 217 | this.prepare(); 218 | 219 | var element = document.createElement('a'); 220 | element.setAttribute('href', 'data:text/plain;charset=utf-8,' + encodeURIComponent(this.textarea.value)); 221 | element.setAttribute('download', "config_data.json"); 222 | 223 | element.style.display = 'none'; 224 | document.body.appendChild(element); 225 | 226 | element.click(); 227 | 228 | document.body.removeChild(element); 229 | } 230 | //document.getElementById("config_download").addEventListener('click', download); 231 | 232 | // Download 233 | async upload(filename, text) { 234 | 235 | const [file] = document.getElementById(this.id+"_file_upload").files; 236 | 237 | if (file) { 238 | var loadData = JSON.parse( await file.text() ); 239 | loadData = this.objectUpdate(this.defaultData,loadData) 240 | 241 | //data = { ...default_config, ...loadData }; 242 | var form=document.getElementById('config_options'); 243 | this.objectToForm(loadData,this.title); 244 | } 245 | } 246 | //document.getElementById("completeLoad").addEventListener('change', upload); 247 | 248 | 249 | } 250 | -------------------------------------------------------------------------------- /web/http/static/status-view.js: -------------------------------------------------------------------------------- 1 | function mediaSampleView(media_sample) 2 | { 3 | var table = document.createElement("table"); 4 | table.classList.add("status-view"); 5 | 6 | var tr = document.createElement("tr"); 7 | 8 | // ROW1 - Name Header 9 | var h3 = document.createElement("h3"); 10 | var th = document.createElement("th"); 11 | h3.innerText=media_sample["name"]; 12 | th.colSpan = 100; 13 | th.appendChild(h3); 14 | tr.appendChild(th); 15 | 16 | table.appendChild(tr); 17 | 18 | 19 | // ROW2 - Preview image 20 | tr = document.createElement("tr"); 21 | var td = document.createElement("td"); 22 | var row =3; 23 | if ("time_added" in media_sample) row+=1; 24 | if ("time_start" in media_sample) row+=1; 25 | if ("time_end" in media_sample) row+=1; 26 | td.rowSpan = row; 27 | var img = document.createElement("img"); 28 | if (media_sample["media_type"] == "FLOPPY" && !( media_sample["data"] === undefined)) 29 | { 30 | for (const data of media_sample["data"]) 31 | { 32 | if (data["type_id"]=="IMAGE") 33 | { 34 | img.src = "/output/"+data["data_dir"]+"/"+data["data_files"]["PNG"] 35 | } 36 | } 37 | } 38 | if (media_sample["media_type"] == "CD" && !( media_sample["data"] === undefined)) 39 | { 40 | for (const data of media_sample["data"]) 41 | { 42 | if (data["type_id"]=="IMAGE") 43 | { 44 | img.src = "/output/"+data["data_dir"]+"/"+data["data_files"]["JPG"] 45 | } 46 | } 47 | } 48 | if (media_sample["media_type"] == "DVD" && !( media_sample["data"] === undefined)) 49 | { 50 | for (const data of media_sample["data"]) 51 | { 52 | if (data["type_id"]=="IMAGE") 53 | { 54 | img.src = "/output/"+data["data_dir"]+"/"+data["data_files"]["JPG"] 55 | } 56 | } 57 | } 58 | if (media_sample["media_type"] == "BD" && !( media_sample["data"] === undefined)) 59 | { 60 | for (const data of media_sample["data"]) 61 | { 62 | if (data["type_id"]=="IMAGE") 63 | { 64 | img.src = "/output/"+data["data_dir"]+"/"+data["data_files"]["JPG"] 65 | } 66 | } 67 | } 68 | td.appendChild(img); 69 | tr.appendChild(td); 70 | 71 | // ROW2 - Description 72 | td = document.createElement("td"); 73 | td.innerText="Description"; 74 | tr.appendChild(td); 75 | td = document.createElement("td"); 76 | td.innerText=media_sample["description"]; 77 | tr.appendChild(td); 78 | 79 | table.appendChild(tr); 80 | 81 | // ROW3 - Media Type 82 | tr = document.createElement("tr"); 83 | td = document.createElement("td"); 84 | td.innerText="Media Type"; 85 | tr.appendChild(td); 86 | td = document.createElement("td"); 87 | td.innerText=media_sample["media_type"]; 88 | tr.appendChild(td); 89 | 90 | table.appendChild(tr); 91 | 92 | // ROW4 - Drive 93 | tr = document.createElement("tr"); 94 | td = document.createElement("td"); 95 | td.innerText="Drive"; 96 | tr.appendChild(td); 97 | td = document.createElement("td"); 98 | td.innerText=media_sample["drive"]; 99 | tr.appendChild(td); 100 | 101 | table.appendChild(tr); 102 | 103 | // ROW5 - Drive 104 | if ("time_added" in media_sample) 105 | { 106 | tr = document.createElement("tr"); 107 | td = document.createElement("td"); 108 | td.innerText="Added"; 109 | tr.appendChild(td); 110 | td = document.createElement("td"); 111 | td.innerText=media_sample["time_added"]; 112 | tr.appendChild(td); 113 | 114 | table.appendChild(tr); 115 | } 116 | 117 | // ROW6 - Drive 118 | if ("time_start" in media_sample) 119 | { 120 | tr = document.createElement("tr"); 121 | td = document.createElement("td"); 122 | td.innerText="Start"; 123 | tr.appendChild(td); 124 | td = document.createElement("td"); 125 | td.innerText=media_sample["time_start"]; 126 | tr.appendChild(td); 127 | 128 | table.appendChild(tr); 129 | } 130 | 131 | // ROW7- Drive 132 | if ("time_end" in media_sample) 133 | { 134 | tr = document.createElement("tr"); 135 | td = document.createElement("td"); 136 | td.innerText="End"; 137 | tr.appendChild(td); 138 | td = document.createElement("td"); 139 | td.innerText=media_sample["time_end"]; 140 | tr.appendChild(td); 141 | 142 | table.appendChild(tr); 143 | } 144 | 145 | if ( media_sample["data"] === undefined) return table; 146 | // ROW8 - Data Header 147 | tr = document.createElement("tr"); 148 | var h4 = document.createElement("h4"); 149 | th = document.createElement("th"); 150 | h4.innerText="Data Outputs"; 151 | th.colSpan = 100; 152 | th.appendChild(h4); 153 | tr.appendChild(th); 154 | 155 | table.appendChild(tr); 156 | 157 | // ROW8+N - Data Outputs 158 | for (const data of media_sample["data"]) 159 | { 160 | tr = document.createElement("tr"); 161 | td = document.createElement("td"); 162 | td.innerText=data["type_id"]; 163 | data_files_array = Object.entries(data["data_files"]) 164 | td.rowSpan=data_files_array.length; 165 | tr.appendChild(td); 166 | 167 | 168 | row_left=data_files_array.length; 169 | for (const [key, value] of data_files_array) 170 | { 171 | td = document.createElement("td"); 172 | td.innerText=key; 173 | tr.appendChild(td); 174 | 175 | td = document.createElement("td"); 176 | if ( Array.isArray(value) ) 177 | { 178 | var ul = document.createElement("ul"); 179 | var limit=20; 180 | for (const file of value) 181 | { 182 | var li = document.createElement("li"); 183 | li.innerText=file; 184 | ul.appendChild(li); 185 | limit-=1; 186 | if(!limit) 187 | { 188 | var li = document.createElement("li"); 189 | li.innerText="..."; 190 | ul.appendChild(li); 191 | break; 192 | } 193 | } 194 | td.appendChild(ul); 195 | 196 | } else { 197 | if(typeof value === "object") 198 | { 199 | var ul = document.createElement("ul"); 200 | var limit=20; 201 | for (const [type, path] of Object.entries(value)) 202 | { 203 | var li = document.createElement("li"); 204 | li.innerText=type+":"+path; 205 | ul.appendChild(li); 206 | limit-=1; 207 | if(!limit) 208 | { 209 | var li = document.createElement("li"); 210 | li.innerText="..."; 211 | ul.appendChild(li); 212 | break; 213 | } 214 | } 215 | td.appendChild(ul); 216 | 217 | }else if(typeof value === "string") 218 | { 219 | td.innerText=value; 220 | } 221 | } 222 | tr.appendChild(td); 223 | 224 | if(row_left > 1) 225 | { 226 | row_left-=1; 227 | table.appendChild(tr); 228 | tr = document.createElement("tr"); 229 | } 230 | } 231 | table.appendChild(tr); 232 | } 233 | 234 | 235 | return table; 236 | } 237 | 238 | 239 | function loadStatus(event) 240 | { 241 | fetch('/status/status.json').then((response) => response.json()) 242 | .then((data) => 243 | { 244 | document.getElementById("status").replaceChildren(); 245 | 246 | data.sort(function(a, b) { 247 | var adate = a.time_added.split('T')[0]+"T"+a.time_added.split('T')[1].replaceAll("-",":"); 248 | var bdate = b.time_added.split('T')[0]+"T"+b.time_added.split('T')[1].replaceAll("-",":"); 249 | var keyA = new Date(adate), 250 | keyB = new Date(bdate); 251 | // Compare the 2 dates 252 | if (keyA < keyB) return 1; 253 | if (keyA > keyB) return -1; 254 | return 0; 255 | }); 256 | 257 | for (const media_sample of data) 258 | { 259 | table=mediaSampleView(media_sample); 260 | document.getElementById("status").appendChild(table); 261 | } 262 | } 263 | ); 264 | 265 | 266 | setTimeout(loadStatus, 3000); 267 | } 268 | window.addEventListener("load", loadStatus); 269 | 270 | -------------------------------------------------------------------------------- /handler/data/wav.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # WAV conversion module for pyDiscRip. 4 | 5 | # Python System 6 | import os, sys 7 | import glob 8 | import json 9 | 10 | # External Modules 11 | try: 12 | import ffmpeg 13 | if not "input" in dir(ffmpeg): 14 | print("You have the wrong ffmpeg python module installed.") 15 | print("Need to install Python module [ffmpeg-python]") 16 | sys.exit(1) 17 | 18 | except Exception as e: 19 | print("Need to install Python module [ffmpeg-python]") 20 | sys.exit(1) 21 | 22 | # Internal Modules 23 | from handler.data.data_handler import DataHandler 24 | 25 | 26 | class DataHandlerWAV(DataHandler): 27 | """Handler for WAV data types 28 | 29 | Converts files using ffmpeg 30 | """ 31 | 32 | def __init__(self): 33 | """Constructor to setup basic data and config defaults 34 | 35 | """ 36 | # Call parent constructor 37 | super().__init__() 38 | # Set handle ID 39 | self.handle_id="DataHandlerWAV" 40 | # Set data type to handle 41 | self.type_id="WAV" 42 | # Data types output 43 | self.data_outputs=["FLAC"] 44 | 45 | 46 | def convertWAV(self,data,data_meta=None): 47 | """Use ffmpeg to convert WAVs to FLAC 48 | 49 | Optionally uses metadata to tag files 50 | 51 | TODO - I don't think this can handle CDs that did not find metadata 52 | 53 | """ 54 | # Track info from metadata 55 | tracks = None 56 | # Metadata object for ffmpeg 57 | metadata = {} 58 | # Data files to process 59 | data_files = {} 60 | # Matching release for metadata 61 | release = {} 62 | metadata = None 63 | # Open metadata json 64 | if data_meta is not None: 65 | with open(f"{data_meta["data_dir"]}/{data_meta["data_files"]["JSON"]}", encoding="utf-8") as f: 66 | json_data = json.load(f) 67 | 68 | # Get discid for sample ripped 69 | if json_data.get("disc"): 70 | discid = json_data["disc"]["id"] 71 | 72 | # Iterate through all releases in metadata 73 | for medium in json_data["disc"]["release-list"][0]["medium-list"]: 74 | for disc in medium["disc-list"]: 75 | # Find matching release 76 | if disc["id"] == discid: 77 | release = medium 78 | # break? 79 | 80 | # Get track data for release 81 | tracks = list(map((lambda t: t["recording"]["title"]), release["track-list"])) 82 | artist = json_data["disc"]["release-list"][0]["artist-credit-phrase"] 83 | album = json_data["disc"]["release-list"][0]["title"] 84 | year = int(json_data["disc"]["release-list"][0]["date"][0:4]) 85 | 86 | # Get discid for sample ripped 87 | elif json_data.get("cdstub"): 88 | # Get track data for release 89 | tracks = list(map((lambda t: t["title"]), json_data["cdstub"]["track-list"])) 90 | artist = json_data["cdstub"]["artist"] 91 | album = json_data["cdstub"]["title"] 92 | year = 0 93 | 94 | # Build metadata for ffmpeg 95 | metadata = { 96 | "metadata:g:1": f"artist={artist}", 97 | "metadata:g:2": f"album={album}", 98 | "metadata:g:3": f"date={year}" 99 | } 100 | # Build data output for FLAC 101 | data_files = { 102 | "type_id": "FLAC", 103 | "processed_by": [], 104 | "data_dir": self.ensureDir(f"{self.getPath()}/FLAC/{data["data_dir"].split("WAV/",1)[1]}/{artist}/{year} - {album}"), 105 | "data_files": { 106 | "FLAC": [] 107 | } 108 | } 109 | else: 110 | # Build metadata for ffmpeg 111 | metadata = { 112 | "metadata:g:1": f"artist=Unknown Artist", 113 | "metadata:g:2": f"album=Unknown Album", 114 | "metadata:g:3": f"date=0" 115 | } 116 | # Build data output for FLAC 117 | data_files = { 118 | "type_id": "FLAC", 119 | "processed_by": [], 120 | "data_dir": self.ensureDir(f"{self.getPath()}/FLAC/{data["data_dir"].split("WAV/",1)[1]}"), 121 | "data_files": { 122 | "FLAC": [] 123 | } 124 | } 125 | 126 | 127 | 128 | # Iterate over WAV files 129 | for i,v in enumerate(data["data_files"]["WAV"]): 130 | if data_meta is not None: 131 | print(f"Working on: {data["data_files"]["WAV"][i]}: {self.cleanFilename(tracks[i])}") 132 | # Set track title in ffmpeg metadata 133 | metadata["metadata:g:0"] = f"title={tracks[i]}" 134 | # Run ffmpeg to conver WAV to FLAC 135 | (ffmpeg 136 | .input(f"{data["data_dir"]}/{data["data_files"]["WAV"][i]}") 137 | .output(f"{data_files["data_dir"]}/{(i+1):02} - {self.cleanFilename(tracks[i])}.flac", **metadata) 138 | .overwrite_output() 139 | .run(capture_stdout=True, capture_stderr=True) 140 | ) 141 | else: 142 | print(f"Working on: {data["data_files"]["WAV"][i]}") 143 | metadata["metadata:g:0"] = f"title=Track - {(i+1):02}" 144 | (ffmpeg 145 | .input(f"{data["data_dir"]}/{data["data_files"]["WAV"][i]}") 146 | .output(f"{data_files["data_dir"]}/{(i+1):02} - Track.flac", **metadata) 147 | .overwrite_output() 148 | .run(capture_stdout=True, capture_stderr=True) 149 | ) 150 | 151 | 152 | # Get FLAC files 153 | flacs = glob.glob(f"{data_files["data_dir"]}/*.flac") 154 | # If FLACs were created add them to data output 155 | if len(flacs) > 0: 156 | for flac in flacs: 157 | data_files["data_files"]["FLAC"].append(f"{flac.replace(data_files["data_dir"]+"/","")}") 158 | return data_files 159 | 160 | 161 | def convert(self, media_sample): 162 | """Take in WAV and convert to FLACs with taging if available 163 | 164 | """ 165 | 166 | # Setup rip output path 167 | self.setProjectDir(media_sample["name"]) 168 | 169 | # Go through all data in media sample 170 | for data in media_sample["data"]: 171 | # Check handler can work on data 172 | if data["type_id"] == self.type_id: 173 | # Check if handler has already worked on data 174 | if self.type_id not in data["processed_by"]: 175 | print("Convert WAV to FLAC") 176 | 177 | # Check for metadata 178 | data_meta=None 179 | for data_sup in media_sample["data"]: 180 | if data_sup["type_id"] == "MUSICBRAINZ": 181 | data_meta=data_sup 182 | 183 | # Convert data 184 | data_output = self.convertWAV(data, data_meta) 185 | 186 | if data_output is not None: 187 | # Mark data as processed 188 | data["processed_by"].append(self.type_id) 189 | media_sample["data"].append(data_output) 190 | 191 | # Return media sample with new data 192 | return media_sample 193 | -------------------------------------------------------------------------------- /handler/media/cd_redumper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # CD ripping module for pyDiscRip. Can be used to rip a CD and fetch metadata 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | import time 10 | import glob 11 | from datetime import datetime 12 | 13 | # External Modules 14 | import libdiscid 15 | import musicbrainzngs 16 | import pycdio, cdio 17 | 18 | # Internal Modules 19 | from handler.media.media_handler import MediaHandler 20 | from handler.media.optical import MediaOptical 21 | 22 | 23 | class MediaHandlerCDRedumper(MediaOptical): 24 | """Handler for CD media types 25 | 26 | rips using a subprocess command to run `cdrdao` to create a BIN/CUE 27 | """ 28 | 29 | def __init__(self): 30 | """Constructor to setup basic data and config defaults 31 | 32 | """ 33 | # Call parent constructor 34 | super().__init__() 35 | # Set handler ID 36 | self.handler_id="cd_redumper" 37 | # Set media type to handle 38 | self.type_id="CD" 39 | # Default config data 40 | self.config_data={} 41 | # Data types output 42 | self.data_outputs=["BINCUE_SPLIT","MUSICBRAINZ"] 43 | 44 | 45 | 46 | def ripBinCue(self, media_sample): 47 | """Use cdrdao to rip all sessions on a CD and toc2cue to get cue file 48 | 49 | """ 50 | 51 | # Data types to return to be processed after rip 52 | datas=[] 53 | # Start ripping all sessions 54 | sessions = 1 55 | print(f"Rip to: {self.getPath()}") 56 | # Build data output 57 | data = { 58 | "type_id": "BINCUE_SPLIT", 59 | "processed_by": [], 60 | "done": False, 61 | "data_dir": self.ensureDir(f"{self.getPath()}/BINCUE_SPLIT/{media_sample["name"]}-S{sessions}"), 62 | "data_files": { 63 | "BIN": f"{media_sample["name"]}.bin", 64 | "CUE": f"{media_sample["name"]}.cue" 65 | } 66 | } 67 | 68 | self.status(data) 69 | 70 | # Don't re-rip BIN/TOC 71 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["CUE"]}"): 72 | # Build cdrdao command to read CD 73 | cmd = [ 74 | "redumper", 75 | "disc", 76 | "--retries=100", 77 | f"--drive={media_sample["drive"]}", 78 | f"--image-path={data["data_dir"]}", 79 | f"--image-name={media_sample["name"]}" 80 | 81 | ] 82 | 83 | # Run command 84 | self.osRun(cmd) 85 | 86 | 87 | # Get files in output directory 88 | bins = list(map(os.path.basename, glob.glob(f"{data["data_dir"]}/*.bin"))) 89 | # Sort wavs to have file order make sense 90 | bins.sort() 91 | data["data_files"]["BIN"] = bins 92 | 93 | # Continue to next session 94 | data["done"]=True 95 | self.status(data) 96 | # Add generated data to output 97 | datas.append(data) 98 | 99 | # Return all generated data 100 | return datas 101 | 102 | 103 | def fetchMetadata(self,media_sample): 104 | """Use musicbrainzngs to fetch Audio CD metadata 105 | 106 | """ 107 | data = { 108 | "type_id": "MUSICBRAINZ", 109 | "processed_by": [], 110 | "data_dir": self.ensureDir(f"{self.getPath()}/MUSICBRAINZ"), 111 | "data_files": { 112 | "JSON": f"{media_sample["name"]}-musicbrainz.json" 113 | } 114 | } 115 | 116 | 117 | # Wait for 30 seconds between requests to not get blocked 118 | tmp=self.ensureDir("/tmp/discrip") 119 | wait=True 120 | while(wait): 121 | if not os.path.isfile(f"{tmp}/musicbrainz.json"): 122 | wait=False 123 | else: 124 | with open(f"{tmp}/musicbrainz.json", newline='') as output: 125 | tdata = json.load(output) 126 | 127 | if tdata["next"] < int(datetime.now().timestamp()): 128 | wait=False 129 | else: 130 | print(f"Waiting: {tdata["next"]} < {int(datetime.now().timestamp())}") 131 | time.sleep(10) 132 | state_store={} 133 | state_store["next"]=int(datetime.now().timestamp()) + 30 134 | 135 | with open(f"{tmp}/musicbrainz.json", 'w', encoding="utf-8") as output: 136 | output.write(json.dumps(state_store, indent=4)) 137 | 138 | 139 | # Don't re-download data if exists 140 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["JSON"]}"): 141 | # https://python-discid.readthedocs.io/en/latest/usage/#fetching-metadata 142 | musicbrainzngs.set_useragent("AkBKukU: pyDiscRip", "0.1", "akbkuku@akbkuku.com") 143 | 144 | try: 145 | # Get calculated discid for CD 146 | # NOTE - This process is not failureproof and can result in discid collisions 147 | disc = libdiscid.read(device=media_sample["drive"]) 148 | self.log("disc.id",disc.id) 149 | except libdiscid.exceptions.DiscError: 150 | print("no actual audio tracks on disc: CDROM or DVD?") 151 | return None 152 | try: 153 | # Fetch metadata using discid 154 | result = musicbrainzngs.get_releases_by_discid(disc.id, 155 | includes=["artists", "recordings"]) 156 | except musicbrainzngs.ResponseError: 157 | print("disc not found or bad response") 158 | return None 159 | else: 160 | # Received metadata 161 | if result.get("disc"): 162 | # Write data to json 163 | self.ensureDir(data["data_dir"]) 164 | with open(f"{data["data_dir"]}/{data["data_files"]["JSON"]}", 'w', encoding="utf-8") as output: 165 | output.write(json.dumps(result, indent=4)) 166 | 167 | elif result.get("cdstub"): 168 | with open(f"{data["data_dir"]}/{data["data_files"]["JSON"]}", 'w', encoding="utf-8") as output: 169 | output.write(json.dumps(result, indent=4)) 170 | print("Waring: Musicbrainz returned a CD stub which is not as good as a full entry.") 171 | return data 172 | 173 | return data 174 | 175 | 176 | def rip(self, media_sample): 177 | """Rip CD with cdrdao and fetch metadata with musicbrainzngs 178 | 179 | Will automatically generate cue for bin using toc2cue 180 | 181 | """ 182 | 183 | # Data types to return 184 | datas=[] 185 | 186 | # Setup rip output path 187 | self.setProjectDir(media_sample["name"]) 188 | 189 | try: 190 | # Get metadata for audio CD 191 | data_output = self.fetchMetadata(media_sample) 192 | 193 | # Add metadata if was found 194 | if data_output is not None: 195 | datas.append(data_output) 196 | except Exception as e: 197 | print("Musicbrainz error") 198 | 199 | # cd-info log 200 | # result = self.osRun(["cd-info", f"{media_sample["drive"]}"]) 201 | # self.log("cd-info_stdout",str(result.stdout.decode("ascii"))) 202 | # self.log("cd-info_stderr",str(result.stderr.decode("ascii"))) 203 | 204 | # Rip all sessions on CD 205 | data_outputs=self.ripBinCue(media_sample) 206 | 207 | 208 | # Add all session rips 209 | if data_outputs is not None: 210 | for data in data_outputs: 211 | datas.append(data) 212 | 213 | # Return ripped data 214 | return datas 215 | 216 | -------------------------------------------------------------------------------- /handler/media/floppy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Floppy ripping module for pyDiscRip. Uses greaseweazle hardware 4 | 5 | # Python System 6 | import os 7 | import json 8 | import glob 9 | from pathlib import Path 10 | import importlib 11 | from pprint import pprint 12 | import time 13 | 14 | # External Modules 15 | # Directly imports from greaseweazle module in code 16 | 17 | # Internal Modules 18 | from handler.media.media_handler import MediaHandler 19 | 20 | # from handler.controller.gw import ControllerGw 21 | 22 | class MediaHandlerFloppy(MediaHandler): 23 | """Handler for Floppy media types 24 | 25 | rips using greaseweazle floppy interface and directly accessing python code 26 | """ 27 | 28 | def __init__(self): 29 | """Constructor to setup basic data and config defaults 30 | 31 | """ 32 | # Call parent constructor 33 | super().__init__() 34 | # Set media type to handle 35 | self.type_id="FLOPPY" 36 | # Default config data 37 | self.config_data={ 38 | "flux_output":"raw", 39 | "gw":{ 40 | "revs": None, 41 | "tracks": None, 42 | "hard-sectors": None, 43 | "seek-retries": None, 44 | "pll": None, 45 | "densel": None, 46 | "reverse": None 47 | } 48 | } 49 | # Data types output 50 | self.data_outputs=["FLUX"] 51 | 52 | 53 | def ripToFlux(self, media_sample): 54 | """Use gw python modules to rip floppy directly 55 | 56 | """ 57 | 58 | # Data types to return to be processed after rip 59 | datas=[] 60 | 61 | if self.config_data["flux_output"] == "raw": 62 | data = { 63 | "type_id": "FLUX", 64 | "processed_by": [], 65 | "done": False, 66 | "data_dir": self.ensureDir(f"{self.getPath()}/FLUX"), 67 | "data_files": { 68 | "flux": f"track00.0.raw" 69 | } 70 | } 71 | self.status(data) 72 | 73 | try: 74 | # Import greaseweazle read module to access hardware 75 | mod = importlib.import_module('greaseweazle.tools.read') 76 | except Exception as e: 77 | print("Need to install greaseweazle software:") 78 | print("pip install git+https://github.com/keirf/greaseweazle@latest --force") 79 | sys.exit(1) 80 | 81 | main = mod.__dict__['main'] 82 | 83 | # gw modules individually parse arguments to control rip process. This 84 | # builds fake argumets to pass to module 85 | # For more information on gw parameters run `gw read --help` 86 | args=[] 87 | args.append("pyDiscRip") # Not actually used but index position is needed 88 | args.append("read") # Not actually used but index position is needed 89 | args.append("--drive") 90 | args.append(media_sample["drive"].split("@")[0]) 91 | 92 | # Process all config options to build parameters for gw module 93 | if "revs" in self.config_data["gw"] and self.config_data["gw"]["revs"] is not None: 94 | args.append("--revs") 95 | args.append(str(self.config_data["gw"]["revs"])) 96 | if "tracks" in self.config_data["gw"] and self.config_data["gw"]["tracks"] is not None: 97 | args.append("--tracks") 98 | args.append(str(self.config_data["gw"]["tracks"])) 99 | if "seek-retries" in self.config_data["gw"] and self.config_data["gw"]["seek-retries"] is not None: 100 | args.append("--seek-retries") 101 | args.append(str(self.config_data["gw"]["seek-retries"])) 102 | if "pll" in self.config_data["gw"] and self.config_data["gw"]["pll"] is not None: 103 | args.append("--pll") 104 | args.append(self.config_data["gw"]["pll"]) 105 | if "@" in media_sample["drive"]: 106 | args.append("--device") 107 | args.append(media_sample["drive"].split("@")[1]) 108 | if "densel" in self.config_data["gw"] and self.config_data["gw"]["densel"] is not None: 109 | args.append("--densel") 110 | args.append(self.config_data["gw"]["densel"]) 111 | if "hard-sectors" in self.config_data["gw"] and self.config_data["gw"]["hard-sectors"] is not None: 112 | args.append("--hard-sectors") 113 | if "reverse" in self.config_data["gw"] and self.config_data["gw"]["reverse"] is not None: 114 | args.append("--reverse") 115 | 116 | # Add the file output as final parameter 117 | args.append(f"{data["data_dir"]}/{data["data_files"]["flux"]}") 118 | 119 | # Log all parameters to be passed to gw read 120 | self.log("floppy_gw_args",args,json_output=True) 121 | 122 | # Don't re-rip Floppy 123 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["flux"]}"): 124 | # Run the gw read process using arguments 125 | try: 126 | main(args) 127 | except Exception as e: 128 | print("GW FAIL - Possibly not connected?") 129 | 130 | # Get flux files 131 | fluxs = glob.glob(f"{data["data_dir"]}/*.raw") 132 | # If FLACs were created add them to data output 133 | if len(fluxs) > 0: 134 | data["data_files"]["flux"]=[] 135 | for flux in fluxs: 136 | data["data_files"]["flux"].append(f"{flux.replace(data["data_dir"]+"/","")}") 137 | 138 | 139 | data["done"]=True 140 | self.status(data) 141 | # Return all generated data 142 | return data 143 | 144 | 145 | 146 | def rip(self, media_sample): 147 | """Rip Floppy with greaseweazle hardware using gw software as python 148 | modules 149 | 150 | Only rips to flux, the convert step later can be used to decode flux 151 | 152 | """ 153 | # Lock drive bus 154 | if self.controller is not None: 155 | self.controller.floppy_bus_check(True) 156 | 157 | # Setup rip output path 158 | self.setProjectDir(media_sample["name"]) 159 | 160 | # Rip and return data 161 | data = self.ripToFlux(media_sample) 162 | 163 | # Unlock drive bus 164 | if self.controller is not None: 165 | self.controller.floppy_bus_check(False) 166 | 167 | # Rip and return data 168 | return [data] 169 | 170 | 171 | def eject(self,media_sample): 172 | """Eject drive tray 173 | """ 174 | time.sleep(1) 175 | 176 | def load(self,media_sample,bypass=False): 177 | """Load media before continuing. 178 | 179 | Default method call waits for user to press enter 180 | 181 | Overload with automatic methods where possible. 182 | """ 183 | config_data=media_sample["config_data"] 184 | print(f"Please load [{media_sample["name"]}] into [{media_sample["drive"]}]") 185 | if self.controller is not None: 186 | self.web_update({"drive_status":{media_sample["drive"]:{"status":3,"title":f"Please load [{media_sample["name"]}] into [{media_sample["drive"]}]"}}},media_sample["config_data"]) 187 | self.controller.load_hold(callback=MediaHandler.web_after_action,callback_arg={"url":f"http://{config_data["settings"]["web"]["ip"]}:{config_data["settings"]["web"]["port"]}/status/drive_status.json","drive":media_sample["drive"]}) 188 | 189 | self.web_update({"drive_status":{media_sample["drive"]:{"status":2,"title":f"Waiting for bus to be free"}}},media_sample["config_data"]) 190 | self.controller.floppy_bus_check() 191 | return 192 | 193 | if bypass: 194 | # Allow skipping blocking to handle externally 195 | return 196 | input(f"Please load [{media_sample["name"]}] into [{media_sample["drive"]}]") 197 | -------------------------------------------------------------------------------- /web/web.py: -------------------------------------------------------------------------------- 1 | 2 | # Python System 3 | from pprint import pprint 4 | import os, sys 5 | import json 6 | 7 | import logging 8 | 9 | try: 10 | # External Modules 11 | from flask import Flask 12 | from flask import request 13 | from flask import send_file 14 | from flask import redirect 15 | from flask import make_response 16 | from flask import send_from_directory 17 | except Exception as e: 18 | print("Need to install Python module [flask]") 19 | sys.exit(1) 20 | 21 | from multiprocessing import Process 22 | 23 | # Internal Modules 24 | from handler.mediareader import MediaReader 25 | 26 | 27 | class WebInterface(object): 28 | """Web interface for managing rips 29 | 30 | """ 31 | 32 | def __init__(self,settings=None): 33 | 34 | 35 | self.host_dir=os.path.realpath(__file__).replace(os.path.basename(__file__),"") 36 | 37 | self.app = Flask("PyDiscRip") 38 | self.app.logger.disabled = True 39 | #log = logging.getLogger('werkzeug') 40 | #log.disabled = True 41 | 42 | # Static content 43 | self.app.static_folder=self.host_dir+"http/static" 44 | self.app.static_url_path='/static/' 45 | # Define routes in class to use with flask 46 | self.app.add_url_rule('/','home', self.index) 47 | self.app.add_url_rule('/settings.json','settings_json', self.settings_json) 48 | self.app.add_url_rule('/config_data.json','config_data_json', self.config_data_json) 49 | self.app.add_url_rule('/rip','rip', self.web_rip,methods=["POST"]) 50 | self.app.add_url_rule('/output/','rip_data', self.web_rip_data) 51 | self.app.add_url_rule('/status/status.json','output_status_json', self.output_status_json) 52 | self.app.add_url_rule('/status/drive_status.json','drive_status_json', self.drive_status_json) 53 | self.app.add_url_rule('/status/queue.json','queue_json', self.queue_json) 54 | self.app.add_url_rule('/status/file','settings_json', self.settings_json) 55 | self.app.add_url_rule('/update','update', self.update,methods=["POST"]) 56 | 57 | # Callback data 58 | self.drive_status={} 59 | self.queue=[] 60 | 61 | # Set headers for server 62 | self.app.after_request(self.add_header) 63 | 64 | if settings is not None: 65 | self.settings=settings 66 | self.host = settings["web"]["ip"] 67 | self.port = settings["web"]["port"] 68 | 69 | 70 | 71 | def set_host(self,host_ip): 72 | self.host = host_ip 73 | 74 | def set_port(self,host_port): 75 | self.port = host_port 76 | 77 | 78 | def add_header(self,r): 79 | """ 80 | Force the page cache to be reloaded each time 81 | """ 82 | r.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" 83 | r.headers["Pragma"] = "no-cache" 84 | r.headers["Expires"] = "0" 85 | r.headers['Cache-Control'] = 'public, max-age=0' 86 | return r 87 | 88 | def static_files(self,name): 89 | print(f"static: {name}") 90 | return send_from_directory(self.host_dir+"http/static", name) 91 | 92 | 93 | def index(self): 94 | """ Simple class function to send HTML to browser """ 95 | return send_file(self.host_dir+"http/home.html") 96 | 97 | def settings_json(self): 98 | """ Simple class function to send HTML to browser """ 99 | return json.dumps(self.settings), 200, {'Content-Type': 'application/json; charset=utf-8'} 100 | 101 | def config_data_json(self): 102 | """ Simple class function to send HTML to browser """ 103 | return json.dumps(MediaReader.getConfigOptions()), 200, {'Content-Type': 'application/json; charset=utf-8'} 104 | 105 | def web_rip(self): 106 | """ Simple class function to send HTML to browser """ 107 | pprint(request.form.to_dict()) 108 | 109 | media_sample={} 110 | media_sample["name"] = request.form['media_name'] 111 | media_sample["media_type"] = request.form['media_type'] 112 | if(MediaReader.isGroup(self.settings["drives"],request.form['media_drive'])): 113 | media_sample["group"] = request.form['media_drive'] 114 | else: 115 | media_sample["drive"] = request.form['media_drive'] 116 | media_sample["description"] = request.form['media_description'] 117 | media_sample["config_data"] = json.loads(request.form['config_options_json_data']) 118 | #return pprint(request.form) 119 | 120 | if not os.path.exists(f"{self.settings["watch"]}"): 121 | os.makedirs(f"{self.settings["watch"]}") 122 | 123 | with open(f"{self.settings["watch"]}/{media_sample["name"]}.json", 'w', encoding="utf-8") as output: 124 | output.write(json.dumps(media_sample, indent=4)) 125 | 126 | # self.rip_thread = Process(target=MediaReader.rip,kwargs={"media_sample":media_sample,"config_data":{},"callback_update":self.media_sample_status}) 127 | # self.rip_thread.start() 128 | #return pprint(request.form) 129 | return send_file(self.host_dir+"http/rip/index.html") 130 | 131 | def update(self): 132 | """ Simple class function to send HTML to browser """ 133 | 134 | print("Updating API") 135 | pprint(request.form.to_dict()) 136 | data_dict = request.form.to_dict() 137 | if not data_dict: 138 | pprint(request.get_data()) 139 | data = json.loads(request.get_data()) 140 | else: 141 | data = json.loads(next(iter(data_dict))) 142 | pprint(data) 143 | 144 | if "drive_status" in data: 145 | print("Updating drive status") 146 | for drive, update in data["drive_status"].items(): 147 | if drive not in self.drive_status: 148 | self.drive_status[drive]={} 149 | for key, value in update.items(): 150 | self.drive_status[drive][key] = value 151 | 152 | 153 | if "queue" in data: 154 | # support list or not 155 | if not isinstance(data["queue"], list): 156 | data["queue"] = [data["queue"]] 157 | for media_sample in data["queue"]: 158 | media_sample.pop('config_data', None) 159 | media_sample.pop('settings', None) 160 | match = next((i for i, item in enumerate(self.queue) if item["name"] == media_sample["name"]), None) 161 | if match is not None: 162 | self.queue[match] = media_sample 163 | else: 164 | self.queue.append(media_sample) 165 | 166 | 167 | 168 | return "thx" 169 | 170 | def media_sample_status(self,media_sample): 171 | filepath=media_sample["name"]+"/media_sample.json" 172 | print(f"writing: {filepath}") 173 | # Write data 174 | with open(filepath, 'w', encoding="utf-8") as output: 175 | output.write(json.dumps(media_sample, indent=4)) 176 | 177 | def web_rip_data(self,name): 178 | print(f"web_rip_data: {name}") 179 | return send_file(f"{name}") 180 | 181 | def output_status_json(self): 182 | done = request.args.get('done')=="true" 183 | outputs=[] 184 | names=None 185 | if (request.args.get('names') is not None): 186 | names=json.dumps(request.args.get('names')) 187 | 188 | for root, dirs, files in os.walk(self.settings["output"]): 189 | for output in dirs: 190 | 191 | if os.path.exists(f"{self.settings["output"]}/{output}/status/status.json"): 192 | 193 | with open(f"{self.settings["output"]}/{output}/status/status.json", newline='') as jsonfile: 194 | status = json.load(jsonfile) 195 | # Filter by status 196 | if request.args.get('done') is not None and status["done"] != done: 197 | continue 198 | # Filter by name 199 | if names is not None and status["name"] not in names: 200 | continue 201 | 202 | outputs.append(status) 203 | 204 | return json.dumps(outputs), 200, {'Content-Type': 'application/json; charset=utf-8'} 205 | return "" 206 | 207 | def drive_status_json(self): 208 | """ Simple class function to send HTML to browser """ 209 | return json.dumps(self.drive_status), 200, {'Content-Type': 'application/json; charset=utf-8'} 210 | 211 | def queue_json(self): 212 | """ Simple class function to send HTML to browser """ 213 | return json.dumps(self.queue), 200, {'Content-Type': 'application/json; charset=utf-8'} 214 | 215 | def callback_update(self,data): 216 | if "drive_status" in data: 217 | print("Updating drive status") 218 | pprint(data["drive_status"]) 219 | self.drive_status = data["drive_status"] 220 | 221 | return None 222 | 223 | 224 | async def start(self): 225 | """ Run Flask in a process thread that is non-blocking """ 226 | print("Starting Flask") 227 | self.web_thread = Process(target=self.app.run, 228 | kwargs={ 229 | "host":self.host, 230 | "port":self.port, 231 | "debug":True, 232 | "use_reloader":False 233 | } 234 | ) 235 | self.web_thread.start() 236 | 237 | # Pass settings 238 | config_data={} 239 | config_data["settings"] = self.settings 240 | 241 | self.rip_thread = Process(target=MediaReader.rip_queue_groups, 242 | kwargs={ 243 | "media_samples":[], 244 | "config_data":config_data, 245 | "callback_update":self.callback_update 246 | } 247 | ) 248 | self.rip_thread.start() 249 | 250 | def stop(self): 251 | """ Send SIGKILL and join thread to end Flask server """ 252 | if hasattr(self, "web_thread") and self.web_thread is not None: 253 | self.web_thread.terminate() 254 | self.web_thread.join() 255 | if hasattr(self, "rip_thread"): 256 | self.rip_thread.terminate() 257 | self.rip_thread.join() 258 | -------------------------------------------------------------------------------- /handler/media/cd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # CD ripping module for pyDiscRip. Can be used to rip a CD and fetch metadata 4 | 5 | # Python System 6 | import os 7 | import json 8 | from pathlib import Path 9 | import time 10 | from datetime import datetime 11 | 12 | # External Modules 13 | import libdiscid 14 | import musicbrainzngs 15 | import pycdio, cdio 16 | 17 | # Internal Modules 18 | from handler.media.media_handler import MediaHandler 19 | from handler.media.optical import MediaOptical 20 | 21 | 22 | class MediaHandlerCD(MediaOptical): 23 | """Handler for CD media types 24 | 25 | rips using a subprocess command to run `cdrdao` to create a BIN/CUE 26 | """ 27 | 28 | def __init__(self): 29 | """Constructor to setup basic data and config defaults 30 | 31 | """ 32 | # Call parent constructor 33 | super().__init__() 34 | # Set handler ID 35 | self.handler_id="cdrdao" 36 | # Set media type to handle 37 | self.type_id="CD" 38 | # Default config data 39 | self.config_data={ 40 | "cdrdao_driver":"generic-mmc-raw:0x20000", 41 | "paranoia_mode":"3", 42 | } 43 | # Data types output 44 | self.data_outputs=["BINCUE","MUSICBRAINZ"] 45 | # CD info to be collected 46 | self.cd_sessions=0 47 | self.cd_tracks=0 48 | 49 | 50 | def countSessions(self,media_sample): 51 | """Use cdrdao to count the number of sessions on a CD 52 | 53 | CDs may conntain multiple sessions which will each be ripped into 54 | seperate BIN/CUE files. 55 | """ 56 | # Sample output 57 | # cdrdao disk-info --device /dev/sr1 58 | # Cdrdao version 1.2.4 - (C) Andreas Mueller 59 | # /dev/sr1: HL-DT-ST BD-RE WP50NB40 Rev: 1.03 60 | # Using driver: Generic SCSI-3/MMC - Version 2.0 (options 0x0000) 61 | # 62 | # That data below may not reflect the real status of the inserted medium 63 | # if a simulation run was performed before. Reload the medium in this case. 64 | # 65 | # CD-RW : no 66 | # Total Capacity : n/a 67 | # CD-R medium : n/a 68 | # Recording Speed : n/a 69 | # CD-R empty : no 70 | # Toc Type : CD-DA or CD-ROM 71 | # Sessions : 2 72 | # Last Track : 23 73 | # Appendable : no 74 | 75 | # Run command 76 | result = self.osRun(["cdrdao", "disk-info", "--device", f"{media_sample["drive"]}"]) 77 | 78 | # Parse output to find session count 79 | self.log("cdrdao-disk-info",result.stdout.decode("utf-8")) 80 | self.cd_sessions=int(result.stdout.decode("utf-8").split("Sessions : ")[1][:1]) 81 | print(f"Sessions Found: {self.cd_sessions}") 82 | 83 | 84 | def ripBinCue(self, media_sample): 85 | """Use cdrdao to rip all sessions on a CD and toc2cue to get cue file 86 | 87 | """ 88 | 89 | # Data types to return to be processed after rip 90 | datas=[] 91 | # Start ripping all sessions 92 | sessions = 1 93 | while sessions <= self.cd_sessions: 94 | print(f"Rip session: {sessions}") 95 | print(f"Rip to: {self.getPath()}") 96 | # Build data output 97 | data = { 98 | "type_id": "BINCUE", 99 | "processed_by": [], 100 | "done": False, 101 | "data_dir": self.ensureDir(f"{self.getPath()}/BINCUE/{media_sample["name"]}-S{sessions}"), 102 | "data_files": { 103 | "BIN": f"{media_sample["name"]}-S{sessions}.bin", 104 | "CUE": f"{media_sample["name"]}-S{sessions}.cue", 105 | "TOC": f"{media_sample["name"]}-S{sessions}.toc" 106 | } 107 | } 108 | 109 | self.status(data) 110 | 111 | # Don't re-rip BIN/TOC 112 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["BIN"]}"): 113 | # Build cdrdao command to read CD 114 | cmd = [ 115 | "cdrdao", 116 | "read-cd", 117 | "--paranoia-mode", 118 | f"{self.config_data["cdrdao_driver"]}", 119 | "--read-raw", 120 | "--datafile", 121 | f"{data["data_dir"]}/{data["data_files"]["BIN"]}", 122 | "--device", 123 | f"{media_sample["drive"]}", 124 | "--session", 125 | f"{sessions}", 126 | "--driver", 127 | f"{self.config_data["cdrdao_driver"]}", 128 | f"{data["data_dir"]}/{data["data_files"]["TOC"]}", 129 | ] 130 | 131 | # Run command 132 | self.osRun(cmd) 133 | 134 | 135 | # Don't re-convert CUE 136 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["CUE"]}"): 137 | # Build toc2cue command to generate CUE 138 | cmd = [ 139 | "toc2cue", 140 | f"{data["data_dir"]}/{data["data_files"]["TOC"]}", 141 | f"{data["data_dir"]}/{data["data_files"]["CUE"]}" 142 | ] 143 | 144 | # Run command 145 | result = self.osRun(cmd) 146 | self.log("cdrdao_stdout",str(result.stdout)) 147 | self.log("cdrdao_stderr",str(result.stderr)) 148 | 149 | # Continue to next session 150 | sessions += 1 151 | data["done"]=True 152 | self.status(data) 153 | # Add generated data to output 154 | datas.append(data) 155 | 156 | # Return all generated data 157 | return datas 158 | 159 | 160 | def fetchMetadata(self,media_sample): 161 | """Use musicbrainzngs to fetch Audio CD metadata 162 | 163 | """ 164 | data = { 165 | "type_id": "MUSICBRAINZ", 166 | "processed_by": [], 167 | "data_dir": self.ensureDir(f"{self.getPath()}/MUSICBRAINZ"), 168 | "data_files": { 169 | "JSON": f"{media_sample["name"]}-musicbrainz.json" 170 | } 171 | } 172 | 173 | 174 | # Wait for 30 seconds between requests to not get blocked 175 | tmp=self.ensureDir("/tmp/discrip") 176 | wait=True 177 | while(wait): 178 | if not os.path.isfile(f"{tmp}/musicbrainz.json"): 179 | wait=False 180 | else: 181 | with open(f"{tmp}/musicbrainz.json", newline='') as output: 182 | tdata = json.load(output) 183 | 184 | if tdata["next"] < int(datetime.now().timestamp()): 185 | wait=False 186 | else: 187 | print(f"Waiting: {tdata["next"]} < {int(datetime.now().timestamp())}") 188 | time.sleep(10) 189 | state_store={} 190 | state_store["next"]=int(datetime.now().timestamp()) + 30 191 | 192 | with open(f"{tmp}/musicbrainz.json", 'w', encoding="utf-8") as output: 193 | output.write(json.dumps(state_store, indent=4)) 194 | 195 | 196 | # Don't re-download data if exists 197 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["JSON"]}"): 198 | # https://python-discid.readthedocs.io/en/latest/usage/#fetching-metadata 199 | musicbrainzngs.set_useragent("AkBKukU: pyDiscRip", "0.1", "akbkuku@akbkuku.com") 200 | 201 | try: 202 | # Get calculated discid for CD 203 | # NOTE - This process is not failureproof and can result in discid collisions 204 | disc = libdiscid.read(device=media_sample["drive"]) 205 | self.log("disc.id",disc.id) 206 | except libdiscid.exceptions.DiscError: 207 | print("no actual audio tracks on disc: CDROM or DVD?") 208 | return None 209 | try: 210 | # Fetch metadata using discid 211 | result = musicbrainzngs.get_releases_by_discid(disc.id, 212 | includes=["artists", "recordings"]) 213 | except musicbrainzngs.ResponseError: 214 | print("disc not found or bad response") 215 | return None 216 | else: 217 | # Received metadata 218 | if result.get("disc"): 219 | # Write data to json 220 | self.ensureDir(data["data_dir"]) 221 | with open(f"{data["data_dir"]}/{data["data_files"]["JSON"]}", 'w', encoding="utf-8") as output: 222 | output.write(json.dumps(result, indent=4)) 223 | 224 | elif result.get("cdstub"): 225 | with open(f"{data["data_dir"]}/{data["data_files"]["JSON"]}", 'w', encoding="utf-8") as output: 226 | output.write(json.dumps(result, indent=4)) 227 | print("Waring: Musicbrainz returned a CD stub which is not as good as a full entry.") 228 | 229 | return data 230 | return data 231 | 232 | 233 | def rip(self, media_sample): 234 | """Rip CD with cdrdao and fetch metadata with musicbrainzngs 235 | 236 | Will automatically generate cue for bin using toc2cue 237 | 238 | """ 239 | 240 | # Data types to return 241 | datas=[] 242 | 243 | # Setup rip output path 244 | self.setProjectDir(media_sample["name"]) 245 | 246 | # Determine number of seesions to rip 247 | self.countSessions(media_sample) 248 | try: 249 | # Get metadata for audio CD 250 | data_output = self.fetchMetadata(media_sample) 251 | 252 | # Add metadata if was found 253 | if data_output is not None: 254 | datas.append(data_output) 255 | except Exception as e: 256 | print("Musicbrainz error") 257 | 258 | # cd-info log 259 | result = self.osRun(["cd-info", f"{media_sample["drive"]}"]) 260 | self.log("cd-info_stdout",str(result.stdout.decode("ascii"))) 261 | self.log("cd-info_stderr",str(result.stderr.decode("ascii"))) 262 | 263 | # Rip all sessions on CD 264 | data_outputs=self.ripBinCue(media_sample) 265 | 266 | 267 | # Add all session rips 268 | if data_outputs is not None: 269 | for data in data_outputs: 270 | datas.append(data) 271 | 272 | # Return ripped data 273 | return datas 274 | 275 | -------------------------------------------------------------------------------- /handler/handler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Base handler for pyDiscRip. 4 | 5 | # Python System 6 | import sys, os,re 7 | import json 8 | import time 9 | from enum import Enum 10 | from datetime import datetime 11 | import subprocess 12 | from pprint import pprint 13 | from urllib import request, parse 14 | 15 | # External Modules 16 | try: 17 | import unidecode 18 | except Exception as e: 19 | print("Need to install Python module [unidecode]") 20 | sys.exit(1) 21 | 22 | 23 | class Handler(object): 24 | """Base handler for media and data samples 25 | 26 | 27 | """ 28 | def __init__(self): 29 | """Constructor to setup basic data and config defaults 30 | 31 | """ 32 | # Set handle ID 33 | self.handle_id=None 34 | # Set data type to handle 35 | self.type_id=None # TODO - Genericize media and data IDs 36 | # Set main directory to work in 37 | self.output_dir="./" 38 | # Set sub directory to work in 39 | self.project_dir="" 40 | # Get current datetime 41 | self.project_timestamp=str(datetime.now().isoformat()).replace(":","-") 42 | # Data types output for later use 43 | self.data_outputs=[] 44 | # Default config data 45 | self.config_data=None 46 | 47 | # Virtual Handler Setup 48 | self.virt_cmd=None 49 | # Data format 50 | self.data_output_format=None 51 | # Temp Media Sample Holder 52 | self.media_sample=None 53 | 54 | 55 | def prepareVirtualFormat(self,setup): 56 | """Setup handler for a virtual format 57 | 58 | """ 59 | # Type to act on 60 | self.type_id=setup["input_type_id"] 61 | # Type to output 62 | self.data_outputs=[setup["output_type_id"]] 63 | # Command to run 64 | self.virt_cmd=setup["cmd"] 65 | # Data format 66 | self.data_output_format=setup["data_output"] 67 | 68 | def web_after_action(data): 69 | wait_time=time.time() 70 | action_time=0 71 | while(wait_time>action_time): 72 | print(f"{wait_time} > {action_time}") 73 | with request.urlopen(data["url"]) as url: 74 | drive_status = json.load(url) 75 | pprint(drive_status[data["drive"]]) 76 | if "action" in drive_status[data["drive"]]: 77 | print("HAS ACTION") 78 | action_time=drive_status[data["drive"]]["action"] 79 | print(action_time) 80 | time.sleep(5) 81 | return not wait_time>action_time 82 | 83 | def web_update(self,data, config_data): 84 | 85 | try: 86 | # Post Method is invoked if data != None 87 | endpoint=f"http://{config_data["settings"]["web"]["ip"]}:{config_data["settings"]["web"]["port"]}/update" 88 | data=json.dumps(data).encode("utf-8") 89 | req = request.Request(endpoint, data=data) 90 | 91 | # Response 92 | resp = request.urlopen(req) 93 | except Exception as e: 94 | # Web server probably isn't running, fail silently 95 | return 96 | 97 | 98 | 99 | def cleanFilename(self, filename_raw): 100 | """Replace characters that are not available for filenames in some filesystems 101 | 102 | """ 103 | return re.sub("[\\/\\\\\\&:\\\"<>\*|]","-", unidecode.unidecode(filename_raw)) 104 | 105 | def setProjectDir(self,project_dir=""): 106 | """Update project dir path 107 | 108 | """ 109 | self.ensureDir(self.output_dir+"/"+project_dir) 110 | self.project_dir=project_dir 111 | 112 | def setOutputDir(self,output_dir="./"): 113 | """Update project dir path 114 | 115 | """ 116 | self.output_dir=self.ensureDir(output_dir) 117 | 118 | def getPath(self): 119 | """Update project dir path 120 | 121 | """ 122 | return self.output_dir+"/"+self.project_dir 123 | 124 | def ensureDir(self,path): 125 | """Ensured that a path exists by attempting to create it or throwing an error 126 | 127 | """ 128 | try: 129 | if not os.path.exists(path): 130 | os.makedirs(path) 131 | except Exception as e: 132 | print(f"Error making directory: {path}") 133 | sys.exit(1) 134 | return path 135 | 136 | 137 | def status(self,data): 138 | """Log status from processes 139 | 140 | """ 141 | # Setup rip output path 142 | if "name" in data: 143 | self.setProjectDir(data["name"]) 144 | # Set filepath for status 145 | status_path=Handler.ensureDir(None,f"{self.getPath()}/status") 146 | 147 | # Build filename 148 | if "type_id" in data: 149 | filepath=f"{status_path}/{data["type_id"]}.json" 150 | else: 151 | filepath=f"{status_path}/status.json" 152 | 153 | # Write data 154 | with open(filepath, 'w', encoding="utf-8") as output: 155 | output.write(json.dumps(data, indent=4)) 156 | return 157 | 158 | 159 | def log(self,action_name,text,json_output=False): 160 | """Log data from processes 161 | 162 | Supports JSON as an output format 163 | 164 | """ 165 | # Set filepath for log 166 | log_path=self.ensureDir(f"{self.getPath()}/log") 167 | 168 | # Build filename 169 | if json_output: 170 | filepath=f"{log_path}/{self.project_timestamp}_{action_name}.json" 171 | else: 172 | filepath=f"{log_path}/{self.project_timestamp}_{action_name}.log" 173 | 174 | # Write data 175 | with open(filepath, 'w', encoding="utf-8") as output: 176 | if json_output: 177 | output.write(json.dumps(text, indent=4)) 178 | else: 179 | if isinstance(text, (bytes, bytearray)): 180 | output.write(str(text.decode("utf-8"))) 181 | else: 182 | output.write(str(text)) 183 | 184 | return 185 | 186 | def config(self, config_data): 187 | """Set configuration data for handler by matching ID 188 | 189 | """ 190 | 191 | # Check for config data for handler 192 | if self.type_id in config_data: 193 | # Iterate over all top level config values 194 | for key, value in config_data[self.type_id].items(): 195 | if value is not None: 196 | # Set all config values 197 | self.config_data[key] = value 198 | 199 | if config_data["settings"]["output"] != "": 200 | self.setOutputDir(config_data["settings"]["output"]) 201 | 202 | def configDirect(self, config_data): 203 | """Set configuration data for handler by matching ID 204 | 205 | """ 206 | if self.config_data is None: 207 | return 208 | print(f"ConfigureDirect: {self.type_id}") 209 | # Iterate over all top level config values 210 | pprint(config_data) 211 | for key, value in config_data.items(): 212 | if value is not None and key in self.config_data: 213 | # Set all config values 214 | self.config_data[key] = value 215 | 216 | 217 | def configOptions(self): 218 | """Return all configutation options""" 219 | 220 | return self.config_data 221 | 222 | 223 | def osRun(self, cmd): 224 | """Runs a command at the OS level and returns stdout and stderr""" 225 | try: 226 | # Run command and store output 227 | # stack = ''.join(traceback.format_stack()) 228 | # self.log("stack",str(stack)) 229 | self.log(cmd[0],' '.join(cmd)) 230 | result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 231 | 232 | return result 233 | 234 | except subprocess.CalledProcessError as exc: 235 | print("Status : FAIL", exc.returncode, exc.output) 236 | 237 | 238 | def convertData(self, data_in): 239 | """Generic convert command that for use with virtual data formats""" 240 | 241 | # Copy default format 242 | data = self.data_output_format 243 | 244 | # Create and set output dir 245 | data["data_dir"]=self.ensureDir(self.getPath()+"/"+data["data_dir"]) 246 | 247 | # Format command 248 | print("Closed until further notice") 249 | sys.exit(1) 250 | cmd = self.virt_cmd.format( 251 | input_file=data_in["data_dir"]+"/"+data_in["data_files"]["BINARY"], 252 | data_dir=data["data_dir"] 253 | ) 254 | 255 | # log command 256 | self.log(f"{self.type_id}_cmd",str(cmd)) 257 | 258 | # Run command 259 | result = self.osRun(cmd) 260 | 261 | 262 | return [data] 263 | 264 | def convert(self, media_sample): 265 | """Generic convert process for one data output 266 | 267 | """ 268 | 269 | # Setup rip output path 270 | self.setProjectDir(media_sample["name"]) 271 | 272 | # Go through all data in media sample 273 | for data in media_sample["data"]: 274 | # Check handler can work on data 275 | if data["type_id"] == self.type_id: 276 | # Check if handler has already worked on data 277 | if self.type_id not in data["processed_by"]: 278 | # Convert data 279 | print(f"Converting {data["type_id"]} to {self.data_outputs[0]}") 280 | data_outputs = self.convertData(data) 281 | 282 | if data_outputs is not None: 283 | # Mark data as processed 284 | data["processed_by"].append(self.handle_id) 285 | # Add new data to media sample 286 | for data_new in data_outputs: 287 | if data_new is not None: 288 | media_sample["data"].append(data_new) 289 | 290 | # Return media sample with new data 291 | return media_sample 292 | 293 | def load(self,media_sample,bypass=False): 294 | """Load media before continuing. 295 | 296 | Default method call waits for user to press enter 297 | 298 | Overload with automatic methods where possible. 299 | """ 300 | 301 | if bypass: 302 | # Allow skipping blocking to handle externally 303 | return 304 | input(f"Please load [{media_sample["name"]}] into [{media_sample["drive"]}]") 305 | 306 | 307 | def eject(self,media_sample): 308 | """Remove media before continuing. 309 | Overload with automatic methods where possible. 310 | """ 311 | print(f"Please remove [{media_sample["name"]}] from [{media_sample["drive"]}]") 312 | -------------------------------------------------------------------------------- /discrip.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # discrip.py 4 | # This is a CLI interface to the modules capable of ripping and converting data 5 | # from defined media types. It can take a list of media samples to rip in batch 6 | # and a configuration json to change some settings. 7 | 8 | # Python System 9 | import argparse 10 | import csv 11 | import json 12 | import sys 13 | import os 14 | import shutil 15 | from pprint import pprint 16 | 17 | # External Modules 18 | import asyncio 19 | import signal 20 | 21 | # Internal Modules 22 | from handler.mediareader import MediaReader 23 | from handler.media.manager import MediaHandlerManager 24 | from handler.data.manager import DataHandlerManager 25 | 26 | 27 | def rip_list_read(filepath=None): 28 | """ Read a CSV with drive paths, BIN names, and full media_sample names 29 | 30 | CSVs may optionally provide a `media_type` which will be used to bypass 31 | automatic media type detection. If mixing known and unknown media types 32 | you can set media_type to "auto" as well. 33 | """ 34 | 35 | # Open CSV with media samples to rip 36 | media_samples=[] 37 | with open(filepath, newline='') as csvfile: 38 | reader = csv.DictReader(csvfile, skipinitialspace=True) 39 | # Make all CSV headers lowercase 40 | for index, name in enumerate(reader.fieldnames): 41 | reader.fieldnames[index]=name.lower() 42 | 43 | for row in reader: 44 | # Convert media types to upper case if present 45 | if "media_type" in row: 46 | row["media_type"] = row["media_type"].upper() 47 | media_samples.append(row) 48 | 49 | # Return a dict of media_sample information to rip 50 | return media_samples 51 | 52 | 53 | def config_read(filepath=None): 54 | """ Read a JSON with config parameters for media and data handlers 55 | 56 | """ 57 | # Veryfiy config file exists 58 | if not os.path.exists(filepath): 59 | config_local = os.path.realpath(__file__).replace(os.path.basename(__file__),"")+"config/"+filepath 60 | # Check for config file next to script 61 | if not os.path.exists(config_local): 62 | # Check for config file next to script without extension 63 | if not os.path.exists(config_local+".json"): 64 | print(f"Config file \"{filepath}\" not found.") 65 | sys.exit(1) 66 | else: 67 | filepath = config_local+".json" 68 | else: 69 | filepath = config_local 70 | 71 | # Open JSON to read config data 72 | config_data={} 73 | with open(filepath, newline='') as jsonfile: 74 | config_data = json.load(jsonfile) 75 | 76 | # Return a dict of config data 77 | return config_data 78 | 79 | 80 | 81 | 82 | def config_dump(filename): 83 | """ Save a JSON with all config parameter options for media and data handlers 84 | 85 | """ 86 | 87 | # Save config data to JSON 88 | with open(filename, 'w') as f: 89 | json.dump(MediaReader.getConfigOptions(), f, indent=4) 90 | 91 | 92 | global loop_state 93 | global server 94 | loop_state = True 95 | server = None 96 | 97 | 98 | async def asyncLoop(): 99 | """ Blocking main loop to provide time for async tasks to run""" 100 | print('Blocking main loop') 101 | global loop_state 102 | while loop_state: 103 | await asyncio.sleep(1) 104 | 105 | 106 | def exit_handler(sig, frame): 107 | """ Handle CTRL-C to gracefully end program and API connections """ 108 | global loop_state 109 | print('You pressed Ctrl+C!') 110 | loop_state = False 111 | server.stop() 112 | 113 | 114 | async def startWeb(settings=None): 115 | 116 | # Internal Modules 117 | from web.web import WebInterface 118 | global server 119 | server = WebInterface(settings) 120 | 121 | """ Start connections to async modules """ 122 | 123 | # Setup CTRL-C signal to end programm 124 | signal.signal(signal.SIGINT, exit_handler) 125 | print('Press Ctrl+C to exit program') 126 | 127 | # Start async modules 128 | L = await asyncio.gather( 129 | server.start(), 130 | asyncLoop() 131 | ) 132 | 133 | 134 | 135 | def main(): 136 | """ Execute as a CLI and process parameters to rip and convert 137 | 138 | """ 139 | 140 | # Clean up old temp data 141 | if os.path.exists("/tmp/discrip"): 142 | shutil.rmtree("/tmp/discrip") 143 | 144 | # Setup CLI arguments 145 | parser = argparse.ArgumentParser( 146 | prog="pyDiscRip", 147 | description='Media ripping manager program', 148 | epilog='By Shelby Jueden') 149 | parser.add_argument('-c', '--csv', help="CSV file in `Drive,Name,Description` format", default=None) 150 | parser.add_argument('-f', '--config', help="Config file for ripping", default=None) 151 | parser.add_argument('-d', '--configdump', help="Dump all config options. Optional filename to output to.", 152 | nargs='?', default=None, const='config_options.json') 153 | parser.add_argument('-o', '--output', help="Directory to save data in", default="") 154 | parser.add_argument('-j', '--json-watch', help="Directory to watch for JSON sample files", default=None) 155 | parser.add_argument('-p', '--preserve-order', help="Maintain FIFO rip order for group ripping", action='store_true') 156 | parser.add_argument('-w', '--web', help="Start web server (Clears watch folder on launch)", action='store_true') 157 | parser.add_argument('-s', '--settings', help="Settings file for web", default=None) 158 | args = parser.parse_args() 159 | 160 | # Dump config options and exit 161 | settings={ 162 | "drives": { 163 | "Optical":[ 164 | { 165 | "name":"Laptop CD/DVD", 166 | "drive":"/dev/sr0", 167 | "group":"CD-DVD", 168 | "type":"OPTICAL", 169 | } 170 | ], 171 | "Greaseweazle": [ 172 | { 173 | "name":"Floppy A", 174 | "drive": "a@/dev/ttyACM0", 175 | "group":"3.5in", 176 | "type":"FLOPPY", 177 | "controller_id":"gw1" 178 | }, 179 | { 180 | "name":"Floppy B", 181 | "drive": "b@/dev/ttyACM0", 182 | "group":"5.25in", 183 | "type":"FLOPPY", 184 | "controller_id":"gw1" 185 | }, 186 | 187 | ], 188 | "Dummy": [ 189 | { 190 | "name":"Random A", 191 | "drive": "/dev/random1", 192 | "group":"dummy", 193 | "type":"DUMMY" 194 | }, 195 | { 196 | "name":"Random B", 197 | "drive": "/dev/random2", 198 | "group":"dummy", 199 | "type":"DUMMY" 200 | }, 201 | { 202 | "name":"Random C", 203 | "drive": "/dev/random3", 204 | "group":"dummy", 205 | "type":"DUMMY" 206 | } 207 | ], 208 | }, 209 | "controllers":[ 210 | { 211 | "controller_type":"RoboRacerLS", 212 | "id": "stacker", 213 | "serial_port":"/dev/ttyUSB0" 214 | }, 215 | { 216 | "controller_type":"AutoPublisherLS", 217 | "id": "apls", 218 | "bin":1, 219 | "serial_port":"/dev/ttyUSB1", 220 | "drives":[], # media_name, open 221 | "cal": 222 | { 223 | "BIN_1":637, 224 | "BIN_2":1356, 225 | "BIN_3":2106, 226 | "BIN_5":-140, 227 | "ARM_BOTTOM":1832, 228 | "DRIVE_1":575, 229 | "DRIVE_2":1050, 230 | "DRIVE_3":1450, 231 | "TRAY_SLIDE":50, 232 | "TRAY_ANGLE":-125 233 | } 234 | }, 235 | { 236 | "controller_type": "Greaseweazle", 237 | "id": "gw1" 238 | } 239 | ], 240 | "web" : { 241 | "port": 5000, 242 | "ip": "0.0.0.0" 243 | }, 244 | "media_handlers": 245 | { 246 | "CD":"cdrdao", 247 | "DVD":"dvd_ddrescue", 248 | "BD":"bd_redumper" 249 | }, 250 | "output": "", 251 | "watch": None, 252 | "fifo": False 253 | } 254 | if args.settings is not None: 255 | if args.settings == "": 256 | print(json.dumps(settings, indent=4)) 257 | sys.exit(0) 258 | else: 259 | settings = config_read(args.settings) 260 | 261 | # Output folder 262 | if args.output != "": 263 | settings["output"]=args.output 264 | if args.preserve_order: 265 | settings["fifo"]=args.preserve_order 266 | if args.json_watch is not None: 267 | settings["watch"]=args.json_watch 268 | 269 | 270 | # Dump config options and exit 271 | if args.configdump is not None: 272 | config_dump(args.configdump) 273 | sys.exit(0) 274 | 275 | # If CSV is none exit 276 | if args.csv == None and settings["watch"] is None: 277 | parser.print_help() 278 | sys.exit(0) 279 | 280 | # If CSV is blank return only CSV header and exit 281 | if args.csv == "": 282 | print("Media_Type,Drive,Name,Description") 283 | sys.exit(0) 284 | 285 | # Load optional config file 286 | if args.config is not None: 287 | config_data = config_read(args.config) 288 | else: 289 | config_data = {} 290 | 291 | # Pass settings 292 | config_data["settings"] = settings 293 | 294 | # Read media samples to rip from CSV file 295 | media_samples =[] 296 | if args.csv is not None: 297 | media_samples = rip_list_read(args.csv) 298 | MediaReader.rip_queue_drives(media_samples,config_data) 299 | sys.exit(0) 300 | 301 | # Watch folder 302 | if settings["watch"] is not None: 303 | if not args.web: 304 | MediaReader.rip_queue_groups(media_samples,config_data) 305 | sys.exit(0) 306 | 307 | # Run web server 308 | if args.web: 309 | if settings["watch"] is None: 310 | print("Must provide -j/--json-watch or settings file for rip watch folder") 311 | sys.exit(1) 312 | else: 313 | if os.path.exists(settings["watch"]): 314 | shutil.rmtree(settings["watch"]) 315 | asyncio.run(startWeb(settings)) 316 | sys.exit(0) 317 | 318 | # Begin ripping all media samples provided 319 | rip_count = 1 320 | for media_sample in media_samples: 321 | MediaReader.rip(media_sample,config_data) 322 | 323 | # If there are more media samples to rip, wait while user changes samples 324 | if rip_count < len(media_samples): 325 | rip_count+=1 326 | input("Change media_samples and press Enter to continue...") 327 | 328 | 329 | if __name__ == "__main__": 330 | main() 331 | -------------------------------------------------------------------------------- /handler/controller/DiscRobotGeneric.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | 4 | """ 5 | Com port setup - 9600 (38400 for some) baud, 8N1, No flow control. 6 | 7 | 8 | Response Codes: 9 | Success - X TODO 10 | Failure - E TODO 11 | 12 | 13 | Core Commands: 14 | V - Signup or version (sometimes required as first command) 15 | C - Reset or Calibrate unit TODO 16 | I - Input disc from bin to drive TODO 17 | A - Accept disc from drive to output bin TODO 18 | G - Get disc from drive and hold in picker (required before R and sometimes A) 19 | R - Move disc from picker to Reject bin 20 | S - Status of mechanism 21 | 22 | Examples of non-standard Commands: 23 | ctrl-C - Cancel operation 24 | K - Sometimes a variant for G above. 25 | j - Shake-based Insert 26 | F - Reset flags (aka set input bin #1 as the staring bin) 27 | P - Pick (before I/? in multiple drive bank systems) 28 | H - Get from bank 2 in multiple drive bank systems 29 | B - Accept from bank 2 in multiple drive bank systems 30 | 31 | 32 | On the Kodak unit, when you have five or less discs, command "I" for Input/Insert always returns E for failure (presumably to alert the kiosk operator that the device is running low) - you'll need to code around that if you want to be able to use the entire input capacity. 33 | """ 34 | 35 | # Python System 36 | import os 37 | import sys 38 | import json 39 | import subprocess 40 | from datetime import datetime 41 | from pathlib import Path 42 | import time 43 | from pprint import pprint 44 | 45 | # External Modules 46 | try: 47 | import serial 48 | except Exception as e: 49 | print("Need to install Python module [pyserial]") 50 | sys.exit(1) 51 | 52 | # Internal Modules 53 | if __name__ == "__main__": 54 | class ControllerHandler(object): 55 | def __init__(self): 56 | print("Totally a real class") 57 | def osRun(self, cmd): 58 | try: 59 | result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 60 | return result 61 | except subprocess.CalledProcessError as exc: 62 | print("Status : FAIL", exc.returncode, exc.output) 63 | 64 | def ensureDir(self,path): 65 | try: 66 | if not os.path.exists(path): 67 | os.makedirs(path) 68 | except Exception as e: 69 | sys.exit(1) 70 | return path 71 | 72 | else: 73 | from handler.controller.controller_handler import ControllerHandler 74 | 75 | 76 | class ControllerDiscRobotGeneric(ControllerHandler): 77 | """Handler for generic disc changing robot 78 | 79 | Command refrence: http://hyperdiscs.pbworks.com/w/page/19778461/Command%20Sets%20--%20Generic 80 | 81 | Intended to be stateless and run from differnt processes. Uses a JSON data 82 | file to store the state data needed. 83 | """ 84 | 85 | def __init__(self): 86 | """Constructor to setup basic data and config defaults 87 | 88 | """ 89 | # Call parent constructor 90 | super().__init__() 91 | # Set media type to handle 92 | self.type_id="DiscRobotGeneric" 93 | # Default id 94 | self.controller_id = "changer" 95 | # Default config data 96 | self.config_data={ 97 | "debug_print":True, 98 | "serial_port":None, 99 | "drives":[] 100 | } 101 | 102 | # Cross instance data 103 | self.instance_data_init={ 104 | "drive_open":[], 105 | "active":False 106 | } 107 | self.instance_data={} 108 | 109 | # Device commands 110 | self.cmd = { 111 | # Disc move commands 112 | "LOAD":"I", 113 | "UNLOAD":"A", 114 | # Calibration 115 | "INIT":"C", 116 | } 117 | 118 | def instance_save(self, instance): 119 | """ Save instance state to JSON file 120 | 121 | """ 122 | 123 | tmp=self.ensureDir("/tmp/discrip/cdchanger/"+self.controller_id) 124 | # If instance is None delete existing file 125 | if instance is None: 126 | if os.path.isfile(f"{tmp}/instance.json"): 127 | os.remove(f"{tmp}/instance.json") 128 | return 129 | 130 | with open(f"{tmp}/instance.json", 'w', encoding="utf-8") as output: 131 | print("saving file?") 132 | output.write(json.dumps(instance, indent=4)) 133 | 134 | 135 | def instance_get(self): 136 | """ Load instance state from JSON file 137 | 138 | """ 139 | 140 | print("instance_get") 141 | tmp=self.ensureDir("/tmp/discrip/cdchanger/"+self.controller_id) 142 | if os.path.isfile(f"{tmp}/instance.json"): 143 | print("found file?") 144 | with open(f"{tmp}/instance.json", newline='') as output: 145 | return json.load(output) 146 | else: 147 | print("No instance data") 148 | return self.instance_data_init 149 | 150 | 151 | def active(self,state=None): 152 | """ Manage active state to prevent multiple process from trying to 153 | use robot at once 154 | 155 | """ 156 | 157 | # Block execution until robot is inactive 158 | if state is None: 159 | # Wait if the arm is doing another task 160 | while self.instance_data["active"]: 161 | time.sleep(1) 162 | #TODO - reload json data 163 | self.instance_data = self.instance_get() 164 | 165 | # Claim active status and perform action 166 | self.instance_data["active"]=True 167 | self.instance_save(self.instance_data) 168 | return 169 | else: 170 | # Set active status to provided value 171 | self.instance_data["active"]=state 172 | self.instance_save(self.instance_data) 173 | 174 | 175 | def initialize(self): 176 | """ Configure machine and get all hardware and parameters into 177 | default state 178 | 179 | """ 180 | 181 | # Clear existing instance data 182 | self.instance_save(None) 183 | self.instance_data=self.instance_data_init 184 | 185 | # Begin init 186 | self.cmdSend(self.cmd["INIT"]) 187 | 188 | # Close all trays and save status 189 | for i in range(0, len(self.config_data["drives"])): 190 | self.instance_data["drive_open"].append(False) 191 | self.drive_trayClose(self.config_data["drives"][i]) 192 | 193 | self.instance_save(self.instance_data) 194 | return False 195 | 196 | 197 | def cmdSend(self, cmd_line): 198 | """ Send standard command to robot and validate response 199 | 200 | """ 201 | 202 | # Open serial port 203 | try: 204 | ser = serial.Serial(self.config_data["serial_port"],9600,timeout=30,parity=serial.PARITY_NONE,) 205 | # Prepare 206 | ser.reset_input_buffer() 207 | ser.reset_output_buffer() 208 | 209 | # Send command 210 | ser.write( bytes(cmd_line,'ascii',errors='ignore') ) 211 | 212 | # Read response 213 | cmd_stat=True 214 | while(cmd_stat): 215 | response = ser.read().decode("ascii") 216 | 217 | # Generic universal response that seems fine 218 | if "X" in response: 219 | cmd_stat=False 220 | 221 | if "E" in response: 222 | cmd_stat=False 223 | print("Error response returned, exiting to protect hardware") 224 | sys.exit(0) 225 | 226 | # No valid response 227 | if response is None or response == "" or response == '\x15': 228 | print("Assuming error") 229 | cmd_stat=False 230 | 231 | ser.close() 232 | return response 233 | 234 | except Exception as e: 235 | print("Totally sending command:") 236 | print(cmd_line) 237 | return "E" 238 | 239 | def drive_trayOpen(self,drive): 240 | """ Open tray 241 | 242 | """ 243 | if self.config_data["debug_print"]: 244 | print(f"Ejecting: {drive}") 245 | 246 | self.osRun(["eject", drive]) 247 | 248 | def drive_trayClose(self,drive): 249 | """ Close tray 250 | 251 | """ 252 | if self.config_data["debug_print"]: 253 | print(f"Closing: {drive}") 254 | 255 | self.osRun(["eject","-t", drive]) 256 | 257 | 258 | def load(self, drive): 259 | """ Managed load into drive 260 | Takes drive path and loads next available disc into it 261 | 262 | """ 263 | #Read instance data from JSON 264 | self.instance_data = self.instance_get() 265 | # Wait until inactive 266 | self.active() 267 | 268 | # Get drive ID from drive path 269 | drive_load=self.config_data["drives"].index(drive) 270 | # Close all other trays if open 271 | for i in range(0, len(self.config_data["drives"])): 272 | if i != drive_load: 273 | self.drive_trayClose(self.config_data["drives"][i]) 274 | self.instance_data["drive_open"][i]=False 275 | # Check if tray was only left open (internally tracked) 276 | if not self.instance_data["drive_open"][drive_load]: 277 | # False: closed 278 | self.drive_trayOpen(drive) 279 | self.instance_data["drive_open"][drive_load]=True 280 | time.sleep(5) 281 | 282 | # Save tray status 283 | self.instance_save(self.instance_data) 284 | 285 | # Attempt load 286 | if "X" in self.cmdSend(self.cmd["LOAD"]): 287 | # Loaded disc 288 | if self.config_data["debug_print"]: 289 | print("Disc loaded") 290 | else: 291 | # Load failed 292 | print("No discs found") 293 | sys.exit(0) 294 | 295 | # Close tray for reading 296 | self.drive_trayClose(drive) 297 | self.instance_data["drive_open"][drive_load]=False 298 | # Release active state 299 | self.active(False) 300 | return False 301 | 302 | 303 | 304 | def eject(self, drive): 305 | """ Managed unload from drive 306 | Takes drive path and unloads to output hopper 307 | 308 | """ 309 | try: 310 | #Read instance data from JSON 311 | self.instance_data = self.instance_get() 312 | # Wait until inactive 313 | self.active() 314 | 315 | # Get drive ID from drive path 316 | drive_unload=self.config_data["drives"].index(drive) 317 | # Close all other trays if open 318 | for i in range(0, len(self.config_data["drives"])): 319 | if i != drive_unload: 320 | self.drive_trayClose(self.config_data["drives"][i]) 321 | self.instance_data["drive_open"][i]=False 322 | # eject tray 323 | self.drive_trayOpen(drive) 324 | # leave tray open for quick loading 325 | self.instance_data["drive_open"][drive_unload]=True 326 | self.instance_save(self.instance_data) 327 | time.sleep(5) # Wait for tray action 328 | 329 | # Run unload command 330 | if "X" in self.cmdSend(self.cmd["UNLOAD"]): 331 | # Loaded disc 332 | if self.config_data["debug_print"]: 333 | print("Disc unloaded") 334 | else: 335 | # Load failed 336 | print("Disc unload fail") 337 | sys.exit(0) 338 | # Release active state 339 | self.active(False) 340 | return True 341 | 342 | except Exception as e: 343 | print("EMERGENCY STOP - ERROR UNLOADING CD CHANGER") 344 | sys.exit(1) 345 | 346 | 347 | 348 | 349 | if __name__ == "__main__": 350 | """ Test routine 351 | 352 | Will attempt to load and remove three discs from hopper 1 353 | 354 | """ 355 | controller = ControllerDiscRobotGeneric() 356 | controller.config_data["serial_port"] = "/dev/ttyUSB0" 357 | controller.config_data["drives"] = [ 358 | "/dev/sr2", 359 | "/dev/sr3" 360 | ] 361 | controller.initialize() 362 | 363 | count = 3 364 | while count: 365 | controller.load("/dev/sr2") 366 | time.sleep(3) 367 | controller.eject("/dev/sr2") 368 | count -= 1 369 | 370 | -------------------------------------------------------------------------------- /config/handler/hxc_image/config.script: -------------------------------------------------------------------------------- 1 | # 2 | # libhxcfe init script 3 | # 4 | # This script is built-in the HxC library and is executed at startup to apply 5 | # the default variables values. 6 | # 7 | # You can put a "config.script" file in the HxC Software folder to change one 8 | # or more of these variables if needed. 9 | # Your "config.script" will be executed right after this script at startup. 10 | # The syntax to use in your script is exactly the same. 11 | # 12 | # In the same way some images/streams loaders support the script execution and 13 | # try load and execute a "config.script" script from the image folder. 14 | # This allows you to apply specific settings for each of your streams if 15 | # needed. The loaders supporting this are currently the KryoFlux raw 16 | # loader (*.raw), the SCP loader (*.scp), the HxC stream loader 17 | # (*.hxcstream / Pauline stream dump) and the stream hfe loader (*.hfe / High 18 | # resolutions stream floppy images for Pauline). 19 | # 20 | # Note : In the HxC software you can view and modify all these variables in 21 | # the "Settings" -> "Internal Parameters" window. 22 | # 23 | 24 | # ---------------------------------------------------------------------------- 25 | # ---------------------------------------------------------------------------- 26 | 27 | # 28 | # ------------------ Stream analyzer parameters ------------------ 29 | # 30 | # These parameters are used for stream based images/flux 31 | # (Apply to Kryoflux raw files, SCP files, HxC stream files, 32 | # stream hfe files...) 33 | # 34 | 35 | # 36 | # Stream BMP Export resolution and settings 37 | # 38 | # (Done before any processing if enabled, see 39 | # STREAMHFELOADER_BMPEXPORT,HXCSTREAMLOADER_BMPEXPORT, 40 | # KFRAWLOADER_BMPEXPORT,SCPLOADER_BMPEXPORT to enable 41 | # the bmp stream export) 42 | # 43 | 44 | set BMPEXPORT_STREAM_DEFAULT_XSIZE 8192 45 | set BMPEXPORT_STREAM_DEFAULT_YSIZE 480 46 | set BMPEXPORT_STREAM_DEFAULT_XTOTALTIME 1200000 # 200*1000*6 -> 200ms * 6 -> 6 revolutions 47 | set BMPEXPORT_STREAM_DEFAULT_YTOTALTIME 16 # 16us 48 | set BMPEXPORT_STREAM_BIG_DOTS 1 # Set it to be able to see 49 | # a single bit error. 50 | set BMPEXPORT_STREAM_HIGHCONTRAST 1 # Increase the contrast. 51 | 52 | # 53 | # FLUXSTREAM_RPMFIX - Change the stream replay speed. 54 | # 55 | # NONE : 56 | # No stream speed change. 57 | # 58 | # 360TO300RPM : 59 | # 300 RPM disk dumped on a 360 RPM drive : Slow down the stream. 60 | # 61 | # 300TO360RPM : 62 | # 360 RPM disk dumped on a 300 RPM drive : Speed up the stream. 63 | # 64 | 65 | set FLUXSTREAM_RPMFIX NONE # NONE / 360TO300RPM / 300TO360RPM 66 | 67 | # 68 | # FLUXSTREAM_IMPORT_PCCAV_TO_MACCLV - Enable the Macintosh CLV mode. 69 | # 70 | # When enabled the stream speed is modified according to the 71 | # track number. Set the drive RPM used to make the dump. 72 | # (example : 300) 73 | # 74 | 75 | set FLUXSTREAM_IMPORT_PCCAV_TO_MACCLV 0 76 | 77 | # 78 | # FLUXSTREAM_IMPORT_PCCAV_TO_C64CLV - Enable the C64 CLV mode. 79 | # 80 | # When enabled the stream speed is modified according to the 81 | # track number. Set the drive RPM used to make the dump. 82 | # (example : 300) 83 | # 84 | 85 | set FLUXSTREAM_IMPORT_PCCAV_TO_C64CLV 0 86 | 87 | set FLUXSTREAM_IMPORT_PCCAV_TO_VICTOR9KCLV 0 88 | 89 | # 90 | # FLUXSTREAM_SKIPBLOCKSDETECTION 91 | # 92 | # Set 1 here to disable the pulses analysis to link them between the 93 | # revolutions and find the the exact track overlap. 94 | # This part can take a lot of CPU time so if you just want to have 95 | # a quick preview set this to 1. 96 | # 97 | 98 | set FLUXSTREAM_SKIPBLOCKSDETECTION 0 99 | 100 | # 101 | # Exact track revolution overlap search depth (index to index lenght pourcent 102 | # variation) 103 | # With unstable index signal you may need to increase this value. 104 | # (If you get some tracks with all sectors fine but all the track marked as 105 | # flakey/weak you probably have an unstable index signal - try to increase 106 | # this value to 0.02 or 0.03). 107 | # 108 | 109 | set FLUXSTREAM_OVERLAPSEARCHDEPTH 0.015 110 | 111 | # 112 | # Experimental : 113 | # Ignore the index and threat all the stream as one revolution. 114 | # 115 | 116 | set FLUXSTREAM_ALL_REVOLUTIONS_IN_ONE 0 117 | 118 | # 119 | # PLL settings 120 | # 121 | 122 | # Set to 1 to skip the PLL pre-sync. 123 | 124 | set FLUXSTREAM_PLL_NO_PRESYNC 0 125 | 126 | # PLL Phase error to phase 127 | # correction divisor. 128 | 129 | set FLUXSTREAM_PLL_PHASE_CORRECTION_DIVISOR 8 130 | 131 | # To force the PLL initial bitrate. 132 | # 0 -> AUTO 133 | 134 | set FLUXSTREAM_PLL_INITIAL_BITRATE 0 135 | 136 | # Set 1 to enable the GCR inter-bands rejection. 137 | # This appears to provide better results with Mac AppleSauce A2R dump. 138 | 139 | set FLUXSTREAM_PLL_GCR_INTER_BANDS_REJECTION 0 140 | 141 | # Set 1 to enable the FM inter-bands rejection. 142 | # This appears to provide better results with noisy FM disks. 143 | 144 | set FLUXSTREAM_PLL_FM_INTER_BANDS_REJECTION 0 145 | 146 | # 147 | # "Expert" PLL settings Zone 148 | # Unless you know what you are doing, keep the default values. 149 | # 150 | 151 | set FLUXSTREAM_PLL_TICK_FREQUENCY 250000000 # Internal bitstream tick frequency. 152 | set FLUXSTREAM_PLL_MIN_MAX_PERCENT 18 # Min-Max PLL deviation in percent versus the central frequency. 153 | 154 | set FLUXSTREAM_PLL_MAX_ERROR_NS 680 # Max PLL error trigger level (in nS). 155 | # If the error is above this level, the corresponding 156 | # bit is marked as "flakey" 157 | 158 | # PLL correction ratio ("Fast" -> Used when getting closer to the central frequency) 159 | 160 | set FLUXSTREAM_PLL_FAST_CORRECTION_RATIO_N 15 161 | set FLUXSTREAM_PLL_FAST_CORRECTION_RATIO_D 16 162 | 163 | # PLL correction ratio ("Slow" -> Used when getting far from the central frequency) 164 | 165 | set FLUXSTREAM_PLL_SLOW_CORRECTION_RATIO_N 31 166 | set FLUXSTREAM_PLL_SLOW_CORRECTION_RATIO_D 32 167 | 168 | set FLUXSTREAM_ANALYSIS_WINDOW_SIZE_US 1000 # Analysis window size in uS 169 | set FLUXSTREAM_ANALYSIS_REV2REV_MAX_PULSES_JITTER 10 # Max pulses duration jitter in percent 170 | 171 | # 172 | # Output bitrate filter settings 173 | # 174 | 175 | set FLUXSTREAM_BITRATE_FILTER_PASSES 2 # number of filter passes. 176 | set FLUXSTREAM_BITRATE_FILTER_WINDOW 24 # Filter window size. 177 | 178 | # 179 | # Post analysis default index length setting 180 | # 181 | 182 | set FLUXSTREAM_ANALYSIS_OUT_INDEX_LENGTH -1 # -1 : Auto, > 0 index length in uS 183 | 184 | # 185 | # Weakbits cells threshold 186 | # 187 | 188 | set FLUXSTREAM_PLL_WEAKBITS_CELLCNT_THRESHOLD 6 189 | 190 | # 191 | # Victor 9000 Bands mode pll 192 | # 193 | 194 | set FLUXSTREAM_ANALYSIS_USE_VICTOR_TIMING 0 195 | 196 | # 197 | # Stream files (SCP/Kryoflux...) export mode 198 | # 199 | 200 | set FLUXSTREAM_STREAM_TO_STREAM_CONVERT 1 # 1 - Stream to stream support enabled. 201 | # 0 - Stream to stream support disabled (old behavior). 202 | 203 | # 204 | # ---- Specific raw format parameters ---- 205 | # 206 | 207 | # 208 | # HFE stream loader parameters 209 | # (Pauline Floppy simulation files) 210 | # 211 | 212 | set STREAMHFELOADER_BITRATE 0 # Force the bitrate. (no auto-detection) 213 | set STREAMHFELOADER_BMPEXPORT 0 # Export/plot the inputs stream tracks to BMP images. 214 | 215 | # 216 | # HxC Stream loader parameters 217 | # 218 | 219 | set HXCSTREAMLOADER_DOUBLE_STEP 0 # Skip the odd files/tracks. 220 | set HXCSTREAMLOADER_SINGLE_SIDE 0 # Skip the side 1 files. 221 | set HXCSTREAMLOADER_BITRATE 0 # Force the bitrate. (no auto-detection) 222 | set HXCSTREAMLOADER_BMPEXPORT 0 # Export/plot the inputs stream tracks to BMP images. 223 | 224 | # 225 | # KryoFlux raw loader parameters 226 | # 227 | 228 | set KFRAWLOADER_DOUBLE_STEP 0 # Skip the odd files/tracks. 229 | set KFRAWLOADER_SINGLE_SIDE 0 # Skip the side 1 files. 230 | set KFRAWLOADER_BITRATE 0 # Force the bitrate. (no auto-detection) 231 | set KFRAWLOADER_BMPEXPORT 0 # Export/plot the inputs stream tracks to BMP images. 232 | 233 | # 234 | # SuperCard Pro SCP Stream loader parameters 235 | # 236 | 237 | set SCPLOADER_DOUBLE_STEP 0 # Skip the odd tracks. 238 | set SCPLOADER_SINGLE_SIDE 0 # Skip the side 1. 239 | set SCPLOADER_BITRATE 0 # Force the bitrate. (no auto-detection) 240 | set SCPLOADER_BMPEXPORT 0 # Export/plot the inputs stream tracks to BMP images. 241 | 242 | # 243 | # DiscFerret DFI Stream loader parameters 244 | # 245 | 246 | set DFILOADER_DOUBLE_STEP 0 # Skip the odd tracks. 247 | set DFILOADER_SINGLE_SIDE 0 # Skip the side 1. 248 | set DFILOADER_BITRATE 0 # Force the bitrate. (no auto-detection) 249 | set DFILOADER_BMPEXPORT 0 # Export/plot the inputs stream tracks to BMP images. 250 | set DFILOADER_SAMPLE_FREQUENCY_MHZ 100 # Default sample frequency. 251 | 252 | # 253 | # AppleSauceFDC A2R Stream loader parameters 254 | # 255 | 256 | set A2RLOADER_SINGLE_SIDE 0 # Skip the side 1. 257 | set A2RLOADER_BITRATE 0 # Force the bitrate. (no auto-detection) 258 | set A2RLOADER_BMPEXPORT 0 # Export/plot the inputs stream tracks to BMP images. 259 | set A2RLOADER_SKIP_INTER_TRACKS 1 # Skip inter tracks. Just process the normal tracks. 260 | 261 | # ---------------------------------------------------------------------------- 262 | # ---------------------------------------------------------------------------- 263 | 264 | # 265 | # Tracks BMP writer parameters 266 | # 267 | 268 | set BMPEXPORT_DEFAULT_XSIZE 3840 # BMP X size 269 | set BMPEXPORT_DEFAULT_YSIZE 2160 # BMP Y size 270 | set BMPEXPORT_DEFAULT_XTOTALTIME 240000 # 240ms 271 | set BMPEXPORT_DEFAULT_YTOTALTIME 16 # 16us 272 | 273 | # 274 | # Disk BMP writer parameters 275 | # 276 | 277 | set BMPDISKEXPORT_DEFAULT_XSIZE 3840 278 | set BMPDISKEXPORT_DEFAULT_YSIZE 2160 279 | 280 | # 281 | # sector/formats decoder enabled during the picture export. 282 | # (Used by the bmp writers and the flux stream importer) 283 | # 284 | 285 | set BMPEXPORT_ENABLE_ISOIBM_MFM_ENCODING 1 286 | set BMPEXPORT_ENABLE_ISOIBM_FM_ENCODING 1 287 | set BMPEXPORT_ENABLE_AMIGA_MFM_ENCODING 1 288 | set BMPEXPORT_ENABLE_EMU_FM_ENCODING 1 289 | set BMPEXPORT_ENABLE_MEMBRAIN_MFM_ENCODING 1 290 | set BMPEXPORT_ENABLE_TYCOM_FM_ENCODING 1 291 | set BMPEXPORT_ENABLE_APPLEII_GCR1_ENCODING 1 292 | set BMPEXPORT_ENABLE_APPLEII_GCR2_ENCODING 1 293 | set BMPEXPORT_ENABLE_APPLEMAC_GCR_ENCODING 1 294 | set BMPEXPORT_ENABLE_ARBURGDAT_ENCODING 0 295 | set BMPEXPORT_ENABLE_ARBURGSYS_ENCODING 0 296 | set BMPEXPORT_ENABLE_NORTHSTAR_HS_MFM_ENCODING 0 297 | set BMPEXPORT_ENABLE_HEATHKIT_HS_FM_ENCODING 0 298 | set BMPEXPORT_ENABLE_DEC_RX02_M2FM_ENCODING 0 299 | set BMPEXPORT_ENABLE_QD_MO5_ENCODING 0 300 | set BMPEXPORT_ENABLE_C64_GCR_ENCODING 0 301 | set BMPEXPORT_ENABLE_VICTOR9000_GCR_ENCODING 0 302 | 303 | # 304 | # Stream writers options 305 | # 306 | 307 | set SCPEXPORT_NUMBER_OF_REVOLUTIONS 3 # Number of revolutions generated to the exported SCP file 308 | set KFRAWEXPORT_NUMBER_OF_REVOLUTIONS 3 # Number of revolutions generated to the exported KF raw files 309 | set KFRAWEXPORT_DOUBLE_STEP 0 # Generate 40 tracks dump for 80 tracks drives. 310 | 311 | #set SCPEXPORT_DISK_TYPE 0x15 # Uncomment to force/change the SCP Disk Type field 312 | # Please refer to the SCP documentation for more details 313 | 314 | #set SCPEXPORT_DISK_96TPI 1 # Uncomment one of the following lines to force the 315 | #set SCPEXPORT_DISK_48TPI 1 # TPI flag 316 | 317 | # 318 | # IPF Loader flags/parameters 319 | # 320 | 321 | set LOADER_IPF_CAPS_DI_LOCK_INDEX 0 322 | #set SPSCAPS_LIB_NAME CAPSImg.dll 323 | 324 | # 325 | # Logic analyzer bin file loader parameters 326 | # 327 | 328 | set LOGICANALYZER_DATA_BIT 0 329 | set LOGICANALYZER_INDEX_BIT 3 330 | set LOGICANALYZER_SAMPLERATE 16000000 331 | set LOGICANALYZER_IMPORT_START_OFFSET -1 332 | set LOGICANALYZER_IMPORT_END_OFFSET -1 333 | set LOGICANALYZER_BMPEXPORT 0 334 | set LOGICANALYZER_BITRATE 0 335 | 336 | # 337 | # HFE Loader / Writer parameters 338 | # 339 | 340 | set HFE_WRITER_WRITENOTALLOWED 0 # If != 0 -> Image is write protected. 341 | 342 | # 343 | # CPC DSK Loader / Writer parameters 344 | # 345 | 346 | set CPCDSK_WRITER_LIMIT_SECTOR_SIZE 1 # Limit the sector data to 6144 bytes 347 | set CPCDSK_WRITER_DISCARD_UNFORMATTED_SIDE 1 348 | 349 | # 350 | # Oric DSK Loader / Writer parameters 351 | # 352 | 353 | set ORICDSK_LOADER_REGENERATE_TRACK 1 # Extract the sectors data and regenerate 354 | # the whole track. 355 | 356 | # 357 | # 2008 USB HxC Floppy Emulator parameters 358 | # ( FTDI + CPLD based hardware ) 359 | # 360 | 361 | set USB_HXCFLOPPYEMULATOR_DEFAULT_PACKET_SIZE 1664 362 | 363 | # 364 | # Pauline Parameters 365 | # (High-end high resolution Floppy drive simulator / Floppy dumper) 366 | # 367 | 368 | set PAULINE_DEFAULT_IP_ADDRESS "192.168.20.7" 369 | 370 | # 371 | # hxcfe_floppySectorBySectorCopy behavior setting in 372 | # case of source image read error during the sectors copy. 373 | # 374 | # >=0 : Clear the destination sector with the SECTORBYSECTORCOPY_SECTOR_ERROR_TAG value 375 | # -1 : Write the destination sector with the error informations 376 | # -2 : Don't write/change the destination sector. 377 | # 378 | 379 | set SECTORBYSECTORCOPY_SECTOR_ERROR_TAG -1 380 | 381 | # 382 | # Set this variable to 1 to keep the source file image 383 | # name extension when using the batch converter. 384 | # 385 | # adf to hfe converion example : myfile.adf -> myfile_adf.hfe 386 | # 387 | # Set 0 it to forget the source file extentsion : 388 | # adf to hfe converion example : myfile.adf -> myfile.hfe 389 | # 390 | 391 | set BATCHCONVERT_KEEP_SOURCE_FILE_NAME_EXTENSION 1 392 | 393 | # 394 | # Default User interface "last state" variables 395 | # 396 | 397 | set UISTATE_SAVE_FILE "laststate.script" 398 | 399 | set LASTSTATE_BATCHCONVERTER_SRC_DIR "" 400 | set LASTSTATE_BATCHCONVERTER_DST_DIR "" 401 | set LASTSTATE_BATCHCONVERTER_TARGETFORMAT 0 402 | 403 | set LASTSTATE_EXPORT_TARGETFORMAT 0 404 | --------------------------------------------------------------------------------