├── handler
├── data
│ ├── __init__.py
│ ├── data_handler.py
│ ├── bincue_split.py
│ ├── iso9660.py
│ ├── manager.py
│ ├── hxc_image.py
│ ├── bincue.py
│ ├── flux.py
│ └── wav.py
├── media
│ ├── __init__.py
│ ├── media_handler.py
│ ├── bd_redumper.py
│ ├── dvd_redumper.py
│ ├── dummy.py
│ ├── ddisk.py
│ ├── optical.py
│ ├── dvd.py
│ ├── manager.py
│ ├── cd_redumper.py
│ ├── floppy.py
│ └── cd.py
├── controller
│ ├── manager.py
│ ├── gw.py
│ ├── RoboRacerLS.py
│ ├── controller_handler.py
│ └── DiscRobotGeneric.py
├── util
│ └── bincon.py
└── handler.py
├── web
├── http
│ ├── rip
│ │ └── index.html
│ ├── static
│ │ ├── status.html
│ │ ├── status-drives.html
│ │ ├── style.css
│ │ ├── status-view.css
│ │ ├── status-drives.css
│ │ ├── img
│ │ │ ├── good.svg
│ │ │ ├── warning.svg
│ │ │ ├── idle.svg
│ │ │ └── working.svg
│ │ ├── status-drives.js
│ │ ├── status-queue.js
│ │ ├── script.js
│ │ ├── jsonForm.js
│ │ └── status-view.js
│ └── home.html
└── web.py
├── TODO.MD
├── config
├── mac.400.json
├── hp.lif.1651b.json
├── hp.lif.33dd.json
├── hp.lif.77dd.json
├── hp.lif.77hd.json
├── ibm.3740.json
├── jonos.35.json
├── sord.m23.35.json
├── ibm.1200.json
├── ibm.1440.json
├── ibm.360.json
├── ibm.720.json
└── handler
│ ├── flux
│ └── diskdefs.cfg
│ └── hxc_image
│ └── config.script
├── LICENSE
├── .gitignore
├── README.md
└── discrip.py
/handler/data/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/handler/media/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/web/http/rip/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
20 | */
21 | select = document.createElement("select");
22 | select.name=name;
23 | select.id=id;
24 | drivegroup = document.createElement("optgroup");
25 | drivegroup.label="Groups";
26 | groupcheck=[];
27 | groups=false;
28 | for (const [key, value] of Object.entries(data))
29 | {
30 | console.log(key);
31 | optgroup = document.createElement("optgroup");
32 | optgroup.label=key;
33 | for (var i = 0; i < value.length; ++i) {
34 | option = document.createElement("option");
35 | option.value=value[i]["drive"];
36 | option.innerText=value[i]["name"];
37 | optgroup.appendChild(option);
38 | if ("group" in value[i])
39 | {
40 | if(!groupcheck.includes(value[i]["group"]))
41 | {
42 | groups=true;
43 | option = document.createElement("option");
44 | option.value=value[i]["group"];
45 | option.innerText=value[i]["group"];
46 | drivegroup.appendChild(option);
47 |
48 | groupcheck.push(value[i]["group"]);
49 | }
50 | }
51 | }
52 | if(groups) select.appendChild(drivegroup);
53 | select.appendChild(optgroup);
54 | }
55 | return select
56 | }
57 |
58 | function markerCustomAdd(event)
59 | {
60 | fetch('/settings.json').then((response) => response.json())
61 | .then((data) =>
62 | {
63 | settings = data; document.getElementById('media_drive').replaceWith(buildOptionGroupList(settings["drives"],"media_drive","media_drive"));
64 | }
65 | );
66 | }
67 | window.addEventListener("load", markerCustomAdd);
68 |
69 |
70 | jform = new jsonForm(
71 | '/config_data.json',
72 | document.getElementById('config_options'),
73 | 'Config Options',
74 | {"top_blank":true,"form_names":false},
75 | {"FLUX|diskdefs-direct":"textarea"}
76 | )
77 |
78 | function sendMediaForm()
79 | {
80 | jform.prepare();
81 | const data = new URLSearchParams();
82 | for (const pair of new FormData(document.getElementById('media_form'))) {
83 | data.append(pair[0], pair[1]);
84 | }
85 |
86 | fetch("/rip", {
87 | method: 'post',
88 | body: data,
89 | }).then(() => {
90 | document.getElementById('media_name').value = "";
91 | document.getElementById('media_description').value = "";
92 | });
93 | }
94 | document.getElementById('send_media_form').onclick = function() { sendMediaForm(); };
95 |
--------------------------------------------------------------------------------
/handler/data/manager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # Data conversion manager for pyDiscRip. Can be used to rip a CD and fetch metadata
4 |
5 | from pprint import pprint
6 |
7 | # Internal Modules
8 | from handler.data.data_handler import DataHandler
9 | from handler.data.bincue import DataHandlerBINCUE
10 | from handler.data.bincue_split import DataHandlerBINCUESPLIT
11 | from handler.data.iso9660 import DataHandlerISO9660
12 | from handler.data.wav import DataHandlerWAV
13 | from handler.data.flux import DataHandlerFLUX
14 | from handler.data.hxc_image import DataHandlerHXCImage
15 |
16 |
17 | class DataHandlerManager(object):
18 | """Manager for data types
19 |
20 | Provides process control functions for converting different data types and
21 | setting configuration data.
22 | """
23 |
24 | def __init__(self):
25 | """Constructor to setup basic data and config defaults
26 |
27 | """
28 | # Call parent constructor
29 | super().__init__()
30 | # Add all supported data types
31 | self.data_types={}
32 | self.data_types["BINCUE"] = DataHandlerBINCUE()
33 | self.data_types["BINCUE_SPLIT"] = DataHandlerBINCUESPLIT()
34 | self.data_types["ISO9660"] = DataHandlerISO9660()
35 | self.data_types["WAV"] = DataHandlerWAV()
36 | self.data_types["FLUX"] = DataHandlerFLUX()
37 | self.data_types["HXC"] = DataHandlerHXCImage()
38 |
39 | def configVirtual(self,config):
40 | """Configure a new handler to use as a virtual data format
41 |
42 | """
43 | if "Virtual" in config:
44 | # Add all new virtual formats
45 | for data in config["Virtual"]["Data"]:
46 | # Create and configure new handler
47 | self.data_types[data["input_type_id"]] = DataHandler()
48 | self.data_types[data["input_type_id"]].prepareVirtualFormat(data)
49 |
50 | def findDataType(self,data):
51 | """Match data handler to type and return handler
52 |
53 | """
54 |
55 | # Iterate through all handlers
56 | for type_id, data_handler in self.data_types.items():
57 | if data_handler.dataMatch(data) and not data_handler.handle_id in data["processed_by"]:
58 | print(f"Found handler: {data_handler.handle_id}")
59 | return data_handler
60 |
61 | return None
62 |
63 |
64 | def configDump(self):
65 | """Get all config data for media handlers and dump it to json
66 |
67 | """
68 | config_options={}
69 | # Iterate through all handlers
70 | for type_id, data_handler in self.data_types.items():
71 | # Add all config options for handler
72 | config_options[type_id]=data_handler.configOptions()
73 |
74 | return config_options
75 |
76 |
--------------------------------------------------------------------------------
/handler/media/dummy.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # DVD ripping module for pyDiscRip. Can be used to rip a DVD
4 |
5 | # Python System
6 | import os
7 | import json
8 | from pathlib import Path
9 | import random
10 | import time
11 |
12 | # Internal Modules
13 | from handler.media.media_handler import MediaHandler
14 |
15 |
16 | class MediaHandlerDummy(MediaHandler):
17 | """Handler for DVD media types
18 |
19 | rips using a subprocess command to run `ddrescue` to create an ISO file
20 | """
21 |
22 | def __init__(self):
23 | """Constructor to setup basic data and config defaults
24 |
25 | """
26 | # Call parent constructor
27 | super().__init__()
28 | # Set media type to handle
29 | self.type_id="DUMMY"
30 | # Data types output
31 | self.data_outputs=["BINARY"]
32 | # DVD info to be collected
33 | self.dvd_partition_filesystem=""
34 |
35 |
36 | def ripDummy(self, media_sample):
37 | """Use ddrescue to rip DVD with multiple passes and mapfile
38 |
39 | """
40 | data = {
41 | "type_id": "BINARY",
42 | "processed_by": [],
43 | "done": False,
44 | "data_dir": self.ensureDir(f"{self.getPath()}/BINARY/{media_sample["name"]}"),
45 | "data_files": {
46 | "BINARY": [f"{media_sample["name"]}.img"]
47 | }
48 | }
49 | #self.status(data)
50 |
51 | # Don't re-rip ISO
52 | # if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}"):
53 | count=str(int(random.random()*100))
54 | # ddrescue is a multi step process that is run three times
55 | cmd1 = [
56 | "dd",
57 | "bs=8M",
58 | "count={count}",
59 | "if=/dev/random",
60 | f"of={data["data_dir"]}/{data["data_files"]["BINARY"][0]}"
61 | ]
62 |
63 | # Run command
64 | result = self.osRun(cmd1)
65 | #self.log("dd_out",str(result.stdout))
66 | #self.log("dd_err",str(result.stderr))
67 |
68 | data["done"]=True
69 | #self.status(data)
70 | # Return all generated data
71 | return data
72 |
73 |
74 | def rip(self, media_sample):
75 | """Rip DVD with ddrescue
76 |
77 | """
78 | # Setup rip output path
79 | self.setProjectDir(self.project_timestamp+"_"+media_sample["name"])
80 |
81 | # Rip and return data
82 | return [self.ripDummy(media_sample)]
83 |
84 |
85 | def load(self,media_sample,bypass=False):
86 | print(f"Dummy [{media_sample["name"]}] Loading to [{media_sample["drive"]}]")
87 | delay=int(random.random()*20)
88 | time.sleep(delay)
89 |
90 |
91 | def eject(self,media_sample):
92 | print(f"Dummy [{media_sample["name"]}] Done [{media_sample["drive"]}]")
93 | time.sleep(1)
94 |
--------------------------------------------------------------------------------
/handler/data/hxc_image.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # Flux conversion module for pyDiscRip. Uses greaseweazle software
4 |
5 | # Python System
6 | import os
7 | import json
8 | from pathlib import Path
9 | import importlib
10 | from pprint import pprint
11 |
12 | # External Modules
13 | # Directly imports from greaseweazle module in code
14 |
15 | # Internal Modules
16 | from handler.data.data_handler import DataHandler
17 |
18 |
19 | class DataHandlerHXCImage(DataHandler):
20 | """Handler for FLUX data types
21 |
22 | converts using greaseweazle software by directly accessing python code
23 | """
24 |
25 | def __init__(self):
26 | """Constructor to setup basic data and config defaults
27 |
28 | """
29 | # Call parent constructor
30 | super().__init__()
31 | # Set handle ID
32 | self.handle_id="DataHandlerHXCImage"
33 | # Set data type to handle
34 | self.type_id="FLUX"
35 | # Data types output
36 | self.data_outputs=["IMAGE"]
37 |
38 |
39 | def convertData(self, data_in):
40 | """Use gw python modules to convert FLUX to BINARY
41 |
42 | """
43 |
44 | data = {
45 | "type_id": "IMAGE",
46 | "processed_by": [],
47 | "data_dir": self.ensureDir(f"{self.getPath()}/status"),
48 | "data_files": {
49 | "PNG": f"flux_image.png" # Reusing project dir for name
50 | }
51 | }
52 |
53 | print("Make image")
54 |
55 | # Don't re-render image
56 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["PNG"]}"):
57 | script=os.path.realpath(__file__).replace(os.path.basename(__file__),"")+"/../../config/handler/hxc_image/config.script"
58 | # Build hxcfe command
59 | cmd = [
60 | "hxcfe",
61 | f"-script:{script}",
62 | f"-finput:{os.getcwd()}/{data_in["data_dir"]}/{data_in["data_files"]["flux"][0]}",
63 | f"-foutput:{os.getcwd()}/{data["data_dir"]}/{data["data_files"]["PNG"]}.bmp",
64 | "-conv:BMP_DISK_IMAGE"
65 | ]
66 |
67 | # Run command
68 | print("run Make image")
69 | self.log("hxcfe_cmd",str(cmd))
70 | result = self.osRun(cmd)
71 | self.log("hxcfe_stdout",str(result.stdout.decode("utf-8")))
72 | self.log("hxcfe_stderr",str(result.stderr.decode("utf-8")))
73 |
74 |
75 | from wand.image import Image
76 | img = Image(filename=f"{os.getcwd()}/{data["data_dir"]}/{data["data_files"]["PNG"]}.bmp")
77 | img.format = 'png'
78 | img.save(filename=f"{os.getcwd()}/{data["data_dir"]}/{data["data_files"]["PNG"]}")
79 | os.remove(f"{os.getcwd()}/{data["data_dir"]}/{data["data_files"]["PNG"]}.bmp")
80 |
81 | # Return all generated data
82 | return [data]
83 |
84 |
85 |
86 |
--------------------------------------------------------------------------------
/web/http/home.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
13 |
14 |
15 |
16 |
17 |
18 | pyDiscRip |
19 | |
20 |
21 | |
62 |
63 |
64 |
65 | |
66 | |
67 |
68 | |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
--------------------------------------------------------------------------------
/handler/controller/RoboRacerLS.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # Python System
4 | import os
5 | import sys
6 | import json
7 | from pathlib import Path
8 | import time
9 | from pprint import pprint
10 |
11 | # External Modules
12 | try:
13 | import serial
14 | except Exception as e:
15 | print("Need to install Python module [pyserial]")
16 | sys.exit(1)
17 |
18 | # Internal Modules
19 | from handler.controller.controller_handler import ControllerHandler
20 |
21 |
22 | class ControllerRoboRacerLS(ControllerHandler):
23 | """Handler for CD media types
24 |
25 | rips using a subprocess command to run `cdrdao` to create a BIN/CUE
26 | """
27 |
28 | def __init__(self):
29 | """Constructor to setup basic data and config defaults
30 |
31 | """
32 | # Call parent constructor
33 | super().__init__()
34 | # Set media type to handle
35 | self.type_id="RoboRacerLS"
36 | # Default config data
37 | self.config_data={"serial_port":None}
38 | # Device commands
39 | self.cmd = {
40 | "CLEAR":"\r\n",
41 | "ARM_UP":"!BNKPH94",
42 | "ARM_DOWN":"!BNKPG93",
43 | "DISC_DROP":"!BNKDP90"
44 | }
45 |
46 | # Initialized
47 |
48 | def initialize(self):
49 | try:
50 | # Arm up
51 | with serial.Serial(self.config_data["serial_port"],9600,timeout=1) as ser:
52 | time.sleep(1)
53 | ser.write( bytes(self.cmd["ARM_DOWN"],'ascii',errors='ignore') )
54 | time.sleep(3)
55 | ser.write( bytes(self.cmd["ARM_UP"],'ascii',errors='ignore') )
56 |
57 | return False
58 |
59 | except Exception as e:
60 | print("EMERGENCY STOP - ERROR ROBO RACER INIT")
61 | sys.exit(1)
62 |
63 |
64 | def load(self, drive):
65 | try:
66 | # Arm up
67 | with serial.Serial(self.config_data["serial_port"],9600,timeout=1) as ser:
68 | ser.write( bytes(self.cmd["ARM_UP"],'ascii',errors='ignore') )
69 | time.sleep(0.5)
70 |
71 | # Tray should be ejected
72 | self.osRun(["eject", f"{drive}"])
73 | time.sleep(5)
74 |
75 | # Drop disc
76 | with serial.Serial(self.config_data["serial_port"],9600,timeout=1) as ser:
77 | ser.write( bytes(self.cmd["DISC_DROP"],'ascii',errors='ignore') )
78 | time.sleep(5)
79 |
80 | # Close tray
81 | self.osRun(["eject","-t", f"{drive}"])
82 | time.sleep(10)
83 |
84 | return False
85 |
86 | except Exception as e:
87 | print("EMERGENCY STOP - ERROR LOADING ROBO RACER")
88 | sys.exit(1)
89 |
90 |
91 | def eject(self, drive):
92 | try:
93 | # Arm down
94 | with serial.Serial(self.config_data["serial_port"],9600,timeout=1) as ser:
95 | ser.write( bytes(self.cmd["ARM_DOWN"],'ascii',errors='ignore') )
96 | time.sleep(5)
97 |
98 | # Tray should be ejected
99 | self.osRun(["eject", f"{drive}"])
100 | time.sleep(5)
101 |
102 | return True
103 |
104 | except Exception as e:
105 | print("EMERGENCY STOP - ERROR LOADING ROBO RACER")
106 | sys.exit(1)
107 |
108 |
109 |
110 |
--------------------------------------------------------------------------------
/config/handler/flux/diskdefs.cfg:
--------------------------------------------------------------------------------
1 | # Computer Devices Dot
2 | disk cdi.dot
3 | cyls = 70
4 | heads = 1
5 | tracks * ibm.mfm
6 | secs = 8
7 | bps = 512
8 | gap3 = 84
9 | rate = 250
10 | end
11 | end
12 |
13 | # Sord M23P Sony OA-D31V
14 | disk sord.m23.35
15 | cyls = 70
16 | heads = 1
17 | tracks * ibm.mfm
18 | secs = 16
19 | bps = 256
20 | interleave = 2
21 | end
22 | end
23 |
24 | # Jonos Escort CP/M
25 | disk jonos.35
26 | cyls = 70
27 | heads = 1
28 | tracks * ibm.mfm
29 | secs = 9
30 | bps = 512
31 | interleave = 2
32 | end
33 | end
34 |
35 | # HP LIF 3.5in Double Density/Double Sided for 1651b Logic Analyser
36 | disk hp.lif.1651b
37 | cyls = 79
38 | heads = 2
39 | tracks * ibm.mfm
40 | id = 1
41 | interleave = 1
42 | secs = 5
43 | bps = 1024
44 | end
45 | end
46 |
47 | # HP LIF 5.25in Double Density/Double Sided
48 | disk hp.lif.33dd
49 | cyls = 33
50 | heads = 2
51 | tracks * ibm.mfm
52 | id = 0
53 | interleave = 1
54 | secs = 16
55 | bps = 256
56 | end
57 | end
58 |
59 | # HP LIF 3.5in Double Density/Double Sided
60 | disk hp.lif.77dd
61 | cyls = 77
62 | heads = 2
63 | tracks * ibm.mfm
64 | interleave = 2
65 | secs = 16
66 | bps = 256
67 | end
68 | end
69 |
70 | # HP LIF 3.5in High Density/Double Sided
71 | disk hp.lif.77hd
72 | cyls = 77
73 | heads = 2
74 | tracks * ibm.mfm
75 | interleave = 2
76 | secs = 32
77 | bps = 256
78 | end
79 | end
80 |
81 | # IBM 3740 8in disk
82 | disk ibm.3740
83 | cyls = 77
84 | heads = 1
85 | tracks * ibm.fm
86 | secs = 26
87 | bps = 128
88 | rpm = 360
89 | end
90 | end
91 |
92 | # Kaypro IV
93 | disk kaypro.iv
94 | cyls = 40
95 | heads = 2
96 | tracks 0-39.0 ibm.mfm
97 | id=0
98 | secs = 10
99 | bps = 512
100 | interleave = 5
101 | end
102 | tracks 0-39.1 ibm.mfm
103 | id=10
104 | h = 0
105 | secs = 10
106 | bps = 512
107 | interleave = 5
108 | end
109 | end
110 |
111 | # CP/M-86 Boot
112 | disk cpm.86
113 | cyls = 70
114 | heads = 1
115 | tracks 0 ibm.fm
116 | secs = 26
117 | bps = 128
118 | rpm = 360
119 | end
120 |
121 | tracks 1-69 ibm.mfm
122 | secs = 8
123 | bps = 1024
124 | rpm = 360
125 | end
126 | end
127 |
128 |
129 | # CP/M-86 Boot
130 | disk cpm.86ds
131 | cyls = 70
132 | heads = 2
133 | tracks 0.0 ibm.fm
134 | secs = 26
135 | bps = 128
136 | rpm = 360
137 | end
138 |
139 | tracks * ibm.mfm
140 | hskew = 2
141 | secs = 8
142 | bps = 1024
143 | rpm = 360
144 | end
145 | end
146 |
147 | # Magnavox VideoWriter
148 | disk maganavox.videowriter
149 | cyls = 80
150 | heads = 1
151 | tracks * ibm.mfm
152 | id = 0
153 | secs = 18
154 | bps = 256
155 | end
156 | end
157 |
158 | # ZOBEX DD-FDC DSDD
159 | disk zobex-dsdd
160 | cyls = 77
161 | heads = 2
162 | tracks * ibm.mfm
163 | secs = 16
164 | bps = 512
165 | rpm = 360
166 | interleave = 3
167 | id = 0
168 | end
169 | end
170 |
171 | # ZOBEX DD-FDC SSDD
172 | disk zobex-ssdd
173 | cyls = 77
174 | heads = 1
175 | tracks * ibm.mfm
176 | secs = 16
177 | bps = 512
178 | rpm = 360
179 | interleave = 3
180 | id = 0
181 | end
182 | end
183 |
--------------------------------------------------------------------------------
/handler/media/ddisk.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # DVD ripping module for pyDiscRip. Can be used to rip a DVD
4 |
5 | # Python System
6 | import os
7 | import json
8 | from pathlib import Path
9 |
10 | # Internal Modules
11 | from handler.media.media_handler import MediaHandler
12 |
13 |
14 | class MediaHandlerDDisk(MediaHandler):
15 | """Handler for DVD media types
16 |
17 | rips using a subprocess command to run `ddrescue` to create an ISO file
18 | """
19 |
20 | def __init__(self):
21 | """Constructor to setup basic data and config defaults
22 |
23 | """
24 | # Call parent constructor
25 | super().__init__()
26 | # Set media type to handle
27 | self.type_id="DDISK"
28 | # Data types output
29 | self.data_outputs=["BINARY"]
30 | # DVD info to be collected
31 | self.dvd_partition_filesystem=""
32 |
33 |
34 | def ripDD(self, media_sample):
35 | """Use ddrescue to rip DVD with multiple passes and mapfile
36 |
37 | """
38 | data = {
39 | "type_id": "BINARY",
40 | "processed_by": [],
41 | "done": False,
42 | "data_dir": self.ensureDir(f"{self.getPath()}/BINARY/{media_sample["name"]}"),
43 | "data_files": {
44 | "BINARY": [f"{media_sample["name"]}.img"]
45 | }
46 | }
47 | self.status(data)
48 |
49 | # Don't re-rip ISO
50 | # if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}"):
51 |
52 | # ddrescue is a multi step process that is run three times
53 | cmd1 = [
54 | "ddrescue",
55 | "-b",
56 | "2048",
57 | "-n",
58 | "-v",
59 | f"{media_sample["drive"]}",
60 | f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}",
61 | f"{data["data_dir"]}/mapfile"
62 | ]
63 | cmd2 = [
64 | "ddrescue",
65 | "-b",
66 | "2048",
67 | "-d",
68 | "-r",
69 | "3",
70 | "-v",
71 | f"{media_sample["drive"]}",
72 | f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}",
73 | f"{data["data_dir"]}/mapfile"
74 | ]
75 | cmd3 = [
76 | "ddrescue",
77 | "-b",
78 | "2048",
79 | "-d",
80 | "-R",
81 | "-r",
82 | "3",
83 | "-v",
84 | f"{media_sample["drive"]}",
85 | f"{data["data_dir"]}/{data["data_files"]["BINARY"][0]}",
86 | f"{data["data_dir"]}/mapfile"
87 | ]
88 |
89 | # Run command
90 | result = self.osRun(cmd1)
91 | self.log("ddrescue_1-3_out",str(result.stdout))
92 | self.log("ddrescue_1-3_err",str(result.stderr))
93 | result = self.osRun(cmd2)
94 | self.log("ddrescue_2-3_out",str(result.stdout))
95 | self.log("ddrescue_2-3_err",str(result.stderr))
96 | result = self.osRun(cmd3)
97 | self.log("ddrescue_3-3_out",str(result.stdout))
98 | self.log("ddrescue_3-3_err",str(result.stderr))
99 |
100 | data["done"]=True
101 | self.status(data)
102 | # Return all generated data
103 | return data
104 |
105 |
106 | def rip(self, media_sample):
107 | """Rip DVD with ddrescue
108 |
109 | """
110 | print("Ripping as generic disk with ddrescue")
111 | # Setup rip output path
112 | self.setProjectDir(media_sample["name"])
113 |
114 | # Rip and return data
115 | return [self.ripDD(media_sample)]
116 |
117 |
--------------------------------------------------------------------------------
/handler/media/optical.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # CD ripping module for pyDiscRip. Can be used to rip a CD and fetch metadata
4 |
5 | # Python System
6 | import os, sys
7 | import json
8 | from pathlib import Path
9 | import time
10 | from pprint import pprint
11 | from urllib import request, parse
12 |
13 | # External Modules
14 | try:
15 | import libdiscid
16 | except Exception as e:
17 | print("Need to install libdiscid system packages [libdiscid-dev build-essential python-dev-is-python3]")
18 | print("Need to install Python module [python-libdiscid]")
19 | sys.exit(1)
20 | try:
21 | import musicbrainzngs
22 | except Exception as e:
23 | print("Need to install Python module [musicbrainzngs]")
24 | sys.exit(1)
25 | try:
26 | import pycdio, cdio
27 | except Exception as e:
28 | print("Need to install pycdio system packages [libcdio-dev libiso9660-dev swig pkg-config build-essential python-dev-is-python3]")
29 | print("Need to install Python module [pycdio]")
30 | sys.exit(1)
31 |
32 | # Internal Modules
33 | from handler.media.media_handler import MediaHandler
34 |
35 |
36 | class MediaOptical(MediaHandler):
37 | """Handler for CD media types
38 |
39 | rips using a subprocess command to run `cdrdao` to create a BIN/CUE
40 | """
41 |
42 | def __init__(self):
43 | """Constructor to setup basic data and config defaults
44 |
45 | """
46 | # Call parent constructor
47 | super().__init__()
48 | # Set media type to handle
49 | self.type_id="OPTICAL"
50 | # Default config data
51 | self.config_data=None
52 | # Data types output
53 | self.data_outputs=[]
54 | self.cd_tracks=0
55 |
56 |
57 | def load(self,media_sample,bypass=False):
58 | """Load media before continuing.
59 |
60 | Default method call waits for user to press enter
61 |
62 | Overload with automatic methods where possible.
63 | """
64 | if self.controller is not None:
65 | self.controller.load(media_sample["drive"])
66 |
67 | error_count=0
68 | print(f"Please insert [{media_sample["name"]}] into [{media_sample["drive"]}]")
69 | wait_load=0
70 | while(True):
71 | try:
72 | time.sleep(wait_load)
73 | d=cdio.Device(media_sample["drive"])
74 | tracks = d.get_num_tracks()
75 | print(f"Found disc with {tracks} tracks")
76 | return True
77 | except cdio.TrackError:
78 | print(f"Please insert [{media_sample["name"]}] into [{media_sample["drive"]}]")
79 |
80 | if self.controller is None:
81 | self.eject(media_sample)
82 | self.web_update({"drive_status":{media_sample["drive"]:{"status":3,"title":f"Please insert [{media_sample["name"]}] into [{media_sample["drive"]}]"}}},media_sample["config_data"])
83 | wait_load=10
84 | error_count+=1
85 | if self.controller is not None and error_count > 10:
86 | return False
87 |
88 |
89 | def eject(self,media_sample, controller=None):
90 | """Eject drive tray
91 | """
92 | print("OPTICAL EJECT")
93 | if self.controller is not None:
94 | controller = self.controller
95 | if controller is not None:
96 | print("Controller EJECT")
97 | if controller.eject(media_sample["drive"]):
98 | return
99 | print("EJECTING...")
100 | d=cdio.Device(media_sample["drive"])
101 | d.eject_media()
102 | time.sleep(3)
103 |
--------------------------------------------------------------------------------
/handler/media/dvd.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # DVD ripping module for pyDiscRip. Can be used to rip a DVD
4 |
5 | # Python System
6 | import os
7 | import json
8 | from pathlib import Path
9 |
10 | # Internal Modules
11 | from handler.media.media_handler import MediaHandler
12 | from handler.media.optical import MediaOptical
13 |
14 |
15 | class MediaHandlerDVD(MediaOptical):
16 | """Handler for DVD media types
17 |
18 | rips using a subprocess command to run `ddrescue` to create an ISO file
19 | """
20 |
21 | def __init__(self):
22 | """Constructor to setup basic data and config defaults
23 |
24 | """
25 | # Call parent constructor
26 | super().__init__()
27 | # Set handler ID
28 | self.handler_id="dvd_ddrescue"
29 | # Set media type to handle
30 | self.type_id="DVD"
31 | # Data types output
32 | self.data_outputs=["ISO9660"]
33 | # DVD info to be collected
34 | self.dvd_partition_filesystem=""
35 |
36 |
37 | def ripDVD(self, media_sample):
38 | """Use ddrescue to rip DVD with multiple passes and mapfile
39 |
40 | """
41 | # TODO - Data is not always ISO9660, support for UDF is needed still
42 | data = {
43 | "type_id": "ISO9660",
44 | "processed_by": [],
45 | "done": False,
46 | "data_dir": self.ensureDir(f"{self.getPath()}/ISO9660/{media_sample["name"]}"),
47 | "data_files": {
48 | "ISO": [f"{media_sample["name"]}.iso"]
49 | }
50 | }
51 | self.status(data)
52 |
53 | # Don't re-rip ISO
54 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["ISO"][0]}"):
55 |
56 | # ddrescue is a multi step process that is run three times
57 | cmd1 = [
58 | "ddrescue",
59 | "-b",
60 | "2048",
61 | "-n",
62 | "-v",
63 | f"{media_sample["drive"]}",
64 | f"{data["data_dir"]}/{data["data_files"]["ISO"][0]}",
65 | f"{data["data_dir"]}/mapfile"
66 | ]
67 | cmd2 = [
68 | "ddrescue",
69 | "-b",
70 | "2048",
71 | "-d",
72 | "-r",
73 | "3",
74 | "-v",
75 | f"{media_sample["drive"]}",
76 | f"{data["data_dir"]}/{data["data_files"]["ISO"][0]}",
77 | f"{data["data_dir"]}/mapfile"
78 | ]
79 | cmd3 = [
80 | "ddrescue",
81 | "-b",
82 | "2048",
83 | "-d",
84 | "-R",
85 | "-r",
86 | "3",
87 | "-v",
88 | f"{media_sample["drive"]}",
89 | f"{data["data_dir"]}/{data["data_files"]["ISO"][0]}",
90 | f"{data["data_dir"]}/mapfile"
91 | ]
92 |
93 | # Run command
94 | result = self.osRun(cmd1)
95 | self.log("ddrescue_stdout",str(result.stdout))
96 | self.log("ddrescue_stderr",str(result.stderr))
97 |
98 | self.osRun(cmd2)
99 | self.osRun(cmd3)
100 |
101 | data["done"]=True
102 | self.status(data)
103 | # Return all generated data
104 | return data
105 |
106 |
107 | def rip(self, media_sample):
108 | """Rip DVD with ddrescue
109 |
110 | """
111 | print("Ripping as DVD")
112 | print("WARNING: This software does not yet distinguish between ISO9660 and UDF filesystems")
113 | # Setup rip output path
114 | self.setProjectDir(media_sample["name"])
115 |
116 | # Rip and return data
117 | return [self.ripDVD(media_sample)]
118 |
119 |
--------------------------------------------------------------------------------
/web/http/static/img/working.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
57 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # UV
98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | #uv.lock
102 |
103 | # poetry
104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105 | # This is especially recommended for binary packages to ensure reproducibility, and is more
106 | # commonly ignored for libraries.
107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108 | #poetry.lock
109 |
110 | # pdm
111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112 | #pdm.lock
113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114 | # in version control.
115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
116 | .pdm.toml
117 | .pdm-python
118 | .pdm-build/
119 |
120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
121 | __pypackages__/
122 |
123 | # Celery stuff
124 | celerybeat-schedule
125 | celerybeat.pid
126 |
127 | # SageMath parsed files
128 | *.sage.py
129 |
130 | # Environments
131 | .env
132 | .venv
133 | env/
134 | venv/
135 | ENV/
136 | env.bak/
137 | venv.bak/
138 |
139 | # Spyder project settings
140 | .spyderproject
141 | .spyproject
142 |
143 | # Rope project settings
144 | .ropeproject
145 |
146 | # mkdocs documentation
147 | /site
148 |
149 | # mypy
150 | .mypy_cache/
151 | .dmypy.json
152 | dmypy.json
153 |
154 | # Pyre type checker
155 | .pyre/
156 |
157 | # pytype static type analyzer
158 | .pytype/
159 |
160 | # Cython debug symbols
161 | cython_debug/
162 |
163 | # PyCharm
164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
166 | # and can be added to the global gitignore or merged into this file. For a more nuclear
167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
168 | #.idea/
169 |
170 | # PyPI configuration file
171 | .pypirc
172 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pyDiscRip
2 | Automate ripping optical discs and extracting data
3 |
4 | ## Usage
5 |
6 | ```
7 | usage: pyDiscRip [-h] [-c CSV] [-f CONFIG] [-d [CONFIGDUMP]] [-o OUTPUT]
8 |
9 | Media ripping manager program
10 |
11 | options:
12 | -h, --help show this help message and exit
13 | -c, --csv CSV CSV file in `Drive,Name,Description` format
14 | -f, --config CONFIG Config file for ripping
15 | -d, --configdump [CONFIGDUMP]
16 | Dump all config options. Optional filename to output to.
17 | -o, --output OUTPUT Directory to save data in
18 |
19 | By Shelby Jueden
20 | ```
21 | ### Rip List CSV
22 | This program takes in a CSV as a parameter that holds data about what drive a media sample is in as well as a name and description. The CSV may optionally tell the software what format the media is if it is not an optical disc.
23 |
24 | The headers for the CSV are almost all required, but the order is not critical. The headers is:
25 | ```
26 | Drive,Name,Description,media_type
27 | ```
28 |
29 | The header `media_type` is not required for optical discs, but is requried for other formats. Omitting the `media_type` header is the same as setting the `media_type` to `auto`.
30 |
31 | #### Valid Media Types
32 |
33 | - CD
34 | - DVD
35 | - Some Bluray
36 | - Floppy
37 |
38 | #### CSV Line Examples
39 |
40 | - **Ripping a CD with automatic format detection:** `/dev/sr0, StAnger, Metallica - St. Anger`
41 | - **Ripping a CD with manual format specification:** `CD, /dev/sr0, StAnger, Metallica - St. Anger`
42 | - **Ripping a Floppy in Drive A with a Greaseweazle:** `floppy, a, doomsharev1.1_1-2, Doom Shareware v1.1 Disk 1 of 2`
43 |
44 | ### Config File
45 |
46 | A Json configuration file may be used to change some parameters of the rip such as the `cdrdao` driver or the format the Greaseweazle `convert` function will use to decode flux. You can have all possible configuration values dumped to a file with the `-d` parameter, a filename may be specified to put them into.
47 |
48 | ### Virtal Data Formats
49 | Virtual data formats may be specified in config files. This allows you to add additional conversion steps only in json.
50 |
51 | Here is an example of a virtual format for extracting contents a binary file that contains a FAT12 filesystem using `mtools`:
52 |
53 | ```
54 | "Virtual": {
55 | "Data": [
56 | {
57 | "input_type_id":"BINARY",
58 | "output_type_id":"Z_FILES",
59 | "cmd":"mcopy -spi {input_file} ::*.* {data_dir}",
60 | "data_output": {
61 | "type_id": "Z_FILES",
62 | "processed_by": [],
63 | "data_dir": "FILES",
64 | "data_files": {
65 | "Z_FILES": ""
66 | }
67 | }
68 | }
69 | ]
70 | }
71 | ```
72 |
73 | The `{input_file}` and `{data_dir}` parts of the "cmd" get substituted before execution.
74 |
75 | ## Installation
76 |
77 | ### System
78 | Some features of this software require the usage of system level program execution. In the future it would be prefered to replace these with native python packages but that isn't possible currently.
79 |
80 | You will need the following system packages:
81 |
82 | cdrdao bchunk ddrescue 7z ibdiscid-dev python-dev-is-python3 libcdio-dev libiso9660-dev swig pkg-config libcdio-utils
83 |
84 |
85 | ### pip
86 | Make sure to install system packages first as some pip packages using them for building modules
87 |
88 | flask pyudev python-libdiscid musicbrainzngs pycdio unidecode ffmpeg-python pyserial
89 |
90 | For floppy media reading you will also need the greaseweazle software installed. You will most likely have it installed if you have used it before. But if you want to directly install the latest version you can use the following command:
91 |
92 | pip install git+https://github.com/keirf/greaseweazle@latest --force
93 |
94 | ## Roadmap
95 |
96 |
97 | ### Format: CD
98 | - Pre-gap detection and ripping (would be audio only so can go direct to WAV)
99 |
100 | ### Data: FLAC + Musicbrainz
101 |
102 | - **Mixed Mode Discs:** A disc that has Data tracks mixed with Audio may return metadata that includes the data tracks. This currently causes off by 1 errors.
103 | - - A possible solution would be to look at a BINCUE data set and determine the index positions of Data tracks and skip those in the tagging step. There is no way to cleanly associate an ISO with a BINCUE though.
104 |
105 |
106 |
--------------------------------------------------------------------------------
/handler/controller/controller_handler.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # Base media handler for pyDiscRip.
4 |
5 | # Python System
6 | import sys, os
7 | import json
8 | import time
9 | from enum import Enum
10 | from datetime import datetime
11 |
12 | # Internal Modules
13 | try:
14 | from handler.handler import Handler
15 | except Exception as e:
16 | # Probably running directly
17 | sys.path.append('../../handler')
18 | from handler import Handler
19 | try:
20 | from wand.image import Image
21 | except Exception as e:
22 | print("Need to install Python module [wand]")
23 | sys.exit(1)
24 |
25 | class ControllerHandler(Handler):
26 | """Base class for Media Types to handle identification and ripping
27 |
28 | """
29 |
30 | def __init__(self):
31 | """Constructor to setup basic data and config defaults
32 |
33 | """
34 | # Call parent constructorw
35 | super().__init__()
36 | # Set media type id for later use
37 | self.type_id=None
38 | # Set id to match against
39 | self.controller_id=None
40 | # Set directory to work in
41 | self.project_dir=""
42 | # Get current datetime
43 | self.project_timestamp=str(datetime.now().isoformat()).replace(":","-")
44 | # Data types output for later use
45 | self.data_outputs=[]
46 | # Camera setting values
47 | self.camera_defaults={
48 | "video_id":-1, # The /dev/video# id for the camera to use
49 | "camera_x":1920,
50 | "camera_y":1080,
51 | "crop_x0":0,
52 | "crop_y0":0,
53 | "crop_x1":1920,
54 | "crop_y1":1080,
55 | "focus":0
56 | }
57 |
58 |
59 | def initialize(self):
60 | return
61 |
62 |
63 | def controllerMatch(self, media_sample=None):
64 | """Check if the media sample should be handled by this type"""
65 | return media_sample["controller_type"] == self.type_id
66 |
67 |
68 | def load_hold(self,callback=None,callback_arg=None):
69 | if callback is not None:
70 | callback(callback_arg)
71 |
72 |
73 | def photoDrive(self,driveName, focus=None):
74 | """ Take a photo of media related to drive """
75 |
76 | # Check if camera is configured
77 | if self.config_data["camera"]["video_id"] == -1:
78 | return False
79 |
80 | # Find focus value
81 | if focus is None:
82 | # Use default focus
83 | focus = self.config_data["camera"]["focus"]
84 | # Handle given drive name
85 | drivepath=driveName+"/"
86 |
87 | print("Taking photo of media")
88 | from linuxpy.video.device import Device, MenuControl, VideoCapture, BufferType
89 | # Init camera device
90 | cam = Device.from_id(self.config_data["camera"]["video_id"])
91 | cam.open()
92 | # set camera data format
93 | capture = VideoCapture(cam)
94 | capture.set_format(
95 | self.config_data["camera"]["camera_x"],
96 | self.config_data["camera"]["camera_y"],
97 | "YUYV"
98 | )
99 | cam.controls["focus_automatic_continuous"].value=False
100 | cam.controls["focus_absolute"].value=focus
101 | time.sleep(3)
102 |
103 | # get frame from camera
104 | img = None
105 | for i, frame in enumerate(cam):
106 | if i > 30:
107 | img = frame
108 | break
109 |
110 | # extract raw data from frame
111 | raw_yuv = list(img.data)
112 |
113 | # Byteswap for wand
114 | hold = None
115 | for i in range(0,len(raw_yuv),2):
116 | hold = raw_yuv[i]
117 | raw_yuv[i] = raw_yuv[i+1]
118 | raw_yuv[i+1] = hold
119 | data = bytes(raw_yuv)
120 | cam.close()
121 |
122 | with Image(blob=data, format='UYVY',width=self.config_data["camera"]["camera_x"],height=self.config_data["camera"]["camera_y"],depth=8,colorspace="yuv") as image:
123 | # Build path to save image
124 | tmp=self.ensureDir("/tmp/discrip/photo/"+drivepath)
125 | # Apply crop
126 | image.crop(
127 | self.config_data["camera"]["crop_x0"],
128 | self.config_data["camera"]["crop_y0"],
129 | self.config_data["camera"]["crop_x1"],
130 | self.config_data["camera"]["crop_y1"],
131 | )
132 |
133 | image.save(filename=tmp+"photo.jpg")
134 |
135 |
136 | def load(self, drive):
137 | return False
138 |
139 |
140 | def eject(self, drive):
141 | return False
142 |
--------------------------------------------------------------------------------
/handler/data/bincue.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # BINCUE conversion module for pyDiscRip.
4 |
5 | # Python System
6 | import os
7 | import glob
8 | import sys
9 | import json
10 |
11 | # Internal Modules
12 | from handler.data.data_handler import DataHandler
13 |
14 |
15 | class DataHandlerBINCUE(DataHandler):
16 | """Handler for BINCUE data types
17 |
18 | Extracts files using bchunk
19 | """
20 |
21 | def __init__(self):
22 | """Constructor to setup basic data and config defaults
23 |
24 | """
25 | # Call parent constructor
26 | super().__init__()
27 | # Set handle ID
28 | self.handle_id="DataHandlerBINCUE"
29 | # Set data type to handle
30 | self.type_id="BINCUE"
31 | # Data types output
32 | self.data_outputs=["WAV","ISO9660"]
33 |
34 |
35 | def convertData(self,data_in):
36 | """Use bchunk to extract all WAVs and ISOs from BINCUE
37 |
38 | """
39 |
40 | if type(data_in["data_files"]["BIN"]) is list:
41 | bin_path = data_in["data_files"]["BIN"][0].replace(".bin","")
42 | else:
43 | bin_path = data_in["data_files"]["BIN"].replace(".bin","")
44 |
45 |
46 | # Build data output for WAV
47 | data_wav = {
48 | "type_id": "WAV",
49 | "processed_by": [],
50 | "data_dir": self.ensureDir(f"{self.getPath()}/WAV/{bin_path}"),
51 | "data_files": {
52 | "WAV": []
53 | }
54 | }
55 |
56 | # Build data output ISO
57 | data_iso = {
58 | "type_id": "ISO9660",
59 | "processed_by": [],
60 | "data_dir": self.ensureDir(f"{self.getPath()}/ISO9660/{bin_path}"),
61 | "data_files": {
62 | "ISO": []
63 | }
64 | }
65 |
66 | # Check for files in ouput directory
67 | wavs = glob.glob(f"{data_wav["data_dir"]}/*.wav")
68 | isos = glob.glob(f"{data_iso["data_dir"]}/*.iso")
69 |
70 | # Don't re-convert if files exist
71 | if len(wavs) == 0 and len(isos) == 0 :
72 |
73 | if type(data_in["data_files"]["BIN"]) is list:
74 | if len(data_in["data_files"]["BIN"]) > 1:
75 |
76 | with open(f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}") as in_cue:
77 | with open(f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}-s1.cue", 'w') as out_cue:
78 | for line in in_cue:
79 | if not "SESSION 02" in line:
80 | out_cue.write(line+"\n")
81 | else:
82 | break
83 | # Build bchunk command to generate CUE
84 | cmd = [
85 | "bchunk",
86 | "-w",
87 | f"{data_in["data_dir"]}/{data_in["data_files"]["BIN"][0]}",
88 | f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}-s1.cue",
89 | f"{data_wav["data_dir"]}/track"
90 | ]
91 |
92 |
93 | else:
94 | # Build bchunk command to generate CUE
95 | cmd = [
96 | "bchunk",
97 | "-w",
98 | f"{data_in["data_dir"]}/{data_in["data_files"]["BIN"][0]}",
99 | f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}",
100 | f"{data_wav["data_dir"]}/track"
101 | ]
102 |
103 |
104 | else:
105 | # Build bchunk command to generate CUE
106 | cmd = [
107 | "bchunk",
108 | "-w",
109 | f"{data_in["data_dir"]}/{data_in["data_files"]["BIN"]}",
110 | f"{data_in["data_dir"]}/{data_in["data_files"]["CUE"]}",
111 | f"{data_wav["data_dir"]}/track"
112 | ]
113 |
114 | # Run command
115 | result = self.osRun(cmd)
116 | self.log("bchunk_stdout",str(result.stdout))
117 | self.log("bchunk_stderr",str(result.stderr))
118 |
119 |
120 | # Get files in ouput directory
121 | wavs = glob.glob(f"{data_wav["data_dir"]}/*.wav")
122 | # Sort wavs to have file order make sense
123 | wavs.sort()
124 |
125 | # Build data output if WAVs were converted
126 | if len(wavs) > 0:
127 |
128 | # Add file paths to data output for all WAVs
129 | for wav in wavs:
130 | print(f"Working on: {wav}")
131 | data_wav["data_files"]["WAV"].append(f"{wav.replace(data_wav["data_dir"]+"/","")}")
132 |
133 | # Build data output if ISOs were converted
134 | isos = glob.glob(f"{data_wav["data_dir"]}/*.iso") + glob.glob(f"{data_iso["data_dir"]}/*.iso")
135 | if len(isos) > 0:
136 |
137 | # Add file paths to data output for all ISOs
138 | for iso in isos:
139 | print(f"Working on: {iso}")
140 | # The file paths get weird, this is a fix for it
141 | if "WAV" in iso:
142 | os.rename(
143 | iso,
144 | f"{data_iso["data_dir"]}/{iso.replace(data_wav["data_dir"]+"/","")}")
145 | iso = f"{data_iso["data_dir"]}/{iso.replace(data_wav["data_dir"]+"/","")}"
146 | data_iso["data_files"]["ISO"].append(f"{iso.replace(data_iso["data_dir"]+"/","")}")
147 |
148 | # Clear WAV data if no WAVs were created
149 | if len(wavs) == 0:
150 | data_wav = None
151 |
152 | # Clear ISO data if no ISOs were created
153 | if len(isos) == 0:
154 | data_iso = None
155 |
156 | # Return all generated data
157 | return [data_wav,data_iso]
158 |
159 |
--------------------------------------------------------------------------------
/handler/util/bincon.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # Python System
4 | import argparse
5 | import sys
6 | import os
7 | import re
8 | from enum import Enum
9 |
10 | class CD_MODE_SECTORS(Enum):
11 | AUDIO = 2352
12 | CDG = 2448
13 | MODE1_RAW = 2352
14 | MODE1_2048 = 2048
15 | MODE1_2352 = 2352
16 | MODE2_RAW = 2352
17 | MODE2_2048 = 2048
18 | MODE2_2324 = 2324
19 | MODE2_2336 = 2336
20 | MODE2_2352 = 2352
21 | CDI_2336 = 2336
22 | CDI_2352 = 2352
23 |
24 |
25 | def msf2sector(msf):
26 | sector=0
27 | sector+=int(msf.split(":")[0])*60*75
28 | sector+=int(msf.split(":")[1])*75
29 | sector+=int(msf.split(":")[2])
30 |
31 | return sector
32 |
33 | def sector2msf(sector):
34 | msf=""
35 | m=sector // (60*75)
36 | msf+=str(int(m)).zfill(2)+":"
37 | s=(sector-(m*(60*75))) // (75)
38 | msf+=str(int(s)).zfill(2)+":"
39 | f=sector % 75
40 | msf+=str(int(f)).zfill(2)
41 |
42 | return msf
43 |
44 | def cue_by_line(cue_file, bin_out,path="./"):
45 |
46 | # Create output folder if it doesn't exist
47 | if not os.path.exists(path):
48 | os.makedirs(path)
49 |
50 | # Load CUE file
51 | cue_lines=None
52 | cue_dir=os.path.dirname(cue_file) if os.path.dirname(cue_file) != "" else "./"
53 | with open(cue_file) as file:
54 | cue_lines = [line.rstrip() for line in file]
55 |
56 | # Count sessions to know if is multisession disc image
57 | session_total=0
58 |
59 | # Check all BIN files exist
60 | for line in cue_lines:
61 | if "SESSION" in line:
62 | session_total+=1
63 | if "FILE" in line:
64 | # Exist if file not found
65 | if not os.path.exists(cue_dir+"/"+re.search(r'FILE "?(.*?)"? BINARY', line).group(1)):
66 | print(f'BIN file [{re.search(r'FILE "?(.*?)"? BINARY', line).group(1)}] from CUE not found.')
67 | sys.exit(1)
68 |
69 | # Setup runtime
70 | mode_size=2352
71 | session=1
72 | session_post="" if session_total == 0 or session_total == 1 else f'-s{session}'
73 | track=0
74 | # Track position in data with sector position relative to bin data
75 | sector=0
76 | file_size_full=0
77 | file_size_used=0
78 |
79 | # Prepare output files
80 | if bin_out:
81 | output = open(f'{path}/{bin_out+session_post}.bin', "w+b")
82 | cue = open(f'{path}/{bin_out}.cue', 'w')
83 |
84 | # Main CUE loop
85 | for line in cue_lines:
86 | # Reset on new session and start new file
87 | if "SESSION" in line:
88 | result=re.search(r'REM SESSION ([0-9]+)', line)
89 | sector=0
90 | session=int(result.group(1))
91 | file_size_full=0
92 | file_size_used=0
93 | session_post=f'-s{session}'
94 | if bin_out:
95 | output.close()
96 | output = open(f'{path}/{bin_out+session_post}.bin', "w+b")
97 |
98 | # Use track to get sector size for upcoming data
99 | if "TRACK" in line:
100 | result=re.search(r'TRACK ([0-9]+) (.*)', line)
101 | if result is not None:
102 | track=result.group(1)
103 | mode_size=CD_MODE_SECTORS[result.group(2).replace("/","_")].value
104 |
105 | # Get size of files to calculate length of tracks using sector size
106 | if "FILE" in line:
107 | if file_size_full == 0:
108 | if bin_out:
109 | cue.write(f'FILE "{bin_out+session_post}.bin" BINARY'+"\n")
110 |
111 | # Copy bin file into output
112 | if bin_out:
113 | with open(cue_dir+"/"+re.search(r'FILE "?(.*?)"? BINARY', line).group(1), "rb") as r:
114 | output.write(r.read())
115 |
116 | # Add any unaccounted for data to sector position
117 | sector+=file_size_full
118 |
119 | # Reset size
120 | file_size_used=0
121 | file_size_full=os.path.getsize(cue_dir+"/"+re.search(r'FILE "?(.*?)"? BINARY', line).group(1))/mode_size
122 |
123 | # Check for MSF times in INDEXes
124 | if "INDEX" in line:
125 | result = re.search(r'[0-9]+:[0-9]+:[0-9]+', line)
126 | if result is not None:
127 | # Consume current file data
128 | if file_size_full != 0:
129 | file_size_used=msf2sector(result.group(0))
130 |
131 | # Update MSF in line
132 | line=line.replace(result.group(0),sector2msf(sector+file_size_used))
133 |
134 | # Pass all lines to new CUE except old FILE lines
135 | if "FILE" not in line:
136 | print(line)
137 | if bin_out:
138 | cue.write(line+"\n")
139 |
140 | # Close new files
141 | if bin_out:
142 | cue.close()
143 | output.close()
144 |
145 |
146 | if __name__ == "__main__":
147 | """ Run directly
148 |
149 | """
150 | parser = argparse.ArgumentParser(
151 | prog='bincon',
152 | description='BIN/CUE bin concatenation tool to combine multiple BIN files into one.',
153 | epilog='By Shelby Jueden')
154 | parser.add_argument('-d', '--debug', help="Only print CUE, don't write files", action='store_true')
155 | parser.add_argument('-o', '--output-folder', help="Path to output files to", default="./")
156 | parser.add_argument('filenames', help="", default=None, nargs=argparse.REMAINDER)
157 | args = parser.parse_args()
158 |
159 |
160 | if len(args.filenames) < 1:
161 | print("Please provide a CUE file to work on. And optionally an output BIN name.")
162 | sys.exit(1)
163 |
164 | # Allow sloppy file name parameters based on file existing or not
165 | cue=None
166 | bin_out="data"
167 | for check in args.filenames:
168 | if not os.path.exists(check):
169 | bin_out=check
170 | else:
171 | cue=check
172 |
173 | if args.debug:
174 | bin_out = None
175 |
176 | # Check CUE was passed and begin parsing
177 | if cue is None:
178 | print("Make sure CUE file exists.")
179 | sys.exit(1)
180 | else:
181 | print(f'Working on {cue}')
182 | cue_by_line(cue,bin_out,args.output_folder)
183 |
--------------------------------------------------------------------------------
/handler/data/flux.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # Flux conversion module for pyDiscRip. Uses greaseweazle software
4 |
5 | # Python System
6 | import os
7 | import json
8 | from pathlib import Path
9 | import importlib
10 | from pprint import pprint
11 |
12 | # External Modules
13 | # Directly imports from greaseweazle module in code
14 |
15 | # Internal Modules
16 | from handler.data.data_handler import DataHandler
17 |
18 |
19 | class DataHandlerFLUX(DataHandler):
20 | """Handler for FLUX data types
21 |
22 | converts using greaseweazle software by directly accessing python code
23 | """
24 |
25 | def __init__(self):
26 | """Constructor to setup basic data and config defaults
27 |
28 | """
29 | # Call parent constructor
30 | super().__init__()
31 | # Set handle ID
32 | self.handle_id="DataHandlerFLUX"
33 | # Set data type to handle
34 | self.type_id="FLUX"
35 | # Default config data
36 | self.config_data={
37 | "convert_output":"img",
38 | "gw":{
39 | "tracks": None,
40 | "hard-sectors": None,
41 | "pll": None,
42 | "reverse": None,
43 | "diskdefs": None,
44 | "format": "ibm.1440"
45 | },
46 | "diskdefs-direct": None
47 | }
48 | # Data types output
49 | self.data_outputs=["BINARY"]
50 |
51 | def buildArgs(self,data_in, data,default_diskdef=True):
52 | # gw modules individually parse arguments to control rip process. This
53 | # builds fake argumets to pass to module
54 | # For more information on gw parameters run `gw read --help`
55 | args=[]
56 | args.append("pyDiscRip") # Not actually used but index position is needed
57 | args.append("convert") # Not actually used but index position is needed
58 |
59 | # Process all config options to build parameters for gw module
60 | if "diskdefs-direct" in self.config_data and self.config_data["diskdefs-direct"] is not None:
61 |
62 | with open(f"/tmp/discrip/{self.project_timestamp}_diskdefs.cfg", 'w', encoding="utf-8") as output:
63 | output.write(self.config_data["diskdefs-direct"])
64 | args.append("--diskdefs")
65 | args.append(f"/tmp/discrip/{self.project_timestamp}_diskdefs.cfg")
66 | else:
67 | if "diskdefs" in self.config_data["gw"] and self.config_data["gw"]["diskdefs"] is not None:
68 | args.append("--diskdefs")
69 | args.append(str(self.config_data["gw"]["diskdefs"]))
70 | else:
71 | if not default_diskdef:
72 | args.append("--diskdefs")
73 | args.append(os.path.realpath(__file__).replace(os.path.basename(__file__),"")+"/../../config/handler/flux/diskdefs.cfg")
74 | if "format" in self.config_data["gw"] and self.config_data["gw"]["format"] is not None:
75 | args.append("--format")
76 | args.append(str(self.config_data["gw"]["format"]))
77 | if "tracks" in self.config_data["gw"] and self.config_data["gw"]["tracks"] is not None:
78 | args.append("--tracks")
79 | args.append(str(self.config_data["gw"]["tracks"]))
80 | if "seek-retries" in self.config_data["gw"] and self.config_data["gw"]["seek-retries"] is not None:
81 | args.append("--seek-retries")
82 | args.append(str(self.config_data["gw"]["seek-retries"]))
83 | if "pll" in self.config_data["gw"] and self.config_data["gw"]["pll"] is not None:
84 | args.append("--pll")
85 | args.append(self.config_data["gw"]["pll"])
86 | if "hard-sectors" in self.config_data["gw"] and self.config_data["gw"]["hard-sectors"] is not None:
87 | args.append("--hard-sectors")
88 | if "reverse" in self.config_data["gw"] and self.config_data["gw"]["reverse"] is not None:
89 | args.append("--reverse")
90 |
91 | # Add the file input as parameter
92 | if isinstance(data_in["data_files"]["flux"], list):
93 | args.append(f"{data_in["data_dir"]}/{data_in["data_files"]["flux"][0]}")
94 | else:
95 | args.append(f"{data_in["data_dir"]}/{data_in["data_files"]["flux"]}")
96 |
97 | # Add the file output as final parameter
98 | args.append(f"{data["data_dir"]}/{data["data_files"]["BINARY"]}")
99 |
100 | # Log all parameters to be passed to gw read
101 | self.log("floppy_gw_args",args,json_output=True)
102 |
103 | return args
104 |
105 | def convertData(self, data_in):
106 | """Use gw python modules to convert FLUX to BINARY
107 |
108 | """
109 |
110 | if self.config_data["convert_output"] == "img":
111 | data = {
112 | "type_id": "BINARY",
113 | "processed_by": [],
114 | "data_dir": self.ensureDir(f"{self.getPath()}/BINARY"),
115 | "data_files": {
116 | "BINARY": f"{self.project_dir}.img" # Reusing project dir for name
117 | }
118 | }
119 | else:
120 | data = {
121 | "type_id": "BINARY",
122 | "processed_by": [],
123 | "data_dir": self.ensureDir(f"{self.getPath()}/BINARY"),
124 | "data_files": {
125 | "BINARY": f"{self.project_dir}.{self.config_data["convert_output"]}" # Reusing project dir for name
126 | }
127 | }
128 |
129 |
130 | # Import greaseweazle read module to access hardware
131 | mod = importlib.import_module('greaseweazle.tools.convert')
132 | main = mod.__dict__['main']
133 |
134 |
135 |
136 | # Don't re-convert flux
137 | if not os.path.exists(f"{data["data_dir"]}/{data["data_files"]["BINARY"]}"):
138 | # Run the gw read process using arguments
139 | try:
140 | # Use default diskdef
141 | args = self.buildArgs(data_in, data)
142 | res = main(args)
143 | except Exception as e:
144 | # Use repo diskdef
145 | args = self.buildArgs(data_in, data,default_diskdef=False)
146 | res = main(args)
147 |
148 | # Return all generated data
149 | return [data]
150 |
151 |
152 |
153 |
--------------------------------------------------------------------------------
/handler/media/manager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # Media ripping manager for pyDiscRip. Can be used to rip a CD and fetch metadata
4 |
5 | # External Modules
6 | import time, sys
7 | import json
8 | from pprint import pprint
9 | import pathlib
10 | try:
11 | import pyudev
12 | except Exception as e:
13 | print("Need to install Python module [pyudev]")
14 | sys.exit(1)
15 |
16 | # Internal Modules
17 | from handler.media.optical import MediaOptical
18 | from handler.media.cd import MediaHandlerCD
19 | from handler.media.cd_redumper import MediaHandlerCDRedumper
20 | from handler.media.dvd import MediaHandlerDVD
21 | from handler.media.dvd_redumper import MediaHandlerDVDRedumper
22 | from handler.media.bd_redumper import MediaHandlerBDRedumper
23 | from handler.media.ddisk import MediaHandlerDDisk
24 | from handler.media.floppy import MediaHandlerFloppy
25 | # Testing only
26 | from handler.media.dummy import MediaHandlerDummy
27 |
28 | class MediaHandlerManager(object):
29 | """Manager for media types
30 |
31 | Provides process control functions for ripping different media types and
32 | setting configuration data.
33 | """
34 |
35 | def __init__(self):
36 | """Constructor to setup basic data and config defaults
37 |
38 | """
39 | # Call parent constructor
40 | super().__init__()
41 |
42 | # Add all supported media types
43 | self.media_types={}
44 | self.media_types["OPTICAL"] = MediaOptical()
45 | self.media_types["CD_cdrdao"] = MediaHandlerCD()
46 | self.media_types["CD_redumper"] = MediaHandlerCDRedumper()
47 | self.media_types["DVD"] = MediaHandlerDVD()
48 | self.media_types["DVD_redumper"] = MediaHandlerDVDRedumper()
49 | self.media_types["BD_redumper"] = MediaHandlerBDRedumper()
50 | self.media_types["DDISK"] = MediaHandlerDDisk()
51 | self.media_types["FLOPPY"] = MediaHandlerFloppy()
52 | # Testing only
53 | self.media_types["DUMMY"] = MediaHandlerDummy()
54 |
55 | def loadMediaType(self,media_sample,bypass=False,controller=None):
56 | """Match media handler to type and return handler
57 |
58 | """
59 | # Iterate through all handlers
60 | for type_id, media_type in self.media_types.items():
61 | # If handler can proccess media return it
62 | if media_type.mediaMatch(media_sample):
63 | # Set controller
64 | media_type.controller = controller
65 | return media_type.load(media_sample,bypass)
66 |
67 | # No handlers found
68 |
69 | def ejectMediaType(self,media_sample,controller=None):
70 | """Match media handler to type and return handler
71 |
72 | """
73 | print("Ejecting through manager")
74 | # Iterate through all handlers
75 | for type_id, media_type in self.media_types.items():
76 | # If handler can proccess media return it
77 | if media_type.mediaMatch(media_sample):
78 | print(f"Matched: {type_id}")
79 | # Set controller
80 | media_type.controller = controller
81 | media_type.eject(media_sample)
82 | return
83 |
84 | # Generic optical
85 | print("No match found, attempting generic optical")
86 | if typeIsOptical(selfmedia_sample):
87 | self.media_types["OPTICAL"].eject(media_sample)
88 | # No handlers found
89 | return
90 |
91 |
92 | def findMediaType(self,media_sample,config_data):
93 | """Match media handler to type and return handler
94 |
95 | """
96 | # Check if a media type was provided
97 | if "media_type" not in media_sample or media_sample["media_type"].upper() == "OPTICAL":
98 | # Access the drive associated to the media to determine the type
99 | print("Finding media type")
100 | media_sample["media_type"] = self.guessMediaType(media_sample["drive"])
101 |
102 | # Iterate through all handlers
103 | for type_id, media_type in self.media_types.items():
104 | # If handler can proccess media return it
105 | if media_type.mediaMatch(media_sample):
106 | if media_type.handler_id == None:
107 | return media_type
108 | if config_data["settings"]["media_handlers"][media_sample["media_type"]] == media_type.handler_id:
109 | return media_type
110 |
111 | # No handlers found
112 | print(f"No handlers found for following media sample:")
113 | pprint(media_sample)
114 | return None
115 |
116 |
117 | def configDump(self):
118 | """Get all config data for media handlers and dump it to json
119 |
120 | """
121 | config_options={}
122 | # Iterate through all handlers
123 | for type_id, media_type in self.media_types.items():
124 | # Add all config options for handler
125 | config_options[type_id]=media_type.configOptions()
126 |
127 | return config_options
128 |
129 |
130 | def guessMediaType(self,drivepath=None):
131 | """ Guess media type in drive which will determine how it is ripped
132 |
133 | Only useful for optical discs.
134 | """
135 |
136 | # Init udev interface to access drive
137 | context = pyudev.Context()
138 |
139 | # Countdown to assume it's a weird CD
140 | countdown = 10
141 |
142 | # Get info from device
143 | output = True
144 | while(output):
145 | print("FIND A DISC TYPE")
146 | #print(f"Drive path: {drivepath}")
147 |
148 | # Solve and symlinks to standard drive path
149 | drivepath=str(pathlib.Path(drivepath).resolve())
150 | # NOTE: Returns as list but we are accessing a specific device
151 | devices = context.list_devices(sys_name=drivepath.replace("/dev/",""))
152 | dev = next(iter(devices))
153 |
154 | #print(json.dumps(dict(dev.properties),indent=4))
155 | # Determine media type by ID
156 | if dev.properties.get("ID_CDROM_MEDIA_CD", False) or dev.properties.get("ID_CDROM_MEDIA_CD_R", False):
157 | media_type="CD"
158 | output = False
159 | print("Is CD")
160 | elif dev.properties.get("ID_CDROM_MEDIA_DVD", False):
161 | media_type="DVD"
162 | output = False
163 | print("Is DVD")
164 | elif dev.properties.get("ID_CDROM_MEDIA_BD", False):
165 | media_type="BD"
166 | output = False
167 |
168 | if output:
169 | countdown-=1
170 | if not countdown:
171 | media_type="CD"
172 | output = False
173 | print("Is probably a weird CD")
174 |
175 | #print(json.dumps(dict(dev.properties),indent=4))
176 | time.sleep(3)
177 |
178 | return media_type
179 |
180 | def typeIsOptical(selfmedia_sample):
181 |
182 | # Generic optical
183 | match media_sample.type_id:
184 | case "CD" | "DVD"| "BD" | "CD_cdrdao" | "CD_redumper":
185 | return True
186 | case _:
187 | return False
188 |
189 |
190 |
--------------------------------------------------------------------------------
/web/http/static/jsonForm.js:
--------------------------------------------------------------------------------
1 | /* jsonForm
2 | *
3 | * Takes a json url or data object and creates a form out of named keys.
4 | *
5 | * call prepare() on your instance of this class before submititng the form to build the data.
6 | *
7 | * Example:
8 | *