151 |
--------------------------------------------------------------------------------
/static/css/style.css:
--------------------------------------------------------------------------------
1 | .page-shell {
2 | min-height: 100vh;
3 | display: flex;
4 | flex-direction: column;
5 | }
6 |
7 | main.container {
8 | flex: 1 0 auto;
9 | min-height: 60vh;
10 | }
11 |
12 | .spool-container {
13 | gap: 10px;
14 | }
15 |
16 | #spool-list .spool-container {
17 | padding: 0.75rem;
18 | }
19 |
20 | #spool-list {
21 | max-height: none;
22 | overflow: visible;
23 | }
24 |
25 | #spool-list h6 {
26 | font-size: 1rem;
27 | margin-bottom: 0.1rem;
28 | }
29 |
30 | #spool-list small {
31 | font-size: 0.9rem;
32 | }
33 |
34 | #spool-list .spool-actions {
35 | flex-shrink: 0;
36 | }
37 |
38 | #spool-list .spool-action-link {
39 | text-decoration: none;
40 | }
41 |
42 | .spool-icon {
43 | display: flex;
44 | width: 2.5em;
45 | height: 2em;
46 | gap: 2px;
47 | margin: 0 .5em
48 | }
49 |
50 | .spool-icon.vertical {
51 | flex-direction: column
52 | }
53 |
54 | .spool-icon.small {
55 | width: 20px;
56 | margin: 0;
57 | height: 50px;
58 | }
59 |
60 | .slicer-color-stack {
61 | display: flex;
62 | flex-direction: column;
63 | align-items: center;
64 | justify-content: flex-start;
65 | gap: 4px;
66 | flex-shrink: 0;
67 | }
68 |
69 | .slicer-color-badge {
70 | width: 20px;
71 | height: 20px;
72 | border-radius: 4px;
73 | border: #44444430 solid 1px;
74 | box-sizing: border-box;
75 | background-color: #000;
76 | }
77 |
78 | .spool-icon.large {
79 | width: 4em;
80 | height: 4em
81 | }
82 |
83 | .spool-icon * {
84 | flex: 1 1 0px;
85 | border-radius: 2px;
86 | border: #44444430 solid 2px
87 | }
88 |
89 | .tray-color-wrapper {
90 | position: relative;
91 | display: inline-block;
92 | }
93 |
94 | .tray-color-swatch {
95 | width: 24px;
96 | height: 24px;
97 | display: inline-block;
98 | }
99 |
100 | .tray-color-warning {
101 | position: absolute;
102 | top: -6px;
103 | right: -6px;
104 | font-size: 0.9rem;
105 | line-height: 1;
106 | }
107 |
108 | .spool-icon.vertical *:first-child {
109 | border-top-left-radius: 6px;
110 | border-top-right-radius: 6px
111 | }
112 |
113 | .spool-icon.vertical *:last-child {
114 | border-bottom-left-radius: 6px;
115 | border-bottom-right-radius: 6px
116 | }
117 |
118 | .spool-icon.horizontal *:first-child {
119 | border-top-left-radius: 6px;
120 | border-bottom-left-radius: 6px
121 | }
122 |
123 | .spool-icon.horizontal *:last-child {
124 | border-top-right-radius: 6px;
125 | border-bottom-right-radius: 6px
126 | }
127 |
128 | .ams-card .tray-card {
129 | height: 100%;
130 | }
131 |
132 | .ams-card .tray-card .tray-empty {
133 | height: 100%;
134 | align-items: center;
135 | display: flex;
136 | justify-content: center;
137 | }
138 |
139 |
140 | .print-history {
141 | display: flex;
142 | flex-direction: column;
143 | gap: 15px;
144 | }
145 | .print-job {
146 | padding: 15px;
147 | border-radius: 8px;
148 | }
149 |
150 | /* Bereich für Druckinformationen */
151 | .print-info {
152 | grid-area: info;
153 | padding: 10px;
154 | border-radius: 5px;
155 | }
156 |
157 | /* Filament-Container */
158 | .filament-container {
159 | grid-area: filament;
160 | display: grid;
161 | grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
162 | gap: 10px;
163 | padding: 10px;
164 | border-radius: 5px;
165 | }
166 |
167 | .filament-info {
168 | display: flex;
169 | flex-direction: row;
170 | flex-wrap: nowrap;
171 | justify-content: flex-start;
172 | align-items: center;
173 | }
174 |
175 | .filament-info > div {
176 | margin-right: 10px !important;
177 | }
178 |
179 | .filament-badge {
180 | width: 20px;
181 | height: 50px;
182 | }
183 |
184 | /* Layout für die Print-Grid-Struktur */
185 | .print-grid {
186 | display: grid;
187 | grid-template-columns: 2fr 1fr; /* Zwei Spalten: Info und Bild */
188 | grid-template-rows: auto; /* Dynamische Höhe basierend auf dem Inhalt */
189 | grid-template-areas:
190 | "info image"; /* Layout: Info links, Bild rechts */
191 | gap: 10px;
192 | align-items: start; /* Elemente oben ausrichten */
193 | }
194 |
195 | /* Bereich für Druckinformationen */
196 | .printinfo {
197 | grid-area: info;
198 | border-radius: 5px;
199 | }
200 |
201 | /* Bereich für das Bild */
202 | .print-image {
203 | grid-area: image;
204 | display: flex;
205 | justify-content: center;
206 | align-items: center;
207 | padding: 10px;
208 | border-radius: 5px;
209 | height: auto; /* Dynamische Höhe basierend auf dem Container */
210 | }
211 |
212 | .print-image div {
213 | height: 100%;
214 | }
215 |
216 | .print-image img {
217 | max-width: 100%; /* Verhindert, dass das Bild breiter als der Container wird */
218 | max-height: 100%; /* Verhindert, dass das Bild höher als der Container wird */
219 | height: auto; /* Beibehaltung des Seitenverhältnisses */
220 | width: auto; /* Beibehaltung des Seitenverhältnisses */
221 | object-fit: contain; /* Sorgt dafür, dass das Bild innerhalb des Containers bleibt */
222 | }
223 |
224 | /* Responsive Anpassungen */
225 | @media (max-width: 768px) {
226 | .print-grid {
227 | grid-template-columns: 1fr;
228 | grid-template-areas:
229 | "info"
230 | "filament"
231 | "image";
232 | }
233 | .filament-container {
234 | grid-template-columns: 1fr;
235 | }
236 | }
237 |
238 | @media (max-width: 575.98px) {
239 | .label-print-value {
240 | display: flex;
241 | flex-direction: column;
242 | align-items: flex-start;
243 | }
244 | .label-print-inline {
245 | display: none;
246 | }
247 | .label-print-stacked {
248 | display: block;
249 | }
250 | }
251 |
252 | @media (min-width: 576px) {
253 | .label-print-value {
254 | display: ruby;
255 | }
256 | .label-print-inline {
257 | display: inline;
258 | }
259 | .label-print-stacked {
260 | display: none;
261 | }
262 | }
263 |
264 | @media (max-width: 576px) {
265 | #spool-list .spool-container {
266 | padding: 0.5rem;
267 | }
268 |
269 | #spool-list h6 {
270 | font-size: 0.95rem;
271 | }
272 |
273 | #spool-list small {
274 | font-size: 0.8rem;
275 | }
276 |
277 | #spool-list .badge {
278 | font-size: 0.8rem;
279 | }
280 |
281 | #spool-list .badge.d-inline-block {
282 | width: 16px !important;
283 | height: 40px !important;
284 | }
285 |
286 | .spool-icon.small {
287 | width: 16px;
288 | height: 40px;
289 | }
290 |
291 | #spool-list .spool-actions {
292 | align-items: stretch;
293 | }
294 |
295 | #spool-list .spool-action-link {
296 | width: 100%;
297 | }
298 | }
299 |
300 | /* AMS humidity visualization */
301 | .ams-humidity-display {
302 | gap: 0.35rem !important;
303 | }
304 |
305 | .humidity-droplet-wrapper {
306 | position: relative;
307 | display: inline-flex;
308 | align-items: center;
309 | justify-content: center;
310 | width: 32px;
311 | height: 32px;
312 | }
313 |
314 | .humidity-droplet {
315 | width: 28px;
316 | height: 28px;
317 | }
318 |
319 | .droplet-base {
320 | fill: transparent;
321 | }
322 |
323 | .droplet-fill {
324 | fill: var(--bs-info, #0dcaf0);
325 | opacity: 0.8;
326 | }
327 |
328 | .droplet-outline {
329 | fill: none;
330 | stroke: #000;
331 | stroke-width: 0.8;
332 | }
333 |
334 | .humidity-level-number {
335 | position: absolute;
336 | font-weight: 700;
337 | font-size: 0.85rem;
338 | color: var(--bs-body-color);
339 | line-height: 1;
340 | left: 50%;
341 | top: 60%;
342 | transform: translate(-50%, -50%);
343 | }
344 |
--------------------------------------------------------------------------------
/templates/fragments/list_spools.html:
--------------------------------------------------------------------------------
1 | {% set materials = materials or [] %}
2 | {% set selected_materials = selected_materials or [] %}
3 | {% set action_bambu_link = action_bambu_link or False %}
4 |
5 |
6 | {% if spools|length == 0 or not spools %}
7 |
34 | {% for spool in spools %}
35 |
36 |
38 |
39 |
40 | {% if "multi_color_hexes" in spool.filament and spool.filament.multi_color_hexes is iterable and spool.filament.multi_color_hexes is not string%}
41 |
42 | {% if spool.filament.multi_color_direction == "coaxial" %}
43 |
44 | {% else %}
45 |
46 | {% endif %}
47 |
48 | {% for color in spool.filament.multi_color_hexes %}
49 |
50 | {% endfor %}
51 |
52 | {% else %}
53 |
55 |
56 | {% endif %}
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 | #{{ spool.id }}
65 |
66 | {{ spool.filament.material }} - {{ spool.filament.vendor.name }}
67 |
68 | {{ spool.filament.name }} - {{ spool.remaining_weight|round(0)|int }} g left
69 |
70 |
71 |
119 |
120 | {% endfor %}
121 |
122 | {% endif %}
123 |
124 |
129 |
130 |
209 |
--------------------------------------------------------------------------------
/tests/test_filament_mismatch.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | import pytest
4 |
5 | import spoolman_service as svc
6 |
7 |
8 | def _make_tray(tray_type, tray_sub_brands, tray_id="tray-1"):
9 | return {
10 | "tray_type": tray_type,
11 | "tray_sub_brands": tray_sub_brands,
12 | "tray_color": "FFFFFF",
13 | "id": tray_id,
14 | }
15 |
16 |
17 | def _make_spool(material, extra_type, ams_id=0, tray_id="tray-1", spool_id=1, spool_extra_type=None):
18 | return {
19 | "id": spool_id,
20 | "remaining_weight": 1000,
21 | # SpoolMan may carry a type in spool.extra; include it when provided.
22 | "extra": {
23 | "active_tray": json.dumps(svc.trayUid(ams_id, tray_id)),
24 | **({"type": spool_extra_type if spool_extra_type is not None else extra_type} if (spool_extra_type is not None or extra_type) else {}),
25 | },
26 | "filament": {
27 | "name": "Test",
28 | "vendor": {"name": "Vendor"},
29 | "material": material,
30 | "extra": {"type": extra_type} if extra_type is not None else {},
31 | "color_hex": "FFFFFF",
32 | },
33 | }
34 |
35 |
36 | def _run_case(tray, spool, ams_id=0, tray_id="tray-1"):
37 | spool_list = [spool]
38 | # avoid file writes during tests
39 | svc._log_filament_mismatch = lambda *args, **kwargs: None
40 | svc.augmentTrayDataWithSpoolMan(spool_list, tray, ams_id, tray_id)
41 | return tray
42 |
43 |
44 | def test_match_with_extra_type():
45 | tray = _make_tray("PLA", "PLA CF")
46 | spool = _make_spool("PLA", "CF")
47 | result = _run_case(tray, spool)
48 |
49 | assert result["matched"] is True
50 | assert result["mismatch"] is False
51 | assert result["tray_sub_brand"] == "CF"
52 | assert result["spool_sub_brand"] == "CF"
53 |
54 |
55 | def test_match_when_material_contains_subtype():
56 | tray = _make_tray("PLA", "PLA CF")
57 | spool = _make_spool("PLA CF", "-")
58 | result = _run_case(tray, spool)
59 |
60 | assert result["matched"] is True
61 | assert result["mismatch"] is False
62 | assert result["tray_sub_brand"] == "CF"
63 | assert result["spool_sub_brand"] == "CF"
64 |
65 |
66 | def test_mismatch_when_subtype_missing_on_spool():
67 | tray = _make_tray("PLA", "PLA CF")
68 | spool = _make_spool("PLA", "")
69 | result = _run_case(tray, spool)
70 |
71 | assert result["matched"] is True
72 | assert result["mismatch"] is True
73 | assert result["tray_sub_brand"] == "CF"
74 | assert result["spool_sub_brand"] == ""
75 |
76 |
77 | def test_material_mismatch_even_if_subtype_matches():
78 | tray = _make_tray("PLA", "PLA CF")
79 | spool = _make_spool("ABS CF", "CF")
80 | result = _run_case(tray, spool)
81 |
82 | assert result["matched"] is True
83 | assert result["mismatch"] is True
84 | assert result["tray_sub_brand"] == "CF"
85 | assert result["spool_sub_brand"] == "CF"
86 |
87 |
88 | def test_variant_type_requires_exact_material_match():
89 | tray = _make_tray("PLA-S", "Support for PLA")
90 | spool = _make_spool("PLA", "Support for PLA")
91 | result = _run_case(tray, spool)
92 |
93 | assert result["matched"] is True
94 | assert result["mismatch"] is True # main type differs because tray expects PLA-S
95 |
96 |
97 | def test_variant_type_matches_when_spool_material_exact():
98 | tray = _make_tray("PLA-S", "Support for PLA")
99 | spool = _make_spool("PLA-S", "Support for PLA")
100 | result = _run_case(tray, spool)
101 |
102 | assert result["matched"] is True
103 | assert result["mismatch"] is False
104 |
105 |
106 | def test_mismatch_warning_can_be_disabled(monkeypatch):
107 | monkeypatch.setattr(svc, "DISABLE_MISMATCH_WARNING", True)
108 | tray = _make_tray("PLA", "PLA CF")
109 | spool = _make_spool("PLA", "")
110 | result = _run_case(tray, spool)
111 |
112 | assert result["mismatch_detected"] is True
113 | assert result["mismatch"] is False # hidden in UI
114 |
115 |
116 | BAMBULAB_BASE_MAPPINGS = [
117 | # tray_type, tray_sub_brands, spool_material, spool_type, expected_match
118 | ("ABS", "", "ABS", "", True),
119 | ("ABS-GF", "", "ABS-GF", "", True),
120 | ("ASA", "", "ASA", "", True),
121 | ("ASA-AERO", "", "ASA-AERO", "", True),
122 | ("ASA-CF", "", "ASA-CF", "", True),
123 | ("PA-CF", "", "PA-CF", "", True),
124 | ("PA6-CF", "", "PA6-CF", "", True),
125 | ("PA-GF", "", "PA-GF", "", True),
126 | ("PA-CF", "", "PA-CF", "", True), # PAHT-CF uses PA-CF
127 | ("PC", "", "PC", "", True),
128 | ("PC", "PC FR", "PC FR", "", True),
129 | ("PET-CF", "", "PET-CF", "", True),
130 | ("PETG", "PETG Basic", "PETG Basic", "", True),
131 | ("PETG", "PETG HF", "PETG HF", "", True),
132 | ("PETG", "PETG Translucent", "PETG Translucent", "", True),
133 | ("PETG-CF", "", "PETG-CF", "", True),
134 |
135 | # tray value variants
136 | ("PLA", "PLA Basic", "PLA Basic", "", True),
137 | ("PLA", "PLA Basic", "PLA", "", True),
138 | ("PLA", "PLA Basic", "PLA", "Basic", True),
139 | ("PLA", "PLA Basic", "PLA", "-", True),
140 | ("PLA", "", "PLA", "Basic", True),
141 | ("PLA", "", "PLA", "-", True),
142 | ("PLA", "", "PLA", "", True),
143 |
144 | ("PLA", "PLA Aero", "PLA Aero", "", True),
145 | ("PLA", "PLA Dynamic", "PLA Dynamic", "", True),
146 | ("PLA", "PLA Galaxy", "PLA Galaxy", "", True),
147 | ("PLA", "PLA Glow", "PLA Glow", "", True),
148 | ("PLA", "PLA Impact", "PLA Impact", "", True),
149 | ("PLA", "PLA Lite", "PLA Lite", "", True),
150 | ("PLA", "PLA Marble", "PLA Marble", "", True),
151 | ("PLA", "PLA Matte", "PLA Matte", "", True),
152 | ("PLA", "PLA Metal", "PLA Metal", "", True),
153 | ("PLA", "PLA Silk", "PLA Silk", "", True),
154 | ("PLA", "PLA Silk+", "PLA Silk+", "", True),
155 | ("PLA", "PLA Sparkle", "PLA Sparkle", "", True),
156 | ("PLA", "PLA Tough", "PLA Tough", "", True),
157 | ("PLA", "PLA Tough+", "PLA Tough+", "", True),
158 | ("PLA", "PLA Translucent", "PLA Translucent", "", True),
159 | ("PLA", "PLA Wood", "PLA Wood", "", True),
160 | ("PLA-CF", "", "PLA-CF", "", True),
161 | ("PPA-CF", "", "PPA-CF", "", True),
162 | ("PPA-GF", "", "PPA-GF", "", True),
163 | ("PPS-CF", "", "PPS-CF", "", True),
164 | ("PVA", "", "PVA", "", True),
165 |
166 | # variant type matches (Support)
167 | ("PLA-S", "Support For PLA", "PLA-S", "Support For PLA", True),
168 | ("PLA-S", "Support For PLA", "PLA-S", "-", True),
169 | ("PLA-S", "Support For PLA", "PLA-S", "", True),
170 |
171 | ("ABS-S", "Support for ABS", "ABS-S", "Support for ABS", True),
172 | ("Support", "Support For PA PET", "Support For PA PET", "", True),
173 | ("Support", "Support For PLA-PETG", "Support For PLA-PETG", "", True),
174 | ("Support", "Support G", "Support G", "", True),
175 | ("Support", "Support W", "Support W", "", True),
176 | ("TPU", "TPU 85A", "TPU 85A", "", True),
177 | ("TPU", "TPU 90A", "TPU 90A", "", True),
178 | ("TPU", "TPU 95A", "TPU 95A", "", True),
179 | ("TPU", "TPU 95A HF", "TPU 95A HF", "", True),
180 | ("TPU-AMS", "", "TPU-AMS", "", True),
181 | # Example mismatch: wrong subtype despite matching main
182 | ("PLA", "PLA CF", "PLA CF", "Wood", False),
183 | ]
184 |
185 |
186 | def test_bambu_base_profiles_match_tray_expectations():
187 | failures = []
188 | for tray_type, tray_sub_brands, spool_material, spool_type, expected_match in BAMBULAB_BASE_MAPPINGS:
189 | tray = _make_tray(tray_type, tray_sub_brands)
190 | spool = _make_spool(spool_material, spool_type, spool_extra_type=spool_type)
191 |
192 | result = _run_case(tray, spool)
193 |
194 | ctx = f"(tray_type={tray_type!r}, tray_sub_brands={tray_sub_brands!r}, spool_material={spool_material!r}, spool_type={spool_type!r})"
195 | if result["matched"] is not True:
196 | failures.append(f"{ctx} did not match tray/spool assignment")
197 | continue
198 |
199 | if expected_match and result.get("mismatch_detected"):
200 | failures.append(f"{ctx} unexpectedly triggered mismatch")
201 | if not expected_match and not result.get("mismatch_detected"):
202 | failures.append(f"{ctx} should trigger mismatch but did not")
203 |
204 | if failures:
205 | pytest.fail("\n".join(failures))
206 |
--------------------------------------------------------------------------------
/templates/base.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
OpenSpoolMan
10 |
11 |
12 |
13 |
14 |
15 |
25 |
26 |
27 |
48 |
112 |
113 | {% if success_message %}
114 |
115 | Success! {{ success_message }}
116 |
117 |
118 | {% endif %}
119 | {% block content %}{% endblock %}
120 |
121 |
122 |
131 |
132 |
133 |
185 |
186 |
187 |
--------------------------------------------------------------------------------
/tests/test_mqtt_replay.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import json
3 | import logging
4 | import os
5 | import shutil
6 | import tempfile
7 | from pathlib import Path
8 |
9 | import pytest
10 |
11 | import mqtt_bambulab
12 | from filament_usage_tracker import FilamentUsageTracker
13 | import tools_3mf
14 | import spoolman_client
15 | import spoolman_service
16 | from config import TRACK_LAYER_USAGE
17 |
18 |
19 | LOG_ROOT = Path(__file__).resolve().parent / "MQTT"
20 |
21 |
22 | MOCK_SPOOLS = [
23 | {
24 | "id": 3,
25 | "filament": {
26 | "name": "PLA Sample Filament",
27 | "vendor": {"name": "OpenSpoolMan"},
28 | "material": "PLA",
29 | "color_hex": "FF5733",
30 | "extra": {"type": "PLA Basic"},
31 | },
32 | "initial_weight": 1000,
33 | "price": 30,
34 | "remaining_weight": 995,
35 | "extra": {
36 | "active_tray": json.dumps(spoolman_service.trayUid(0, 3)),
37 | },
38 | },
39 | ]
40 |
41 |
42 | def _iter_log_files():
43 | env_file = os.getenv("MQTT_LOG_FILE")
44 | if env_file:
45 | candidate = Path(env_file)
46 | if not candidate.is_absolute():
47 | candidate = LOG_ROOT / candidate
48 | return [candidate]
49 |
50 | printer = os.getenv("MQTT_LOG_PRINTER")
51 | firmware = os.getenv("MQTT_LOG_FIRMWARE")
52 | search_root = LOG_ROOT
53 | if printer:
54 | search_root = search_root / printer
55 | if firmware:
56 | search_root = search_root / firmware
57 | if printer or firmware:
58 | return sorted(search_root.glob("*.log"))
59 |
60 | return sorted(LOG_ROOT.glob("*/*/*.log"))
61 |
62 |
63 | def _load_expected(log_path: Path) -> dict:
64 | expected_path = log_path.with_suffix(".expected.json")
65 | if not expected_path.exists():
66 | pytest.skip(f"Missing expected result file: {expected_path}")
67 | return json.loads(expected_path.read_text())
68 |
69 |
70 | def _base_model_from_log(log_path: Path) -> Path:
71 | base_name = log_path.stem
72 | for suffix in ("_local", "_cloud"):
73 | idx = base_name.find(suffix)
74 | if idx != -1:
75 | base_name = base_name[:idx]
76 | break
77 | gcode_candidate = LOG_ROOT / f"{base_name}.gcode.3mf"
78 | return gcode_candidate
79 |
80 |
81 | def _stub_spoolman(monkeypatch):
82 | # Disable any real billing/network calls.
83 | monkeypatch.setattr(spoolman_client, "consumeSpool", lambda *args, **kwargs: None)
84 | monkeypatch.setattr("filament_usage_tracker.consumeSpool", lambda *args, **kwargs: None)
85 | monkeypatch.setattr(spoolman_service, "setActiveTray", lambda *args, **kwargs: None)
86 | monkeypatch.setattr(spoolman_service, "spendFilaments", lambda *args, **kwargs: None)
87 | monkeypatch.setattr(mqtt_bambulab, "fetchSpools", lambda *args, **kwargs: copy.deepcopy(MOCK_SPOOLS))
88 | monkeypatch.setattr(mqtt_bambulab, "setActiveTray", lambda *args, **kwargs: None)
89 | monkeypatch.setattr(mqtt_bambulab, "spendFilaments", lambda *args, **kwargs: None)
90 | monkeypatch.setattr(spoolman_client, "fetchSpoolList", lambda *args, **kwargs: copy.deepcopy(MOCK_SPOOLS))
91 |
92 |
93 | def _stub_history(monkeypatch):
94 | # Keep DB untouched.
95 | monkeypatch.setattr("print_history.insert_print", lambda *args, **kwargs: 1)
96 | monkeypatch.setattr("print_history.insert_filament_usage", lambda *args, **kwargs: None)
97 | monkeypatch.setattr("print_history.update_filament_spool", lambda *args, **kwargs: None)
98 | monkeypatch.setattr("print_history.update_filament_grams_used", lambda *args, **kwargs: None)
99 | monkeypatch.setattr("print_history.update_layer_tracking", lambda *args, **kwargs: None)
100 |
101 |
102 | def _build_fake_get_meta(model_path: Path):
103 | original_get_meta = tools_3mf.getMetaDataFrom3mf
104 | def _fake(_url: str):
105 | if not model_path.exists():
106 | raise FileNotFoundError(f"Test 3MF not found: {model_path}")
107 | return original_get_meta(f"local:{model_path}")
108 | return _fake
109 |
110 |
111 | @pytest.mark.parametrize("log_path", _iter_log_files(), ids=lambda p: p.name)
112 | def test_mqtt_log_tray_detection(log_path, monkeypatch, caplog):
113 | expected = _load_expected(log_path)
114 | expected_assignments_raw = expected.get("expected_assignments") or {}
115 | expected_assignments = {str(k): str(v) for k, v in expected_assignments_raw.items()}
116 | expected_assignments_by_index = {int(k): str(v) for k, v in expected_assignments_raw.items()}
117 |
118 | model_path = _base_model_from_log(log_path)
119 | if not model_path.exists():
120 | pytest.skip(f"Missing test model: {model_path}")
121 |
122 | temp_file = tempfile.NamedTemporaryFile(suffix=".3mf", delete=False)
123 | temp_file.close()
124 | temp_model_path = Path(temp_file.name)
125 | shutil.copy2(model_path, temp_model_path)
126 |
127 | _stub_spoolman(monkeypatch)
128 | _stub_history(monkeypatch)
129 |
130 | monkeypatch.setattr("mqtt_bambulab.getMetaDataFrom3mf", _build_fake_get_meta(temp_model_path))
131 |
132 | # Reset MQTT state to get a clean replay.
133 | mqtt_bambulab.PRINTER_STATE = {}
134 | mqtt_bambulab.PRINTER_STATE_LAST = {}
135 | mqtt_bambulab.PENDING_PRINT_METADATA = {}
136 |
137 | assignments = {}
138 | last_tracker_mapping: list[int] | None = None
139 |
140 | mqtt_bambulab.FILAMENT_TRACKER = FilamentUsageTracker()
141 |
142 | original_map_filament = mqtt_bambulab.map_filament
143 |
144 | def _record_map(tray_tar):
145 | result = original_map_filament(tray_tar)
146 | metadata = mqtt_bambulab.PENDING_PRINT_METADATA or {}
147 | for idx, tray in enumerate(metadata.get("ams_mapping", [])):
148 | assignments[str(idx)] = str(tray)
149 | return result
150 |
151 | monkeypatch.setattr(mqtt_bambulab, "map_filament", _record_map)
152 | original_resolve = FilamentUsageTracker._resolve_tray_mapping
153 |
154 | def _record_resolve(self, filament_index):
155 | result = original_resolve(self, filament_index)
156 | if result is not None:
157 | assignments[str(filament_index)] = str(result)
158 | return result
159 |
160 | monkeypatch.setattr(FilamentUsageTracker, "_resolve_tray_mapping", _record_resolve)
161 | monkeypatch.setattr(FilamentUsageTracker, "_retrieve_model", lambda self, _: str(temp_model_path))
162 |
163 | try:
164 | with log_path.open() as handle:
165 | for line in handle:
166 | if "::" not in line:
167 | continue
168 | try:
169 | payload = json.loads(line.split("::", 1)[1].strip())
170 | except Exception:
171 | continue
172 |
173 | mqtt_bambulab.processMessage(payload)
174 | mqtt_bambulab.FILAMENT_TRACKER.on_message(payload)
175 | metadata = mqtt_bambulab.PENDING_PRINT_METADATA
176 | if metadata:
177 | for idx, tray in enumerate(metadata.get("ams_mapping", [])):
178 | assignments[str(idx)] = str(tray)
179 | print_obj = payload.get("print", {})
180 | if print_obj.get("command") == "project_file":
181 | ams_mapping = print_obj.get("ams_mapping") or []
182 | for idx, tray in enumerate(ams_mapping):
183 | assignments[str(idx)] = str(tray)
184 | tracker_mapping = mqtt_bambulab.FILAMENT_TRACKER.ams_mapping
185 | if tracker_mapping:
186 | last_tracker_mapping = list(tracker_mapping)
187 | finally:
188 | temp_model_path.unlink(missing_ok=True)
189 |
190 | if expected_assignments:
191 | missing = {
192 | filament: expected_tray
193 | for filament, expected_tray in expected_assignments.items()
194 | if assignments.get(filament) != expected_tray
195 | }
196 | extra = {
197 | filament: tray
198 | for filament, tray in assignments.items()
199 | if filament not in expected_assignments
200 | }
201 | assert not missing and not extra, (
202 | f"{log_path.name}: assignment mismatch missing={missing} extra={extra} "
203 | f"vs expected={expected_assignments} actual={assignments}")
204 |
205 | if TRACK_LAYER_USAGE and expected_assignments_by_index:
206 | tracker_mapping = last_tracker_mapping or []
207 | tracker_mapping_str = [str(value) for value in tracker_mapping]
208 | missing_tracker = {
209 | str(idx): expected_tray
210 | for idx, expected_tray in expected_assignments_by_index.items()
211 | if idx >= len(tracker_mapping_str) or tracker_mapping_str[idx] != expected_tray
212 | }
213 | assert not missing_tracker, (
214 | f"{log_path.name}: filament tracker mapping mismatch missing={missing_tracker} "
215 | f"tracker_mapping={tracker_mapping_str} expected={expected_assignments}")
216 |
217 | caplog.set_level(logging.INFO)
218 | logging.getLogger(__name__).info(
219 | "Log %s -> assignments=%s expected_assignments=%s",
220 | log_path.name,
221 | assignments,
222 | expected_assignments,
223 | )
224 |
--------------------------------------------------------------------------------
/tools_3mf.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import zipfile
3 | import tempfile
4 | import xml.etree.ElementTree as ET
5 | import pycurl
6 | import urllib.parse
7 | import os
8 | import re
9 | import time
10 | from datetime import datetime
11 | from config import PRINTER_CODE, PRINTER_IP
12 | from urllib.parse import urlparse
13 |
14 | def parse_ftp_listing(line):
15 | """Parse a line from an FTP LIST command."""
16 | parts = line.split(maxsplit=8)
17 | if len(parts) < 9:
18 | return None
19 | return {
20 | 'permissions': parts[0],
21 | 'links': int(parts[1]),
22 | 'owner': parts[2],
23 | 'group': parts[3],
24 | 'size': int(parts[4]),
25 | 'month': parts[5],
26 | 'day': int(parts[6]),
27 | 'time_or_year': parts[7],
28 | 'name': parts[8]
29 | }
30 |
31 | def get_base_name(filename):
32 | return filename.rsplit('.', 1)[0]
33 |
34 | def parse_date(item):
35 | """Parse the date and time from the FTP listing item."""
36 | try:
37 | date_str = f"{item['month']} {item['day']} {item['time_or_year']}"
38 | return datetime.strptime(date_str, "%b %d %H:%M")
39 | except ValueError:
40 | return None
41 |
42 | def get_filament_order(file):
43 | filament_order = {}
44 | switch_count = 0
45 |
46 | for line in file:
47 | match_filament = re.match(r"^M620 S(\d+)[^;\r\n]*", line.decode("utf-8").strip())
48 | if match_filament:
49 | filament = int(match_filament.group(1))
50 | if filament not in filament_order and int(filament) != 255:
51 | filament_order[int(filament)] = switch_count
52 | switch_count += 1
53 |
54 | if len(filament_order) == 0:
55 | filament_order = {1:0}
56 |
57 | return filament_order
58 |
59 | def download3mfFromCloud(url, destFile):
60 | print("Downloading 3MF file from cloud...")
61 | # Download the file and save it to the temporary file
62 | response = requests.get(url)
63 | response.raise_for_status()
64 | destFile.write(response.content)
65 |
66 | def download3mfFromFTP(filename, destFile):
67 | print("Downloading 3MF file from FTP...")
68 | ftp_host = PRINTER_IP
69 | ftp_user = "bblp"
70 | ftp_pass = PRINTER_CODE
71 | remote_path = "/cache/" + filename
72 | local_path = destFile.name # 🔹 Download into the current directory
73 | encoded_remote_path = urllib.parse.quote(remote_path)
74 | with open(local_path, "wb") as f:
75 | c = pycurl.Curl()
76 | url = f"ftps://{ftp_host}{encoded_remote_path}"
77 |
78 | # 🔹 Setup explicit FTPS connection (like FileZilla)
79 | c.setopt(c.URL, url)
80 | c.setopt(c.USERPWD, f"{ftp_user}:{ftp_pass}")
81 | c.setopt(c.WRITEDATA, f)
82 |
83 | # 🔹 Enable SSL/TLS
84 | c.setopt(c.SSL_VERIFYPEER, 0) # Disable SSL verification
85 | c.setopt(c.SSL_VERIFYHOST, 0)
86 |
87 | # 🔹 Enable passive mode (like FileZilla)
88 | c.setopt(c.FTP_SSL, c.FTPSSL_ALL)
89 |
90 | # 🔹 Enable proper TLS authentication
91 | c.setopt(c.FTPSSLAUTH, c.FTPAUTH_TLS)
92 |
93 | print("[DEBUG] Starting file download...")
94 |
95 | try:
96 | c.perform()
97 | print("[DEBUG] File successfully downloaded!")
98 | except pycurl.error as e:
99 | print(f"[ERROR] cURL error: {e}")
100 |
101 | c.close()
102 |
103 | def download3mfFromLocalFilesystem(path, destFile):
104 | with open(path, "rb") as src_file:
105 | destFile.write(src_file.read())
106 |
107 | def getMetaDataFrom3mf(url):
108 | """
109 | Download a 3MF file from a URL, unzip it, and parse filament usage.
110 |
111 | Args:
112 | url (str): URL to the 3MF file.
113 |
114 | Returns:
115 | list[dict]: List of dictionaries with `tray_info_idx` and `used_g`.
116 | """
117 | try:
118 | metadata = {}
119 |
120 | # Create a temporary file
121 | with tempfile.NamedTemporaryFile(delete_on_close=False,delete=True, suffix=".3mf") as temp_file:
122 | temp_file_name = temp_file.name
123 |
124 | if url.startswith("http"):
125 | download3mfFromCloud(url, temp_file)
126 | elif url.startswith("local:"):
127 | download3mfFromLocalFilesystem(url.replace("local:", ""), temp_file)
128 | else:
129 | download3mfFromFTP(url.replace("ftp://", "").replace(".gcode",""), temp_file)
130 |
131 | temp_file.close()
132 | metadata["model_path"] = url
133 |
134 | parsed_url = urlparse(url)
135 | metadata["file"] = os.path.basename(parsed_url.path)
136 |
137 | print(f"3MF file downloaded and saved as {temp_file_name}.")
138 |
139 | # Unzip the 3MF file
140 | with zipfile.ZipFile(temp_file_name, 'r') as z:
141 | # Check for the Metadata/slice_info.config file
142 | slice_info_path = "Metadata/slice_info.config"
143 | if slice_info_path in z.namelist():
144 | with z.open(slice_info_path) as slice_info_file:
145 | # Parse the XML content of the file
146 | tree = ET.parse(slice_info_file)
147 | root = tree.getroot()
148 |
149 | # Extract id and used_g from each filament
150 | """
151 |
152 |
153 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 | """
189 |
190 | for meta in root.findall(".//plate/metadata"):
191 | if meta.attrib.get("key") == "index":
192 | metadata["plateID"] = meta.attrib.get("value", "")
193 |
194 | usage = {}
195 | filaments= {}
196 | filamentId = 1
197 | for plate in root.findall(".//plate"):
198 | for filament in plate.findall(".//filament"):
199 | used_g = filament.attrib.get("used_g")
200 | #filamentId = int(filament.attrib.get("id"))
201 |
202 | usage[filamentId] = used_g
203 | filaments[filamentId] = {"id": filamentId,
204 | "tray_info_idx": filament.attrib.get("tray_info_idx"),
205 | "type":filament.attrib.get("type"),
206 | "color": filament.attrib.get("color"),
207 | "used_g": used_g,
208 | "used_m":filament.attrib.get("used_m")}
209 | filamentId += 1
210 |
211 | metadata["filaments"] = filaments
212 | metadata["usage"] = usage
213 | else:
214 | print(f"File '{slice_info_path}' not found in the archive.")
215 | return {}
216 |
217 | metadata["image"] = time.strftime('%Y%m%d%H%M%S') + ".png"
218 |
219 | with z.open("Metadata/plate_"+metadata["plateID"]+".png") as source_file:
220 | with open(os.path.join(os.getcwd(), 'static', 'prints', metadata["image"]), 'wb') as target_file:
221 | target_file.write(source_file.read())
222 |
223 | # Check for the Metadata/slice_info.config file
224 | gcode_path = "Metadata/plate_"+metadata["plateID"]+".gcode"
225 | metadata["gcode_path"] = gcode_path
226 | if gcode_path in z.namelist():
227 | with z.open(gcode_path) as gcode_file:
228 | metadata["filamentOrder"] = get_filament_order(gcode_file)
229 |
230 | print(metadata)
231 |
232 | return metadata
233 |
234 | except requests.exceptions.RequestException as e:
235 | print(f"Error downloading file: {e}")
236 | return {}
237 | except zipfile.BadZipFile:
238 | print("The downloaded file is not a valid 3MF archive.")
239 | return {}
240 | except ET.ParseError:
241 | print("Error parsing the XML file.")
242 | return {}
243 | except Exception as e:
244 | print(f"An unexpected error occurred: {e}")
245 | return {}
246 |
--------------------------------------------------------------------------------
/test_data.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import time
4 | from copy import deepcopy
5 | from contextlib import ExitStack, contextmanager
6 | import importlib
7 | from pathlib import Path
8 | from unittest.mock import patch
9 | import pytest
10 |
11 | from config import (
12 | EXTERNAL_SPOOL_AMS_ID,
13 | EXTERNAL_SPOOL_ID,
14 | )
15 | from spoolman_service import augmentTrayDataWithSpoolMan, trayUid
16 |
17 | TEST_MODE_FLAG = os.getenv("OPENSPOOLMAN_TEST_DATA") == "1"
18 | SNAPSHOT_PATH = Path(os.getenv("OPENSPOOLMAN_TEST_SNAPSHOT") or Path("data") / "live_snapshot.json")
19 |
20 | _TEST_PRINTER_ID = os.getenv("PRINTER_ID", "TEST-PRINTER")
21 | _PATCH_ACTIVE = False
22 | _DATASET: dict | None = None
23 |
24 |
25 | def _compute_cost_per_gram(spool: dict) -> dict:
26 | if "cost_per_gram" in spool:
27 | return spool
28 |
29 | initial_weight = spool.get("initial_weight") or spool.get("filament", {}).get("weight")
30 | price = spool.get("price") or spool.get("filament", {}).get("price")
31 |
32 | if initial_weight and price:
33 | try:
34 | spool["cost_per_gram"] = float(price) / float(initial_weight)
35 | except (TypeError, ValueError, ZeroDivisionError):
36 | spool["cost_per_gram"] = 0
37 | else:
38 | spool["cost_per_gram"] = 0
39 |
40 | return spool
41 |
42 |
43 | def _load_snapshot(path: str | Path):
44 | snapshot_path = Path(path)
45 |
46 | try:
47 | with snapshot_path.open("r", encoding="utf-8") as f:
48 | data = json.load(f)
49 | except FileNotFoundError:
50 | return None
51 | except (OSError, json.JSONDecodeError):
52 | return None
53 |
54 | spools = [_compute_cost_per_gram(spool) for spool in data.get("spools", [])]
55 |
56 | snapshot = {
57 | "spools": spools,
58 | "last_ams_config": data.get("last_ams_config") or {},
59 | "settings": data.get("settings") or {},
60 | "prints": data.get("prints", []),
61 | "printer": data.get("printer") or {},
62 | }
63 |
64 | snapshot.setdefault("last_ams_config", {})
65 | snapshot.setdefault("settings", {})
66 | snapshot.setdefault("prints", [])
67 | snapshot.setdefault("printer", {})
68 |
69 | return snapshot
70 |
71 |
72 | def _ensure_dataset_loaded() -> dict:
73 | global _DATASET
74 |
75 | if _DATASET is not None:
76 | return _DATASET
77 |
78 | snapshot_path = SNAPSHOT_PATH
79 | if not snapshot_path.exists():
80 | raise FileNotFoundError(
81 | f"Snapshot not found at {snapshot_path}. Create one with 'python scripts/export_live_snapshot.py --output {snapshot_path}'."
82 | )
83 |
84 | snapshot = _load_snapshot(snapshot_path)
85 | if snapshot is None:
86 | raise RuntimeError(
87 | f"Snapshot at {snapshot_path} could not be loaded. Recreate it with 'python scripts/export_live_snapshot.py --output {snapshot_path}'."
88 | )
89 |
90 | _DATASET = deepcopy(snapshot)
91 | return _DATASET
92 |
93 |
94 | if TEST_MODE_FLAG:
95 | _ensure_dataset_loaded()
96 |
97 |
98 | def current_dataset() -> dict:
99 | """Return a deep copy of the active snapshot-backed dataset."""
100 |
101 | return deepcopy(_ensure_dataset_loaded())
102 |
103 |
104 | def isMqttClientConnected():
105 | return True
106 |
107 |
108 | def getPrinterModel():
109 | printer = deepcopy(_ensure_dataset_loaded().get("printer") or {})
110 | printer.setdefault("devicename", _TEST_PRINTER_ID)
111 | printer.setdefault("model", "Snapshot printer")
112 | return printer
113 |
114 |
115 | def fetchSpools():
116 | return deepcopy(_ensure_dataset_loaded().get("spools", []))
117 |
118 |
119 | def getLastAMSConfig():
120 | config = deepcopy(_ensure_dataset_loaded().get("last_ams_config") or {})
121 | spool_list = fetchSpools()
122 |
123 | vt_tray = config.get("vt_tray")
124 | if vt_tray:
125 | augmentTrayDataWithSpoolMan(spool_list, vt_tray, EXTERNAL_SPOOL_AMS_ID, EXTERNAL_SPOOL_ID)
126 |
127 | for ams in config.get("ams", []):
128 | for tray in ams.get("tray", []):
129 | augmentTrayDataWithSpoolMan(spool_list, tray, ams.get("id"), tray.get("id"))
130 | return config
131 |
132 |
133 | def getSettings():
134 | return deepcopy(_ensure_dataset_loaded().get("settings", {}))
135 |
136 |
137 | def patchExtraTags(spool_id, _, new_tags):
138 | dataset = _ensure_dataset_loaded()
139 | for spool in dataset.get("spools", []):
140 | if spool["id"] == int(spool_id):
141 | spool.setdefault("extra", {}).update(new_tags)
142 | return spool
143 | return None
144 |
145 |
146 | def getSpoolById(spool_id):
147 | for spool in _ensure_dataset_loaded().get("spools", []):
148 | if spool["id"] == int(spool_id):
149 | return deepcopy(spool)
150 | return None
151 |
152 |
153 | def setActiveTray(spool_id, spool_extra, ams_id, tray_id):
154 | dataset = _ensure_dataset_loaded()
155 | active_tray = json.dumps(trayUid(int(ams_id), int(tray_id)))
156 | for spool in dataset.get("spools", []):
157 | if spool["id"] == int(spool_id):
158 | spool.setdefault("extra", {}).update(spool_extra or {})
159 | spool["extra"]["active_tray"] = active_tray
160 | break
161 | return active_tray
162 |
163 |
164 | def consumeSpool(spool_id, grams):
165 | dataset = _ensure_dataset_loaded()
166 | for spool in dataset.get("spools", []):
167 | if spool["id"] == int(spool_id):
168 | spool["remaining_weight"] = max(spool.get("remaining_weight", 0) - grams, 0)
169 | break
170 |
171 |
172 | def get_prints_with_filament(limit=50, offset=0):
173 | dataset = _ensure_dataset_loaded()
174 | prints = deepcopy(dataset.get("prints", []))
175 | if offset:
176 | prints = prints[offset:]
177 | if limit is not None:
178 | prints = prints[:limit]
179 | return prints, len(dataset.get("prints", []))
180 |
181 |
182 | def get_filament_for_slot(print_id, ams_slot):
183 | dataset = _ensure_dataset_loaded()
184 | for print_job in dataset.get("prints", []):
185 | if int(print_job.get("id")) != int(print_id):
186 | continue
187 | for filament in json.loads(print_job.get("filament_info", "[]")):
188 | if int(filament.get("ams_slot")) == int(ams_slot):
189 | return filament
190 | return None
191 |
192 |
193 | def update_filament_spool(print_id, ams_slot, spool_id):
194 | dataset = _ensure_dataset_loaded()
195 | for print_job in dataset.get("prints", []):
196 | if int(print_job.get("id")) != int(print_id):
197 | continue
198 | filaments = json.loads(print_job.get("filament_info", "[]"))
199 | for filament in filaments:
200 | if int(filament.get("ams_slot")) == int(ams_slot):
201 | filament["spool_id"] = int(spool_id)
202 | print_job["filament_info"] = json.dumps(filaments)
203 | return True
204 |
205 |
206 | def setActiveSpool(*_args, **_kwargs):
207 | # No-op in test mode
208 | return None
209 |
210 |
211 | def wait_for_seed_ready(timeout=10):
212 | start = time.time()
213 | while time.time() - start < timeout:
214 | time.sleep(0.1)
215 | return True
216 |
217 |
218 | _PATCH_TARGETS = {
219 | "spoolman_client.fetchSpoolList": fetchSpools,
220 | "spoolman_client.getSpoolById": getSpoolById,
221 | "spoolman_client.consumeSpool": consumeSpool,
222 | "spoolman_client.patchExtraTags": patchExtraTags,
223 | "print_history.get_prints_with_filament": get_prints_with_filament,
224 | "print_history.get_filament_for_slot": get_filament_for_slot,
225 | "print_history.update_filament_spool": update_filament_spool,
226 | "spoolman_service.fetchSpools": fetchSpools,
227 | "spoolman_service.setActiveTray": setActiveTray,
228 | "mqtt_bambulab.fetchSpools": fetchSpools,
229 | "mqtt_bambulab.getLastAMSConfig": getLastAMSConfig,
230 | "mqtt_bambulab.isMqttClientConnected": isMqttClientConnected,
231 | "mqtt_bambulab.getPrinterModel": getPrinterModel,
232 | "mqtt_bambulab.setActiveTray": setActiveTray,
233 | }
234 |
235 |
236 | def test_data_active():
237 | """Return True when the test-data patches or flag are enabled."""
238 |
239 | if not (TEST_MODE_FLAG or _PATCH_ACTIVE):
240 | # During pytest runs, skip to keep the test suite green when seeded data is off.
241 | if os.getenv("PYTEST_CURRENT_TEST"):
242 | pytest.skip("Seeded data is not enabled (set OPENSPOOLMAN_TEST_DATA=1 or apply test overrides).")
243 | # In production imports (e.g., app startup), just report False without raising.
244 | return False
245 | return True
246 |
247 |
248 | @contextmanager
249 | def patched_test_data():
250 | """
251 | Patch production modules with the in-memory test dataset for unit tests.
252 |
253 | Usage:
254 | with patched_test_data():
255 | # imports inside the block will use the seeded functions
256 | ...
257 | """
258 |
259 | global _PATCH_ACTIVE
260 | previous_state = _PATCH_ACTIVE
261 | _PATCH_ACTIVE = True
262 |
263 | with ExitStack() as stack:
264 | for target, replacement in _PATCH_TARGETS.items():
265 | stack.enter_context(patch(target, replacement))
266 | try:
267 | yield
268 | finally:
269 | _PATCH_ACTIVE = previous_state
270 |
271 |
272 | def apply_test_overrides(monkeypatch=None):
273 | """
274 | Apply the test-data mocks either via pytest's monkeypatch or as a context manager.
275 |
276 | If ``monkeypatch`` is provided, overrides are applied immediately for the
277 | duration of the test. Without it, a context manager is returned so tests can
278 | control the lifetime explicitly:
279 |
280 | with apply_test_overrides():
281 | ...
282 | """
283 |
284 | if monkeypatch is not None:
285 | global _PATCH_ACTIVE
286 | _PATCH_ACTIVE = True
287 | for target, replacement in _PATCH_TARGETS.items():
288 | module_name, attr = target.rsplit(".", 1)
289 | module = importlib.import_module(module_name)
290 | monkeypatch.setattr(module, attr, replacement)
291 | return None
292 |
293 | return patched_test_data()
294 |
295 |
296 | def activate_test_data_patches():
297 | """Apply the test-data patches for the lifetime of the process."""
298 |
299 | ctx = patched_test_data()
300 | ctx.__enter__()
301 | return ctx
302 |
--------------------------------------------------------------------------------
/print_history.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sqlite3
3 | from datetime import datetime
4 | from pathlib import Path
5 |
6 | DEFAULT_DB_NAME = "3d_printer_logs.db"
7 | DB_ENV_VAR = "OPENSPOOLMAN_PRINT_HISTORY_DB"
8 |
9 |
10 | def _default_db_path() -> Path:
11 | """Resolve the print history database path, allowing an env override."""
12 |
13 | env_path = os.getenv(DB_ENV_VAR)
14 | if env_path:
15 | return Path(env_path).expanduser().resolve()
16 |
17 | return Path(__file__).resolve().parent / "data" / DEFAULT_DB_NAME
18 |
19 |
20 | db_config = {"db_path": str(_default_db_path())} # Configuration for database location
21 |
22 |
23 | def _ensure_column(cursor: sqlite3.Cursor, table: str, column: str, definition: str) -> None:
24 | cursor.execute(f"PRAGMA table_info({table})")
25 | columns = {row[1] for row in cursor.fetchall()}
26 | if column not in columns:
27 | cursor.execute(f"ALTER TABLE {table} ADD COLUMN {column} {definition}")
28 |
29 |
30 | def create_database() -> None:
31 | """
32 | Ensure the SQLite schema exists (used for both fresh and upgrading databases).
33 | """
34 | db_path = Path(db_config["db_path"])
35 | db_path.parent.mkdir(parents=True, exist_ok=True)
36 |
37 | conn = sqlite3.connect(db_path)
38 | cursor = conn.cursor()
39 |
40 | cursor.execute('''
41 | CREATE TABLE IF NOT EXISTS prints (
42 | id INTEGER PRIMARY KEY AUTOINCREMENT,
43 | print_date TEXT NOT NULL,
44 | file_name TEXT NOT NULL,
45 | print_type TEXT NOT NULL,
46 | image_file TEXT
47 | )
48 | ''')
49 |
50 | cursor.execute('''
51 | CREATE TABLE IF NOT EXISTS filament_usage (
52 | id INTEGER PRIMARY KEY AUTOINCREMENT,
53 | print_id INTEGER NOT NULL,
54 | spool_id INTEGER,
55 | filament_type TEXT NOT NULL,
56 | color TEXT NOT NULL,
57 | grams_used REAL NOT NULL,
58 | ams_slot INTEGER NOT NULL,
59 | estimated_grams REAL,
60 | FOREIGN KEY (print_id) REFERENCES prints (id) ON DELETE CASCADE
61 | )
62 | ''')
63 |
64 | cursor.execute('''
65 | CREATE TABLE IF NOT EXISTS print_layer_tracking (
66 | id INTEGER PRIMARY KEY AUTOINCREMENT,
67 | print_id INTEGER NOT NULL UNIQUE,
68 | total_layers INTEGER,
69 | layers_printed INTEGER,
70 | filament_grams_billed REAL,
71 | filament_grams_total REAL,
72 | status TEXT NOT NULL DEFAULT 'RUNNING',
73 | predicted_end_time TEXT,
74 | actual_end_time TEXT,
75 | FOREIGN KEY (print_id) REFERENCES prints (id) ON DELETE CASCADE
76 | )
77 | ''')
78 |
79 | _ensure_column(
80 | cursor,
81 | "filament_usage",
82 | "estimated_grams",
83 | "REAL",
84 | )
85 |
86 | # Ensure column definitions exist for older databases
87 | _ensure_column(
88 | cursor,
89 | "print_layer_tracking",
90 | "predicted_end_time",
91 | "TEXT",
92 | )
93 | _ensure_column(
94 | cursor,
95 | "print_layer_tracking",
96 | "actual_end_time",
97 | "TEXT",
98 | )
99 |
100 | conn.commit()
101 | conn.close()
102 |
103 |
104 | def insert_print(file_name: str, print_type: str, image_file: str = None, print_date: str = None) -> int:
105 | """
106 | Inserts a new print job into the database and returns the print ID.
107 | If no print_date is provided, the current timestamp is used.
108 | """
109 | if print_date is None:
110 | print_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
111 |
112 | conn = sqlite3.connect(db_config["db_path"])
113 | cursor = conn.cursor()
114 | cursor.execute('''
115 | INSERT INTO prints (print_date, file_name, print_type, image_file)
116 | VALUES (?, ?, ?, ?)
117 | ''', (print_date, file_name, print_type, image_file))
118 | print_id = cursor.lastrowid
119 | conn.commit()
120 | conn.close()
121 | return print_id
122 |
123 | def insert_filament_usage(
124 | print_id: int,
125 | filament_type: str,
126 | color: str,
127 | grams_used: float,
128 | ams_slot: int,
129 | estimated_grams: float | None = None,
130 | ) -> None:
131 | """
132 | Inserts a new filament usage entry for a specific print job.
133 | """
134 | conn = sqlite3.connect(db_config["db_path"])
135 | cursor = conn.cursor()
136 | cursor.execute('''
137 | INSERT INTO filament_usage (print_id, filament_type, color, grams_used, ams_slot, estimated_grams)
138 | VALUES (?, ?, ?, ?, ?, ?)
139 | ''', (print_id, filament_type, color, grams_used, ams_slot, estimated_grams))
140 | conn.commit()
141 | conn.close()
142 |
143 | def update_filament_spool(print_id: int, filament_id: int, spool_id: int) -> None:
144 | """
145 | Updates the spool_id for a given filament usage entry, ensuring it belongs to the specified print job.
146 | """
147 | conn = sqlite3.connect(db_config["db_path"])
148 | cursor = conn.cursor()
149 | cursor.execute('''
150 | UPDATE filament_usage
151 | SET spool_id = ?
152 | WHERE ams_slot = ? AND print_id = ?
153 | ''', (spool_id, filament_id, print_id))
154 | conn.commit()
155 | conn.close()
156 |
157 | def update_filament_grams_used(print_id: int, filament_id: int, grams_used: float) -> None:
158 | """
159 | Updates the grams_used for a given filament usage entry, ensuring it belongs to the specified print job.
160 | """
161 | conn = sqlite3.connect(db_config["db_path"])
162 | cursor = conn.cursor()
163 | cursor.execute('''
164 | UPDATE filament_usage
165 | SET grams_used = ?
166 | WHERE ams_slot = ? AND print_id = ?
167 | ''', (grams_used, filament_id, print_id))
168 | conn.commit()
169 | conn.close()
170 |
171 |
172 | def get_prints_with_filament(limit: int | None = None, offset: int | None = None):
173 | """
174 | Retrieves print jobs along with their associated filament usage, grouped by print job.
175 |
176 | A total count is returned to support pagination.
177 | """
178 | conn = sqlite3.connect(db_config["db_path"])
179 | conn.row_factory = sqlite3.Row # Enable column name access
180 |
181 | count_cursor = conn.cursor()
182 | count_cursor.execute("SELECT COUNT(*) FROM prints")
183 | total_count = count_cursor.fetchone()[0]
184 |
185 | cursor = conn.cursor()
186 | query = '''
187 | SELECT p.id AS id, p.print_date AS print_date, p.file_name AS file_name,
188 | p.print_type AS print_type, p.image_file AS image_file,
189 | (
190 | SELECT json_group_array(json_object(
191 | 'spool_id', f.spool_id,
192 | 'filament_type', f.filament_type,
193 | 'color', f.color,
194 | 'grams_used', f.grams_used,
195 | 'estimated_grams', f.estimated_grams,
196 | 'ams_slot', f.ams_slot
197 | )) FROM filament_usage f WHERE f.print_id = p.id
198 | ) AS filament_info
199 | FROM prints p
200 | ORDER BY p.print_date DESC
201 | '''
202 | params: list[int] = []
203 | if limit is not None:
204 | query += " LIMIT ?"
205 | params.append(limit)
206 | if offset is not None:
207 | query += " OFFSET ?"
208 | params.append(offset)
209 |
210 | cursor.execute(query, params)
211 | prints = [dict(row) for row in cursor.fetchall()]
212 | conn.close()
213 | return prints, total_count
214 |
215 | def get_prints_by_spool(spool_id: int):
216 | """
217 | Retrieves all print jobs that used a specific spool.
218 | """
219 | conn = sqlite3.connect(db_config["db_path"])
220 | cursor = conn.cursor()
221 | cursor.execute('''
222 | SELECT DISTINCT p.* FROM prints p
223 | JOIN filament_usage f ON p.id = f.print_id
224 | WHERE f.spool_id = ?
225 | ''', (spool_id,))
226 | prints = cursor.fetchall()
227 | conn.close()
228 | return prints
229 |
230 | def get_filament_for_slot(print_id: int, ams_slot: int):
231 | conn = sqlite3.connect(db_config["db_path"])
232 | conn.row_factory = sqlite3.Row # Enable column name access
233 | cursor = conn.cursor()
234 |
235 | cursor.execute('''
236 | SELECT * FROM filament_usage
237 | WHERE print_id = ? AND ams_slot = ?
238 | ''', (print_id, ams_slot))
239 |
240 | results = cursor.fetchone()
241 | conn.close()
242 | return results
243 |
244 | def _ensure_layer_tracking_entry(print_id: int):
245 | conn = sqlite3.connect(db_config["db_path"])
246 | cursor = conn.cursor()
247 | cursor.execute('''
248 | INSERT OR IGNORE INTO print_layer_tracking (print_id)
249 | VALUES (?)
250 | ''', (print_id,))
251 | conn.commit()
252 | conn.close()
253 |
254 | def update_layer_tracking(print_id: int, **fields):
255 | if not fields:
256 | return
257 |
258 | allowed_columns = {
259 | "total_layers",
260 | "layers_printed",
261 | "filament_grams_billed",
262 | "filament_grams_total",
263 | "status",
264 | "predicted_end_time",
265 | "actual_end_time",
266 | }
267 |
268 | sanitized = {key: value for key, value in fields.items() if key in allowed_columns}
269 | if not sanitized:
270 | return
271 |
272 | _ensure_layer_tracking_entry(print_id)
273 |
274 | set_clause = ", ".join(f"{key} = ?" for key in sanitized)
275 | params = list(sanitized.values()) + [print_id]
276 |
277 | conn = sqlite3.connect(db_config["db_path"])
278 | cursor = conn.cursor()
279 | cursor.execute(f'''
280 | UPDATE print_layer_tracking
281 | SET {set_clause}
282 | WHERE print_id = ?
283 | ''', params)
284 | conn.commit()
285 | conn.close()
286 |
287 | def get_layer_tracking_for_prints(print_ids: list[int]):
288 | if not print_ids:
289 | return {}
290 |
291 | conn = sqlite3.connect(db_config["db_path"])
292 | conn.row_factory = sqlite3.Row
293 | cursor = conn.cursor()
294 | placeholders = ",".join("?" for _ in print_ids)
295 | cursor.execute(f'''
296 | SELECT print_id, total_layers, layers_printed, filament_grams_billed, filament_grams_total, status, predicted_end_time, actual_end_time
297 | FROM print_layer_tracking
298 | WHERE print_id IN ({placeholders})
299 | ''', print_ids)
300 | rows = cursor.fetchall()
301 | conn.close()
302 | return {row["print_id"]: dict(row) for row in rows}
303 |
304 | def get_all_filament_usage_for_print(print_id: int):
305 | """
306 | Retrieves all filament usage entries for a specific print.
307 | Returns a dict mapping ams_slot to grams_used.
308 | """
309 | conn = sqlite3.connect(db_config["db_path"])
310 | conn.row_factory = sqlite3.Row
311 | cursor = conn.cursor()
312 |
313 | cursor.execute('''
314 | SELECT ams_slot, grams_used FROM filament_usage
315 | WHERE print_id = ?
316 | ''', (print_id,))
317 |
318 | results = {row["ams_slot"]: row["grams_used"] for row in cursor.fetchall()}
319 | conn.close()
320 | return results
321 |
322 | # Example for creating the database if it does not exist
323 | create_database()
324 |
--------------------------------------------------------------------------------
/scripts/generate_screenshots.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import asyncio
3 | import json
4 | import os
5 | import subprocess
6 | import sys
7 | import time
8 | from dataclasses import dataclass
9 | from pathlib import Path
10 | from typing import Any
11 |
12 | import requests
13 |
14 | # Ensure repository root is importable when executed from the scripts directory
15 | REPO_ROOT = Path(__file__).resolve().parent.parent
16 | if str(REPO_ROOT) not in sys.path:
17 | sys.path.insert(0, str(REPO_ROOT))
18 | os.chdir(REPO_ROOT)
19 |
20 | DEFAULT_SNAPSHOT_PATH = Path("data") / "live_snapshot.json"
21 |
22 |
23 | @dataclass(frozen=True)
24 | class ScreenshotJob:
25 | output: str
26 | route: str
27 | viewport: tuple[int, int]
28 | max_height: int | None
29 | device: str
30 | name: str
31 | full_page: bool
32 |
33 |
34 | def parse_viewport(raw_viewport: str | tuple[int, int] | list[int]) -> tuple[int, int]:
35 | """Parse a viewport specification from CLI, pytest, or config options."""
36 |
37 | if isinstance(raw_viewport, (tuple, list)) and len(raw_viewport) == 2:
38 | return int(raw_viewport[0]), int(raw_viewport[1])
39 |
40 | if isinstance(raw_viewport, str) and "x" in raw_viewport:
41 | width, height = raw_viewport.lower().split("x", 1)
42 | return int(width), int(height)
43 |
44 | raise ValueError("Viewport must be WIDTHxHEIGHT or two integers")
45 |
46 |
47 | def load_config(config_path: str | os.PathLike | None = None) -> dict[str, Any]:
48 | """Load the screenshot configuration JSON (defaults to scripts/screenshot_config.json)."""
49 |
50 | if config_path is None:
51 | config_path = Path(__file__).with_name("screenshot_config.json")
52 |
53 | with open(config_path, "r", encoding="utf-8") as f:
54 | return json.load(f)
55 |
56 |
57 | def _device_viewport(device: dict[str, Any]) -> tuple[int, int]:
58 | viewport = device.get("viewport")
59 | if viewport is None:
60 | raise ValueError("Each device in the screenshot config must define a viewport")
61 | return parse_viewport(viewport)
62 |
63 |
64 | def _rewrite_output_path(output: str, device: str, target_devices: list[str], output_dir: str | None) -> str:
65 | """Rewrite the output path to include the device prefix and optional directory."""
66 |
67 | path = Path(output)
68 |
69 | if not path.name.startswith(f"{device}_"):
70 | path = path.with_name(f"{device}_{path.name}")
71 |
72 | if output_dir:
73 | path = Path(output_dir) / path.name
74 |
75 | return str(path)
76 |
77 |
78 | def build_jobs(
79 | config: dict[str, Any],
80 | devices: list[str] | None = None,
81 | output_dir: str | None = None,
82 | default_max_height: int | None = None,
83 | ) -> list[ScreenshotJob]:
84 | """Build the set of screenshots to capture from the JSON configuration."""
85 |
86 | device_defs = config.get("devices") or {}
87 | if not device_defs:
88 | raise ValueError("Screenshot config must define at least one device")
89 |
90 | selected_devices = devices or config.get("default_devices") or list(device_defs.keys())
91 | jobs: list[ScreenshotJob] = []
92 |
93 | for target in config.get("targets", []):
94 | target_devices = target.get("devices") or selected_devices
95 | route = target["route"]
96 | name = target.get("name") or route
97 | target_max_height = target.get("max_height")
98 | full_page = bool(target.get("full_page"))
99 |
100 | for device in target_devices:
101 | if device not in selected_devices:
102 | continue
103 | if device not in device_defs:
104 | raise ValueError(f"Device '{device}' referenced by target '{name}' is not defined in the config")
105 |
106 | viewport = _device_viewport(device_defs[device])
107 | resolved_max_height = None
108 |
109 | if not full_page:
110 | if isinstance(target_max_height, dict):
111 | resolved_max_height = target_max_height.get(device)
112 | else:
113 | resolved_max_height = target_max_height
114 |
115 | if resolved_max_height is None:
116 | resolved_max_height = default_max_height
117 |
118 | if resolved_max_height is None:
119 | resolved_max_height = viewport[1]
120 | output = target.get("output") or f"docs/img/{name}.png"
121 |
122 | if template := target.get("output_template"):
123 | output = template.format(device=device, name=name)
124 |
125 | output = _rewrite_output_path(output, device, target_devices, output_dir)
126 | jobs.append(
127 | ScreenshotJob(
128 | output=output,
129 | route=route,
130 | viewport=viewport,
131 | max_height=resolved_max_height,
132 | device=device,
133 | name=name,
134 | full_page=full_page,
135 | )
136 | )
137 |
138 | return jobs
139 |
140 |
141 | async def capture_pages(base_url: str, jobs: list[ScreenshotJob], color_scheme: str | None = None) -> None:
142 | from playwright.async_api import async_playwright
143 |
144 | async with async_playwright() as p:
145 | browser = await p.chromium.launch(headless=True)
146 |
147 | for job in jobs:
148 | viewport_width, viewport_height = job.viewport
149 | page_height = max(viewport_height, job.max_height or viewport_height)
150 |
151 | context = await browser.new_context(
152 | viewport={"width": viewport_width, "height": page_height},
153 | color_scheme=None if color_scheme == "auto" else color_scheme,
154 | )
155 | page = await context.new_page()
156 |
157 | url = f"{base_url}{job.route}"
158 | print(f"Capturing {url} -> {job.output} ({job.device})")
159 | await page.goto(url, wait_until="networkidle")
160 | await page.wait_for_timeout(1000)
161 | Path(job.output).parent.mkdir(parents=True, exist_ok=True)
162 |
163 | screenshot_kwargs: dict = {"path": job.output}
164 | if job.full_page:
165 | screenshot_kwargs["full_page"] = True
166 | else:
167 | screenshot_kwargs.update(
168 | {
169 | "full_page": False,
170 | "clip": {"x": 0, "y": 0, "width": viewport_width, "height": job.max_height},
171 | }
172 | )
173 |
174 | await page.screenshot(**screenshot_kwargs)
175 | await context.close()
176 |
177 | await browser.close()
178 |
179 |
180 | def wait_for_server(url: str, timeout: int = 30) -> None:
181 | start = time.time()
182 | while time.time() - start < timeout:
183 | try:
184 | response = requests.get(url, timeout=5)
185 | if response.status_code < 500:
186 | return
187 | except requests.RequestException:
188 | pass
189 | time.sleep(0.5)
190 | raise RuntimeError(f"Server at {url} did not become ready in time")
191 |
192 |
193 | def start_server(
194 | port: int,
195 | use_test_data: bool = True,
196 | snapshot_path: str | None = None,
197 | live_read_only: bool = True,
198 | print_history_db: str | None = None,
199 | ) -> subprocess.Popen:
200 | env = os.environ.copy()
201 | env.setdefault("FLASK_APP", "app")
202 | env["FLASK_RUN_PORT"] = str(port)
203 | snapshot_for_env = snapshot_path or str(DEFAULT_SNAPSHOT_PATH)
204 | if use_test_data:
205 | resolved_snapshot = Path(snapshot_for_env)
206 | if not resolved_snapshot.exists():
207 | raise FileNotFoundError(
208 | f"Snapshot not found at {resolved_snapshot}. Create one with 'python scripts/export_live_snapshot.py --output {resolved_snapshot}'."
209 | )
210 |
211 | env["OPENSPOOLMAN_TEST_DATA"] = "1"
212 | env["OPENSPOOLMAN_TEST_SNAPSHOT"] = str(resolved_snapshot)
213 | if live_read_only:
214 | env["OPENSPOOLMAN_LIVE_READONLY"] = "1"
215 | if print_history_db:
216 | env["OPENSPOOLMAN_PRINT_HISTORY_DB"] = print_history_db
217 | env.setdefault("OPENSPOOLMAN_BASE_URL", f"http://127.0.0.1:{port}")
218 |
219 | process = subprocess.Popen(
220 | [sys.executable, "-m", "flask", "run", "--port", str(port), "--host", "0.0.0.0"],
221 | stdout=None,
222 | stderr=None,
223 | env=env,
224 | )
225 | return process
226 |
227 |
228 | def stop_server(process: subprocess.Popen) -> None:
229 | if process.poll() is None:
230 | process.terminate()
231 | try:
232 | process.wait(timeout=10)
233 | except subprocess.TimeoutExpired:
234 | process.kill()
235 |
236 |
237 | def main() -> int:
238 | parser = argparse.ArgumentParser(description="Generate UI screenshots using a seeded dataset or live server")
239 | parser.add_argument("--port", type=int, default=5001, help="Port to run the Flask app on")
240 | parser.add_argument(
241 | "--config",
242 | dest="config_path",
243 | default=None,
244 | help="Path to screenshot configuration JSON (defaults to scripts/screenshot_config.json)",
245 | )
246 | parser.add_argument(
247 | "--devices",
248 | help="Comma-separated list of device names from the config to capture (defaults to config default_devices)",
249 | )
250 | parser.add_argument(
251 | "--max-height",
252 | type=int,
253 | default=None,
254 | help=(
255 | "Default maximum screenshot height; per-target/device overrides in the config win."
256 | " If omitted, captures are clipped to the viewport height unless a target sets full_page=true."
257 | ),
258 | )
259 | parser.add_argument("--output-dir", dest="output_dir", help="Directory to write screenshots (defaults to config outputs)")
260 | parser.add_argument("--base-url", dest="base_url", help="Use an already-running server instead of starting one")
261 | parser.add_argument("--mode", choices=["seed", "live"], default="seed", help="Start Flask in seeded test mode or against live data")
262 | parser.add_argument(
263 | "--snapshot",
264 | dest="snapshot",
265 | default=str(DEFAULT_SNAPSHOT_PATH),
266 | help="Path to a snapshot JSON to load when using test data (defaults to data/live_snapshot.json)",
267 | )
268 | parser.add_argument(
269 | "--print-history-db",
270 | dest="print_history_db",
271 | default=str(Path("data") / "demo.db"),
272 | help="Path to a SQLite DB for print history (defaults to data/demo.db for screenshot runs)",
273 | )
274 | parser.add_argument(
275 | "--test-data",
276 | action="store_true",
277 | help="Explicitly set OPENSPOOLMAN_TEST_DATA=1 when starting the Flask server",
278 | )
279 | parser.add_argument(
280 | "--live-readonly",
281 | action="store_true",
282 | help="Explicitly set OPENSPOOLMAN_LIVE_READONLY=1 when starting the Flask server",
283 | )
284 | parser.add_argument("--allow-live-actions", action="store_true", help="Permit live mode to make state changes instead of running read-only")
285 | parser.add_argument(
286 | "--color-scheme",
287 | choices=["auto", "light", "dark"],
288 | default=None,
289 | help="Force Playwright to render pages in light or dark mode (defaults to config color_scheme or auto)",
290 | )
291 | args = parser.parse_args()
292 |
293 | config = load_config(args.config_path)
294 | color_scheme = args.color_scheme or config.get("color_scheme") or "auto"
295 | selected_devices = args.devices.split(",") if args.devices else None
296 | jobs = build_jobs(
297 | config,
298 | devices=[device.strip() for device in selected_devices] if selected_devices else None,
299 | output_dir=args.output_dir,
300 | default_max_height=args.max_height,
301 | )
302 |
303 | server_process = None
304 |
305 | base_url = args.base_url or f"http://127.0.0.1:{args.port}"
306 |
307 | try:
308 | if base_url == f"http://127.0.0.1:{args.port}":
309 | use_test_data = args.test_data or args.mode == "seed"
310 | live_read_only = args.live_readonly or (not args.allow_live_actions)
311 | server_process = start_server(
312 | args.port,
313 | use_test_data=use_test_data,
314 | snapshot_path=args.snapshot,
315 | live_read_only=live_read_only,
316 | print_history_db=args.print_history_db,
317 | )
318 | wait_for_server(f"{base_url}/health")
319 | elif args.mode == "live" and not args.allow_live_actions:
320 | print("Live mode reminder: set OPENSPOOLMAN_LIVE_READONLY=1 on the target server to avoid state changes.")
321 |
322 | asyncio.run(capture_pages(base_url, jobs, color_scheme=color_scheme))
323 | return 0
324 | except FileNotFoundError as exc:
325 | print(exc)
326 | return 1
327 | finally:
328 | if server_process is not None:
329 | stop_server(server_process)
330 |
331 |
332 | if __name__ == "__main__":
333 | raise SystemExit(main())
334 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | #
OpenSpoolMan
2 | Use any filament like Bambu filaments while OpenSpoolMan automatically subtracts the estimated usage from the SpoolMan-managed spool records (see [AUTO SPEND](#auto-spend---automatic-filament-usage-based-on-slicer-estimate)). BambuLab filament is auto-tracked once it shows up in a tray; only third-party spools must be assigned manually through the UI.
3 |
4 | No need for cloud or additional hardware—NFC Tags are optional and you can rely solely on the web GUI. In SpoolMan you can generate QR-code stickers that link straight back to OpenSpoolMan so users can tap a label from their mobile device; change the base URL in SpoolMan settings to OpenSpoolMan before generating the sticker (see [SpoolMan stickers](#spoolman-stickers)).
5 |
6 | Similar functionality to https://github.com/spuder/OpenSpool using only your phone, server, and NFC tags integrated with SpoolMan.
7 |
8 | Everything works locally without cloud access; you can use `scripts/init_bambulab.py` to fetch your `PRINTER_ID`/`PRINTER_CODE` if the printer does not expose them.
9 |
10 | Docker: https://ghcr.io/drndos/openspoolman
11 |
12 | Helm: https://github.com/drndos/openspoolman/pkgs/container/openspoolman%2Fhelm%2Fopenspoolman
13 |
14 | ### News
15 | - [v0.3.0](https://github.com/drndos/openspoolman/releases/tag/v0.3.0) - 23.12.2025 — more accurate filament accounting and layer tracking, higher-fidelity print history, and better Bambu Lab / AMS integration
16 | - [v0.2.0](https://github.com/drndos/openspoolman/releases/tag/v0.2.0) - 07.12.2025 — Adds material-aware tray/spool mismatch detection, tray color cues, print reassign/pagination, spool material filters, and SpoolMan URL handling with refreshed responsive layouts.
17 | - [v0.1.9](https://github.com/drndos/openspoolman/releases/tag/v0.1.9) - 25.05.2025 — Ships post-print spool assignment, multi-platform Docker images, customizable spool sorting, timezone config, and compatibility/uI polish.
18 | - [v0.1.8](https://github.com/drndos/openspoolman/releases/tag/v0.1.8) - 20.04.2025 — Starts importing each filament’s SpoolMan `filament_id` for accurate matching (requires the `filament_id` custom field).
19 | - [v0.1.7](https://github.com/drndos/openspoolman/releases/tag/v0.1.7) - 17.04.2025 — Introduces print cost tracking, printer header info, SPA gating improvements, and fixes for drawer colors/local prints.
20 | - [0.1.6](https://github.com/drndos/openspoolman/releases/tag/0.1.6) - 09.04.2025 — Published container images (main service + Helm chart) and packaged artifacts for easier deployments.
21 |
22 | ### Main features
23 |
24 | #### Dashboard overview
25 | *Overview over the trays and the assigned spools and spool information*
26 |
27 |
28 |
29 | Desktop screenshots (expand to view)
30 |
31 | Dashboard overview
32 | Overview over the trays and the assigned spools and spool information
33 |
34 |
35 | Fill tray workflow
36 | Assign a spool to a tray with quick filters.
37 |
38 |
39 | Print history
40 | Track every print with filament usage, used spools and costs.
41 |
42 |
43 | Spool detail info
44 | Shows informations about the spool and allows to assign it to a tray.
45 |
46 |
47 | NFC tag assignment
48 | Assign and refresh NFC tags so you can scan them with you mobile and get directly to the spool info.
49 |
50 |
51 | Spool change view from print history
52 | Change or remove the spool assignment after a print Useful when the wrong spool was assigned or the print was canceled.
53 |
54 |
55 |
56 |
57 |
58 | Mobile screenshots (expand to view)
59 |
60 |
61 |
62 |
63 | Dashboard overview
64 | Overview over the trays and the assigned spools and spool information
65 |
66 |
67 |
68 | Fill tray workflow
69 | Assign a spool to a tray with quick filters.
70 |
71 |
72 |
73 |
74 |
75 | Print history
76 | View recent prints, AMS slots, and filament usage anytime.
77 |
78 |
79 |
80 | Spool detail info
81 | Spool metadata and NFC tags are accessible on the phone.
82 |
83 |
84 |
85 |
86 |
87 | NFC tag assignment
88 | Assign and refresh NFC tags so you can scan them with you mobile and get directly to the spool info.
89 |
90 |
91 |
92 | Spool change view from print history
93 | Change or remove the spool assignment after a print Useful when the wrong spool was assigned or the print was canceled.
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 | ### What you need:
102 | - Android Phone with Chrome web browser or iPhone (manual process much more complicated if using NFC Tags)
103 | - Server to run OpenSpoolMan with https (optional when not using NFC Tags) that is reachable from your Phone and can reach both SpoolMan and Bambu Lab printer on the network
104 | - Active Bambu Lab Account or PRINTER_ID and PRINTER_CODE on your printer
105 | - Bambu Lab printer https://eu.store.bambulab.com/collections/3d-printer
106 | - SpoolMan installed https://github.com/Donkie/Spoolman
107 | - NFC Tags (optional) https://eu.store.bambulab.com/en-sk/collections/nfc/products/nfc-tag-with-adhesive https://www.aliexpress.com/item/1005006332360160.html
108 |
109 | ### SpoolMan stickers
110 | SpoolMan can print QR-code stickers for every spool; follow the SpoolMan label guide (https://github.com/Donkie/Spoolman/wiki/Printing-Labels) to generate them. Before printing, update the base URL in SpoolMan’s settings to point at OpenSpoolMan so every sticker redirects to OpenSpoolMan instead of SpoolMan.
111 |
112 | ### How to setup:
113 |
114 |
115 | Python / venv deployment (see Environment configuration below)
116 |
117 | 1. Clone the repository and switch to the desired branch:
118 | ```bash
119 | git clone https://github.com/drndos/openspoolman.git
120 | cd openspoolman
121 | git checkout
122 | ```
123 | 2. Create and activate a virtual environment, then install the dependencies:
124 | ```bash
125 | python3 -m venv .venv
126 | source .venv/bin/activate
127 | pip install -r requirements.txt
128 | ```
129 | 3. Configure the environment variables (see below).
130 | 4. Run the server with:
131 | ```bash
132 | python wsgi.py
133 | ```
134 | OpenSpoolMan listens on port `8001` by default so it does not clash with SpoolMan on the same host.
135 |
136 |
137 |
138 |
139 | Docker deployment (see Environment configuration below)
140 |
141 | 1. Make sure `docker` and `docker compose` are installed.
142 | 2. Configure the environment variables (see below).
143 | 3. Copy `docker-compose.yaml` to your deployment directory (or ensure `./docker-compose.yaml` matches your environment) and adjust any host volumes or ports as needed.
144 | 4. Build and start the containers:
145 | ```bash
146 | docker compose up -d
147 | ```
148 |
149 |
150 |
151 |
152 | Kubernetes (Helm) deployment (see Environment configuration below)
153 |
154 | 1. Use the bundled Helm chart under `./helm/openspoolman`:
155 | ```bash
156 | helm dependency update helm/openspoolman
157 | ```
158 | 2. Create a `values.yaml` (or use `helm/openspoolman/values.yaml`) that overrides the same `config.env` entries and configures an ingress with TLS for your cluster.
159 | 3. Install or upgrade the release:
160 | ```bash
161 | helm upgrade --install openspoolman helm/openspoolman -f values.yaml --namespace openspoolman --create-namespace
162 | ```
163 | 4. Verify the pods and ingress:
164 | ```bash
165 | kubectl get pods -n openspoolman
166 | kubectl describe ingress -n openspoolman
167 | ```
168 |
169 |
170 |
171 | #### Environment configuration
172 | - Rename `config.env.template` to `config.env` or set environment properties and:
173 | - set `OPENSPOOLMAN_BASE_URL` — the HTTPS URL where OpenSpoolMan will be available on your network (no trailing slash, required for NFC writes).
174 | - set `PRINTER_ID` — find it in the printer settings under Setting → Device → Printer SN.
175 | - set `PRINTER_ACCESS_CODE` — find it in Setting → LAN Only Mode → Access Code (the LAN Only Mode toggle may stay off).
176 | - set `PRINTER_IP` — found in Setting → LAN Only Mode → IP Address.
177 | - set `SPOOLMAN_BASE_URL` — the URL of your SpoolMan installation without trailing slash.
178 | - set `AUTO_SPEND` to `True` to enable legacy slicer-estimate tracking (no live layer tracking).
179 | - set `TRACK_LAYER_USAGE` to `True` to switch to per-layer tracking/consumption **while `AUTO_SPEND` is also `True`**. If `AUTO_SPEND` is `False`, all filament tracking remains disabled regardless of `TRACK_LAYER_USAGE`.
180 | - set `AUTO_SPEND` to `True` if you want automatic filament usage tracking (see the AUTO SPEND notes below).
181 | - set `DISABLE_MISMATCH_WARNING` to `True` to hide mismatch warnings in the UI (mismatches are still detected and logged to `data/filament_mismatch.json`).
182 | - set `CLEAR_ASSIGNMENT_WHEN_EMPTY` to `True` if you want OpenSpoolMan to clear any SpoolMan assignment and reset the AMS tray whenever the printer reports no spool in that slot.
183 | - By default, the app reads `data/3d_printer_logs.db` for print history; override it through `OPENSPOOLMAN_PRINT_HISTORY_DB` or via the screenshot helper (which targets `data/demo.db` by default).
184 |
185 | - Run SpoolMan.
186 | - Add these extra fields in SpoolMan:
187 | - **Filaments**
188 | - "type","Type","Choice", "AERO,CF,GF,FR,Basic,HF,Translucent,Aero,Dynamic,Galaxy,Glow,Impact,Lite,Marble,Matte,Metal,Silk,Silk+,Sparkle,Tough,Tough+,Wood,Support for ABS,Support for PA PET,Support for PLA,Support for PLA-PETG,G,W,85A,90A,95A,95A HF,for AMS"
189 | - "nozzle_temperature","Nozzle Temperature","Integer Range","°C","190 - 230"
190 | - "filament_id","Filament ID", "Text"
191 | - **Spools**
192 | - "tag","tag","Text"
193 | - "active_tray","Active Tray","Text"
194 | - Add your Manufacturers, Filaments and Spools to SpoolMan (consider 'Import from External' for faster workflow).
195 | - The filament id lives in `C:\Users\USERNAME\AppData\Roaming\BambuStudio\user\USERID\filament\base` (same for each printer/nozzle).
196 | - Open the server base URL in your mobile browser.
197 | - Optionally copy Bambu Lab RFIDs into the extra tag on spools so they match automatically; read the tag id from logs or the AMS info page.
198 |
199 | #### Filament matching rules
200 | - The spool's `material` must match the AMS tray's `tray_type` (main type).
201 | - For Bambu filaments, the AMS reports a sub-brand; this must match the spool's sub-brand. You can model this either as:
202 | - `material` = full Bambu material (e.g., `PLA Wood`) and leave `type` empty, **or**
203 | - `material` = base (e.g., `PLA`) and `type` = the add-on (e.g., `Wood`).
204 | Both must correspond to what the AMS reports for that tray.
205 | - You can wrap optional notes in parentheses inside `material` (e.g., `PLA CF (recycled)`); anything in parentheses is ignored during matching.
206 | - If matching still fails, please file a report using `.github/ISSUE_TEMPLATE/filament-mismatch.md` or temporarily hide the UI warning via `DISABLE_MISMATCH_WARNING=true` (mismatches are still logged to `data/filament_mismatch.json`).
207 |
208 | With NFC Tags:
209 | - For non-Bambu filament, select it in SpoolMan, click 'Write,' and tap an NFC tag near your phone (allow NFC).
210 | - Attach the NFC tag to the filament.
211 | - Load the filament into AMS, then bring the phone near the NFC tag so it opens OpenSpoolMan.
212 | - Assign the AMS slot you used in the UI.
213 |
214 | Without NFC Tags:
215 | - Click 'Fill' on a tray and select the desired spool.
216 | - Done.
217 |
218 | ### Accessing OpenSpoolMan
219 | Once the server is running (via `wsgi.py`, Gunicorn, Docker, or Helm), open `https://
:8443` if you used the built-in adhoc SSL mode, or `http://:8001` when the service listens on the default port 8001. Replace `` with your server's IP/DNS and ensure the port matches your chosen deployment (`PORT` env var or docker-compose mapping). For Docker deployments, you can also use `docker compose port openspoolman 8001` to see the mapped host port.
220 | ### AUTO SPEND - Automatic filament usage based on slicer estimate
221 | You can turn this feature on to automatically update the spool usage in SpoolMan.
222 | This feature is using slicer information about predicted filament weight usage (and in future correlating it with the progress of the printer to compute the estimate of filament used).
223 |
224 | This feature has currently following issues/drawbacks:
225 | - Spending on the start of the print
226 | - Not spending according to print process and spending full filament weight even if print fails
227 | - Don't know if it works with LAN mode, since it downloads the 3MF file from cloud
228 | - Not tested with multiple AMS systems
229 | - Not handling the mismatch between the SpoolMan and AMS (if you don't have the Active Tray information correct in spoolman it won't work properly)
230 |
231 | ### Notes:
232 | - If you change the BASE_URL of this app, you will need to reconfigure all NFC TAGS
233 |
234 | ### TBD:
235 | - Filament remaining in AMS (I have only AMS lite, if you have AMS we can test together)
236 | - Filament spending based on printing
237 | - TODO: handle situation when the print doesn't finish
238 | - TODO: test with multiple AMS
239 | - Code cleanup
240 | - Video showcase
241 | - Docker compose SSL
242 | - TODOs
243 | - Reduce the amount of files in docker container
244 | - Cloud service for controlled redirect so you don't have to reconfigure NFC tags
245 | - QR codes
246 | - Add search to list of spools
247 |
--------------------------------------------------------------------------------
/templates/fragments/list_prints.html:
--------------------------------------------------------------------------------
1 |
2 | {% for print in prints %}
3 |
4 |
5 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
Date:
18 |
Date
19 |
{{ print['print_date'] }}
20 |
21 |
22 |
23 |
24 |
25 |
Type:
26 |
Type
27 |
{{ print['print_type'] }}
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
File:
36 |
File
37 |
{{ print['file_name'] }}
38 |
39 |
40 |
41 |
42 | {% set total_cost = print.get('total_cost', 0) %}
43 | {% if total_cost > 0 %}
44 |
45 |
Cost:
46 |
Cost
47 |
{{ '%.2f' | format(total_cost) }} {{currencysymbol}}
48 |
49 | {% endif %}
50 | {% if print.get('display_filament_total') is not none %}
51 |
52 |
Filament:
53 |
Filament
54 |
{{ '%.2f' | format(print['display_filament_total']) }}g
55 |
56 | {% endif %}
57 |
58 |
59 |
60 |
61 | {% if print['layer_tracking'] %}
62 |
63 |
64 |
65 | Status:
66 |
67 | {{ print['layer_tracking']['status_label'] }}
68 |
69 | {% if print['layer_tracking']['total_layers'] %}
70 |
71 | Layer Progress: {{ print['layer_tracking']['layers_printed'] }}/{{ print['layer_tracking']['total_layers'] }}
72 |
73 | {% endif %}
74 | {% if print['layer_tracking']['progress_percent'] is not none %}
75 |
76 | ({{ print['layer_tracking']['progress_percent'] }}%)
77 |
78 | {% endif %}
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 | Layer Tracking
87 |
88 | {{ print['layer_tracking']['status_label'] }}
89 |
90 |
91 |
92 |
93 | Layers
94 | {{ print['layer_tracking']['layers_printed'] }}
95 | / {{ print['layer_tracking']['total_layers'] or '-' }}
96 |
97 |
98 | Billed
99 | {% if print['layer_tracking']['filament_grams_billed'] is not none %}
100 | {{ '%.2f' | format(print['layer_tracking']['filament_grams_billed']) }}g
101 | {% else %}
102 | - g
103 | {% endif %}
104 |
105 | {% if print['layer_tracking']['filament_grams_total'] is not none %}
106 |
107 | Total
108 | {{ '%.2f' | format(print['layer_tracking']['filament_grams_total']) }}g
109 |
110 | {% endif %}
111 | {% if print['layer_tracking']['progress_percent'] is not none %}
112 |
113 | Progress
114 | {{ print['layer_tracking']['progress_percent'] }}%
115 |
116 | {% endif %}
117 |
118 |
119 | {% if print['layer_tracking']['predicted_end_time'] %}
120 | Expected end: {{ print['layer_tracking']['predicted_end_time'] }}
121 | {% endif %}
122 | {% if print['layer_tracking']['actual_end_time'] %}
123 | {% if print['layer_tracking']['predicted_end_time'] %} · {% endif %}
124 | Finished: {{ print['layer_tracking']['actual_end_time'] }}
125 | {% endif %}
126 |
127 |
128 |
129 |
130 | {% endif %}
131 |
132 |
219 |
220 |
221 |
222 | {% if print['image_file'] %}
223 |
224 | {% else %}
225 |
No Image
226 | {% endif %}
227 |
228 |
229 |
230 |
231 | {% endfor %}
232 |
233 |
--------------------------------------------------------------------------------