├── .codespellrc ├── .dockerignore ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_suggestion.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── ci.yml │ ├── nightly.yml │ ├── publish_docs.yml │ ├── release.yml │ └── verify_published.yml ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Makefile ├── README.md ├── _tooling ├── changelog_bump_version.py ├── changelog_for_version.sh ├── check_version.py ├── make_release.sh └── publish.sh ├── cluster_tools ├── .gitignore ├── Changelog.md ├── LICENSE ├── README.md ├── cluster_tools │ ├── __init__.py │ ├── _utils │ │ ├── call.py │ │ ├── file_wait_thread.py │ │ ├── multiprocessing_logging_handler.py │ │ ├── pickling.py │ │ ├── reflection.py │ │ ├── string_.py │ │ ├── tailf.py │ │ └── warning.py │ ├── executor_protocol.py │ ├── executors │ │ ├── dask.py │ │ ├── multiprocessing_.py │ │ ├── multiprocessing_pickle.py │ │ ├── sequential.py │ │ └── sequential_pickle.py │ ├── remote.py │ └── schedulers │ │ ├── __init__.py │ │ ├── cluster_executor.py │ │ ├── kube.py │ │ ├── pbs.py │ │ └── slurm.py ├── dockered-slurm │ ├── LICENSE │ ├── README.md │ ├── cgroup.conf │ ├── docker-compose.yml │ ├── register_cluster.sh │ ├── slurm.conf │ └── slurmdbd.conf ├── format.sh ├── lint.sh ├── pyproject.toml ├── slurm_example.py ├── test.sh ├── tests │ ├── Dockerfile │ ├── cluster-config.yaml │ ├── guardless_multiprocessing.py │ ├── test_all.py │ ├── test_dask.py │ ├── test_deref_main.py │ ├── test_kubernetes.py │ ├── test_multiprocessing.py │ └── test_slurm.py ├── typecheck.sh └── uv.lock ├── docs ├── .gitignore ├── README.md ├── generate.sh ├── generate_api_doc_pages.py ├── linkcheckerrc ├── mkdocs.yml ├── overrides │ ├── css │ │ └── lists.css │ ├── images │ │ ├── close.png │ │ ├── loading.gif │ │ ├── next.png │ │ └── prev.png │ ├── main.html │ └── partials │ │ └── page_content_footer.html ├── pyproject.toml ├── src │ ├── api │ │ └── webknossos.md │ ├── cli │ │ ├── compress.md │ │ ├── convert.md │ │ ├── copy.md │ │ ├── distribution_strategies.md │ │ ├── download.md │ │ ├── downsample.md │ │ ├── environment_variables.md │ │ ├── export-as-tiff.md │ │ ├── index.md │ │ ├── install.md │ │ ├── merge-fallback.md │ │ ├── upload.md │ │ └── upsample.md │ ├── javascripts │ │ └── runllm-widget.js │ ├── webknossos │ └── webknossos-py │ │ ├── CODE_OF_CONDUCT.md │ │ ├── CONTRIBUTING.md │ │ ├── changelog.md │ │ ├── examples │ │ ├── accessing_metadata.md │ │ ├── annotation_project_administration.md │ │ ├── announce_dataset_upload.md │ │ ├── apply_merger_mode.md │ │ ├── calculate_segment_sizes.md │ │ ├── convert_4d_tiff.md │ │ ├── create_dataset_from_images.md │ │ ├── dataset_usage.md │ │ ├── download_image_data.md │ │ ├── download_segments.md │ │ ├── download_tiff_stack.md │ │ ├── explore_and_add_remote.md │ │ ├── image_stack_to_dataset.md │ │ ├── learned_segmenter.md │ │ ├── learned_segmenter_annotation.png │ │ ├── learned_segmenter_result.png │ │ ├── load_annotation_from_file.md │ │ ├── merge_nmls.md │ │ ├── remote_datasets.md │ │ ├── skeleton_path_length.md │ │ ├── skeleton_synapse_candidates.md │ │ ├── teams_and_users.md │ │ ├── upload_dicom_stack.md │ │ ├── upload_image_data.md │ │ ├── upload_image_data_dataset.jpg │ │ ├── upload_tiff_stack.md │ │ ├── upsample_skeleton.md │ │ └── user_times.md │ │ ├── index.md │ │ ├── installation.md │ │ └── stability_policy.md └── uv.lock └── webknossos ├── .gitignore ├── .test_durations ├── Changelog.md ├── Dockerfile ├── LICENSE ├── README.md ├── examples ├── WIP │ ├── merge_trees_at_closest_nodes.py │ └── offline_merger_mode.py ├── accessing_metadata.py ├── add_existing_zarr_array.py ├── annotation_project_administration.py ├── announce_dataset_upload.py ├── apply_merger_mode.py ├── calculate_segment_sizes.py ├── convert_4d_tiff.py ├── create_dataset_from_images.py ├── dataset_usage.py ├── download_image_data.py ├── download_segments.py ├── download_tiff_stack.py ├── explore_and_add_remote.py ├── image_stack_to_dataset.py ├── learned_segmenter.py ├── load_annotation_from_file.py ├── merge_trees_of_nml_files.py ├── remote_datasets.py ├── skeleton_path_length.py ├── skeleton_synapse_candidates.py ├── teams_and_users.py ├── upload_dicom_stack.py ├── upload_image_data.py ├── upload_tiff_stack.py ├── upsample_skeleton.py └── user_times.py ├── format.sh ├── lint.sh ├── local_wk_setup.sh ├── pyproject.toml ├── script_collection ├── move_dataset_slices_by_one.py └── test_segmentation_heuristic.py ├── stubs └── boltons │ ├── __init__.py │ ├── cacheutils │ └── __init__.py │ ├── strutils │ └── __init__.py │ └── typeutils │ └── __init__.py ├── test.sh ├── testdata ├── 4D │ ├── 4D_series │ │ └── 4D-series.ome.tif │ ├── 4D_series_zarr3 │ │ ├── color │ │ │ ├── 1 │ │ │ │ ├── c │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 0 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 1 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 2 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 3 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 4 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 5 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ └── 6 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ └── 0 │ │ │ │ └── zarr.json │ │ │ ├── 2 │ │ │ │ ├── c │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 0 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 1 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 2 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 3 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 4 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ ├── 5 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ │ └── 0 │ │ │ │ │ │ └── 6 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ └── 0 │ │ │ │ │ │ └── 0 │ │ │ │ └── zarr.json │ │ │ └── zarr.json │ │ ├── datasource-properties.json │ │ └── zarr.json │ ├── multi_channel_z_series │ │ └── multi-channel-z-series.ome.tif │ └── single_channel │ │ └── single-channel.ome.tiff ├── WT1_wkw.tar.gz ├── annotations │ ├── bounding-boxes-example.zip │ ├── empty_volume_annotation.zip │ ├── l4_sample__explorational__suser__94b271.zip │ ├── l4dense_motta_et_al_demo_v2__explorational__4a6356.zip │ ├── multi_volume_example_CREMI.zip │ ├── nml_with_volumes.nml │ └── nml_with_volumes.zip ├── complex_property_ds │ ├── color │ │ └── 1 │ │ │ └── header.wkw │ ├── datasource-properties.json │ └── segmentation │ │ └── 1 │ │ └── header.wkw ├── dicoms │ ├── N2D_0001.dcm │ ├── N2D_0002.dcm │ ├── N2D_0003.dcm │ ├── N2D_0004.dcm │ ├── N2D_0005.dcm │ ├── N2D_0006.dcm │ ├── N2D_0007.dcm │ ├── N2D_0008.dcm │ ├── N2D_0009.dcm │ └── N2D_0010.dcm ├── nmls │ ├── generate_nml_snapshot.nml │ ├── generated_annotation_snapshot.nml │ ├── generated_skeleton_snapshot.nml │ ├── nml_with_small_distance_nodes.nml │ └── test_a.nml ├── old_wkw_dataset │ ├── color │ │ ├── 1 │ │ │ └── header.wkw │ │ └── 2 │ │ │ └── header.wkw │ └── datasource-properties.json ├── rgb_tiff │ └── test_rgb.tif ├── simple_wkw_dataset │ ├── color │ │ └── 1 │ │ │ ├── header.wkw │ │ │ └── z0 │ │ │ └── y0 │ │ │ └── x0.wkw │ └── datasource-properties.json ├── simple_zarr3_dataset │ ├── color │ │ ├── 1 │ │ │ ├── c │ │ │ │ └── 0 │ │ │ │ │ └── 0 │ │ │ │ │ └── 0 │ │ │ │ │ └── 0 │ │ │ └── zarr.json │ │ └── zarr.json │ ├── datasource-properties.json │ └── zarr.json ├── simple_zarr_dataset │ ├── .zgroup │ ├── color │ │ ├── .zattrs │ │ ├── .zgroup │ │ └── 1-1-1 │ │ │ ├── .zarray │ │ │ └── 0.0.0.0 │ └── datasource-properties.json ├── single_multipage_tiff_folder │ └── test_C.tif ├── temca2 │ ├── 1 │ │ ├── 60 │ │ │ ├── 140.jpg │ │ │ └── 141.jpg │ │ └── 61 │ │ │ ├── 140.jpg │ │ │ └── 141.jpg │ ├── 2 │ │ ├── 60 │ │ │ ├── 140.jpg │ │ │ └── 141.jpg │ │ └── 61 │ │ │ ├── 140.jpg │ │ │ └── 141.jpg │ ├── 40 │ │ ├── 60 │ │ │ ├── 144.jpg │ │ │ └── 145.jpg │ │ └── 61 │ │ │ ├── 144.jpg │ │ │ └── 145.jpg │ └── black.jpg ├── tiff │ ├── datasource-properties.zarr-fixture.json │ ├── test.0000.tiff │ ├── test.0001.tiff │ ├── test.0002.tiff │ ├── test.0003.tiff │ ├── test.0004.tiff │ ├── test.0005.tiff │ ├── test.0006.tiff │ ├── test.0007.tiff │ ├── test.0008.tiff │ ├── test.0009.tiff │ ├── test.0010.tiff │ ├── test.0011.tiff │ ├── test.0012.tiff │ ├── test.0013.tiff │ ├── test.0014.tiff │ ├── test.0015.tiff │ ├── test.0016.tiff │ ├── test.0017.tiff │ ├── test.0018.tiff │ ├── test.0019.tiff │ ├── test.0020.tiff │ ├── test.0021.tiff │ ├── test.0022.tiff │ ├── test.0023.tiff │ ├── test.0024.tiff │ ├── test.0025.tiff │ ├── test.0026.tiff │ ├── test.0027.tiff │ ├── test.0028.tiff │ ├── test.0029.tiff │ ├── test.0030.tiff │ ├── test.0031.tiff │ ├── test.0032.tiff │ ├── test.0033.tiff │ ├── test.0034.tiff │ ├── test.0035.tiff │ ├── test.0036.tiff │ ├── test.0037.tiff │ ├── test.0038.tiff │ ├── test.0039.tiff │ ├── test.0040.tiff │ ├── test.0041.tiff │ ├── test.0042.tiff │ ├── test.0043.tiff │ ├── test.0044.tiff │ ├── test.0045.tiff │ ├── test.0046.tiff │ ├── test.0047.tiff │ ├── test.0048.tiff │ ├── test.0049.tiff │ ├── test.0050.tiff │ ├── test.0051.tiff │ ├── test.0052.tiff │ ├── test.0053.tiff │ ├── test.0054.tiff │ ├── test.0055.tiff │ ├── test.0056.tiff │ ├── test.0057.tiff │ ├── test.0058.tiff │ ├── test.0059.tiff │ ├── test.0060.tiff │ ├── test.0061.tiff │ ├── test.0062.tiff │ ├── test.0063.tiff │ ├── test.0064.tiff │ ├── test.0065.tiff │ ├── test.0066.tiff │ ├── test.0067.tiff │ ├── test.0068.tiff │ ├── test.0069.tiff │ ├── test.0070.tiff │ ├── test.0071.tiff │ ├── test.0072.tiff │ ├── test.0073.tiff │ ├── test.0074.tiff │ ├── test.0075.tiff │ ├── test.0076.tiff │ ├── test.0077.tiff │ ├── test.0078.tiff │ ├── test.0079.tiff │ ├── test.0080.tiff │ ├── test.0081.tiff │ ├── test.0082.tiff │ ├── test.0083.tiff │ ├── test.0084.tiff │ ├── test.0085.tiff │ ├── test.0086.tiff │ ├── test.0087.tiff │ ├── test.0088.tiff │ ├── test.0089.tiff │ ├── test.0090.tiff │ ├── test.0091.tiff │ ├── test.0092.tiff │ ├── test.0093.tiff │ ├── test.0094.tiff │ ├── test.0095.tiff │ ├── test.0096.tiff │ ├── test.0097.tiff │ ├── test.0098.tiff │ ├── test.0099.tiff │ ├── test.0100.tiff │ ├── test.0101.tiff │ ├── test.0102.tiff │ ├── test.0103.tiff │ ├── test.0104.tiff │ ├── test.0105.tiff │ ├── test.0106.tiff │ ├── test.0107.tiff │ ├── test.0108.tiff │ ├── test.0109.tiff │ ├── test.0110.tiff │ ├── test.0111.tiff │ ├── test.0112.tiff │ ├── test.0113.tiff │ ├── test.0114.tiff │ ├── test.0115.tiff │ ├── test.0116.tiff │ ├── test.0117.tiff │ ├── test.0118.tiff │ ├── test.0119.tiff │ ├── test.0120.tiff │ ├── test.0121.tiff │ ├── test.0122.tiff │ ├── test.0123.tiff │ ├── test.0124.tiff │ ├── test.0125.tiff │ ├── test.0126.tiff │ ├── test.0127.tiff │ ├── test.0128.tiff │ ├── test.0129.tiff │ ├── test.0130.tiff │ ├── test.0131.tiff │ ├── test.0132.tiff │ ├── test.0133.tiff │ ├── test.0134.tiff │ ├── test.0135.tiff │ ├── test.0136.tiff │ ├── test.0137.tiff │ ├── test.0138.tiff │ ├── test.0139.tiff │ ├── test.0140.tiff │ ├── test.0141.tiff │ ├── test.0142.tiff │ ├── test.0143.tiff │ ├── test.0144.tiff │ ├── test.0145.tiff │ ├── test.0146.tiff │ ├── test.0147.tiff │ ├── test.0148.tiff │ ├── test.0149.tiff │ ├── test.0150.tiff │ ├── test.0151.tiff │ ├── test.0152.tiff │ ├── test.0153.tiff │ ├── test.0154.tiff │ ├── test.0155.tiff │ ├── test.0156.tiff │ ├── test.0157.tiff │ ├── test.0158.tiff │ ├── test.0159.tiff │ ├── test.0160.tiff │ ├── test.0161.tiff │ ├── test.0162.tiff │ ├── test.0163.tiff │ ├── test.0164.tiff │ ├── test.0165.tiff │ ├── test.0166.tiff │ ├── test.0167.tiff │ ├── test.0168.tiff │ ├── test.0169.tiff │ ├── test.0170.tiff │ ├── test.0171.tiff │ ├── test.0172.tiff │ ├── test.0173.tiff │ ├── test.0174.tiff │ ├── test.0175.tiff │ ├── test.0176.tiff │ ├── test.0177.tiff │ ├── test.0178.tiff │ ├── test.0179.tiff │ ├── test.0180.tiff │ ├── test.0181.tiff │ ├── test.0182.tiff │ ├── test.0183.tiff │ ├── test.0184.tiff │ ├── test.0185.tiff │ ├── test.0186.tiff │ ├── test.0187.tiff │ ├── test.0188.tiff │ ├── test.0189.tiff │ ├── test.0190.tiff │ ├── test.0191.tiff │ ├── test.0192.tiff │ ├── test.0193.tiff │ ├── test.0194.tiff │ ├── test.0195.tiff │ ├── test.0196.tiff │ ├── test.0197.tiff │ ├── test.0198.tiff │ ├── test.0199.tiff │ ├── test.0200.tiff │ ├── test.0201.tiff │ ├── test.0202.tiff │ ├── test.0203.tiff │ ├── test.0204.tiff │ ├── test.0205.tiff │ ├── test.0206.tiff │ ├── test.0207.tiff │ ├── test.0208.tiff │ ├── test.0209.tiff │ ├── test.0210.tiff │ ├── test.0211.tiff │ ├── test.0212.tiff │ ├── test.0213.tiff │ ├── test.0214.tiff │ ├── test.0215.tiff │ ├── test.0216.tiff │ ├── test.0217.tiff │ ├── test.0218.tiff │ ├── test.0219.tiff │ ├── test.0220.tiff │ ├── test.0221.tiff │ ├── test.0222.tiff │ ├── test.0223.tiff │ ├── test.0224.tiff │ ├── test.0225.tiff │ ├── test.0226.tiff │ ├── test.0227.tiff │ ├── test.0228.tiff │ ├── test.0229.tiff │ ├── test.0230.tiff │ ├── test.0231.tiff │ ├── test.0232.tiff │ ├── test.0233.tiff │ ├── test.0234.tiff │ ├── test.0235.tiff │ ├── test.0236.tiff │ ├── test.0237.tiff │ ├── test.0238.tiff │ ├── test.0239.tiff │ ├── test.0240.tiff │ ├── test.0241.tiff │ ├── test.0242.tiff │ ├── test.0243.tiff │ ├── test.0244.tiff │ ├── test.0245.tiff │ ├── test.0246.tiff │ ├── test.0247.tiff │ ├── test.0248.tiff │ ├── test.0249.tiff │ ├── test.0250.tiff │ ├── test.0251.tiff │ ├── test.0252.tiff │ ├── test.0253.tiff │ ├── test.0254.tiff │ ├── test.0255.tiff │ └── test.0256.tiff ├── tiff_with_different_shapes │ ├── 1.tif │ ├── 3.tif │ ├── 4.tif │ └── 5.tif └── various_tiff_formats │ ├── test_C.tif │ ├── test_CS.tif │ ├── test_I.tif │ └── test_S.tif ├── tests ├── __init__.py ├── binaryData │ ├── .gitignore │ └── Organization_X │ │ └── e2006_knossos │ │ ├── color │ │ └── 1 │ │ │ ├── header.wkw │ │ │ └── z0 │ │ │ └── y0 │ │ │ └── x0.wkw │ │ └── datasource-properties.json ├── cassettes │ ├── default.yml │ ├── test_annotation │ │ ├── test_annotation_upload_download_roundtrip.yml │ │ ├── test_bounding_box_roundtrip.yml │ │ ├── test_dataset_access_via_annotation.yml │ │ └── test_remote_annotation_list.yml │ ├── test_api_client │ │ ├── test_annotation_info.yml │ │ ├── test_build_info.yml │ │ ├── test_current_user_info_and_user_logged_time.yml │ │ ├── test_dataset_info.yml │ │ ├── test_datastore_list.yml │ │ ├── test_generate_token_for_data_store.yml │ │ ├── test_health.yml │ │ └── test_user_list.yml │ ├── test_cli │ │ ├── test_download_dataset[l4_sample].yml │ │ └── test_download_dataset[l4_sample__view].yml │ ├── test_context │ │ ├── test_trailing_slash_in_url.yml │ │ └── test_user_organization.yml │ ├── test_dataset │ │ ├── test_explore_and_add_remote.yml │ │ ├── test_remote_dataset_access_metadata.yml │ │ └── test_remote_dataset_urls.yml │ ├── test_dataset_add_remote_mag_and_layer │ │ ├── test_add_remote_layer_from_object.yml │ │ ├── test_add_remote_layer_from_path.yml │ │ ├── test_add_remote_layer_non_public.yml │ │ ├── test_add_remote_mags_from_mag_view.yml │ │ ├── test_add_remote_mags_from_path.yml │ │ └── test_shallow_copy_remote_layers.yml │ ├── test_dataset_download_upload_remote │ │ ├── test_get_remote_datasets.yml │ │ ├── test_remote_dataset.yml │ │ ├── test_upload_download_roundtrip.yml │ │ ├── test_upload_twice.yml │ │ ├── test_url_download[l4_sample].yml │ │ ├── test_url_download[l4_sample__view].yml │ │ ├── test_url_open_remote[l4_sample].yml │ │ └── test_url_open_remote[l4_sample__view].yml │ └── test_user │ │ ├── test_get_all_managed_users.yml │ │ ├── test_get_current_user.yml │ │ └── test_get_logged_time.yml ├── client │ ├── test_context.py │ └── test_user.py ├── conftest.py ├── constants.py ├── dataset │ ├── test_add_layer_from_images.py │ ├── test_attachments.py │ ├── test_buffered_slice_utils.py │ ├── test_dataset.py │ ├── test_dataset_add_remote_mag_and_layer.py │ ├── test_dataset_download_upload_remote.py │ ├── test_downsampling.py │ ├── test_from_images.py │ ├── test_layer.py │ ├── test_remote_dataset.py │ └── test_upsampling.py ├── docker-compose.yml ├── example_files │ └── l4dense_motta_et_al_demo_v2__explorational.zip ├── geometry │ ├── test_bounding_box.py │ ├── test_mag.py │ ├── test_nd_bounding_box.py │ ├── test_vec3_int.py │ └── test_vec_int.py ├── test_annotation.py ├── test_api_client.py ├── test_cli.py ├── test_skeleton.py └── utils.py ├── typecheck.sh ├── uv.lock └── webknossos ├── __init__.py ├── _nml ├── __init__.py ├── branchpoint.py ├── comment.py ├── edge.py ├── group.py ├── meta.py ├── metadata_entry.py ├── nml.py ├── node.py ├── parameters.py ├── segment.py ├── tree.py ├── utils.py └── volume.py ├── administration ├── __init__.py ├── project.py ├── task.py └── user.py ├── annotation ├── __init__.py ├── _nml_conversion.py ├── annotation.py └── annotation_info.py ├── cli ├── __init__.py ├── _utils.py ├── check_equality.py ├── compress.py ├── convert.py ├── convert_knossos.py ├── convert_raw.py ├── convert_zarr.py ├── copy_dataset.py ├── download.py ├── downsample.py ├── export_as_tiff.py ├── main.py ├── merge_fallback.py ├── upload.py └── upsample.py ├── client ├── README.md ├── __init__.py ├── _defaults.py ├── _download_dataset.py ├── _resolve_short_link.py ├── _resumable │ ├── __init__.py │ ├── chunk.py │ ├── core.py │ ├── file.py │ └── util.py ├── _upload_dataset.py ├── api_client │ ├── __init__.py │ ├── _abstract_api_client.py │ ├── _serialization.py │ ├── datastore_api_client.py │ ├── errors.py │ ├── models.py │ └── wk_api_client.py └── context.py ├── dataset ├── __init__.py ├── _array.py ├── _downsampling_utils.py ├── _metadata.py ├── _upsampling_utils.py ├── _utils │ ├── __init__.py │ ├── buffered_slice_reader.py │ ├── buffered_slice_writer.py │ ├── infer_bounding_box_existing_files.py │ ├── pims_czi_reader.py │ ├── pims_dm_readers.py │ ├── pims_images.py │ ├── pims_tiff_reader.py │ ├── segmentation_recognition.py │ └── vendor │ │ ├── dm3.py │ │ └── dm4.py ├── attachments.py ├── data_format.py ├── dataset.py ├── defaults.py ├── layer.py ├── layer_categories.py ├── length_unit.py ├── mag_view.py ├── ome_metadata.py ├── properties.py ├── remote_dataset_registry.py ├── remote_folder.py ├── sampling_modes.py └── view.py ├── datastore ├── __init__.py └── datastore.py ├── geometry ├── __init__.py ├── bounding_box.py ├── mag.py ├── nd_bounding_box.py ├── vec3_int.py └── vec_int.py ├── py.typed ├── skeleton ├── __init__.py ├── group.py ├── node.py ├── skeleton.py └── tree.py ├── utils.py └── version.py /.codespellrc: -------------------------------------------------------------------------------- 1 | [codespell] 2 | # Ref: https://github.com/codespell-project/codespell#using-a-config-file 3 | skip = *.lock,./webknossos/tests/cassettes,./webknossos/tests/dataset/cassettes,./webknossos/webknossos/dataset/_utils/vendor 4 | # some names and camelCased variables etc 5 | ignore-regex = \b([a-z]+[A-Z][a-zA-Z]*)\b 6 | ignore-words-list = nd,ND 7 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | webknossos/testdata 2 | webknossos/testoutput 3 | .venv 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Context 11 | - Affected library: e.g. webknossos 12 | 13 | 14 | 15 | ## Expected Behavior 16 | 17 | 18 | ## Current Behavior 19 | 20 | 21 | ## Steps to Reproduce the bug 22 | 23 | - [ ] Cannot reproduce the bug anymore / needs deeper investigation. 24 | 25 | 26 | 1. 27 | 2. 28 | 3. 29 | 4. 30 | 31 | ## Your Environment for bug 32 | 33 | - Operating System and version: e.g. Windows 10 34 | - Version of webKnossos-libs (Release or Commit): 35 | (can be found e.g. with `pip show webknossos --version`) 36 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_suggestion.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature suggestion 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Detailed Description 11 | - Affected library: e.g. webknossos 12 | 13 | 14 | ## Use Cases & Context 15 | 16 | 17 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ### Description: 2 | - abc 3 | 4 | ### Issues: 5 | - fixes #... 6 | 7 | ### Todos: 8 | Make sure to delete unnecessary points or to check all before merging: 9 | - [ ] Updated Changelog 10 | - [ ] Updated Documentation 11 | - [ ] Added / Updated Tests 12 | - [ ] Considered adding this to the Examples 13 | -------------------------------------------------------------------------------- /.github/workflows/nightly.yml: -------------------------------------------------------------------------------- 1 | name: nightly 2 | 3 | on: 4 | schedule: 5 | - cron: '00 06 * * *' 6 | workflow_dispatch: ~ 7 | 8 | jobs: 9 | webknossos_nightly: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | max-parallel: 4 13 | matrix: 14 | python-version: ["3.10", "3.11", "3.12", "3.13"] 15 | group: [1, 2, 3] 16 | fail-fast: false 17 | defaults: 18 | run: 19 | working-directory: webknossos 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | 24 | - name: Install uv 25 | uses: astral-sh/setup-uv@v3 26 | with: 27 | version: "0.5.26" 28 | 29 | - name: Install proxay 30 | run: npm install -g proxay 31 | 32 | - name: Set up Python ${{ matrix.python-version }} 33 | run: uv python install ${{ matrix.python-version }} 34 | 35 | - name: Check if git is dirty 36 | run: | 37 | git diff --no-ext-diff --quiet --exit-code 38 | [[ -z $(git status -s) ]] 39 | 40 | - name: Python tests, refreshing the network snapshots 41 | env: 42 | WK_TOKEN: ${{ secrets.WK_TOKEN }} 43 | run: ./test.sh --refresh-snapshots --splits 3 --group ${{ matrix.group }} 44 | 45 | - name: Python tests, using the new snapshots 46 | env: 47 | WK_TOKEN: ${{ secrets.WK_TOKEN }} 48 | run: ./test.sh --refresh-snapshots --splits 3 --group ${{ matrix.group }} 49 | -------------------------------------------------------------------------------- /.github/workflows/publish_docs.yml: -------------------------------------------------------------------------------- 1 | name: Publish docs 2 | 3 | on: 4 | workflow_dispatch: ~ 5 | 6 | jobs: 7 | docs: 8 | if: ${{ github.ref == 'refs/heads/master' }} 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v3 12 | with: 13 | fetch-depth: 0 14 | - uses: actions/checkout@v3 15 | with: 16 | repository: scalableminds/webknossos 17 | path: docs/wk-repo 18 | - name: Install uv 19 | uses: astral-sh/setup-uv@v3 20 | with: 21 | # Install a specific version of uv. 22 | version: "0.6.3" 23 | 24 | - name: Build Docs 25 | run: | 26 | cd docs 27 | ./generate.sh --persist 28 | 29 | - name: Push docs 30 | env: 31 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 32 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 33 | AWS_DEFAULT_REGION: "eu-west-1" 34 | run: | 35 | CI_BRANCH=${GITHUB_HEAD_REF:-$GITHUB_REF_NAME} 36 | NORMALIZED_CI_BRANCH=${CI_BRANCH//[\/-]/_} 37 | aws s3 sync --acl public-read docs/out s3://static.webknossos.org/docs/${NORMALIZED_CI_BRANCH} 38 | 39 | - name: Check links (on master) 40 | env: 41 | SLACK_HOOK: ${{ secrets.LINK_CHECKER_SLACK_HOOK }} 42 | run: | 43 | cd docs 44 | uv run --frozen linkchecker --config linkcheckerrc https://docs.webknossos.org > link_status || \ 45 | curl -X POST --data-urlencode "payload={\"text\": \":warning: Broken Links on docs.webknossos.org :warning:\n"'```'"\n$(cat link_status)\n"'```"}' \ 46 | "$SLACK_HOOK" 47 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Automatic release 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | version: 7 | description: "Version of the new release (e.g. `2.0.1`)" 8 | required: true 9 | 10 | jobs: 11 | release: 12 | runs-on: ubuntu-latest 13 | env: 14 | VERSION: ${{ github.event.inputs.version }} 15 | steps: 16 | - uses: actions/checkout@v3 17 | with: 18 | fetch-depth: 0 19 | token: ${{ secrets.NORMANRZ_PAT }} 20 | 21 | - uses: actions/setup-python@v4 22 | with: 23 | python-version: "3.12" 24 | architecture: 'x64' 25 | 26 | - name: Setup git config 27 | run: | 28 | git config user.name "Automatic release" 29 | git config user.email "<>" 30 | 31 | - name: Check whether tag already exists 32 | run: | 33 | if git show-ref --tags "v${VERSION}" --quiet; then 34 | echo "Version $VERSION already exists. Stopping." 35 | exit 1 36 | fi 37 | 38 | - name: Make and push release 39 | run: | 40 | _tooling/make_release.sh ${VERSION} 41 | git add */Changelog.md 42 | 43 | git commit -m "Release for v${VERSION}" 44 | git tag v${VERSION} 45 | 46 | git push origin master 47 | git push --tags 48 | -------------------------------------------------------------------------------- /.github/workflows/verify_published.yml: -------------------------------------------------------------------------------- 1 | name: Verify Published Package 2 | 3 | # Verifies that the published webknossos package on PyPi.org can be installed and imported 4 | # across different Python versions. Tests both basic installation and "all" optional 5 | # dependencies. Runs nightly to ensure consistent package availability. 6 | 7 | on: 8 | schedule: 9 | - cron: '0 0 * * *' # Run every night at midnight 10 | workflow_dispatch: # Allow manual trigger 11 | 12 | jobs: 13 | verify-published: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | python-version: ["3.13", "3.12", "3.11", "3.10"] 18 | extras: ["", "[all]"] 19 | steps: 20 | - name: Set up Python ${{ matrix.python-version }} 21 | uses: actions/setup-python@v4 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | 25 | - name: Create virtual environment 26 | run: | 27 | python -m venv .venv 28 | source .venv/bin/activate 29 | 30 | - name: Install webknossos 31 | run: | 32 | python -m pip install --upgrade pip 33 | python -m pip install webknossos${{ matrix.extras }} 34 | 35 | - name: Verify installation 36 | run: | 37 | python -c "import webknossos; from webknossos import version; print(f'webknossos version: {version.__version__}')" 38 | 39 | if [ "${{ matrix.extras }}" = "[all]" ]; then 40 | # Verify some of the optional dependencies are available 41 | python -c "import tifffile; import imagecodecs; import pandas;" 42 | fi 43 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | packages_by_priority := webknossos cluster_tools docs 2 | packages_by_dependency := cluster_tools webknossos docs 3 | code_packages := cluster_tools webknossos 4 | 5 | define in_each_pkg_by_dependency 6 | for PKG in $(packages_by_dependency); do echo $$PKG; cd $$PKG; $1; cd ..; done 7 | endef 8 | 9 | define in_each_code_pkg 10 | for PKG in $(code_packages); do echo $$PKG; cd $$PKG; $1; cd ..; done 11 | endef 12 | 13 | .PHONY: list_packages_by_priority update update-internal install format lint typecheck flt test 14 | 15 | list_packages_by_priority: 16 | @echo $(packages_by_priority) 17 | 18 | install: 19 | $(call in_each_pkg_by_dependency, uv sync --all-extras) 20 | 21 | format: 22 | $(call in_each_code_pkg, ./format.sh) 23 | 24 | lint: 25 | $(call in_each_code_pkg, ./lint.sh) 26 | 27 | typecheck: 28 | $(call in_each_code_pkg, ./typecheck.sh) 29 | 30 | flt: 31 | $(call in_each_code_pkg, ./format.sh && ./lint.sh && ./typecheck.sh) 32 | 33 | test: 34 | $(call in_each_code_pkg, ./test.sh) 35 | -------------------------------------------------------------------------------- /_tooling/changelog_for_version.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eEuo pipefail 3 | set +x 4 | 5 | if [ $# -eq 0 ]; then 6 | VERSION_START="^## Unreleased" 7 | else 8 | VERSION_START="^## \[$1\]" 9 | fi 10 | VERSION_END="^## \[" 11 | 12 | for PKG in $(make list_packages_by_priority); do 13 | if [ ! -f "$PKG/Changelog.md" ]; then 14 | continue 15 | fi 16 | 17 | CHANGES="$(awk "/$VERSION_START/{flag=1;next} /$VERSION_END/{flag=0} flag" "$PKG/Changelog.md" | tail -n +2)" 18 | 19 | WORDS_IN_CHANGES="$(echo "${CHANGES%x}" | grep --invert-match "###" | wc -w)" 20 | 21 | if [ "$WORDS_IN_CHANGES" != "0" ]; then 22 | echo "## $PKG" 23 | echo "${CHANGES%x}" 24 | echo 25 | echo 26 | fi 27 | done 28 | -------------------------------------------------------------------------------- /_tooling/check_version.py: -------------------------------------------------------------------------------- 1 | from subprocess import run 2 | from re import match 3 | import sys 4 | 5 | this_version = tuple(int(a) for a in sys.argv[1].split(".")) 6 | 7 | max_git_tag = max( 8 | [ 9 | tuple(int(a) for a in line[1:].split(".")) 10 | for line in run(["git", "tag"], capture_output=True) 11 | .stdout.decode("utf-8") 12 | .split("\n") 13 | if match("^v\d+\.\d+.\d+$", line) 14 | ] 15 | ) 16 | 17 | if this_version > max_git_tag: 18 | sys.exit(0) 19 | else: 20 | sys.exit(1) 21 | -------------------------------------------------------------------------------- /_tooling/make_release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eEuo pipefail 3 | set +x 4 | 5 | if [[ $# -eq 0 ]] ; then 6 | echo "Please supply a 'version' as argument." 7 | exit 1 8 | fi 9 | 10 | PKG_VERSION="$1" 11 | 12 | if ! python _tooling/check_version.py ${PKG_VERSION}; then 13 | echo "A higher version is already present." 14 | exit 1 15 | fi 16 | 17 | for PKG in {cluster_tools,webknossos}/pyproject.toml; do 18 | PKG="$(dirname "$PKG")" 19 | echo "Creating release for $PKG" 20 | 21 | pushd "$PKG" > /dev/null 22 | 23 | python ../_tooling/changelog_bump_version.py "$PKG_VERSION" 24 | 25 | popd > /dev/null 26 | done 27 | -------------------------------------------------------------------------------- /_tooling/publish.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eEuo pipefail 3 | set +x 4 | 5 | for PKG in {cluster_tools,webknossos}/pyproject.toml; do 6 | PKG="$(dirname "$PKG")" 7 | echo Publishing "$PKG" 8 | 9 | pushd "$PKG" > /dev/null 10 | 11 | cp pyproject.toml pyproject.toml.bak 12 | PKG_VERSION="$(uvx dunamai from git)" 13 | 14 | echo "__version__ = '$PKG_VERSION'" > ./"$PKG"/version.py 15 | 16 | # Update version number in pyproject.toml 17 | sed -i 's/version = "0.0.0"/version = "'"${PKG_VERSION}"'"/g' pyproject.toml 18 | 19 | # replace relative path dependencies (i.e. cluster-tools) with the current version: 20 | sed -i 's/"cluster-tools"/"cluster-tools=='"${PKG_VERSION}"'"/g' pyproject.toml 21 | 22 | uv build 23 | uv publish 24 | 25 | # Restore files 26 | mv pyproject.toml.bak pyproject.toml 27 | 28 | popd > /dev/null 29 | done 30 | -------------------------------------------------------------------------------- /cluster_tools/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 scalable minds 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /cluster_tools/cluster_tools/_utils/call.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | 4 | def call(command: str, stdin: str | None = None) -> tuple[str, str, int]: 5 | """Invokes a shell command as a subprocess, optionally with some 6 | data sent to the standard input. Returns the standard output data, 7 | the standard error, and the return code. 8 | """ 9 | if stdin is not None: 10 | stdin_flag = subprocess.PIPE 11 | else: 12 | stdin_flag = None 13 | p = subprocess.run( 14 | command, 15 | stdin=stdin_flag, 16 | check=False, 17 | shell=True, 18 | capture_output=True, 19 | text=True, 20 | ) 21 | return p.stdout, p.stderr, p.returncode 22 | 23 | 24 | class CommandError(Exception): 25 | """Raised when a shell command exits abnormally.""" 26 | 27 | def __init__(self, command: str, code: int, stderr: str): 28 | self.command = command 29 | self.code = code 30 | self.stderr = stderr 31 | 32 | def __str__(self) -> str: 33 | return f"{self.command!r} exited with status {self.code}: {self.stderr!r}" 34 | 35 | 36 | def chcall(command: str, stdin: str | None = None) -> tuple[str, str]: 37 | """Like ``call`` but raises an exception when the return code is 38 | nonzero. Only returns the stdout and stderr data. 39 | """ 40 | stdout, stderr, code = call(command, stdin) 41 | if code != 0: 42 | raise CommandError(command, code, stderr) 43 | return stdout, stderr 44 | -------------------------------------------------------------------------------- /cluster_tools/cluster_tools/_utils/reflection.py: -------------------------------------------------------------------------------- 1 | import os 2 | from collections.abc import Callable 3 | 4 | WARNING_TIMEOUT = 10 * 60 # seconds 5 | 6 | 7 | def file_path_to_absolute_module(file_path: str) -> str: 8 | """ 9 | Given a file path, return an import path. 10 | :param file_path: A file path. 11 | :return: 12 | """ 13 | assert os.path.exists(file_path) 14 | file_loc, _ = os.path.splitext(file_path) 15 | directory, module = os.path.split(file_loc) 16 | module_path = [module] 17 | while True: 18 | if os.path.exists(os.path.join(directory, "__init__.py")): 19 | directory, package = os.path.split(directory) 20 | module_path.append(package) 21 | else: 22 | break 23 | path = ".".join(module_path[::-1]) 24 | return path 25 | 26 | 27 | def get_function_name(fun: Callable) -> str: 28 | # When using functools.partial, __name__ does not exist 29 | try: 30 | return fun.__name__ if hasattr(fun, "__name__") else get_function_name(fun.func) # type: ignore[attr-defined] 31 | except Exception: 32 | return "" 33 | -------------------------------------------------------------------------------- /cluster_tools/cluster_tools/_utils/string_.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import string 4 | 5 | # The module name includes a _-suffix to avoid name clashes with the standard library string module. 6 | 7 | 8 | def local_filename(filename: str = "") -> str: 9 | return os.path.join(os.getenv("CFUT_DIR", ".cfut"), filename) 10 | 11 | 12 | # Instantiate a dedicated generator to avoid being dependent on 13 | # the global seed which some external code might have set. 14 | random_generator = random.Random() 15 | 16 | 17 | def random_string( 18 | length: int = 32, chars: str = (string.ascii_letters + string.digits) 19 | ) -> str: 20 | return "".join(random_generator.choice(chars) for i in range(length)) 21 | 22 | 23 | def with_preliminary_postfix(name: str) -> str: 24 | return f"{name}.preliminary" 25 | -------------------------------------------------------------------------------- /cluster_tools/cluster_tools/executor_protocol.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Callable, Iterable, Iterator 2 | from concurrent.futures import Future 3 | from contextlib import AbstractContextManager 4 | from os import PathLike 5 | from typing import ( 6 | Protocol, 7 | TypeVar, 8 | ) 9 | 10 | from typing_extensions import ParamSpec 11 | 12 | _T = TypeVar("_T") 13 | _P = ParamSpec("_P") 14 | _S = TypeVar("_S") 15 | 16 | 17 | class Executor(Protocol, AbstractContextManager["Executor"]): 18 | @classmethod 19 | def as_completed(cls, futures: list[Future[_T]]) -> Iterator[Future[_T]]: ... 20 | 21 | def submit( 22 | self, 23 | __fn: Callable[_P, _T], 24 | /, 25 | *args: _P.args, 26 | **kwargs: _P.kwargs, 27 | ) -> Future[_T]: ... 28 | 29 | def map_to_futures( 30 | self, 31 | fn: Callable[[_S], _T], 32 | args: Iterable[_S], 33 | output_pickle_path_getter: Callable[[_S], PathLike] | None = None, 34 | ) -> list[Future[_T]]: ... 35 | 36 | def map( 37 | self, 38 | fn: Callable[[_S], _T], 39 | iterables: Iterable[_S], 40 | timeout: float | None = None, 41 | chunksize: int | None = None, 42 | ) -> Iterator[_T]: ... 43 | 44 | def forward_log(self, fut: Future[_T]) -> _T: ... 45 | 46 | def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... 47 | -------------------------------------------------------------------------------- /cluster_tools/cluster_tools/executors/multiprocessing_pickle.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Callable 2 | from concurrent.futures import Future 3 | from functools import partial 4 | from typing import TypeVar 5 | 6 | from typing_extensions import ParamSpec 7 | 8 | from cluster_tools._utils import pickling 9 | from cluster_tools.executors.multiprocessing_ import MultiprocessingExecutor 10 | 11 | _T = TypeVar("_T") 12 | _P = ParamSpec("_P") 13 | _S = TypeVar("_S") 14 | 15 | 16 | def _pickle_identity(obj: _S) -> _S: 17 | return pickling.loads(pickling.dumps(obj)) 18 | 19 | 20 | def _pickle_identity_executor( 21 | fn: Callable[_P, _T], 22 | *args: _P.args, 23 | **kwargs: _P.kwargs, 24 | ) -> _T: 25 | result = fn(*args, **kwargs) 26 | return _pickle_identity(result) 27 | 28 | 29 | class MultiprocessingPickleExecutor(MultiprocessingExecutor): 30 | """ 31 | The same as MultiprocessingExecutor, but always pickles input and output of the jobs. 32 | When using this executor for automated tests, it is ensured that using cluster executors in production 33 | won't provoke pickling-related problems. 34 | """ 35 | 36 | def submit( # type: ignore[override] 37 | self, 38 | fn: Callable[_P, _T], 39 | /, 40 | *args: _P.args, 41 | **kwargs: _P.kwargs, 42 | ) -> Future[_T]: 43 | (fn_pickled, args_pickled, kwargs_pickled) = _pickle_identity( 44 | (fn, args, kwargs) 45 | ) 46 | return super().submit( 47 | partial(_pickle_identity_executor, fn_pickled), 48 | *args_pickled, 49 | **kwargs_pickled, 50 | ) 51 | -------------------------------------------------------------------------------- /cluster_tools/cluster_tools/executors/sequential_pickle.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Callable 2 | from concurrent.futures import Future 3 | from functools import partial 4 | from typing import TypeVar 5 | 6 | from typing_extensions import ParamSpec 7 | 8 | from cluster_tools.executors.multiprocessing_pickle import ( 9 | _pickle_identity, 10 | _pickle_identity_executor, 11 | ) 12 | from cluster_tools.executors.sequential import SequentialExecutor 13 | 14 | _T = TypeVar("_T") 15 | _P = ParamSpec("_P") 16 | _S = TypeVar("_S") 17 | 18 | 19 | class SequentialPickleExecutor(SequentialExecutor): 20 | """ 21 | The same as SequentialExecutor, but always pickles input and output of the jobs. 22 | When using this executor for automated tests, it is ensured that using cluster executors in production 23 | won't provoke pickling-related problems. In contrast to the MultiprocessingPickleExecutor this executor 24 | does not have multiprocessing overhead. 25 | """ 26 | 27 | def submit( 28 | self, 29 | __fn: Callable[_P, _T], 30 | *args: _P.args, 31 | **kwargs: _P.kwargs, 32 | ) -> Future[_T]: 33 | (fn_pickled, args_pickled, kwargs_pickled) = _pickle_identity( 34 | (__fn, args, kwargs) 35 | ) 36 | return super().submit( 37 | partial(_pickle_identity_executor, fn_pickled), 38 | *args_pickled, 39 | **kwargs_pickled, 40 | ) 41 | -------------------------------------------------------------------------------- /cluster_tools/cluster_tools/schedulers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scalableminds/webknossos-libs/1202254197dc19b8c2a15f478915e74e63240db5/cluster_tools/cluster_tools/schedulers/__init__.py -------------------------------------------------------------------------------- /cluster_tools/dockered-slurm/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Giovanni Torres 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /cluster_tools/dockered-slurm/cgroup.conf: -------------------------------------------------------------------------------- 1 | # autodetect which is the default detects cgroup/v2 in the github CI, 2 | # which fails during the initialization of the c1 and c2 nodes 3 | CgroupPlugin=cgroup/v2 4 | ConstrainRAMSpace=yes 5 | ConstrainSwapSpace=yes 6 | IgnoreSystemd=yes 7 | -------------------------------------------------------------------------------- /cluster_tools/dockered-slurm/register_cluster.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | docker exec slurmctld bash -c "/usr/bin/sacctmgr --immediate add cluster name=linux" && \ 5 | docker compose restart slurmdbd slurmctld 6 | -------------------------------------------------------------------------------- /cluster_tools/dockered-slurm/slurmdbd.conf: -------------------------------------------------------------------------------- 1 | # 2 | # Example slurmdbd.conf file. 3 | # 4 | # See the slurmdbd.conf man page for more information. 5 | # 6 | # Archive info 7 | #ArchiveJobs=yes 8 | #ArchiveDir="/tmp" 9 | #ArchiveSteps=yes 10 | #ArchiveScript= 11 | #JobPurge=12 12 | #StepPurge=1 13 | # 14 | # Authentication info 15 | AuthType=auth/munge 16 | #AuthInfo=/var/run/munge/munge.socket.2 17 | # 18 | # slurmDBD info 19 | DbdAddr=slurmdbd 20 | DbdHost=slurmdbd 21 | #DbdPort=6819 22 | SlurmUser=slurm 23 | #MessageTimeout=300 24 | DebugLevel=4 25 | #DefaultQOS=normal,standby 26 | LogFile=/var/log/slurm/slurmdbd.log 27 | PidFile=/var/run/slurmdbd/slurmdbd.pid 28 | #PluginDir=/usr/lib/slurm 29 | #PrivateData=accounts,users,usage,jobs 30 | #TrackWCKey=yes 31 | # 32 | # Database info 33 | StorageType=accounting_storage/mysql 34 | StorageHost=mysql 35 | StorageUser=slurm 36 | StoragePass=password 37 | StorageLoc=slurm_acct_db 38 | -------------------------------------------------------------------------------- /cluster_tools/format.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eEuo pipefail 3 | 4 | if [ $# -eq 1 ] && [ "$1" = "check" ]; then 5 | uv run ruff format --check . 6 | else 7 | uv run ruff check --select I --fix . # format the imports 8 | uv run ruff format . 9 | fi 10 | -------------------------------------------------------------------------------- /cluster_tools/lint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eEuo pipefail 3 | 4 | uv run ruff check --fix . -------------------------------------------------------------------------------- /cluster_tools/test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eEuo pipefail 3 | 4 | cd tests 5 | PYTEST_EXECUTORS=multiprocessing,multiprocessing_with_pickling,sequential uv run --frozen python -m pytest -sv test_all.py test_multiprocessing.py 6 | 7 | echo "Tests for the kubernetes, dask and SLURM executors are only run in the CI" 8 | -------------------------------------------------------------------------------- /cluster_tools/tests/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION="3.11" 2 | 3 | FROM python:${PYTHON_VERSION} 4 | 5 | COPY --from=ghcr.io/astral-sh/uv:0.6.3 /uv /bin/uv 6 | 7 | RUN mkdir /cluster_tools 8 | COPY . /cluster_tools 9 | 10 | WORKDIR /cluster_tools 11 | 12 | RUN uv export --all-extras --output-file requirements.txt && \ 13 | uv pip sync --system requirements.txt && \ 14 | uv cache clean 15 | 16 | -------------------------------------------------------------------------------- /cluster_tools/tests/cluster-config.yaml: -------------------------------------------------------------------------------- 1 | kind: Cluster 2 | apiVersion: kind.x-k8s.io/v1alpha4 3 | nodes: 4 | - role: control-plane 5 | image: kindest/node:v1.23.3 6 | extraMounts: 7 | - hostPath: __PATH__ 8 | containerPath: __PATH__ 9 | -------------------------------------------------------------------------------- /cluster_tools/tests/guardless_multiprocessing.py: -------------------------------------------------------------------------------- 1 | import multiprocessing 2 | 3 | from cluster_tools import get_executor 4 | 5 | """ 6 | This file is an example of an incorrect script setup. 7 | The module does not use the 8 | if __name__ == "__main__": 9 | main() 10 | pattern which can lead to bugs when using multiprocessing. 11 | The clustertools will detect the wrong usage and emit a warning. 12 | This file is used to test the warning mechanism. 13 | """ 14 | 15 | 16 | multiprocessing.set_start_method("spawn", force=True) 17 | 18 | 19 | def worker_fn() -> bool: 20 | return True 21 | 22 | 23 | def main() -> None: 24 | res_fut = get_executor("multiprocessing").submit(worker_fn) 25 | assert res_fut.result() == True, "Function should return True" 26 | print("success") 27 | 28 | 29 | main() 30 | -------------------------------------------------------------------------------- /cluster_tools/tests/test_dask.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import TYPE_CHECKING, Optional 3 | 4 | if TYPE_CHECKING: 5 | from distributed import LocalCluster 6 | 7 | import cluster_tools 8 | 9 | _dask_cluster: Optional["LocalCluster"] = None 10 | 11 | 12 | def job(_arg: None) -> str: 13 | return os.getcwd() 14 | 15 | 16 | def test_pass_cwd() -> None: 17 | global _dask_cluster 18 | if not _dask_cluster: 19 | from distributed import LocalCluster, Worker 20 | 21 | _dask_cluster = LocalCluster( 22 | worker_class=Worker, resources={"mem": 20e9, "cpus": 4}, nthreads=6 23 | ) 24 | with cluster_tools.get_executor( 25 | "dask", job_resources={"address": _dask_cluster} 26 | ) as executor: 27 | tmp_path = os.path.realpath("/tmp") # macOS redirects `/tmp` to `/private/tmp` 28 | os.chdir(tmp_path) 29 | assert list(executor.map(job, [None])) == [tmp_path] 30 | -------------------------------------------------------------------------------- /cluster_tools/tests/test_deref_main.py: -------------------------------------------------------------------------------- 1 | import cluster_tools 2 | 3 | 4 | class TestClass: 5 | pass 6 | 7 | 8 | def deref_fun_helper(obj: tuple[type[TestClass], TestClass, int, int]) -> None: 9 | clss, inst, one, two = obj 10 | assert one == 1 11 | assert two == 2 12 | assert isinstance(inst, clss) 13 | 14 | 15 | def test_dereferencing_main() -> None: 16 | with cluster_tools.get_executor( 17 | "slurm", debug=True, job_resources={"mem": "10M"} 18 | ) as executor: 19 | fut = executor.submit(deref_fun_helper, (TestClass, TestClass(), 1, 2)) 20 | fut.result() 21 | futs = executor.map_to_futures( 22 | deref_fun_helper, [(TestClass, TestClass(), 1, 2)] 23 | ) 24 | futs[0].result() 25 | 26 | 27 | if __name__ == "__main__": 28 | # Validate that slurm_executor.submit also works when being called from a __main__ module 29 | test_dereferencing_main() 30 | -------------------------------------------------------------------------------- /cluster_tools/tests/test_kubernetes.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import cluster_tools 4 | 5 | 6 | def square(n: float) -> float: 7 | return n * n 8 | 9 | 10 | def list_dir(path: str) -> list[str]: 11 | return os.listdir(path) 12 | 13 | 14 | def test_simple() -> None: 15 | with cluster_tools.get_executor( 16 | "kubernetes", 17 | job_resources={ 18 | "memory": "100M", 19 | "python_executable": "python", 20 | "image": "scalableminds/cluster-tools:latest", 21 | "node_selector": {}, 22 | "namespace": "cluster-tools", 23 | }, 24 | debug=True, 25 | ) as executor: 26 | assert list(executor.map(square, [n + 2 for n in range(2)])) == [4, 9] 27 | 28 | 29 | def test_mounts() -> None: 30 | parent_dir = os.path.abspath(os.path.join(os.pardir, os.curdir)) 31 | with cluster_tools.get_executor( 32 | "kubernetes", 33 | job_resources={ 34 | "memory": "100M", 35 | "python_executable": "python", 36 | "image": "scalableminds/cluster-tools:latest", 37 | "node_selector": {}, 38 | "namespace": "cluster-tools", 39 | "mounts": [parent_dir], 40 | }, 41 | debug=True, 42 | ) as executor: 43 | assert "cluster_tools" in list(executor.map(list_dir, [parent_dir]))[0] 44 | 45 | with cluster_tools.get_executor( 46 | "kubernetes", 47 | job_resources={ 48 | "memory": "100M", 49 | "python_executable": "python", 50 | "image": "scalableminds/cluster-tools:latest", 51 | "node_selector": {}, 52 | "namespace": "cluster-tools", 53 | }, 54 | debug=True, 55 | ) as executor: 56 | assert "cluster_tools" not in list(executor.map(list_dir, [parent_dir]))[0] 57 | -------------------------------------------------------------------------------- /cluster_tools/typecheck.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eEuo pipefail 3 | 4 | echo "Typecheck cluster_tools..." 5 | uv run python -m mypy -p cluster_tools 6 | 7 | echo "Typecheck tests..." 8 | uv run python -m mypy -p tests 9 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Documentation 2 | 3 | ## Development 4 | Run `./generate.sh` to open a live-reloading server rendering the documentation. 5 | 6 | ## Production 7 | 8 | Run `./generate.sh --persist` to produce the production website/HTML in `out`. Use GitHub Actions for building and deploying the website. 9 | 10 | 11 | ## Further links 12 | 13 | * https://www.mkdocs.org 14 | * https://squidfunk.github.io/mkdocs-material 15 | * https://facelessuser.github.io/pymdown-extensions 16 | * https://python-markdown.github.io/extensions/#officially-supported-extensions 17 | * https://mkdocstrings.github.io/ 18 | -------------------------------------------------------------------------------- /docs/generate.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | set -Eeo pipefail 3 | 4 | if [ ! -d "wk-repo" ]; then 5 | echo 6 | echo ERROR! 7 | echo 'Either link or clone the webknossos repository to "docs/wk-repo", e.g. with' 8 | echo 'git clone --depth 1 git@github.com:scalableminds/webknossos.git docs/wk-repo' 9 | exit 1 10 | fi 11 | rm -rf src/api/webknossos 12 | uv run --frozen generate_api_doc_pages.py 13 | 14 | if [ $# -eq 1 ] && [ "$1" = "--persist" ]; then 15 | uv run --with black --frozen mkdocs build 16 | else 17 | uv run --with black --frozen mkdocs serve -a localhost:8197 --watch-theme 18 | fi 19 | -------------------------------------------------------------------------------- /docs/linkcheckerrc: -------------------------------------------------------------------------------- 1 | [filtering] 2 | ignorewarnings=http-redirected -------------------------------------------------------------------------------- /docs/overrides/css/lists.css: -------------------------------------------------------------------------------- 1 | /* Unordered list