├── .devcontainer ├── devcontainer.json └── setup.sh ├── .editorconfig ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE ├── dependabot.yml ├── pull_request_template.md └── workflows │ ├── EVENT_merge_to_master.yml │ ├── EVENT_pull_request.yml │ ├── EVENT_release.yml │ ├── EVENT_update-linear-labels.yml │ ├── JOB_e2e.yml │ ├── JOB_format.yml │ ├── JOB_generate_documentation.yml │ ├── JOB_get_changed_files.yml │ ├── JOB_lint.yml │ ├── JOB_python_checks.yml │ ├── JOB_slack_message.yml │ ├── JOB_tests.yml │ ├── JOB_typecheck.yml │ ├── codeql.yml │ ├── dependency-review.yml │ ├── scorecards.yml │ └── version_bump.yml ├── .gitignore ├── .hooks └── typechecking_installer.sh ├── .vscode └── settings.json ├── Dockerfile ├── LICENSE ├── README.md ├── darwin ├── __init__.py ├── backend_v2.py ├── cli.py ├── cli_functions.py ├── client.py ├── config.py ├── dataset │ ├── __init__.py │ ├── download_manager.py │ ├── identifier.py │ ├── local_dataset.py │ ├── release.py │ ├── remote_dataset.py │ ├── remote_dataset_v2.py │ ├── split_manager.py │ ├── upload_manager.py │ └── utils.py ├── datatypes.py ├── doc_enum.py ├── exceptions.py ├── exporter │ ├── __init__.py │ ├── exporter.py │ └── formats │ │ ├── __init__.py │ │ ├── coco.py │ │ ├── cvat.py │ │ ├── darwin.py │ │ ├── dataloop.py │ │ ├── helpers │ │ ├── __init__.py │ │ └── yolo_class_builder.py │ │ ├── instance_mask.py │ │ ├── mask.py │ │ ├── nifti.py │ │ ├── numpy_encoder.py │ │ ├── pascalvoc.py │ │ ├── semantic_mask.py │ │ ├── semantic_mask_grey.py │ │ ├── semantic_mask_index.py │ │ ├── yolo.py │ │ └── yolo_segmented.py ├── extractor │ ├── __init__.py │ └── video.py ├── future │ ├── __init__.py │ ├── core │ │ ├── __init__.py │ │ ├── client.py │ │ ├── datasets │ │ │ ├── __init__.py │ │ │ ├── create_dataset.py │ │ │ ├── get_dataset.py │ │ │ ├── list_datasets.py │ │ │ └── remove_dataset.py │ │ ├── items │ │ │ ├── __init__.py │ │ │ ├── archive_items.py │ │ │ ├── assign_items.py │ │ │ ├── delete_items.py │ │ │ ├── get.py │ │ │ ├── move_items.py │ │ │ ├── move_items_to_folder.py │ │ │ ├── restore_items.py │ │ │ ├── set_item_layout.py │ │ │ ├── set_item_priority.py │ │ │ ├── set_stage_to_items.py │ │ │ ├── tag_items.py │ │ │ ├── untag_items.py │ │ │ └── uploads.py │ │ ├── properties │ │ │ ├── __init__.py │ │ │ ├── create.py │ │ │ ├── get.py │ │ │ └── update.py │ │ ├── team │ │ │ ├── __init__.py │ │ │ ├── get_raw.py │ │ │ └── get_team.py │ │ ├── types │ │ │ ├── __init__.py │ │ │ ├── common.py │ │ │ ├── item.py │ │ │ └── query.py │ │ ├── utils │ │ │ └── pathutils.py │ │ └── workflows │ │ │ ├── __init__.py │ │ │ ├── get_workflow.py │ │ │ ├── get_workflows.py │ │ │ └── list_workflows.py │ ├── data_objects │ │ ├── __init__.py │ │ ├── advanced_filters.py │ │ ├── darwinV2.py │ │ ├── dataset.py │ │ ├── item.py │ │ ├── page.py │ │ ├── properties.py │ │ ├── pydantic_base.py │ │ ├── release.py │ │ ├── sorting.py │ │ ├── team.py │ │ ├── team_member_role.py │ │ ├── typing.py │ │ ├── validators.py │ │ └── workflow.py │ ├── exceptions.py │ ├── helpers │ │ ├── __init__.py │ │ ├── assertion.py │ │ └── exception_handler.py │ ├── meta │ │ ├── __init__.py │ │ ├── client.py │ │ ├── objects │ │ │ ├── __init__.py │ │ │ ├── base.py │ │ │ ├── dataset.py │ │ │ ├── item.py │ │ │ ├── stage.py │ │ │ ├── team.py │ │ │ ├── team_member.py │ │ │ ├── v7_id.py │ │ │ └── workflow.py │ │ ├── queries │ │ │ ├── __init__.py │ │ │ ├── dataset.py │ │ │ ├── item.py │ │ │ ├── item_id.py │ │ │ ├── stage.py │ │ │ ├── team_member.py │ │ │ └── workflow.py │ │ └── types │ │ │ └── __init__.py │ ├── pydantic_base.py │ └── tests │ │ ├── __init__.py │ │ ├── core │ │ ├── __init__.py │ │ ├── datasets │ │ │ ├── __init__.py │ │ │ ├── fixtures.py │ │ │ ├── test_create_dataset.py │ │ │ ├── test_delete_dataset.py │ │ │ ├── test_get_dataset.py │ │ │ └── test_list_datasets.py │ │ ├── fixtures.py │ │ ├── items │ │ │ ├── __init__.py │ │ │ ├── fixtures.py │ │ │ ├── test_archive_items.py │ │ │ ├── test_assign_items.py │ │ │ ├── test_delete_items.py │ │ │ ├── test_get_items.py │ │ │ ├── test_item_data_object.py │ │ │ ├── test_move_items.py │ │ │ ├── test_move_items_to_folder.py │ │ │ ├── test_restore_items.py │ │ │ ├── test_set_item_layout.py │ │ │ ├── test_set_priority.py │ │ │ ├── test_set_stage_to_items.py │ │ │ ├── test_tag_items.py │ │ │ ├── test_untag_items.py │ │ │ └── test_upload_items.py │ │ ├── properties │ │ │ ├── __init__.py │ │ │ ├── test_create.py │ │ │ ├── test_get.py │ │ │ └── test_update.py │ │ ├── test_client.py │ │ ├── test_query.py │ │ ├── types │ │ │ ├── __init__.py │ │ │ └── test_querystring.py │ │ └── workflows │ │ │ ├── __init__.py │ │ │ ├── test_get_workflow.py │ │ │ ├── test_get_workflows.py │ │ │ └── test_list_workflows.py │ │ ├── data │ │ ├── .v7 │ │ │ ├── metadata.json │ │ │ ├── metadata_no_item_level_properties.json │ │ │ └── metadata_with_item_level_properties.json │ │ └── base_annotation.json │ │ ├── data_objects │ │ ├── __init__.py │ │ ├── fixtures.py │ │ ├── test_advanced_filters.py │ │ ├── test_darwin.py │ │ ├── test_general_darwin_objects.py │ │ ├── test_page.py │ │ ├── test_properties.py │ │ ├── test_sorting.py │ │ ├── test_team.py │ │ ├── test_validators.py │ │ └── workflow │ │ │ ├── __init__.py │ │ │ ├── data │ │ │ ├── dataset.json │ │ │ ├── edge.json │ │ │ ├── stage.json │ │ │ ├── stage_config.json │ │ │ ├── user.json │ │ │ └── workflow.json │ │ │ ├── invalidvaluefortest.py │ │ │ ├── test_wfdataset.py │ │ │ ├── test_wfedge.py │ │ │ ├── test_wfstage.py │ │ │ ├── test_wfstage_config.py │ │ │ ├── test_wfuser.py │ │ │ └── test_workflow.py │ │ ├── fixtures.py │ │ └── meta │ │ ├── __init__.py │ │ ├── fixtures.py │ │ ├── objects │ │ ├── fixtures.py │ │ ├── test_datasetmeta.py │ │ ├── test_itemmeta.py │ │ ├── test_stagemeta.py │ │ ├── test_teammeta.py │ │ ├── test_v7_id.py │ │ └── test_workflowmeta.py │ │ ├── queries │ │ ├── test_dataset.py │ │ ├── test_item.py │ │ ├── test_item_id.py │ │ ├── test_stage.py │ │ ├── test_team_member.py │ │ └── test_workflow.py │ │ └── test_client.py ├── importer │ ├── __init__.py │ ├── formats │ │ ├── __init__.py │ │ ├── coco.py │ │ ├── csv_tags.py │ │ ├── csv_tags_video.py │ │ ├── darwin.py │ │ ├── dataloop.py │ │ ├── labelbox.py │ │ ├── labelbox_schemas.py │ │ ├── nifti.py │ │ ├── nifti_schemas.py │ │ ├── pascal_voc.py │ │ ├── superannotate.py │ │ └── superannotate_schemas.py │ └── importer.py ├── item.py ├── item_sorter.py ├── options.py ├── path_utils.py ├── torch │ ├── __init__.py │ ├── dataset.py │ ├── transforms.py │ └── utils.py ├── utils │ ├── __init__.py │ ├── flatten_list.py │ ├── get_item_count.py │ └── utils.py ├── validators.py └── version │ └── __init__.py ├── darwin_demo.py ├── deploy ├── __init__.py ├── _filter_files.py ├── _move_tickets_to_done.py ├── check_python.sh ├── confirm_main_branch_deployability.py ├── create_release.sh ├── format_lint.sh ├── increase_version.py ├── nightly_package_setup.py └── revert_nightly_setup.py ├── docs ├── DEV.md ├── README.md └── release_process.md ├── e2e_tests ├── .env.example ├── __init__.py ├── cli │ ├── __init__.py │ ├── convert │ │ └── __init__.py │ ├── test_convert.py │ ├── test_full_cycle.py │ ├── test_import.py │ ├── test_pull.py │ └── test_push.py ├── conftest.py ├── data │ ├── base_annotation.json │ ├── convert │ │ ├── coco │ │ │ ├── from │ │ │ │ └── base_annotation.json │ │ │ └── to │ │ │ │ └── output.json │ │ ├── cvat │ │ │ ├── from │ │ │ │ └── 000000021295.json │ │ │ └── to │ │ │ │ └── output.xml │ │ ├── instance_mask │ │ │ ├── from │ │ │ │ └── 000000021295.json │ │ │ └── to │ │ │ │ └── masks │ │ │ │ ├── 000000021295_00000.png │ │ │ │ ├── 000000021295_00001.png │ │ │ │ └── 000000021295_00002.png │ │ ├── nifti-legacy-scaling │ │ │ ├── from │ │ │ │ └── 2044737.fat.nii.json │ │ │ └── to │ │ │ │ └── 2044737.fat.nii │ │ │ │ └── 0 │ │ │ │ ├── 2044737.fat_Reference_sBAT.nii.gz │ │ │ │ └── 2044737.fat_test_mask_basic_m.nii.gz │ │ ├── nifti-multislot │ │ │ ├── from │ │ │ │ └── 2044737.fat.nii.json │ │ │ └── to │ │ │ │ └── 2044737.fat.nii │ │ │ │ ├── 0 │ │ │ │ ├── 2044737.fat_Reference_sBAT.nii.gz │ │ │ │ └── 2044737.fat_test_mask_basic_m.nii.gz │ │ │ │ └── 1 │ │ │ │ ├── 2044737.fat_Reference_sBAT.nii.gz │ │ │ │ └── 2044737.fat_test_mask_basic_m.nii.gz │ │ ├── nifti-no-legacy-scaling │ │ │ ├── from │ │ │ │ └── 2044737.fat.nii.json │ │ │ └── to │ │ │ │ └── 2044737.fat.nii │ │ │ │ └── 0 │ │ │ │ ├── 2044737.fat_Reference_sBAT.nii.gz │ │ │ │ └── 2044737.fat_test_mask_basic_m.nii.gz │ │ ├── nifti │ │ │ ├── from │ │ │ │ └── hippocampus_001_mpr_1_test_hippo.nii.json │ │ │ └── to │ │ │ │ ├── hippocampus_001_mpr_1_test_hippo_test_mask_basic_m.nii.gz │ │ │ │ └── hippocampus_001_mpr_1_test_hippo_test_polygon_basic.nii.gz │ │ ├── pascalvoc │ │ │ ├── from │ │ │ │ └── 000000021295.json │ │ │ └── to │ │ │ │ └── 000000021295.xml │ │ ├── semantic_mask │ │ │ ├── from │ │ │ │ └── 221b-2.json │ │ │ └── to │ │ │ │ └── class_mapping.csv │ │ ├── yolo │ │ │ ├── from │ │ │ │ └── test_input_with_bboxes_and_polys.json │ │ │ └── to │ │ │ │ └── darknet.labels │ │ └── yolov8 │ │ │ ├── from │ │ │ └── test_input_with_bboxes_and_polys.json │ │ │ └── to │ │ │ └── darknet.labels │ ├── import │ │ ├── coco_annotations │ │ │ └── output.json │ │ ├── csv_tag_annotations │ │ │ └── csv_tags.csv │ │ ├── csv_tag_video_annotations │ │ │ └── csv_tags_video.csv │ │ ├── image_annotations_item_level_properties_no_annotations │ │ │ ├── image_1.json │ │ │ ├── image_2.json │ │ │ ├── image_3.json │ │ │ ├── image_4.json │ │ │ ├── image_5.json │ │ │ ├── image_6.json │ │ │ ├── image_7.json │ │ │ └── image_8.json │ │ ├── image_annotations_split_in_two_files │ │ │ ├── annotations-keypoint_ellipse_polygon_bbox │ │ │ │ ├── image_1-keypoint_ellipse_polygon_bbox.json │ │ │ │ ├── image_2-keypoint_ellipse_polygon_bbox.json │ │ │ │ ├── image_3-keypoint_ellipse_polygon_bbox.json │ │ │ │ ├── image_4-keypoint_ellipse_polygon_bbox.json │ │ │ │ ├── image_5-keypoint_ellipse_polygon_bbox.json │ │ │ │ ├── image_6-keypoint_ellipse_polygon_bbox.json │ │ │ │ ├── image_7-keypoint_ellipse_polygon_bbox.json │ │ │ │ └── image_8-keypoint_ellipse_polygon_bbox.json │ │ │ └── annotations-tag_skeleton_mask_line │ │ │ │ ├── image_1-tag_skeleton_mask_line.json │ │ │ │ ├── image_2-tag_skeleton_mask_line.json │ │ │ │ ├── image_3-tag_skeleton_mask_line.json │ │ │ │ ├── image_4-tag_skeleton_mask_line.json │ │ │ │ ├── image_5-tag_skeleton_mask_line.json │ │ │ │ ├── image_6-tag_skeleton_mask_line.json │ │ │ │ ├── image_7-tag_skeleton_mask_line.json │ │ │ │ └── image_8-tag_skeleton_mask_line.json │ │ ├── image_annotations_with_item_level_properties │ │ │ ├── image_1.json │ │ │ ├── image_2.json │ │ │ ├── image_3.json │ │ │ ├── image_4.json │ │ │ ├── image_5.json │ │ │ ├── image_6.json │ │ │ ├── image_7.json │ │ │ └── image_8.json │ │ ├── image_annotations_with_subtypes │ │ │ ├── image_1.json │ │ │ ├── image_2.json │ │ │ ├── image_3.json │ │ │ ├── image_4.json │ │ │ ├── image_5.json │ │ │ ├── image_6.json │ │ │ ├── image_7.json │ │ │ └── image_8.json │ │ ├── image_annotations_without_subtypes │ │ │ ├── image_1.json │ │ │ ├── image_2.json │ │ │ ├── image_3.json │ │ │ ├── image_4.json │ │ │ ├── image_5.json │ │ │ ├── image_6.json │ │ │ ├── image_7.json │ │ │ └── image_8.json │ │ ├── image_new_annotations_with_item_level_properties │ │ │ ├── .v7 │ │ │ │ └── metadata.json │ │ │ ├── image_1.json │ │ │ ├── image_2.json │ │ │ ├── image_3.json │ │ │ ├── image_4.json │ │ │ ├── image_5.json │ │ │ ├── image_6.json │ │ │ ├── image_7.json │ │ │ └── image_8.json │ │ ├── image_new_annotations_with_properties │ │ │ ├── .v7 │ │ │ │ └── metadata.json │ │ │ ├── image_1.json │ │ │ ├── image_2.json │ │ │ ├── image_3.json │ │ │ ├── image_4.json │ │ │ ├── image_5.json │ │ │ ├── image_6.json │ │ │ ├── image_7.json │ │ │ └── image_8.json │ │ ├── image_new_basic_annotations │ │ │ ├── image_1.json │ │ │ ├── image_2.json │ │ │ ├── image_3.json │ │ │ ├── image_4.json │ │ │ ├── image_5.json │ │ │ ├── image_6.json │ │ │ ├── image_7.json │ │ │ └── image_8.json │ │ ├── multi_channel_annotations_aligned_with_non_base_slot │ │ │ └── multi_channel_item.json │ │ ├── multi_channel_annotations_with_slots_defined │ │ │ └── multi_channel_item.json │ │ ├── multi_channel_annotations_without_slots_defined │ │ │ └── multi_channel_item.json │ │ ├── multi_slotted_annotations_with_dicom_slots │ │ │ └── multi_slotted_dicom_item.json │ │ ├── multi_slotted_annotations_with_slots_defined │ │ │ └── multi_slotted_item.json │ │ ├── multi_slotted_annotations_without_slots_defined │ │ │ └── multi_slotted_item.json │ │ ├── pascal_voc_annotations │ │ │ ├── image_1.xml │ │ │ ├── image_2.xml │ │ │ ├── image_3.xml │ │ │ ├── image_4.xml │ │ │ ├── image_5.xml │ │ │ ├── image_6.xml │ │ │ ├── image_7.xml │ │ │ └── image_8.xml │ │ ├── video_annotations_small_video │ │ │ └── mini_uct.json │ │ ├── video_annotations_with_subtypes │ │ │ └── mini_uct.json │ │ └── video_annotations_without_subtypes │ │ │ └── mini_uct.json │ └── push │ │ ├── 25_frame_video.zip │ │ ├── flat_directory_of_2_dicom_files.zip │ │ ├── flat_directory_of_6_images.zip │ │ ├── mixed_filetypes.zip │ │ └── nested_directory_of_images.zip ├── exceptions.py ├── helpers.py ├── objects.py ├── pytest.ini ├── sdk │ ├── __init__.py │ └── future │ │ └── core │ │ ├── __init__.py │ │ └── test_properties.py ├── setup_tests.py ├── test_darwin.py └── test_example.py ├── mkdocs.yml ├── poetry.lock ├── poetry.toml ├── pyproject.toml ├── pytest.ini ├── source ├── _static │ └── js │ │ └── custom.js ├── conf.py ├── index.rst └── modules.rst ├── test.dcm ├── tests ├── __init__.py ├── darwin │ ├── __init__.py │ ├── cli_functions_test.py │ ├── client_test.py │ ├── data │ │ ├── annotation_raster_layer_data.json │ │ ├── annotation_with_properties.json │ │ ├── annotation_without_properties.json │ │ ├── dataloop.example.json │ │ ├── expected_classes_grey.csv │ │ ├── expected_classes_index.csv │ │ ├── expected_classes_rgb.csv │ │ ├── expected_image_grey.png │ │ ├── expected_image_index.png │ │ ├── expected_image_rgb.png │ │ ├── expected_mask.bin │ │ ├── expected_mask.png │ │ ├── expected_polygons_image_grey.png │ │ ├── expected_polygons_image_index.png │ │ ├── expected_polygons_image_rgb.png │ │ ├── metadata.json │ │ ├── metadata_empty_properties.json │ │ ├── metadata_identical_properties_different_classes.json │ │ ├── metadata_missing_annotation_property_values.json │ │ ├── metadata_missing_section_property_values.json │ │ ├── metadata_nested_properties.json │ │ ├── nifti │ │ │ ├── BRAINIX_NIFTI_ROI.nii.gz │ │ │ ├── legacy │ │ │ │ ├── .v7 │ │ │ │ │ └── metadata.json │ │ │ │ ├── BRAINIX_NIFTI_ROI.nii.json │ │ │ │ └── sample_nifti.nii.json │ │ │ ├── nifti.json │ │ │ ├── no-legacy │ │ │ │ ├── .v7 │ │ │ │ │ └── metadata.json │ │ │ │ └── BRAINIX_NIFTI_ROI.nii.json │ │ │ └── sample_nifti.nii │ │ ├── push_test_dir.zip │ │ ├── test_video.mp4 │ │ ├── test_video_corrupted.mp4 │ │ ├── test_video_with_audio.mp4 │ │ └── video_annotation_raster_layer_data.json │ ├── dataset │ │ ├── data │ │ │ └── manifest_examples │ │ │ │ ├── manifest_1.txt.test │ │ │ │ └── manifest_2.txt.test │ │ ├── dataset_utils_test.py │ │ ├── download_manager_test.py │ │ ├── identifier_test.py │ │ ├── item_test.py │ │ ├── local_dataset_test.py │ │ ├── release_test.py │ │ ├── remote_dataset_test.py │ │ ├── resources │ │ │ ├── random_train │ │ │ └── stratified_polygon_train │ │ ├── split_manager_test.py │ │ └── upload_manager_test.py │ ├── datatypes_test.py │ ├── exporter │ │ └── formats │ │ │ ├── export_coco_test.py │ │ │ ├── export_darwin_test.py │ │ │ ├── export_mask_test.py │ │ │ ├── export_nifti_test.py │ │ │ ├── export_pascalvoc_test.py │ │ │ ├── export_yolo_segmented_test.py │ │ │ ├── export_yolo_test.py │ │ │ └── helpers │ │ │ └── __init__.py │ ├── extractor │ │ └── video_test.py │ ├── importer │ │ ├── formats │ │ │ ├── import_coco_test.py │ │ │ ├── import_csv_tags_test.py │ │ │ ├── import_csv_tags_video_test.py │ │ │ ├── import_darwin_test.py │ │ │ ├── import_dataloop_test.py │ │ │ ├── import_labelbox_test.py │ │ │ ├── import_nifti_test.py │ │ │ ├── import_pascalvoc_test.py │ │ │ └── import_superannotate_test.py │ │ ├── importer_mcpu_test.py │ │ └── importer_test.py │ ├── item_sorter_test.py │ ├── path_utils_test.py │ ├── torch │ │ ├── __init__.py │ │ ├── dataset_test.py │ │ ├── transform_test.py │ │ └── utils_test.py │ ├── utils │ │ ├── find_files_test.py │ │ ├── flatten_list_test.py │ │ ├── get_image_path_from_stream_test.py │ │ └── get_items_count_test.py │ └── utils_test.py ├── data.zip ├── dataset.zip ├── dataset_with_properties.zip ├── e2e_test_internals │ ├── __init__.py │ └── test_run_cli_command.py ├── fixtures.py ├── model_training │ ├── test_image_model_training.py │ └── test_video_model_training.py ├── model_training_data.zip ├── server_example_returns.py ├── test_setup_helpers.py └── version_test.py └── video_annotations_with_subtypes └── mini_uct.json /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "My Dev Container", 3 | "image": "mcr.microsoft.com/devcontainers/base:ubuntu", 4 | "features": {}, 5 | "customizations": { 6 | "vscode": { 7 | "extensions": [ 8 | "github.copilot", 9 | "github.copilot-chat", 10 | "github.github-vscode-theme", 11 | "github.vscode-pull-request-github", 12 | "ms-ceintl.vscode-language-pack-it", 13 | "ms-python.python", 14 | "ms-vscode.vscode-typescript-next" 15 | ] 16 | } 17 | }, 18 | "postCreateCommand": "bash .devcontainer/setup.sh", 19 | "mounts": [ 20 | "source=${localWorkspaceFolder},target=/workspace,type=bind" 21 | ] 22 | } -------------------------------------------------------------------------------- /.devcontainer/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e # Exit on error 4 | 5 | # Update and install dependencies 6 | sudo apt update -y 7 | sudo apt upgrade -y 8 | sudo apt-get install -y python3-openssl 9 | sudo apt install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev \ 10 | wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev git 11 | 12 | # Install pyenv 13 | curl https://pyenv.run | bash 14 | 15 | # Add pyenv to shell profile 16 | echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.profile 17 | echo '[[ -d $PYENV_ROOT/bin ]] && export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.profile 18 | echo 'eval "$(pyenv init --path)"' >> ~/.profile 19 | echo 'eval "$(pyenv init -)"' >> ~/.profile 20 | 21 | # Apply changes 22 | source ~/.profile 23 | 24 | # Install Python 3.10 using pyenv 25 | pyenv install 3.10.0 26 | pyenv global 3.10.0 27 | pyenv local 3.10 28 | 29 | # Install Poetry 30 | curl -sSL https://install.python-poetry.org | python3 - 31 | export PATH="$HOME/.local/bin:$PATH" 32 | 33 | # Initialize Poetry environment 34 | PROJECT_DIR="$(pwd)" 35 | export VENV_PATH="$PROJECT_DIR/.venv" 36 | poetry config virtualenvs.in-project true 37 | 38 | poetry install --all-extras 39 | source "$VENV_PATH/bin/activate" #poetry shell 40 | 41 | # FFmpeg 42 | FFMPEG_VERSION="6.0" # Ensure this is version 5 or higher 43 | mkdir -p $HOME/.local/bin 44 | echo "Downloading FFmpeg $FFMPEG_VERSION..." 45 | cd $HOME/.local/bin 46 | wget -O ffmpeg.tar.xz "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz" 47 | tar -xf ffmpeg.tar.xz --strip-components=1 48 | rm ffmpeg.tar.xz 49 | echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc 50 | export PATH="$HOME/.local/bin:$PATH" 51 | cd - 52 | 53 | #Run tests to verify all is good 54 | pytest 55 | 56 | echo "Setup complete!" -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = space 6 | indent_size = 4 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | # Use 4 spaces for the Python files 13 | [*.py] 14 | indent_size = 4 15 | max_line_length = 160 16 | 17 | # The JSON files contain newlines inconsistently 18 | [*.json] 19 | insert_final_newline = ignore 20 | 21 | # Makefiles always use tabs for indentation 22 | [Makefile] 23 | indent_style = tab 24 | 25 | [*.md] 26 | trim_trailing_whitespace = false 27 | 28 | [*.yml] 29 | indent_size = 2 30 | insert_final_newline = true 31 | 32 | [*.yaml] 33 | indent_size = 2 34 | insert_final_newline = true 35 | 36 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Notify code owners about changes to GitHub actions 2 | .github/ @umbertoDifa @saurbhc @aleksandar-ivic @vvihorev 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | groups: 8 | github-actions: 9 | patterns: 10 | - '*' 11 | reviewers: 12 | - "saurbhc" 13 | - "umbertoDifa" 14 | - "aleksandar-ivic" 15 | open-pull-requests-limit: 1 16 | labels: 17 | - "github-actions-updates" 18 | 19 | - package-ecosystem: "pip" 20 | directory: "/" 21 | schedule: 22 | interval: "monthly" 23 | ignore: 24 | # For all packages, ignore all major updates 25 | - dependency-name: "*" 26 | update-types: 27 | - "version-update:semver-major" 28 | groups: 29 | python-requirements: 30 | patterns: 31 | - '*' 32 | reviewers: 33 | - "saurbhc" 34 | - "umbertoDifa" 35 | - "aleksandar-ivic" 36 | open-pull-requests-limit: 1 37 | labels: 38 | - "dependencies" 39 | 40 | - package-ecosystem: docker 41 | directory: / 42 | schedule: 43 | interval: daily 44 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Problem 2 | (describe the problem here) 3 | 4 | # Solution 5 | (Overview of the solution logic) 6 | 7 | # Changelog 8 | (Will appear in release docs) 9 | -------------------------------------------------------------------------------- /.github/workflows/EVENT_merge_to_master.yml: -------------------------------------------------------------------------------- 1 | name: merge_to_master 2 | run-name: Merge to master 3 | 4 | on: 5 | push: 6 | branches: 7 | - master 8 | workflow_call: 9 | 10 | permissions: 11 | contents: read 12 | id-token: write # Necessary for the generate documentation job 13 | 14 | jobs: 15 | run_tests: 16 | name: Run tests 17 | uses: ./.github/workflows/JOB_tests.yml 18 | 19 | e2e_tests: 20 | name: E2E Tests 21 | uses: ./.github/workflows/JOB_e2e.yml 22 | secrets: inherit 23 | 24 | documentation: 25 | name: Documentation 26 | uses: ./.github/workflows/JOB_generate_documentation.yml 27 | secrets: inherit 28 | permissions: 29 | id-token: write 30 | contents: read 31 | 32 | warn_on_fail: 33 | needs: [run_tests, e2e_tests, documentation] 34 | if: ${{ failure() }} 35 | name: Slack message us on fail 36 | uses: ./.github/workflows/JOB_slack_message.yml 37 | secrets: inherit 38 | with: 39 | at_team: true 40 | icon: ":warning:" 41 | message: "Master is failing after a push event, please review at ${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}" 42 | 43 | success: 44 | needs: [run_tests, e2e_tests, documentation] 45 | if: ${{ success() }} 46 | name: Success 47 | runs-on: ubuntu-latest 48 | permissions: 49 | contents: write 50 | statuses: write 51 | steps: 52 | - name: Harden Runner 53 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 54 | with: 55 | egress-policy: audit 56 | 57 | - name: Set branch status to success 58 | uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 59 | with: 60 | github-token: ${{ secrets.GITHUB_TOKEN }} 61 | script: | 62 | await github.rest.repos.createCommitStatus({ 63 | owner: context.repo.owner, 64 | repo: context.repo.repo, 65 | sha: context.sha, 66 | state: 'success' 67 | }) 68 | -------------------------------------------------------------------------------- /.github/workflows/EVENT_pull_request.yml: -------------------------------------------------------------------------------- 1 | name: pull_request 2 | run-name: Pull Request 3 | 4 | on: 5 | pull_request: 6 | types: [opened, synchronize, reopened] 7 | 8 | permissions: 9 | contents: read 10 | 11 | # Prevent running concurrently 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | get_changed_files: 18 | name: Get changed files 19 | uses: ./.github/workflows/JOB_get_changed_files.yml 20 | permissions: 21 | contents: read 22 | 23 | python_checks: 24 | name: Python Checks 25 | needs: get_changed_files 26 | if: needs.get_changed_files.outputs.python_changed_files != '' 27 | 28 | uses: ./.github/workflows/JOB_python_checks.yml 29 | 30 | with: 31 | files: ${{ needs.get_changed_files.outputs.python_changed_files }} 32 | permissions: 33 | contents: read 34 | 35 | -------------------------------------------------------------------------------- /.github/workflows/EVENT_update-linear-labels.yml: -------------------------------------------------------------------------------- 1 | name: "Update Linear Labels" 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | pull_request: 8 | branches: [master] 9 | workflow_dispatch: 10 | 11 | jobs: 12 | update-linear: 13 | if: github.ref_name != 'master' && !github.event.pull_request.head.repo.fork 14 | name: Update Linear 15 | runs-on: ubuntu-latest 16 | permissions: 17 | contents: write 18 | steps: 19 | - name: Harden Runner 20 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 21 | with: 22 | egress-policy: audit 23 | 24 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 25 | 26 | - uses: v7labs/update-linear-labels-action@0d527ddba6f2f0e0c28c3ec5c8a26767caf13ee3 # v1 27 | with: 28 | label_name: "DarwinPy" 29 | branch_name: "${{ github.head_ref }}" 30 | linear_token: "${{ secrets.LINEAR_TOKEN }}" 31 | error_exit_code: 0 32 | -------------------------------------------------------------------------------- /.github/workflows/JOB_format.yml: -------------------------------------------------------------------------------- 1 | name: format 2 | run-name: Format 3 | 4 | on: 5 | workflow_call: 6 | inputs: 7 | files: 8 | description: "Files to check formatting of" 9 | required: true 10 | type: string 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | format: 17 | if: ${{ inputs.files != '' }} 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Harden Runner 21 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 22 | with: 23 | egress-policy: audit 24 | 25 | - name: Check out source repository 26 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 27 | 28 | - name: Set up Python environment 29 | uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 30 | with: 31 | python-version: "3.12" 32 | 33 | - name: Show filenames 34 | run: echo ${{ inputs.files }} 35 | 36 | - name: Black check 37 | shell: bash 38 | run: bash ${{ github.workspace }}/deploy/format_lint.sh format ${{ inputs.files }} 39 | 40 | -------------------------------------------------------------------------------- /.github/workflows/JOB_lint.yml: -------------------------------------------------------------------------------- 1 | name: lint 2 | run-name: Check linting 3 | 4 | on: 5 | workflow_call: 6 | inputs: 7 | files: 8 | type: string 9 | description: "Files to lint" 10 | required: true 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | lint: 17 | if: ${{ inputs.files != '' }} 18 | runs-on: ubuntu-latest 19 | permissions: 20 | contents: read 21 | steps: 22 | - name: Harden Runner 23 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 24 | with: 25 | egress-policy: audit 26 | 27 | - name: Check out source repository 28 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 29 | 30 | - name: Set up Python environment 31 | uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 32 | with: 33 | python-version: "3.12" 34 | 35 | - name: Show filenames 36 | run: echo ${{ inputs.files }} 37 | 38 | - name: Ruff Lint 39 | shell: bash 40 | run: bash ${{ github.workspace }}/deploy/format_lint.sh lint ${{ inputs.files }} 41 | -------------------------------------------------------------------------------- /.github/workflows/JOB_python_checks.yml: -------------------------------------------------------------------------------- 1 | name: Python Checks 2 | on: 3 | workflow_call: 4 | inputs: 5 | files: 6 | required: true 7 | type: string 8 | 9 | jobs: 10 | format: 11 | name: Check format 12 | uses: ./.github/workflows/JOB_format.yml 13 | with: 14 | files: ${{ inputs.files }} 15 | 16 | lint: 17 | name: Lint 18 | uses: ./.github/workflows/JOB_lint.yml 19 | with: 20 | files: ${{ inputs.files }} 21 | 22 | run_tests: 23 | name: Run tests 24 | uses: ./.github/workflows/JOB_tests.yml 25 | -------------------------------------------------------------------------------- /.github/workflows/JOB_typecheck.yml: -------------------------------------------------------------------------------- 1 | name: typecheck 2 | run-name: Static analysis and typecheck 3 | 4 | on: 5 | workflow_call: 6 | inputs: 7 | files: 8 | type: string 9 | description: "Files to lint" 10 | required: true 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | typecheck: 17 | if: ${{ inputs.files != '' }} 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Harden Runner 21 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 22 | with: 23 | egress-policy: audit 24 | 25 | - name: Check out source repository 26 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 27 | 28 | - name: Set up Python environment 29 | uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 30 | with: 31 | python-version: "3.12" 32 | 33 | - name: Show filenames 34 | run: echo ${{ inputs.files }} 35 | 36 | - name: MyPy typecheck 37 | shell: bash 38 | run: | 39 | pip install pydantic 40 | bash ${{ github.workspace }}/deploy/format_lint.sh typecheck ${{ inputs.files }} 41 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | # Dependency Review Action 2 | # 3 | # This Action will scan dependency manifest files that change as part of a Pull Request, 4 | # surfacing known-vulnerable versions of the packages declared or updated in the PR. 5 | # Once installed, if the workflow run is marked as required, 6 | # PRs introducing known-vulnerable packages will be blocked from merging. 7 | # 8 | # Source repository: https://github.com/actions/dependency-review-action 9 | name: 'Dependency Review' 10 | on: [pull_request] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | dependency-review: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Harden Runner 20 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 21 | with: 22 | egress-policy: audit 23 | 24 | - name: 'Checkout Repository' 25 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 26 | - name: 'Dependency Review' 27 | uses: actions/dependency-review-action@3b139cfc5fae8b618d3eae3675e383bb1769c019 # v4.5.0 28 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.formatOnSave": true, 3 | "[python]": { 4 | "editor.formatOnSave": true, 5 | "editor.tabSize": 4, 6 | "editor.codeActionsOnSave": { 7 | "source.organizeImports": "explicit" 8 | }, 9 | "editor.defaultFormatter": "ms-python.black-formatter" 10 | }, 11 | "[javascript]": { 12 | "editor.formatOnSave": true, 13 | "editor.insertSpaces": true, 14 | "editor.tabSize": 2 15 | }, 16 | "isort.args": [ 17 | "--profile", 18 | "black" 19 | ], 20 | "python.analysis.autoImportCompletions": true, 21 | "python.testing.pytestEnabled": true, 22 | "python.analysis.typeCheckingMode": "basic", 23 | } -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Use an official Python runtime as a parent image 2 | FROM python:3.13-slim@sha256:21e39cf1815802d4c6f89a0d3a166cc67ce58f95b6d1639e68a394c99310d2e5 3 | 4 | # Environment variables 5 | ARG YOUR_ENV 6 | 7 | ENV YOUR_ENV=${YOUR_ENV:-development} \ 8 | PYTHONFAULTHANDLER=1 \ 9 | PYTHONUNBUFFERED=1 \ 10 | PYTHONHASHSEED=random \ 11 | PIP_NO_CACHE_DIR=off \ 12 | PIP_DISABLE_PIP_VERSION_CHECK=on \ 13 | PIP_DEFAULT_TIMEOUT=100 \ 14 | POETRY_VIRTUALENVS_CREATE=false \ 15 | POETRY_NO_INTERACTION=1 16 | 17 | # Install necessary build tools and dependencies 18 | RUN apt-get update && apt-get install -y --no-install-recommends \ 19 | gcc \ 20 | make \ 21 | build-essential \ 22 | libffi-dev \ 23 | libssl-dev \ 24 | python3-dev \ 25 | curl \ 26 | ffmpeg \ 27 | && rm -rf /var/lib/apt/lists/* 28 | 29 | # Install Poetry in a known location and add to PATH 30 | RUN curl -sSL https://install.python-poetry.org | python3 - \ 31 | && ln -s /root/.local/bin/poetry /usr/local/bin/poetry 32 | 33 | # Set the working directory in the container 34 | WORKDIR /app 35 | 36 | # Copy only pyproject.toml and poetry.lock to cache them in the Docker layer 37 | COPY pyproject.toml poetry.lock /app/ 38 | 39 | # Install the dependencies from pyproject.toml using Poetry 40 | RUN poetry install $(test "$YOUR_ENV" = production && echo "--only=main") --no-interaction --no-ansi 41 | 42 | # Install the darwin-py package and CLI executable using pip 43 | RUN pip install darwin-py 44 | 45 | # The following steps are commented out to allow users to customize the Dockerfile: 46 | 47 | # Copy the rest of the application code (uncomment and modify as needed) 48 | # COPY . /app 49 | 50 | # Expose any necessary ports (uncomment and modify as needed) 51 | # EXPOSE 80 52 | 53 | # Set an entry point or command (uncomment and modify as needed) 54 | # CMD ["python", "/app/your_main_script.py"] 55 | 56 | # End of Dockerfile 57 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 V7 Ltd. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /darwin/__init__.py: -------------------------------------------------------------------------------- 1 | import darwin.dataset # noqa 2 | import darwin.exceptions # noqa 3 | 4 | from .client import Client # noqa 5 | from .datatypes import Team # noqa 6 | from .version import __version__ 7 | -------------------------------------------------------------------------------- /darwin/dataset/__init__.py: -------------------------------------------------------------------------------- 1 | from darwin.dataset.local_dataset import LocalDataset # noqa 2 | from darwin.dataset.remote_dataset import RemoteDataset # noqa 3 | -------------------------------------------------------------------------------- /darwin/doc_enum.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class DocEnum(Enum): 5 | """ 6 | Documenting Enums in Python is not supported by many tools. Therefore this class was created to 7 | support just that. It is basically a hack to allow Enum documentation. 8 | `See more here ` 9 | """ 10 | 11 | def __new__(cls, value, doc=None): 12 | self = object.__new__(cls) # calling super().__new__(value) here would fail 13 | self._value_ = value 14 | if doc is not None: 15 | self.__doc__ = doc 16 | return self 17 | -------------------------------------------------------------------------------- /darwin/exporter/__init__.py: -------------------------------------------------------------------------------- 1 | from importlib import import_module 2 | 3 | from darwin.datatypes import ExportParser 4 | 5 | from .exporter import export_annotations # noqa 6 | 7 | 8 | class ExporterNotFoundError(ModuleNotFoundError): 9 | pass 10 | 11 | 12 | def get_exporter(format: str) -> ExportParser: 13 | try: 14 | format = format.replace(".", "_") 15 | module = import_module(f"darwin.exporter.formats.{format}") 16 | return getattr(module, "export") 17 | except ModuleNotFoundError: 18 | print(format) 19 | raise ExporterNotFoundError 20 | -------------------------------------------------------------------------------- /darwin/exporter/formats/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # When updating this file, please also update the docs: 4 | # https://docs.v7labs.com/docs/convert-1 5 | supported_formats: List[str] = [ 6 | "coco", 7 | "cvat", 8 | "dataloop", 9 | "instance_mask", 10 | "pascalvoc", 11 | "semantic_mask", 12 | "semantic_mask_grey", 13 | "semantic_mask_index", 14 | "yolo", 15 | "yolo_segmented", 16 | "nifti", 17 | ] 18 | -------------------------------------------------------------------------------- /darwin/exporter/formats/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/exporter/formats/helpers/__init__.py -------------------------------------------------------------------------------- /darwin/exporter/formats/helpers/yolo_class_builder.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Callable, Dict, Iterable, List 3 | 4 | from darwin.datatypes import AnnotationFile 5 | 6 | ClassIndex = Dict[str, int] 7 | 8 | 9 | def build_class_index( 10 | annotation_files: Iterable[AnnotationFile], 11 | include_types: List[str] = ["bounding_box", "polygon"], 12 | ) -> ClassIndex: 13 | classes = set() 14 | for annotation_file in annotation_files: 15 | for annotation in annotation_file.annotations: 16 | if annotation.annotation_class.annotation_type in include_types: 17 | classes.add(annotation.annotation_class.name) 18 | return {k: v for (v, k) in enumerate(sorted(classes))} 19 | 20 | 21 | def export_file( 22 | annotation_file: AnnotationFile, 23 | class_index: ClassIndex, 24 | output_dir: Path, 25 | build_function: Callable[[AnnotationFile, ClassIndex], str], 26 | ) -> None: 27 | txt = build_function(annotation_file, class_index) 28 | 29 | # Just using `.with_suffix(".txt")` would remove all suffixes, so we need to 30 | # do it manually. 31 | 32 | filename = annotation_file.path.name 33 | filename_to_write = ( 34 | filename.replace(".json", ".txt") if ".json" in filename else filename + ".txt" 35 | ) 36 | output_file_path = output_dir / filename_to_write 37 | 38 | output_file_path.parent.mkdir(parents=True, exist_ok=True) 39 | with open(output_file_path, "w") as f: 40 | f.write(txt) 41 | 42 | 43 | def save_class_index(class_index: ClassIndex, output_dir: Path) -> None: 44 | sorted_items = sorted(class_index.items(), key=lambda item: item[1]) 45 | 46 | with open(output_dir / "darknet.labels", "w") as f: 47 | for class_name, _ in sorted_items: 48 | f.write(f"{class_name}\n") 49 | -------------------------------------------------------------------------------- /darwin/exporter/formats/numpy_encoder.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Any 3 | 4 | import numpy as np 5 | 6 | 7 | class NumpyEncoder(json.JSONEncoder): 8 | """ 9 | Holds auxiliary functions to bridge numpy functionality with Python primitive types which are 10 | JSON friendly. 11 | """ 12 | 13 | def default(self, obj: Any) -> Any: 14 | """ 15 | Converts the given numpy object into a Python's primitive type. 16 | 17 | Parameters 18 | ---------- 19 | obj : Any 20 | The object to convert. 21 | 22 | Returns 23 | ------- 24 | Any 25 | The converted object. 26 | """ 27 | if isinstance(obj, np.integer): 28 | return int(obj) 29 | elif isinstance(obj, np.floating): 30 | return float(obj) 31 | elif isinstance(obj, np.ndarray): 32 | return obj.tolist() 33 | else: 34 | return super(NumpyEncoder, self).default(obj) 35 | -------------------------------------------------------------------------------- /darwin/exporter/formats/semantic_mask.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Iterable 3 | 4 | import darwin.datatypes as dt 5 | from darwin.exporter.formats.mask import export as export_mask 6 | 7 | 8 | def export(annotation_files: Iterable[dt.AnnotationFile], output_dir: Path) -> None: 9 | """ 10 | Exports the given ``AnnotationFile``\\s into semantic masks inside of the given ``output_dir``. 11 | 12 | Parameters 13 | ---------- 14 | annotation_files : Iterable[dt.AnnotationFile] 15 | The ``AnnotationFile``\\s to be exported. 16 | output_dir : Path 17 | The folder where the new semantic mask files will be. 18 | """ 19 | return export_mask( 20 | annotation_files=annotation_files, output_dir=output_dir, mode="rgb" 21 | ) 22 | -------------------------------------------------------------------------------- /darwin/exporter/formats/semantic_mask_grey.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Iterable 3 | 4 | import darwin.datatypes as dt 5 | from darwin.exporter.formats.mask import export as export_mask 6 | 7 | 8 | def export(annotation_files: Iterable[dt.AnnotationFile], output_dir: Path) -> None: 9 | return export_mask( 10 | annotation_files=annotation_files, output_dir=output_dir, mode="grey" 11 | ) 12 | -------------------------------------------------------------------------------- /darwin/exporter/formats/semantic_mask_index.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Iterable 3 | 4 | import darwin.datatypes as dt 5 | from darwin.exporter.formats.mask import export as export_mask 6 | 7 | 8 | def export(annotation_files: Iterable[dt.AnnotationFile], output_dir: Path) -> None: 9 | return export_mask( 10 | annotation_files=annotation_files, output_dir=output_dir, mode="index" 11 | ) 12 | -------------------------------------------------------------------------------- /darwin/extractor/__init__.py: -------------------------------------------------------------------------------- 1 | """Video extraction functionality for Darwin.""" 2 | -------------------------------------------------------------------------------- /darwin/future/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/__init__.py -------------------------------------------------------------------------------- /darwin/future/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/core/__init__.py -------------------------------------------------------------------------------- /darwin/future/core/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | from darwin.future.core.datasets.create_dataset import create_dataset 2 | from darwin.future.core.datasets.get_dataset import get_dataset 3 | from darwin.future.core.datasets.list_datasets import list_datasets 4 | from darwin.future.core.datasets.remove_dataset import remove_dataset 5 | -------------------------------------------------------------------------------- /darwin/future/core/datasets/create_dataset.py: -------------------------------------------------------------------------------- 1 | from darwin.future.core.client import ClientCore 2 | from darwin.future.data_objects.dataset import DatasetCore 3 | 4 | 5 | def create_dataset(api_client: ClientCore, name: str) -> DatasetCore: 6 | """ 7 | Creates a new dataset for the given team 8 | 9 | Parameters 10 | ---------- 11 | 12 | api_client: Client 13 | The client to use to make the request 14 | name: str 15 | The name of the dataset to create 16 | 17 | Returns 18 | ------- 19 | Dataset 20 | The created dataset 21 | 22 | Raises 23 | ------ 24 | HTTPError 25 | Any HTTP errors returned by the API 26 | """ 27 | response = api_client.post( 28 | "/datasets", 29 | { 30 | "name": name, 31 | }, 32 | ) 33 | assert isinstance(response, dict) 34 | return DatasetCore.model_validate(response) 35 | -------------------------------------------------------------------------------- /darwin/future/core/datasets/get_dataset.py: -------------------------------------------------------------------------------- 1 | from darwin.future.core.client import ClientCore 2 | from darwin.future.core.types.common import QueryString 3 | from darwin.future.data_objects.dataset import DatasetCore 4 | 5 | 6 | def get_dataset(api_client: ClientCore, dataset_id: str) -> DatasetCore: 7 | """ 8 | Returns a list of datasets for the given team 9 | 10 | Parameters 11 | ---------- 12 | api_client : Client 13 | The client to use to make the request 14 | dataset_id : str 15 | The id of the dataset to retrieve 16 | 17 | Returns 18 | ------- 19 | Dataset 20 | 21 | Raises 22 | ------ 23 | HTTPError 24 | Any errors that occurred while making the request 25 | ValidationError 26 | Any errors that occurred while parsing the response 27 | """ 28 | 29 | response = api_client.get("/datasets", QueryString({"id": str(dataset_id)})) 30 | assert isinstance(response, dict) 31 | return DatasetCore.model_validate(response) 32 | -------------------------------------------------------------------------------- /darwin/future/core/datasets/list_datasets.py: -------------------------------------------------------------------------------- 1 | from typing import List, Tuple 2 | 3 | from pydantic import ValidationError 4 | 5 | from darwin.future.core.client import ClientCore 6 | from darwin.future.data_objects.dataset import DatasetCore 7 | 8 | 9 | def list_datasets( 10 | api_client: ClientCore, 11 | ) -> Tuple[List[DatasetCore], List[ValidationError]]: 12 | """ 13 | Returns a list of datasets for the given team 14 | 15 | Parameters 16 | ---------- 17 | api_client : Client 18 | The client to use to make the request 19 | team_slug : Optional[TeamSlug] 20 | The slug of the team to retrieve datasets for 21 | 22 | Returns 23 | ------- 24 | List[DatasetList]: 25 | A list of datasets 26 | List[ValidationError] 27 | A list of Validation errors on failed objects 28 | """ 29 | datasets: List[DatasetCore] = [] 30 | errors: List[ValidationError] = [] 31 | 32 | response = api_client.get("/datasets") 33 | try: 34 | for item in response: 35 | assert isinstance(item, dict) 36 | datasets.append(DatasetCore.model_validate(item)) 37 | except ValidationError as e: 38 | errors.append(e) 39 | 40 | return datasets, errors 41 | -------------------------------------------------------------------------------- /darwin/future/core/datasets/remove_dataset.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.exceptions import DatasetNotFound 5 | 6 | 7 | def remove_dataset( 8 | api_client: ClientCore, id: int, team_slug: Optional[str] = None 9 | ) -> int: 10 | """ 11 | Creates a new dataset for the given team 12 | 13 | Parameters 14 | ---------- 15 | api_client : Client 16 | The client to use to make the request 17 | id : int 18 | The name of the dataset to create 19 | team_slug : str 20 | The slug of the team to create the dataset in 21 | 22 | Returns 23 | ------- 24 | int 25 | The dataset deleted id 26 | """ 27 | if not team_slug: 28 | team_slug = api_client.config.default_team 29 | 30 | response = api_client.put( 31 | f"/datasets/{id}/archive", 32 | {"team_slug": team_slug}, 33 | ) 34 | assert isinstance(response, dict) 35 | 36 | if "id" not in response: 37 | raise DatasetNotFound(f"Dataset with id {id} not found") 38 | 39 | return int(response["id"]) 40 | -------------------------------------------------------------------------------- /darwin/future/core/items/__init__.py: -------------------------------------------------------------------------------- 1 | from darwin.future.core.items.get import get_item, get_item_ids, get_item_ids_stage 2 | from darwin.future.core.items.move_items import move_items_to_stage 3 | -------------------------------------------------------------------------------- /darwin/future/core/items/archive_items.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | 5 | from darwin.future.core.client import ClientCore 6 | from darwin.future.core.types.common import JSONType 7 | from darwin.future.data_objects.typing import UnknownType 8 | 9 | 10 | def archive_list_of_items( 11 | client: ClientCore, 12 | team_slug: str, 13 | dataset_ids: int | List[int], 14 | filters: Dict[str, UnknownType] = {}, 15 | ) -> JSONType: 16 | """ 17 | Archive specified items 18 | 19 | Parameters 20 | ---------- 21 | client: Client 22 | The client to use for the request. 23 | team_slug: str 24 | The slug of the team containing the items. 25 | dataset_ids: int | List[int] 26 | The ID(s) of the dataset(s) containing the items. 27 | filters: Dict[str, UnknownType] 28 | Filter parameters. 29 | 30 | Returns 31 | ------- 32 | JSONType 33 | The response data. 34 | """ 35 | assert ( 36 | filters 37 | ), "No parameters provided, please provide at least one non-dataset id filter" 38 | payload = { 39 | "filters": { 40 | "dataset_ids": ( 41 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 42 | ), 43 | **filters, 44 | } 45 | } 46 | 47 | return client.post(f"/v2/teams/{team_slug}/items/archive", data=payload) 48 | -------------------------------------------------------------------------------- /darwin/future/core/items/assign_items.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.core.types.common import JSONDict, JSONType 5 | 6 | 7 | def assign_items( 8 | client: ClientCore, 9 | team_slug: str, 10 | dataset_ids: int | list[int], 11 | assignee_id: int, 12 | workflow_id: str, 13 | filters: JSONDict, 14 | ) -> JSONType: 15 | """ 16 | Assign a user to all items matched by filters. 17 | 18 | Args: 19 | client (ClientCore): The Darwin Core client. 20 | team_slug (str): The team slug. 21 | dataset_ids (int | list[int]): The dataset ids. 22 | assignee_id (int): The user id to assign. 23 | workflow_id (str): The workflow id that selected items have to belong to. 24 | filters Dict[str, UnknownType]: The parameters of the filter. 25 | 26 | Returns: 27 | JSONType: The response data. 28 | """ 29 | assert ( 30 | filters 31 | ), "No parameters provided, please provide at least one non-dataset id filter" 32 | payload = { 33 | "filters": { 34 | "dataset_ids": ( 35 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 36 | ), 37 | **filters, 38 | }, 39 | "assignee_id": assignee_id, 40 | "workflow_id": workflow_id, 41 | } 42 | 43 | return client.post(f"/v2/teams/{team_slug}/items/assign", data=payload) 44 | -------------------------------------------------------------------------------- /darwin/future/core/items/delete_items.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | 5 | from darwin.future.core.client import ClientCore 6 | from darwin.future.core.types.common import JSONType 7 | from darwin.future.data_objects.typing import UnknownType 8 | 9 | 10 | def delete_list_of_items( 11 | client: ClientCore, 12 | team_slug: str, 13 | dataset_ids: int | List[int], 14 | filters: Dict[str, UnknownType] = {}, 15 | ) -> JSONType: 16 | """ 17 | Delete specified items 18 | 19 | Parameters 20 | ---------- 21 | client: Client 22 | The client to use for the request. 23 | team_slug: str 24 | The slug of the team containing the items. 25 | dataset_ids: int | List[int] 26 | The ID(s) of the dataset(s) containing the items. 27 | filters: Dict[str, UnknownType] 28 | Filter parameters 29 | 30 | Returns 31 | ------- 32 | JSONType 33 | The response data. 34 | """ 35 | assert ( 36 | filters 37 | ), "No parameters provided, please provide at least one non-dataset id filter" 38 | payload = { 39 | "filters": { 40 | "dataset_ids": ( 41 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 42 | ), 43 | **filters, 44 | } 45 | } 46 | return client.delete(f"/v2/teams/{team_slug}/items", data=payload) 47 | -------------------------------------------------------------------------------- /darwin/future/core/items/move_items.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | from uuid import UUID 5 | 6 | from darwin.future.core.client import ClientCore 7 | from darwin.future.core.types.common import JSONType 8 | from darwin.future.data_objects.typing import UnknownType 9 | 10 | 11 | def move_items_to_stage( 12 | client: ClientCore, 13 | team_slug: str, 14 | workflow_id: UUID, 15 | dataset_ids: int | List[int], 16 | stage_id: UUID, 17 | filters: Dict[str, UnknownType] = {}, 18 | ) -> JSONType: 19 | """ 20 | Moves a list of items to a stage 21 | 22 | Parameters 23 | ---------- 24 | client: Client 25 | The client to use for the request. 26 | team_slug: str 27 | The slug of the team to move items for. 28 | workflow_id: UUID 29 | The id of the workflow to move items for. 30 | dataset_ids: int | List[int] 31 | The ID(s) of the dataset(s) containing the items. 32 | stage_id: UUID 33 | The id of the workflow to move items for. 34 | filters: Dict[str, UnknownType] 35 | Filter parameters. 36 | 37 | Returns 38 | ------- 39 | JSONType 40 | The response data. 41 | """ 42 | assert ( 43 | filters 44 | ), "No parameters provided, please provide at least one non-dataset id filter" 45 | payload = { 46 | "filters": { 47 | "dataset_ids": ( 48 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 49 | ), 50 | **filters, 51 | }, 52 | "stage_id": str(stage_id), 53 | "workflow_id": str(workflow_id), 54 | } 55 | 56 | return client.post(f"/v2/teams/{team_slug}/items/stage", data=payload) 57 | -------------------------------------------------------------------------------- /darwin/future/core/items/move_items_to_folder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | 5 | from darwin.future.core.client import ClientCore 6 | from darwin.future.core.types.common import JSONType 7 | from darwin.future.data_objects.typing import UnknownType 8 | 9 | 10 | def move_list_of_items_to_folder( 11 | client: ClientCore, 12 | team_slug: str, 13 | dataset_ids: int | List[int], 14 | path: str, 15 | filters: Dict[str, UnknownType] = {}, 16 | ) -> JSONType: 17 | """ 18 | Move specified items to a folder 19 | 20 | Parameters 21 | ---------- 22 | client: Client 23 | The client to use for the request. 24 | team_slug: str 25 | The slug of the team containing the items. 26 | dataset_ids: int | List[int] 27 | The ID(s) of the dataset(s) containing the items. 28 | path: str 29 | The path to the folder to move the items to. 30 | filters: Dict[str, UnknownType] 31 | Filter parameters. 32 | 33 | Returns 34 | ------- 35 | JSONType 36 | The response data. 37 | """ 38 | assert ( 39 | filters 40 | ), "No parameters provided, please provide at least one non-dataset id filter" 41 | payload = { 42 | "filters": { 43 | "dataset_ids": ( 44 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 45 | ), 46 | **filters, 47 | }, 48 | "path": path, 49 | } 50 | 51 | return client.post(f"/v2/teams/{team_slug}/items/path", data=payload) 52 | -------------------------------------------------------------------------------- /darwin/future/core/items/restore_items.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | 5 | from darwin.future.core.client import ClientCore 6 | from darwin.future.core.types.common import JSONType 7 | from darwin.future.data_objects.typing import UnknownType 8 | 9 | 10 | def restore_list_of_items( 11 | client: ClientCore, 12 | team_slug: str, 13 | dataset_ids: int | List[int], 14 | filters: Dict[str, UnknownType] = {}, 15 | ) -> JSONType: 16 | """ 17 | Restore specified items 18 | 19 | Parameters 20 | ---------- 21 | client: Client 22 | The client to use for the request. 23 | team_slug: str 24 | The slug of the team containing the items. 25 | dataset_ids: int | List[int] 26 | The ID(s) of the dataset(s) containing the items. 27 | filters: Dict[str, UnknownType] 28 | Filter parameters. 29 | 30 | Returns 31 | ------- 32 | JSONType 33 | The response data. 34 | """ 35 | assert ( 36 | filters 37 | ), "No parameters provided, please provide at least one non-dataset id filter" 38 | payload = { 39 | "filters": { 40 | "dataset_ids": ( 41 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 42 | ), 43 | **filters, 44 | } 45 | } 46 | 47 | return client.post(f"/v2/teams/{team_slug}/items/restore", data=payload) 48 | -------------------------------------------------------------------------------- /darwin/future/core/items/set_item_layout.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict 4 | 5 | from pydantic import ValidationError 6 | 7 | from darwin.future.core.client import ClientCore 8 | from darwin.future.core.types.common import JSONType 9 | from darwin.future.data_objects.item import ItemLayout 10 | from darwin.future.data_objects.typing import UnknownType 11 | from darwin.future.exceptions import BadRequest 12 | 13 | 14 | def set_item_layout( 15 | client: ClientCore, 16 | team_slug: str, 17 | dataset_ids: int | list[int], 18 | layout: ItemLayout, 19 | filters: Dict[str, UnknownType], 20 | ) -> JSONType: 21 | """ 22 | Set the layout of a dataset and filtered items via filters. 23 | 24 | Args: 25 | client (ClientCore): The Darwin Core client. 26 | team_slug (str): The team slug. 27 | dataset_ids (int | list[int]): The dataset ids. 28 | layout (ItemLayout): The layout. 29 | filters Dict[str, UnknownType]: The parameters of the filter. 30 | 31 | Returns: 32 | JSONType: The response data. 33 | """ 34 | if not isinstance(layout, ItemLayout): 35 | try: 36 | layout = ItemLayout.model_validate(layout) 37 | except (ValueError, ValidationError): 38 | raise BadRequest("Invalid layout provided") 39 | 40 | assert ( 41 | filters 42 | ), "No parameters provided, please provide at least one non-dataset id filter" 43 | payload = { 44 | "filters": { 45 | "dataset_ids": ( 46 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 47 | ), 48 | **filters, 49 | }, 50 | "layout": dict(layout), 51 | } 52 | 53 | return client.post(f"/v2/teams/{team_slug}/items/layout", data=payload) 54 | -------------------------------------------------------------------------------- /darwin/future/core/items/set_item_priority.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | 5 | from darwin.future.core.client import ClientCore 6 | from darwin.future.core.types.common import JSONType 7 | from darwin.future.data_objects.typing import UnknownType 8 | 9 | 10 | def set_item_priority( 11 | client: ClientCore, 12 | team_slug: str, 13 | dataset_ids: int | List[int], 14 | priority: int, 15 | filters: Dict[str, UnknownType] = {}, 16 | ) -> JSONType: 17 | """ 18 | Sets the priority of a list of items 19 | 20 | Parameters 21 | ---------- 22 | client: Client 23 | The client to use for the request. 24 | team_slug: str 25 | The slug of the team containing the items. 26 | dataset_id: int | List[int] 27 | The ID(s) of the dataset(s) containing the items. 28 | priority: int 29 | The priority to set. 30 | 31 | Returns 32 | ------- 33 | JSONType 34 | The response data. 35 | """ 36 | assert ( 37 | filters 38 | ), "No parameters provided, please provide at least one non-dataset id filter" 39 | payload = { 40 | "filters": { 41 | "dataset_ids": ( 42 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 43 | ), 44 | **filters, 45 | }, 46 | "priority": priority, 47 | } 48 | 49 | return client.post( 50 | f"/v2/teams/{team_slug}/items/priority", 51 | data=payload, 52 | ) 53 | -------------------------------------------------------------------------------- /darwin/future/core/items/set_stage_to_items.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.core.types.common import JSONType 5 | from darwin.future.data_objects.typing import UnknownType 6 | 7 | 8 | def set_stage_to_items( 9 | client: ClientCore, 10 | team_slug: str, 11 | dataset_ids: int | list[int], 12 | stage_id: str, 13 | workflow_id: str, 14 | filters: dict[str, UnknownType], 15 | ) -> JSONType: 16 | """ 17 | Sets stage to multiple items matched by filters. 18 | 19 | Args: 20 | client (ClientCore): The Darwin Core client. 21 | team_slug (str): The team slug. 22 | stage_id (str): The stage id. 23 | workflow_id (str): The workflow id. 24 | filters Dict[str, UnknownType]: The parameters of the filter. 25 | 26 | Returns: 27 | JSONType: The response data. 28 | """ 29 | assert ( 30 | filters 31 | ), "No parameters provided, please provide at least one non-dataset id filter" 32 | payload = { 33 | "filters": { 34 | "dataset_ids": ( 35 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 36 | ), 37 | **filters, 38 | }, 39 | "stage_id": stage_id, 40 | "workflow_id": workflow_id, 41 | } 42 | 43 | return client.post(f"/v2/teams/{team_slug}/items/stage", data=payload) 44 | -------------------------------------------------------------------------------- /darwin/future/core/items/tag_items.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.core.types.common import JSONType 5 | from darwin.future.data_objects.typing import UnknownType 6 | 7 | 8 | def tag_items( 9 | client: ClientCore, 10 | team_slug: str, 11 | dataset_ids: int | list[int], 12 | tag_id: int, 13 | filters: dict[str, UnknownType], 14 | ) -> JSONType: 15 | """ 16 | Adds tag annotation to all items slots matched by filters. 17 | 18 | Args: 19 | client (ClientCore): The Darwin Core client. 20 | team_slug (str): The team slug. 21 | dataset_ids (int | list[int]): The dataset ids. 22 | tag_id (int): The tag id. 23 | filters Dict[str, UnknownType]: The parameters of the filter. 24 | 25 | Returns: 26 | JSONType: The response data. 27 | """ 28 | assert ( 29 | filters 30 | ), "No parameters provided, please provide at least one non-dataset id filter" 31 | payload = { 32 | "filters": { 33 | "dataset_ids": ( 34 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 35 | ), 36 | **filters, 37 | }, 38 | "annotation_class_id": tag_id, 39 | } 40 | 41 | return client.post(f"/v2/teams/{team_slug}/items/slots/tags", data=payload) 42 | -------------------------------------------------------------------------------- /darwin/future/core/items/untag_items.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.core.types.common import JSONType 5 | from darwin.future.data_objects.typing import UnknownType 6 | 7 | 8 | def untag_items( 9 | client: ClientCore, 10 | team_slug: str, 11 | dataset_ids: int | list[int], 12 | tag_id: int, 13 | filters: dict[str, UnknownType], 14 | ) -> JSONType: 15 | """ 16 | Untags items slots matched by filters. 17 | 18 | Args: 19 | client (ClientCore): The Darwin Core client. 20 | team_slug (str): The team slug. 21 | dataset_ids (int | list[int]): The dataset ids. 22 | tag_id (int): The tag id. 23 | 24 | filters Dict[str, UnknownType]: The parameters of the filter. 25 | 26 | Returns: 27 | JSONType: The response data. 28 | """ 29 | assert ( 30 | filters 31 | ), "No parameters provided, please provide at least one non-dataset id filter" 32 | payload = { 33 | "filters": { 34 | "dataset_ids": ( 35 | dataset_ids if isinstance(dataset_ids, list) else [dataset_ids] 36 | ), 37 | **filters, 38 | }, 39 | "annotation_class_id": tag_id, 40 | } 41 | 42 | return client.delete(f"/v2/teams/{team_slug}/items/slots/tags", data=payload) 43 | -------------------------------------------------------------------------------- /darwin/future/core/properties/__init__.py: -------------------------------------------------------------------------------- 1 | from darwin.future.core.properties.create import create_property 2 | from darwin.future.core.properties.get import ( 3 | get_property_by_id, 4 | get_team_full_properties, 5 | get_team_properties, 6 | ) 7 | from darwin.future.core.properties.update import update_property, update_property_value 8 | -------------------------------------------------------------------------------- /darwin/future/core/properties/create.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.core.types.common import JSONDict 5 | from darwin.future.data_objects.properties import FullProperty 6 | 7 | 8 | def create_property( 9 | client: ClientCore, 10 | params: Union[FullProperty, JSONDict], 11 | team_slug: Optional[str] = None, 12 | ) -> FullProperty: 13 | """ 14 | Creates a property for the specified team slug. 15 | 16 | Parameters: 17 | client (ClientCore): The client to use for the request. 18 | team_slug (Optional[str]): The slug of the team to get. If not specified, the 19 | default team from the client's config will be used. 20 | params (Optional[JSONType]): The JSON data to use for the request. 21 | 22 | Returns: 23 | FullProperty: FullProperty object for the created property. 24 | 25 | Raises: 26 | HTTPError: If the response status code is not in the 200-299 range. 27 | """ 28 | if not team_slug: 29 | team_slug = client.config.default_team 30 | if isinstance(params, FullProperty): 31 | params = params.to_create_endpoint() 32 | response = client.post(f"/v2/teams/{team_slug}/properties", data=params) 33 | assert isinstance(response, dict) 34 | return FullProperty.model_validate(response) 35 | -------------------------------------------------------------------------------- /darwin/future/core/team/__init__.py: -------------------------------------------------------------------------------- 1 | # Can't import * in this module because of a circular import problem specific to teams 2 | # The TeamCore module can instantiate from a client, but the client needs to use the 3 | # team backend module to request the object for team. To circumvent this there's a 4 | # get_raw method in this module that returns the raw team object, which is then passed 5 | # to the TeamCore module, but if we import * here it introduces the 6 | # circular import problem. 7 | -------------------------------------------------------------------------------- /darwin/future/core/team/get_raw.py: -------------------------------------------------------------------------------- 1 | from requests import Session 2 | 3 | from darwin.future.core.types.common import JSONType 4 | 5 | 6 | def get_team_raw(session: Session, url: str) -> JSONType: 7 | """Gets the raw JSON response from a team endpoint 8 | 9 | Parameters: 10 | session (Session): Requests session to use 11 | url (str): URL to get 12 | 13 | Returns: 14 | JSONType: JSON response from the endpoint 15 | """ 16 | response = session.get(url) 17 | response.raise_for_status() 18 | return response.json() 19 | -------------------------------------------------------------------------------- /darwin/future/core/team/get_team.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional, Tuple 2 | 3 | from pydantic import ValidationError 4 | 5 | from darwin.future.core.client import ClientCore 6 | from darwin.future.data_objects.team import TeamCore, TeamMemberCore 7 | 8 | 9 | def get_team(client: ClientCore, team_slug: Optional[str] = None) -> TeamCore: 10 | """ 11 | Returns a TeamCore object for the specified team slug. 12 | 13 | Parameters: 14 | client (ClientCore): The client to use for the request. 15 | team_slug (Optional[str]): The slug of the team to get. If not specified, the 16 | default team from the client's config will be used. 17 | 18 | Returns: 19 | TeamCore: The TeamCore object for the specified team slug. 20 | 21 | Raises: 22 | HTTPError: If the response status code is not in the 200-299 range. 23 | """ 24 | if not team_slug: 25 | team_slug = client.config.default_team 26 | response = client.get(f"/teams/{team_slug}/") 27 | return TeamCore.model_validate(response) 28 | 29 | 30 | def get_team_members( 31 | client: ClientCore, 32 | ) -> Tuple[List[TeamMemberCore], List[ValidationError]]: 33 | """ 34 | Returns a tuple containing a list of TeamMemberCore objects and a list of exceptions 35 | that occurred while parsing the response. 36 | 37 | Parameters: 38 | client (ClientCore): The client to use for the request. 39 | 40 | Returns: 41 | List[TeamMemberCore]: 42 | List of TeamMembers 43 | List[ValidationError]: 44 | List of ValidationError on failed objects 45 | 46 | Raises: 47 | HTTPError: If the response status code is not in the 200-299 range. 48 | """ 49 | response = client.get("/memberships") 50 | members = [] 51 | errors = [] 52 | for item in response: 53 | try: 54 | members.append(TeamMemberCore.model_validate(item)) 55 | except ValidationError as e: 56 | errors.append(e) 57 | return members, errors 58 | -------------------------------------------------------------------------------- /darwin/future/core/types/__init__.py: -------------------------------------------------------------------------------- 1 | from .common import JSONType, QueryString, TeamSlug 2 | -------------------------------------------------------------------------------- /darwin/future/core/types/item.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/core/types/item.py -------------------------------------------------------------------------------- /darwin/future/core/utils/pathutils.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | from typing import Optional 4 | 5 | import yaml 6 | 7 | from darwin.future.exceptions import UnrecognizableFileEncoding 8 | 9 | ENCODINGS = ["utf-8", "utf-16", "utf-32", "ascii"] 10 | 11 | 12 | def attempt_open(path: Path) -> dict: 13 | try: 14 | if "yaml" in path.suffix.lower(): 15 | return open_yaml(path) 16 | elif "json" in path.suffix.lower(): 17 | return open_json(path) 18 | except Exception: 19 | pass 20 | for encoding in ENCODINGS: 21 | try: 22 | if "yaml" in path.suffix.lower(): 23 | return open_yaml(path, encoding) 24 | elif "json" in path.suffix.lower(): 25 | return open_json(path, encoding) 26 | except Exception: 27 | pass 28 | raise UnrecognizableFileEncoding( 29 | f"Unable to load file {path} with any encodings: {ENCODINGS}" 30 | ) 31 | 32 | 33 | def open_yaml(path: Path, encoding: Optional[str] = None) -> dict: 34 | if not encoding: 35 | with path.open() as infile: 36 | data = yaml.safe_load(infile) 37 | return data 38 | with path.open(encoding=encoding) as infile: 39 | data = yaml.safe_load(infile) 40 | return data 41 | 42 | 43 | def open_json(path: Path, encoding: Optional[str] = None) -> dict: 44 | if not encoding: 45 | with path.open() as infile: 46 | data = json.load(infile) 47 | return data 48 | 49 | with path.open(encoding=encoding) as infile: 50 | data = json.load(infile) 51 | return data 52 | -------------------------------------------------------------------------------- /darwin/future/core/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | from darwin.future.core.workflows.get_workflow import get_workflow 2 | from darwin.future.core.workflows.get_workflows import get_workflows 3 | from darwin.future.core.workflows.list_workflows import list_workflows 4 | -------------------------------------------------------------------------------- /darwin/future/core/workflows/get_workflow.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.data_objects.workflow import WorkflowCore 5 | 6 | 7 | def get_workflow( 8 | client: ClientCore, workflow_id: str, team_slug: Optional[str] = None 9 | ) -> WorkflowCore: 10 | """ 11 | Retrieves a workflow by ID from the Darwin API. 12 | 13 | Parameters: 14 | ----------- 15 | client : ClientCore 16 | The Darwin API client to use for the request. 17 | workflow_id : str 18 | The ID of the workflow to retrieve. 19 | team_slug : Optional[str] 20 | The slug of the team that owns the workflow. If not provided, the default team from the client's configuration 21 | will be used. 22 | 23 | Returns: 24 | -------- 25 | WorkflowCore 26 | The retrieved workflow, as a WorkflowCore object. 27 | 28 | Raises: 29 | ------- 30 | HTTPError 31 | If the API returns an error response. 32 | ValidationError 33 | If the API response does not match the expected schema. 34 | """ 35 | team_slug = team_slug or client.config.default_team 36 | response = client.get(f"/v2/teams/{team_slug}/workflows/{workflow_id}") 37 | assert isinstance(response, dict) 38 | return WorkflowCore.model_validate(response) 39 | -------------------------------------------------------------------------------- /darwin/future/core/workflows/get_workflows.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.data_objects.workflow import WorkflowCore 5 | 6 | 7 | def get_workflows( 8 | client: ClientCore, team_slug: Optional[str] = None 9 | ) -> List[WorkflowCore]: 10 | team_slug = team_slug or client.config.default_team 11 | response = client.get(f"/v2/teams/{team_slug}/workflows?worker=false") 12 | assert isinstance(response, list) 13 | assert all(isinstance(workflow, dict) for workflow in response) 14 | assert len(response) > 0, "No workflows found" 15 | return [WorkflowCore.model_validate(workflow) for workflow in response] 16 | -------------------------------------------------------------------------------- /darwin/future/core/workflows/list_workflows.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional, Tuple 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.data_objects.workflow import WorkflowCore, WorkflowListValidator 5 | 6 | 7 | def list_workflows( 8 | client: ClientCore, team_slug: Optional[str] = None 9 | ) -> Tuple[List[WorkflowCore], List[Exception]]: 10 | """ 11 | Returns a list of workflows for the given team 12 | 13 | Parameters 14 | ---------- 15 | client : Client 16 | The client to use to make the request 17 | team_slug : Optional[str] 18 | The slug of the team to retrieve workflows for 19 | 20 | Returns 21 | ------- 22 | Tuple[List[Workflow], List[Exception]] 23 | """ 24 | exceptions: List[Exception] = [] 25 | workflows: List[WorkflowCore] = [] 26 | 27 | try: 28 | team_slug = team_slug or client.config.default_team 29 | response = client.get(f"/v2/teams/{team_slug}/workflows?worker=false") 30 | list_of_workflows = WorkflowListValidator(list=response) # type: ignore 31 | workflows = [ 32 | WorkflowCore.model_validate(workflow) for workflow in list_of_workflows.list 33 | ] 34 | except Exception as e: 35 | exceptions.append(e) 36 | 37 | return workflows, exceptions 38 | -------------------------------------------------------------------------------- /darwin/future/data_objects/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /darwin/future/data_objects/dataset.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | 3 | from pydantic import PositiveInt, field_validator 4 | 5 | from darwin.future.data_objects.release import ReleaseList 6 | from darwin.future.data_objects.validators import parse_name 7 | from darwin.future.pydantic_base import DefaultDarwin 8 | 9 | 10 | class DatasetCore(DefaultDarwin): 11 | """ 12 | A class to manage all the information around a dataset on the darwin platform, 13 | including validation 14 | 15 | Attributes 16 | ---------- 17 | name : str 18 | slug : str 19 | id: Optional[int] = None 20 | releases: Optional[List[Release]] = None 21 | - a list of export releases linked to a dataset 22 | Methods 23 | ---------- 24 | _name_validator: validates and auto formats the name variable 25 | """ 26 | 27 | name: str 28 | slug: str 29 | id: Optional[PositiveInt] = None 30 | releases: Optional[ReleaseList] = None 31 | 32 | # Data Validation 33 | _name_validator = field_validator("name")(parse_name) 34 | 35 | 36 | DatasetList = List[DatasetCore] 37 | -------------------------------------------------------------------------------- /darwin/future/data_objects/page.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from math import floor 4 | 5 | from pydantic import NonNegativeInt, PositiveInt 6 | 7 | from darwin.future.core.types.common import QueryString 8 | from darwin.future.data_objects.pydantic_base import DefaultDarwin 9 | 10 | 11 | def must_be_positive(v: int) -> int: 12 | if v is not None and v < 0: 13 | raise ValueError("Value must be positive") 14 | return v 15 | 16 | 17 | class Page(DefaultDarwin): 18 | offset: NonNegativeInt = 0 19 | size: PositiveInt = 500 20 | 21 | def get_required_page(self, item_index: int) -> Page: 22 | """ 23 | Get the page that contains the item at the specified index 24 | 25 | Args: 26 | item_index (int): The index of the item 27 | 28 | Returns: 29 | Page: The page that contains the item 30 | """ 31 | assert self.size is not None 32 | required_offset = floor(item_index / self.size) * self.size 33 | return Page(offset=required_offset, size=self.size) 34 | 35 | def to_query_string(self) -> QueryString: 36 | """ 37 | Generate a query string from the page object, some fields are not included if they are None, 38 | and certain fields are renamed. Outgoing and incoming query strings are different and require 39 | dropping certain fields 40 | 41 | Returns: 42 | QueryString: Outgoing query string 43 | """ 44 | qs_dict = {"page[offset]": str(self.offset), "page[size]": str(self.size)} 45 | return QueryString(qs_dict) 46 | 47 | def increment(self) -> None: 48 | """ 49 | Increment the page offset by the page size 50 | """ 51 | self.offset += self.size 52 | -------------------------------------------------------------------------------- /darwin/future/data_objects/pydantic_base.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, ConfigDict 2 | 3 | 4 | class DefaultDarwin(BaseModel): 5 | """ 6 | Default Darwin-Py pydantic settings for meta information. 7 | Default settings include: 8 | - auto validating variables on setting/assignment 9 | - underscore attributes are private 10 | - objects are passed by reference to prevent unnecesary data copying 11 | """ 12 | 13 | # TODO[pydantic]: The following keys were removed: `underscore_attrs_are_private`, `copy_on_model_validation`. 14 | # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information. 15 | model_config = ConfigDict(validate_assignment=True) 16 | -------------------------------------------------------------------------------- /darwin/future/data_objects/release.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from pydantic import field_validator 4 | 5 | from darwin.future.data_objects import validators as darwin_validators 6 | from darwin.future.pydantic_base import DefaultDarwin 7 | 8 | 9 | class ReleaseCore(DefaultDarwin): 10 | """ 11 | A class to manage all the information around a release on the darwin platform 12 | including validation 13 | 14 | Attributes 15 | ---------- 16 | name : str 17 | 18 | Methods 19 | ---------- 20 | _name_validator: validates and auto formats the name variable 21 | """ 22 | 23 | name: str 24 | 25 | def __str__(self) -> str: 26 | return self.name 27 | 28 | # Data Validation 29 | _name_validator = field_validator("name")(darwin_validators.parse_name) 30 | 31 | 32 | ReleaseList = List[ReleaseCore] 33 | -------------------------------------------------------------------------------- /darwin/future/data_objects/sorting.py: -------------------------------------------------------------------------------- 1 | from typing import Literal, Optional 2 | 3 | from pydantic import BaseModel, Field, model_validator 4 | 5 | 6 | class SortingMethods(BaseModel): 7 | accuracy: Optional[Literal["asc", "desc"]] = Field(None) 8 | byte_size: Optional[Literal["asc", "desc"]] = Field(None) 9 | id: Optional[Literal["asc", "desc"]] = Field(None) 10 | map: Optional[Literal["asc", "desc"]] = Field(None) 11 | name: Optional[Literal["asc", "desc"]] = Field(None) 12 | priority: Optional[Literal["asc", "desc"]] = Field(None) 13 | updated_at: Optional[Literal["asc", "desc"]] = Field(None) 14 | 15 | @model_validator(mode="before") 16 | def check_at_least_one_field(cls, values): 17 | assert any(value is not None for value in values.values()) 18 | return values 19 | -------------------------------------------------------------------------------- /darwin/future/data_objects/team_member_role.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class TeamMemberRole(Enum): 5 | TEAM_OWNER = "owner" 6 | TEAM_ADMIN = "admin" 7 | USER = "member" 8 | WORKFORCE_MANAGER = "workforce_manager" 9 | WORKER = "annotator" 10 | -------------------------------------------------------------------------------- /darwin/future/data_objects/typing.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | UnknownType = Any # type:ignore 4 | 5 | KeyValuePairDict = Dict[str, UnknownType] 6 | -------------------------------------------------------------------------------- /darwin/future/data_objects/validators.py: -------------------------------------------------------------------------------- 1 | def parse_name(name: str) -> str: 2 | """ 3 | A function to parse and validate a name 4 | 5 | Parameters 6 | ---------- 7 | name : str 8 | The name to be parsed and validated 9 | 10 | Returns 11 | ------- 12 | str 13 | The parsed and validated name 14 | """ 15 | assert isinstance(name, str) 16 | return name.lower().strip() 17 | -------------------------------------------------------------------------------- /darwin/future/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/helpers/__init__.py -------------------------------------------------------------------------------- /darwin/future/helpers/assertion.py: -------------------------------------------------------------------------------- 1 | from typing import Type 2 | 3 | 4 | def assert_is( 5 | conditional: bool, 6 | message: str, 7 | exception_factory: Type[BaseException] = AssertionError, 8 | ) -> None: 9 | if not conditional: 10 | raise exception_factory(message) 11 | -------------------------------------------------------------------------------- /darwin/future/helpers/exception_handler.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from typing import List, Optional, Union 4 | 5 | from rich.console import Console 6 | 7 | 8 | def handle_exception(exception: Optional[Union[Exception, List[Exception]]]) -> None: 9 | """ 10 | Handles an exception or list of exceptions by printing them to the terminal 11 | 12 | Parameters 13 | ---------- 14 | exception : Optional[Union[Exception, List[Exception]]] 15 | The exception(s) to handle 16 | """ 17 | IS_INTERACTIVE_SESSION = sys.stdout and sys.stdout.isatty() 18 | 19 | if not exception: 20 | exc_info = sys.exc_info() 21 | if exception := getattr(exc_info, "[1]", None): 22 | ... 23 | else: 24 | raise ValueError("No exception provided and no exception in sys.exc_info") 25 | 26 | if IS_INTERACTIVE_SESSION: 27 | console = Console() 28 | handler = console.print 29 | else: 30 | logger = logging.getLogger(__name__) 31 | handler = logger.error # type: ignore 32 | 33 | if isinstance(exception, list): 34 | for e in exception: 35 | handler(e) 36 | else: 37 | handler(exception) 38 | -------------------------------------------------------------------------------- /darwin/future/meta/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/meta/__init__.py -------------------------------------------------------------------------------- /darwin/future/meta/objects/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/meta/objects/__init__.py -------------------------------------------------------------------------------- /darwin/future/meta/objects/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, Generic, Optional, TypeVar 4 | 5 | from darwin.future.core.client import ClientCore 6 | 7 | R = TypeVar("R") 8 | Param = Dict[str, object] 9 | 10 | 11 | class MetaBase(Generic[R]): 12 | """ 13 | A base class for metadata objects. This should only ever be inherited from in meta objects. 14 | stores metadata parameters used to access the api that are related to the Meta Objects 15 | but potentially not required for the core object. For example, a dataset object needs 16 | the team slug to access the api which get's passed down from the team object. 17 | 18 | Attributes: 19 | _element (R): The element R to which the object is related. 20 | client (ClientCore): The client used to execute the query. 21 | meta_params (Dict[str, object]): A dictionary of metadata parameters. This is 22 | used in conjuction with the Query object to execute related api calls. 23 | 24 | Methods: 25 | __init__(client: ClientCore, element: R, meta_params: Optional[Param] = None) -> None: 26 | Initializes a new MetaBase object. 27 | __repr__() -> str: 28 | Returns a string representation of the object. 29 | 30 | Examples: 31 | # Create a MetaBase type that manages a TeamCore object from the API 32 | class Team(MetaBase[TeamCore]): 33 | ... 34 | """ 35 | 36 | _element: R 37 | client: ClientCore 38 | 39 | def __init__( 40 | self, 41 | element: R, 42 | client: ClientCore, 43 | meta_params: Optional[Param] = None, 44 | ) -> None: 45 | self.client = client 46 | self._element = element 47 | self.meta_params = meta_params or {} 48 | 49 | def __repr__(self) -> str: 50 | return str(self) 51 | 52 | def __str__(self) -> str: 53 | return f"{self.__class__.__name__}({self._element})" 54 | -------------------------------------------------------------------------------- /darwin/future/meta/objects/team_member.py: -------------------------------------------------------------------------------- 1 | from darwin.future.data_objects.team import TeamMemberCore 2 | from darwin.future.data_objects.team_member_role import TeamMemberRole 3 | from darwin.future.meta.objects.base import MetaBase 4 | 5 | 6 | class TeamMember(MetaBase[TeamMemberCore]): 7 | """ 8 | Team Member Meta object. Facilitates the creation of Query objects, lazy loading of 9 | sub fields 10 | 11 | Args: 12 | MetaBase (TeamMember): Generic MetaBase object expanded by TeamMemberCore object 13 | return type 14 | 15 | Returns: 16 | _type_: TeamMember 17 | 18 | Attributes: 19 | first_name (str): The first name of the team member. 20 | last_name (str): The last name of the team member. 21 | email (str): The email of the team member. 22 | user_id (int): The user id of the team member. 23 | role (TeamMemberRole): The role of the team member. 24 | 25 | Methods: 26 | None 27 | 28 | Example Usage: 29 | # Get the role of the team member 30 | team_member = client.team.members 31 | .where(first_name='John', last_name='Doe') 32 | .collect_one() 33 | 34 | role = team_member.role 35 | """ 36 | 37 | @property 38 | def role(self) -> TeamMemberRole: 39 | return self._element.role 40 | 41 | @property 42 | def first_name(self) -> str: 43 | return self._element.first_name 44 | 45 | @property 46 | def last_name(self) -> str: 47 | return self._element.last_name 48 | 49 | @property 50 | def email(self) -> str: 51 | return self._element.email 52 | 53 | @property 54 | def user_id(self) -> int: 55 | return self._element.user_id 56 | 57 | def __str__(self) -> str: 58 | return f"Team Member\n\ 59 | - Name: {self.first_name} {self.last_name}\n\ 60 | - Role: {self.role.value}\n\ 61 | - Email: {self.email}\n\ 62 | - User ID: {self.user_id}" 63 | -------------------------------------------------------------------------------- /darwin/future/meta/objects/v7_id.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | 3 | from darwin.future.meta.objects.base import MetaBase 4 | 5 | 6 | class V7ID(MetaBase[UUID]): 7 | @property 8 | def id(self) -> UUID: 9 | return self._element 10 | 11 | def __str__(self) -> str: 12 | return str(self._element) 13 | 14 | def __repr__(self) -> str: 15 | return str(self) 16 | -------------------------------------------------------------------------------- /darwin/future/meta/queries/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/meta/queries/__init__.py -------------------------------------------------------------------------------- /darwin/future/meta/queries/dataset.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | 5 | from darwin.future.core.datasets import list_datasets 6 | from darwin.future.core.types.query import Query, QueryFilter 7 | from darwin.future.meta.objects.dataset import Dataset 8 | 9 | 10 | class DatasetQuery(Query[Dataset]): 11 | """ 12 | DatasetQuery object with methods to manage filters, retrieve data, and execute 13 | filters 14 | 15 | Methods 16 | ------- 17 | 18 | collect: Executes the query and returns the filtered data 19 | """ 20 | 21 | def _collect(self) -> Dict[int, Dataset]: 22 | datasets, exceptions = list_datasets(self.client) 23 | if exceptions: 24 | # TODO: print and or raise exceptions, tbd how we want to handle this 25 | pass 26 | datasets_meta = [ 27 | Dataset(client=self.client, element=dataset, meta_params=self.meta_params) 28 | for dataset in datasets 29 | ] 30 | if not self.filters: 31 | self.filters = [] 32 | 33 | for filter in self.filters: 34 | datasets_meta = self._execute_filters(datasets_meta, filter) 35 | 36 | return dict(enumerate(datasets_meta)) 37 | 38 | def _execute_filters( 39 | self, datasets: List[Dataset], filter: QueryFilter 40 | ) -> List[Dataset]: 41 | """ 42 | Executes filtering on the local list of datasets, applying special logic for 43 | role filtering otherwise calls the parent method for general filtering on the 44 | values of the datasets 45 | 46 | Parameters 47 | ---------- 48 | datasets : List[Dataset] 49 | filter : QueryFilter 50 | 51 | Returns 52 | ------- 53 | List[Dataset]: Filtered subset of datasets 54 | """ 55 | 56 | if filter.name == "releases": 57 | return [ 58 | d 59 | for d in datasets 60 | if d._element is not None 61 | and d._element.releases 62 | and filter.param in [str(r) for r in d._element.releases] 63 | ] 64 | 65 | return super()._generic_execute_filter(datasets, filter) 66 | -------------------------------------------------------------------------------- /darwin/future/meta/queries/stage.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | from uuid import UUID 5 | 6 | from darwin.future.core.types.query import Query, QueryFilter 7 | from darwin.future.core.workflows import get_workflow 8 | from darwin.future.meta.objects.stage import Stage 9 | 10 | 11 | class StageQuery(Query[Stage]): 12 | def _collect(self) -> Dict[int, Stage]: 13 | if "workflow_id" not in self.meta_params: 14 | raise ValueError("Must specify workflow_id to query stages") 15 | workflow_id: UUID = self.meta_params["workflow_id"] 16 | meta_params = self.meta_params 17 | workflow = get_workflow(self.client, str(workflow_id)) 18 | assert workflow is not None 19 | stages = [ 20 | Stage(client=self.client, element=s, meta_params=meta_params) 21 | for s in workflow.stages 22 | ] 23 | if not self.filters: 24 | self.filters = [] 25 | for filter in self.filters: 26 | stages = self._execute_filter(stages, filter) 27 | return dict(enumerate(stages)) 28 | 29 | def _execute_filter(self, stages: List[Stage], filter: QueryFilter) -> List[Stage]: 30 | """Executes filtering on the local list of stages 31 | Parameters 32 | ---------- 33 | stages : List[Stage] 34 | filter : QueryFilter 35 | 36 | Returns 37 | ------- 38 | List[Stage]: Filtered subset of stages 39 | """ 40 | if filter.name == "role": 41 | return [ 42 | s 43 | for s in stages 44 | if s._element is not None and filter.filter_attr(s._element.type.value) 45 | ] 46 | return super()._generic_execute_filter(stages, filter) 47 | -------------------------------------------------------------------------------- /darwin/future/meta/queries/team_member.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List 4 | 5 | from darwin.future.core.team.get_team import get_team_members 6 | from darwin.future.core.types.query import Query, QueryFilter 7 | from darwin.future.meta.objects.team_member import TeamMember 8 | 9 | 10 | class TeamMemberQuery(Query[TeamMember]): 11 | """ 12 | TeamMemberQuery object with methods to manage filters, retrieve data, 13 | and execute filters 14 | Methods: 15 | collect: Executes the query and returns the filtered data 16 | _execute_filter: Executes a filter on a list of objects 17 | """ 18 | 19 | def _collect(self) -> Dict[int, TeamMember]: 20 | members, exceptions = get_team_members(self.client) 21 | members_meta = [ 22 | TeamMember(client=self.client, element=member) for member in members 23 | ] 24 | if exceptions: 25 | # TODO: print and or raise exceptions, tbd how we want to handle this 26 | pass 27 | if not self.filters: 28 | self.filters = [] 29 | for filter in self.filters: 30 | members_meta = self._execute_filter(members_meta, filter) 31 | 32 | return dict(enumerate(members_meta)) 33 | 34 | def _execute_filter( 35 | self, members: List[TeamMember], filter: QueryFilter 36 | ) -> List[TeamMember]: 37 | """ 38 | Executes filtering on the local list of members, applying special logic for 39 | role filtering otherwise calls the parent method for general filtering on the 40 | values of the members 41 | 42 | Parameters 43 | ---------- 44 | members : List[TeamMember] 45 | filter : QueryFilter 46 | 47 | Returns 48 | ------- 49 | List[TeamMember]: Filtered subset of members 50 | """ 51 | if filter.name == "role": 52 | return [ 53 | m 54 | for m in members 55 | if m._element is not None and filter.filter_attr(m._element.role.value) 56 | ] 57 | else: 58 | return super()._generic_execute_filter(members, filter) 59 | -------------------------------------------------------------------------------- /darwin/future/meta/types/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/meta/types/__init__.py -------------------------------------------------------------------------------- /darwin/future/pydantic_base.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, ConfigDict 2 | 3 | 4 | class DefaultDarwin(BaseModel): 5 | """Default Darwin-Py pydantic settings for meta information. 6 | Default settings include: 7 | - auto validating variables on setting/assignment 8 | - underscore attributes are private 9 | - objects are passed by reference to prevent unnecesary data copying 10 | """ 11 | 12 | model_config = ConfigDict(validate_assignment=True, protected_namespaces=()) 13 | -------------------------------------------------------------------------------- /darwin/future/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/core/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/core/datasets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/core/datasets/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/core/datasets/fixtures.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock 2 | 3 | from pytest import fixture 4 | from requests import HTTPError 5 | 6 | from darwin.future.core.client import ClientCore 7 | 8 | 9 | @fixture 10 | def basic_dataset() -> dict: 11 | return { 12 | "name": "test-dataset", 13 | "slug": "1337", 14 | "id": 1, 15 | "releases": [], 16 | } 17 | 18 | 19 | @fixture 20 | def basic_list_of_datasets() -> list: 21 | return [ 22 | { 23 | "name": "test-dataset", 24 | "slug": "1337", 25 | "id": 1, 26 | "releases": [], 27 | }, 28 | { 29 | "name": "test-dataset-2", 30 | "slug": "1338", 31 | "id": 2, 32 | "releases": [], 33 | }, 34 | { 35 | "name": "test-dataset-3", 36 | "slug": "1339", 37 | "id": 3, 38 | "releases": [], 39 | }, 40 | ] 41 | 42 | 43 | @fixture 44 | def sad_http_client() -> ClientCore: 45 | mock = MagicMock(ClientCore) 46 | mock.post.side_effect = HTTPError("error") 47 | mock.get.side_effect = HTTPError("error") 48 | mock.delete.side_effect = HTTPError("error") 49 | 50 | return mock 51 | 52 | 53 | @fixture 54 | def happy_post_client() -> ClientCore: 55 | mock_client = MagicMock(ClientCore) 56 | mock_client.post.return_value = { 57 | "name": "test-dataset", 58 | "slug": "1337", 59 | "id": 1, 60 | "releases": [], 61 | } 62 | 63 | return mock_client 64 | 65 | 66 | @fixture 67 | def happy_get_client() -> ClientCore: 68 | mock_client = MagicMock(ClientCore) 69 | mock_client.get.return_value = [ 70 | { 71 | "name": "test-dataset", 72 | "slug": "1337", 73 | "id": 1, 74 | "releases": [], 75 | } 76 | ] 77 | 78 | return mock_client 79 | -------------------------------------------------------------------------------- /darwin/future/tests/core/datasets/test_create_dataset.py: -------------------------------------------------------------------------------- 1 | import responses 2 | from pytest import raises 3 | 4 | from darwin.future.core.client import ClientCore 5 | from darwin.future.core.datasets import create_dataset 6 | from darwin.future.exceptions import BadRequest 7 | from darwin.future.tests.core.fixtures import * # noqa: F401, F403 8 | 9 | from .fixtures import * # noqa: F401, F403 10 | 11 | 12 | def test_it_creates_a_dataset(basic_dataset: dict, base_client: ClientCore) -> None: 13 | with responses.RequestsMock() as rsps: 14 | rsps.add( 15 | rsps.POST, 16 | base_client.config.api_endpoint + "datasets", 17 | json=basic_dataset, 18 | status=200, 19 | ) 20 | 21 | dataset = create_dataset(base_client, "test-dataset") 22 | assert dataset.name == "test-dataset" 23 | assert dataset.slug == "1337" 24 | 25 | 26 | def test_it_raises_an_error_on_http_error( 27 | basic_dataset: dict, base_client: ClientCore 28 | ) -> None: 29 | with raises(BadRequest): 30 | with responses.RequestsMock() as rsps: 31 | rsps.add( 32 | rsps.POST, 33 | base_client.config.api_endpoint + "datasets", 34 | json=basic_dataset, 35 | status=400, 36 | ) 37 | 38 | create_dataset(base_client, "test-dataset") 39 | -------------------------------------------------------------------------------- /darwin/future/tests/core/datasets/test_delete_dataset.py: -------------------------------------------------------------------------------- 1 | import responses 2 | from pytest import raises 3 | 4 | from darwin.future.core.client import ClientCore 5 | from darwin.future.core.datasets import remove_dataset 6 | from darwin.future.exceptions import BadRequest 7 | from darwin.future.tests.core.fixtures import * 8 | 9 | from .fixtures import * 10 | 11 | 12 | def test_it_deletes_a_dataset(base_client: ClientCore) -> None: 13 | with responses.RequestsMock() as rsps: 14 | rsps.add( 15 | rsps.PUT, 16 | base_client.config.api_endpoint + "datasets/1337/archive", 17 | json={ 18 | "id": 1337, 19 | }, 20 | status=200, 21 | ) 22 | 23 | output = remove_dataset(base_client, 1337) 24 | 25 | assert output == 1337 26 | 27 | 28 | def test_it_throws_http_errors_returned_by_the_client(base_client: ClientCore) -> None: 29 | with raises(BadRequest): 30 | with responses.RequestsMock() as rsps: 31 | rsps.add( 32 | rsps.PUT, 33 | base_client.config.api_endpoint + "datasets/test-dataset/archive", 34 | json={ 35 | "affected_item_count": 1, 36 | }, 37 | status=400, 38 | ) 39 | 40 | remove_dataset(base_client, "test-dataset") # type: ignore 41 | -------------------------------------------------------------------------------- /darwin/future/tests/core/datasets/test_get_dataset.py: -------------------------------------------------------------------------------- 1 | import responses 2 | from pytest import raises 3 | 4 | from darwin.future.core.client import ClientCore 5 | from darwin.future.core.datasets import get_dataset 6 | from darwin.future.data_objects.dataset import DatasetCore 7 | from darwin.future.exceptions import BadRequest 8 | from darwin.future.tests.core.fixtures import * 9 | 10 | from .fixtures import * 11 | 12 | 13 | def test_it_gets_a_dataset(base_client: ClientCore, basic_dataset: DatasetCore) -> None: 14 | with responses.RequestsMock() as rsps: 15 | rsps.add( 16 | rsps.GET, 17 | base_client.config.api_endpoint + "datasets", 18 | json=basic_dataset, 19 | status=200, 20 | ) 21 | 22 | dataset = get_dataset(base_client, "test-dataset") 23 | 24 | assert dataset.name == "test-dataset" 25 | assert dataset.slug == "1337" 26 | 27 | 28 | def test_it_raises_an_error_on_http_error(base_client: ClientCore) -> None: 29 | with responses.RequestsMock() as rsps: 30 | rsps.add( 31 | rsps.GET, 32 | base_client.config.api_endpoint + "datasets", 33 | json={}, 34 | status=400, 35 | ) 36 | with raises(BadRequest): 37 | get_dataset(base_client, "test-dataset") 38 | get_dataset(base_client, "test-dataset") 39 | -------------------------------------------------------------------------------- /darwin/future/tests/core/datasets/test_list_datasets.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import pytest 4 | import responses 5 | 6 | from darwin.future.core.client import ClientCore 7 | from darwin.future.core.datasets import list_datasets 8 | from darwin.future.data_objects.dataset import DatasetCore 9 | from darwin.future.exceptions import BadRequest 10 | from darwin.future.tests.core.fixtures import * 11 | 12 | from .fixtures import * 13 | 14 | 15 | def test_it_lists_datasets( 16 | base_client: ClientCore, basic_list_of_datasets: List[DatasetCore] 17 | ) -> None: 18 | with responses.RequestsMock() as rsps: 19 | rsps.add( 20 | rsps.GET, 21 | base_client.config.api_endpoint + "datasets", 22 | json=basic_list_of_datasets, 23 | status=200, 24 | ) 25 | 26 | datasets, errors = list_datasets(base_client) 27 | 28 | assert len(errors) == 0 29 | 30 | assert len(datasets) == 3 31 | assert datasets[0].name == "test-dataset" 32 | assert datasets[0].slug == "1337" 33 | 34 | 35 | def test_it_returns_an_error_if_the_client_returns_an_http_error( 36 | base_client: ClientCore, 37 | ) -> None: 38 | with responses.RequestsMock() as rsps: 39 | rsps.add( 40 | rsps.GET, 41 | base_client.config.api_endpoint + "datasets", 42 | json={}, 43 | status=400, 44 | ) 45 | with pytest.raises(BadRequest) as execinfo: 46 | list_datasets(base_client) 47 | 48 | assert execinfo.value.args[0].status_code == 400 49 | -------------------------------------------------------------------------------- /darwin/future/tests/core/items/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/core/items/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/core/items/fixtures.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | from uuid import UUID, uuid4 3 | 4 | import pytest 5 | 6 | from darwin.future.data_objects.item import Folder, ItemCore, ItemLayout 7 | 8 | 9 | @pytest.fixture 10 | def base_layout() -> ItemLayout: 11 | return ItemLayout( 12 | slots=["slot1", "slot2"], type="grid", layout_shape=[2, 1], version=2 13 | ) 14 | 15 | 16 | @pytest.fixture 17 | def base_items() -> List[ItemCore]: 18 | return [ 19 | ItemCore( 20 | name=f"test_{i}", 21 | path="test_path", 22 | dataset_id=1, 23 | id=UUID("00000000-0000-0000-0000-000000000000"), 24 | slots=[], 25 | processing_status="complete", 26 | priority=0, 27 | ) 28 | for i in range(10) 29 | ] 30 | 31 | 32 | @pytest.fixture 33 | def base_folders() -> List[Folder]: 34 | return [ 35 | Folder( 36 | dataset_id=0, 37 | filtered_item_count=1, 38 | path=f"test_path_{i}", 39 | unfiltered_item_count=1, 40 | ) 41 | for i in range(10) 42 | ] 43 | 44 | 45 | @pytest.fixture 46 | def base_items_json(base_items: List[ItemCore]) -> List[dict]: 47 | items = [item.model_dump() for item in base_items] 48 | # json library doesn't support UUIDs so need to be str'd 49 | for item in items: 50 | item["id"] = str(item["id"]) 51 | return items 52 | 53 | 54 | @pytest.fixture 55 | def base_folders_json(base_folders: List[Folder]) -> List[dict]: 56 | return [folder.model_dump() for folder in base_folders] 57 | 58 | 59 | @pytest.fixture 60 | def UUIDs() -> List[UUID]: 61 | return [uuid4() for i in range(10)] 62 | 63 | 64 | @pytest.fixture 65 | def UUIDs_str(UUIDs: List[UUID]) -> List[str]: 66 | return [str(uuid) for uuid in UUIDs] 67 | 68 | 69 | @pytest.fixture 70 | def stage_id() -> UUID: 71 | return uuid4() 72 | 73 | 74 | @pytest.fixture 75 | def workflow_id() -> UUID: 76 | return uuid4() 77 | -------------------------------------------------------------------------------- /darwin/future/tests/core/properties/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/core/properties/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/core/properties/test_create.py: -------------------------------------------------------------------------------- 1 | import responses 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.core.properties import create_property 5 | from darwin.future.data_objects.properties import FullProperty 6 | from darwin.future.tests.core.fixtures import * 7 | 8 | 9 | @responses.activate 10 | def test_create_property( 11 | base_client: ClientCore, base_property_object: FullProperty 12 | ) -> None: 13 | # Mocking the response using responses library 14 | responses.add( 15 | responses.POST, 16 | f"{base_client.config.base_url}api/v2/teams/{base_client.config.default_team}/properties", 17 | json=base_property_object.model_dump(mode="json"), 18 | status=200, 19 | ) 20 | # Call the function being tested 21 | property = create_property( 22 | base_client, 23 | params=base_property_object, 24 | team_slug=base_client.config.default_team, 25 | ) 26 | 27 | # Assertions 28 | assert isinstance(property, FullProperty) 29 | assert property == base_property_object 30 | 31 | 32 | @responses.activate 33 | def test_create_property_from_json( 34 | base_client: ClientCore, base_property_object: FullProperty 35 | ) -> None: 36 | json = base_property_object.to_create_endpoint() 37 | # Mocking the response using responses library 38 | responses.add( 39 | responses.POST, 40 | f"{base_client.config.base_url}api/v2/teams/{base_client.config.default_team}/properties", 41 | json=base_property_object.model_dump(mode="json"), 42 | status=200, 43 | ) 44 | # Call the function being tested 45 | property = create_property( 46 | base_client, params=json, team_slug=base_client.config.default_team 47 | ) 48 | 49 | # Assertions 50 | assert isinstance(property, FullProperty) 51 | assert property == base_property_object 52 | -------------------------------------------------------------------------------- /darwin/future/tests/core/properties/test_update.py: -------------------------------------------------------------------------------- 1 | import responses 2 | 3 | from darwin.future.core.client import ClientCore 4 | from darwin.future.core.properties import update_property, update_property_value 5 | from darwin.future.data_objects.properties import FullProperty, PropertyValue 6 | from darwin.future.tests.core.fixtures import * 7 | 8 | 9 | @responses.activate 10 | def test_update_property( 11 | base_client: ClientCore, base_property_object: FullProperty 12 | ) -> None: 13 | # Mocking the response using responses library 14 | responses.add( 15 | responses.PUT, 16 | f"{base_client.config.base_url}api/v2/teams/{base_client.config.default_team}/properties/{base_property_object.id}", 17 | json=base_property_object.model_dump(mode="json"), 18 | status=200, 19 | ) 20 | # Call the function being tested 21 | property = update_property( 22 | base_client, 23 | params=base_property_object, 24 | team_slug=base_client.config.default_team, 25 | ) 26 | 27 | # Assertions 28 | assert isinstance(property, FullProperty) 29 | assert property == base_property_object 30 | 31 | 32 | @responses.activate 33 | def test_update_property_value( 34 | base_client: ClientCore, base_property_object: FullProperty 35 | ) -> None: 36 | # Mocking the response using responses library 37 | item_id = base_property_object.id 38 | assert item_id 39 | assert base_property_object.property_values 40 | pv = base_property_object.property_values[0] 41 | responses.add( 42 | responses.PUT, 43 | f"{base_client.config.base_url}api/v2/teams/{base_client.config.default_team}/properties/{item_id}/property_values/{pv.id}", 44 | json=pv.model_dump(), 45 | status=200, 46 | ) 47 | # Call the function being tested 48 | property_value = update_property_value( 49 | base_client, 50 | params=pv, 51 | item_id=item_id, 52 | team_slug=base_client.config.default_team, 53 | ) 54 | 55 | # Assertions 56 | assert isinstance(property_value, PropertyValue) 57 | assert property_value == pv 58 | -------------------------------------------------------------------------------- /darwin/future/tests/core/types/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/core/types/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/core/types/test_querystring.py: -------------------------------------------------------------------------------- 1 | from darwin.future.core.types.common import QueryString 2 | 3 | 4 | # happy and sad path tests for QueryString 5 | # should validate a dict of strings, and return a query string on str() 6 | def test_querystring_happy_path() -> None: 7 | query_string = QueryString({"foo": "bar"}) 8 | assert str(query_string) == "?foo=bar" 9 | 10 | query_string_2 = QueryString({"foo": "bar", "baz": "qux"}) 11 | assert str(query_string_2) == "?foo=bar&baz=qux" 12 | 13 | query_string_3 = QueryString({}) 14 | assert str(query_string_3) == "" 15 | 16 | assert query_string.value == {"foo": "bar"} 17 | assert query_string_2.value == {"foo": "bar", "baz": "qux"} 18 | 19 | 20 | def test_querystring_coerces_list() -> None: 21 | query_string = QueryString({"foo": ["bar", "baz"]}) 22 | assert str(query_string) == "?foo=bar&foo=baz" 23 | assert query_string.value == {"foo": ["bar", "baz"]} 24 | 25 | 26 | def test_querystring_coerces_stringable() -> None: 27 | class Stringable: 28 | def __str__(self) -> str: 29 | return "bar" 30 | 31 | query_string = QueryString({"foo": Stringable()}) 32 | assert str(query_string) == "?foo=bar" 33 | assert query_string.value == {"foo": "bar"} 34 | -------------------------------------------------------------------------------- /darwin/future/tests/core/workflows/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/core/workflows/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/data/base_annotation.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "", 6 | "path": "/" 7 | }, 8 | "annotations": [ 9 | { 10 | "bounding_box": { 11 | "h": 1.0, 12 | "w": 1.0, 13 | "x": 0.0, 14 | "y": 0.0 15 | }, 16 | "id": "007882ff-99c4-4c6f-b71b-79cfc147fef6", 17 | "name": "test_bb" 18 | }, 19 | { 20 | "ellipse": { 21 | "angle": 0.0, 22 | "center": { 23 | "x": 1.0, 24 | "y": 1.0 25 | }, 26 | "radius": { 27 | "x": 1.0, 28 | "y": 1.0 29 | } 30 | }, 31 | "id": "320a60f2-643b-4d74-a117-0ea2fdfe7a61", 32 | "name": "test_ellipse" 33 | }, 34 | { 35 | "bounding_box": { 36 | "h": 1.0, 37 | "w": 1.0, 38 | "x": 0.0, 39 | "y": 0.0 40 | }, 41 | "id": "012dcc6c-5b77-406b-8cd7-d9567c8b00b7", 42 | "name": "test_poly", 43 | "polygon": { 44 | "paths": [ 45 | [ 46 | { 47 | "x": 0.0, 48 | "y": 0.0 49 | }, 50 | { 51 | "x": 1.0, 52 | "y": 0.0 53 | }, 54 | { 55 | "x": 1.0, 56 | "y": 1.0 57 | }, 58 | { 59 | "x": 0.0, 60 | "y": 1.0 61 | } 62 | ] 63 | ] 64 | } 65 | } 66 | ] 67 | } -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/data_objects/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/fixtures.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | test_data_path: Path = Path(__file__).parent / "workflow" / "data" 6 | valid_stage_json = test_data_path / "stage.json" 7 | valid_workflow_json = test_data_path / "workflow.json" 8 | 9 | 10 | @pytest.fixture 11 | def basic_team() -> dict: 12 | return {"slug": "test-team", "id": 0, "name": "test-team"} 13 | 14 | 15 | @pytest.fixture 16 | def basic_dataset() -> dict: 17 | return {"name": "test-dataset", "slug": "test-dataset"} 18 | 19 | 20 | @pytest.fixture 21 | def basic_release() -> dict: 22 | return {"name": "test-release"} 23 | 24 | 25 | @pytest.fixture 26 | def basic_combined(basic_team: dict, basic_dataset: dict, basic_release: dict) -> dict: 27 | combined = basic_team 28 | combined["datasets"] = [basic_dataset] 29 | combined["datasets"][0]["releases"] = [basic_release] 30 | return combined 31 | 32 | 33 | @pytest.fixture 34 | def broken_combined(basic_combined: dict) -> dict: 35 | del basic_combined["datasets"][0]["name"] 36 | return basic_combined 37 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/test_darwin.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | 5 | from darwin.future.data_objects.darwinV2 import ( 6 | BoundingBoxAnnotation, 7 | DarwinV2, 8 | EllipseAnnotation, 9 | PolygonAnnotation, 10 | ) 11 | 12 | 13 | @pytest.fixture 14 | def raw_json() -> dict: 15 | with open("./darwin/future/tests/data/base_annotation.json") as f: 16 | raw_json = json.load(f) 17 | return raw_json 18 | 19 | 20 | def test_loads_base_darwin_v2(raw_json: dict) -> None: 21 | test = DarwinV2.model_validate(raw_json) 22 | assert len(test.annotations) == 3 23 | assert isinstance(test.annotations[0], BoundingBoxAnnotation) 24 | assert isinstance(test.annotations[1], EllipseAnnotation) 25 | assert isinstance(test.annotations[2], PolygonAnnotation) 26 | 27 | 28 | def test_bbox_annotation(raw_json: dict) -> None: 29 | bounds_annotation = raw_json["annotations"][0] 30 | BoundingBoxAnnotation.model_validate(bounds_annotation) 31 | 32 | 33 | def test_ellipse_annotation(raw_json: dict) -> None: 34 | ellipse_annotation = raw_json["annotations"][1] 35 | EllipseAnnotation.model_validate(ellipse_annotation) 36 | 37 | 38 | def test_polygon_annotation(raw_json: dict) -> None: 39 | polygon_annotation = raw_json["annotations"][2] 40 | PolygonAnnotation.model_validate(polygon_annotation) 41 | 42 | 43 | def test_polygon_bbx_validator(raw_json: dict) -> None: 44 | polygon_annotation = raw_json["annotations"][2] 45 | without_bbx = polygon_annotation.copy() 46 | del without_bbx["bounding_box"] 47 | without_bb_annotation = PolygonAnnotation.model_validate(without_bbx) 48 | with_bb_annotation = PolygonAnnotation.model_validate(polygon_annotation) 49 | 50 | assert without_bb_annotation.bounding_box is not None 51 | assert with_bb_annotation.bounding_box is not None 52 | assert without_bb_annotation == with_bb_annotation 53 | bounds_annotation = raw_json["annotations"][0] 54 | BoundingBoxAnnotation.model_validate(bounds_annotation) 55 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/test_general_darwin_objects.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pydantic import BaseModel, ValidationError 3 | 4 | from darwin.future.data_objects.dataset import DatasetCore 5 | from darwin.future.data_objects.release import ReleaseCore 6 | from darwin.future.data_objects.team import TeamCore 7 | from darwin.future.tests.data_objects.fixtures import * 8 | 9 | 10 | def test_integrated_parsing_works_with_raw(basic_combined: dict) -> None: 11 | team = TeamCore.model_validate(basic_combined) 12 | assert team.slug == "test-team" 13 | assert team.datasets is not None 14 | assert team.datasets[0].name == "test-dataset" 15 | assert team.datasets[0].releases is not None 16 | assert team.datasets[0].releases[0].name == "test-release" 17 | 18 | 19 | def test_broken_obj_raises(broken_combined: dict) -> None: 20 | with pytest.raises(ValidationError): 21 | TeamCore.model_validate(broken_combined) 22 | 23 | 24 | @pytest.mark.parametrize("test_object", [TeamCore, DatasetCore, ReleaseCore]) 25 | def test_empty_obj_raises(test_object: BaseModel) -> None: 26 | with pytest.raises(ValidationError): 27 | test_object.model_validate({}) 28 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/test_page.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from darwin.future.data_objects.page import Page 4 | 5 | 6 | def test_default_page() -> None: 7 | page = Page() 8 | assert page.offset == 0 9 | assert page.size == 500 10 | 11 | 12 | def test_to_query_string() -> None: 13 | page = Page(offset=0, size=10) 14 | qs = page.to_query_string() 15 | assert qs.value == {"page[offset]": "0", "page[size]": "10"} 16 | 17 | 18 | def test_increment() -> None: 19 | page = Page(offset=0, size=10) 20 | page.increment() 21 | assert page.offset == 10 22 | assert page.size == 10 23 | 24 | 25 | @pytest.mark.parametrize( 26 | "size, index, expected_offset", [(10, 0, 0), (10, 9, 0), (10, 10, 10), (10, 11, 10)] 27 | ) 28 | def test_get_required_page(size: int, index: int, expected_offset: int) -> None: 29 | page = Page(size=size, offset=0) 30 | required_page = page.get_required_page(index) 31 | assert required_page.offset == expected_offset 32 | assert required_page.size == size 33 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/test_properties.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from darwin.future.data_objects.properties import MetaDataClass, SelectedProperty 6 | 7 | 8 | @pytest.fixture 9 | def path_to_metadata_folder() -> Path: 10 | return Path("darwin/future/tests/data") 11 | 12 | 13 | @pytest.fixture 14 | def path_to_metadata(path_to_metadata_folder: Path) -> Path: 15 | return path_to_metadata_folder / ".v7" / "metadata.json" 16 | 17 | 18 | def test_properties_metadata_loads_folder(path_to_metadata: Path) -> None: 19 | metadata = MetaDataClass.from_path(path_to_metadata) 20 | assert metadata is not None 21 | assert len(metadata) == 2 22 | 23 | 24 | def test_properties_metadata_loads_file(path_to_metadata: Path) -> None: 25 | metadata = MetaDataClass.from_path(path_to_metadata) 26 | assert metadata is not None 27 | assert len(metadata) == 2 28 | 29 | 30 | def test_properties_metadata_fails() -> None: 31 | path = Path("darwin/future/tests/data/does_not_exist.json") 32 | with pytest.raises(FileNotFoundError): 33 | MetaDataClass.from_path(path) 34 | 35 | path = Path("darwin/future/tests/data/does_not_exist") 36 | with pytest.raises(FileNotFoundError): 37 | MetaDataClass.from_path(path) 38 | 39 | 40 | def test_can_parse_unpopulated_required_properties() -> None: 41 | selected_property = SelectedProperty( 42 | frame_index=None, name="name", type="type", value=None 43 | ) 44 | assert selected_property is not None 45 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/test_sorting.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pydantic import ValidationError 3 | 4 | from darwin.future.data_objects.sorting import SortingMethods 5 | 6 | 7 | def test_sorting_methods_all_fields_none(): 8 | with pytest.raises(ValidationError): 9 | SortingMethods() 10 | 11 | 12 | def test_sorting_methods_one_field_set(): 13 | sorting = SortingMethods(accuracy="asc") 14 | assert sorting.accuracy == "asc" 15 | 16 | 17 | def test_sorting_methods_multiple_fields_set(): 18 | sorting = SortingMethods(accuracy="asc", byte_size="desc") 19 | assert sorting.accuracy == "asc" 20 | assert sorting.byte_size == "desc" 21 | 22 | 23 | def test_sorting_methods_invalid_value(): 24 | with pytest.raises(ValidationError): 25 | SortingMethods(accuracy="invalid") 26 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/test_validators.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | import pytest 4 | 5 | from darwin.future.data_objects.validators import parse_name 6 | 7 | Simple = Union[list, tuple, dict, str, int, float] 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "input,expected", 12 | [ 13 | ("UPPERCASE", "uppercase"), 14 | ("lowercase", "lowercase"), 15 | (" whitespace ", "whitespace"), 16 | ("middle white space", "middle white space"), 17 | (" Inte grated Test ", "inte grated test"), 18 | ], 19 | ) 20 | def test_parse_name_parses_correctly(input: str, expected: str) -> None: 21 | parsed = parse_name(input) 22 | assert parsed == expected 23 | 24 | 25 | @pytest.mark.parametrize( 26 | "input", 27 | [-1, [], 1.0], 28 | ) 29 | def test_parse_name_raises_with_incorrect_input(input: Simple) -> None: 30 | with pytest.raises(AssertionError): 31 | parse_name(input) # type: ignore[arg-type] 32 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/data_objects/workflow/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/data/dataset.json: -------------------------------------------------------------------------------- 1 | { 2 | "annotation_hotkeys": {}, 3 | "annotators_can_instantiate_workflows": true, 4 | "id": 101, 5 | "instructions": "", 6 | "name": "Test Dataset" 7 | } -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/data/edge.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "e69d3ebe-6ab9-4159-b44f-2bf84d29bb20", 3 | "name": "default", 4 | "source_stage_id": "e69d3ebe-6ab9-4159-b44f-2bf84d29bb20", 5 | "target_stage_id": "5e0bc6aa-22fa-4845-a67c-0f1adfe46b5c" 6 | } -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/data/stage.json: -------------------------------------------------------------------------------- 1 | { 2 | "assignable_users": [ 3 | { 4 | "stage_id": "0fa1ae43-fb46-44d7-bf85-b78e81d0d02f", 5 | "user_id": 100 6 | } 7 | ], 8 | "config": { 9 | "allowed_class_ids": null, 10 | "annotation_group_id": null, 11 | "assignable_to": "manual", 12 | "authorization_header": null, 13 | "auto_instantiate": false, 14 | "champion_stage_id": null, 15 | "class_mapping": [], 16 | "dataset_id": null, 17 | "from_non_default_v1_template": null, 18 | "include_annotations": false, 19 | "initial": false, 20 | "iou_thresholds": null, 21 | "model_id": null, 22 | "model_type": "gust", 23 | "parallel_stage_ids": null, 24 | "readonly": false, 25 | "retry_if_fails": false, 26 | "rules": [], 27 | "skippable": false, 28 | "test_stage_id": null, 29 | "threshold": null, 30 | "url": null, 31 | "x": 20, 32 | "y": 20 33 | }, 34 | "edges": [ 35 | { 36 | "id": "e69d3ebe-6ab9-4159-b44f-2bf84d29bb20", 37 | "name": "default", 38 | "source_stage_id": "e69d3ebe-6ab9-4159-b44f-2bf84d29bb20", 39 | "target_stage_id": "5e0bc6aa-22fa-4845-a67c-0f1adfe46b5c" 40 | } 41 | ], 42 | "id": "e69d3ebe-6ab9-4159-b44f-2bf84d29bb20", 43 | "name": "Annotate", 44 | "type": "annotate" 45 | } -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/data/stage_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "allowed_class_ids": null, 3 | "annotation_group_id": null, 4 | "assignable_to": "manual", 5 | "authorization_header": null, 6 | "auto_instantiate": false, 7 | "champion_stage_id": null, 8 | "class_mapping": [], 9 | "dataset_id": null, 10 | "from_non_default_v1_template": null, 11 | "include_annotations": false, 12 | "initial": false, 13 | "iou_thresholds": null, 14 | "model_id": null, 15 | "model_type": "gust", 16 | "parallel_stage_ids": null, 17 | "readonly": false, 18 | "retry_if_fails": false, 19 | "rules": [], 20 | "skippable": false, 21 | "test_stage_id": null, 22 | "threshold": null, 23 | "url": null, 24 | "x": 20, 25 | "y": 20 26 | } -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/data/user.json: -------------------------------------------------------------------------------- 1 | { 2 | "stage_id": "0fa1ae43-fb46-44d7-bf85-b78e81d0d02f", 3 | "user_id": 100 4 | } -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/invalidvaluefortest.py: -------------------------------------------------------------------------------- 1 | class InvalidValueForTest: ... 2 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/test_wfdataset.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | from pydantic import ValidationError 5 | 6 | from darwin.future.data_objects.workflow import WFDatasetCore 7 | from darwin.future.tests.data_objects.workflow.invalidvaluefortest import ( 8 | InvalidValueForTest, 9 | ) 10 | 11 | test_data_path: Path = Path(__file__).parent / "data" 12 | validate_dataset_json = test_data_path / "dataset.json" 13 | 14 | 15 | def test_file_exists() -> None: 16 | # This is a test sanity check to make sure the file exists 17 | # Helps avoids headaches when debugging tests 18 | assert validate_dataset_json.exists() 19 | 20 | 21 | def test_WFDataset_validates_from_valid_json() -> None: 22 | WFDatasetCore.parse_file(validate_dataset_json) 23 | assert True 24 | 25 | 26 | def test_cast_to_int_returns_dataset_id() -> None: 27 | dataset = WFDatasetCore.parse_file(validate_dataset_json) 28 | assert dataset.id == 101 29 | 30 | 31 | def test_cast_to_str_returns_dataset_name() -> None: 32 | dataset = WFDatasetCore.parse_file(validate_dataset_json) 33 | assert dataset.name == "Test Dataset" 34 | 35 | 36 | def test_sad_paths() -> None: 37 | dataset = WFDatasetCore.parse_file(validate_dataset_json) 38 | fields = ["id", "name", "instructions"] 39 | 40 | # Test missing fields 41 | for key in fields: 42 | with pytest.raises(ValidationError) as excinfo: 43 | working_dataset = dataset.model_copy().model_dump() 44 | del working_dataset[key] 45 | WFDatasetCore.model_validate(working_dataset) 46 | 47 | assert str(excinfo.value).startswith( 48 | f"1 validation error for WFDatasetCore\n{key}" 49 | ) 50 | 51 | # Test invalid types 52 | for key in fields: 53 | with pytest.raises(ValidationError) as excinfo: 54 | working_dataset = dataset.model_copy().model_dump() 55 | working_dataset[key] = InvalidValueForTest() 56 | WFDatasetCore.model_validate(working_dataset) 57 | 58 | assert str(excinfo.value).startswith( 59 | f"1 validation error for WFDatasetCore\n{key}" 60 | ) 61 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/test_wfedge.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from darwin.future.data_objects.workflow import WFEdgeCore 4 | 5 | test_data_path: Path = Path(__file__).parent / "data" 6 | validate_json = test_data_path / "edge.json" 7 | 8 | 9 | def test_file_exists() -> None: 10 | # This is a test sanity check to make sure the file exists 11 | # Helps avoids headaches when debugging tests 12 | assert validate_json.exists() 13 | 14 | 15 | def test_WFEdge_validates_from_valid_json() -> None: 16 | parsed_edge = WFEdgeCore.parse_file(validate_json) 17 | 18 | assert isinstance(parsed_edge, WFEdgeCore) 19 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/test_wfstage.py: -------------------------------------------------------------------------------- 1 | from json import loads 2 | from uuid import UUID 3 | 4 | import pytest 5 | from pydantic import ValidationError 6 | 7 | from darwin.future.data_objects.workflow import WFStageCore 8 | from darwin.future.tests.data_objects.fixtures import test_data_path 9 | 10 | validate_json = test_data_path / "stage.json" 11 | 12 | 13 | def test_file_exists() -> None: 14 | # This is a test sanity check to make sure the file exists 15 | # Helps avoids headaches when debugging tests 16 | assert validate_json.exists() 17 | 18 | 19 | def test_WFStage_validates_from_valid_json() -> None: 20 | WFStageCore.parse_file(validate_json) 21 | assert True 22 | 23 | 24 | def test_casts_strings_to_uuids_as_needed() -> None: 25 | parsed_stage = WFStageCore.parse_file(validate_json) 26 | assert isinstance(parsed_stage.id, UUID) 27 | assert str(parsed_stage.id) == "e69d3ebe-6ab9-4159-b44f-2bf84d29bb20" 28 | 29 | 30 | def test_raises_with_invalid_uuid() -> None: 31 | dict_from_json = loads(validate_json.read_text()) 32 | dict_from_json["id"] = "not-a-uuid" 33 | 34 | with pytest.raises(ValidationError) as excinfo: 35 | WFStageCore.model_validate(dict_from_json) 36 | 37 | assert str(excinfo.value).startswith("1 validation error for WFStageCore\nid") 38 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/test_wfstage_config.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from darwin.future.data_objects.workflow import WFStageConfigCore 4 | 5 | test_data_path: Path = Path(__file__).parent / "data" 6 | validate_json = test_data_path / "stage_config.json" 7 | 8 | 9 | def test_file_exists() -> None: 10 | # This is a test sanity check to make sure the file exists 11 | # Helps avoids headaches when debugging tests 12 | assert validate_json.exists() 13 | 14 | 15 | def test_WFStageConfig_validates_from_valid_json() -> None: 16 | parsed_stage_config = WFStageConfigCore.parse_file(validate_json) 17 | 18 | assert isinstance(parsed_stage_config, WFStageConfigCore) 19 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/test_wfuser.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from darwin.future.data_objects.workflow import WFUserCore 4 | 5 | test_data_path: Path = Path(__file__).parent / "data" 6 | validate_json = test_data_path / "user.json" 7 | 8 | 9 | def test_file_exists() -> None: 10 | # This is a test sanity check to make sure the file exists 11 | # Helps avoids headaches when debugging tests 12 | assert validate_json.exists() 13 | 14 | 15 | def test_WFUser_validates_from_valid_json() -> None: 16 | parsed_user = WFUserCore.parse_file(validate_json) 17 | 18 | assert isinstance(parsed_user, WFUserCore) 19 | assert parsed_user.user_id == 100 20 | assert str(parsed_user.stage_id) == "0fa1ae43-fb46-44d7-bf85-b78e81d0d02f" 21 | -------------------------------------------------------------------------------- /darwin/future/tests/data_objects/workflow/test_workflow.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from pathlib import Path 3 | from uuid import UUID 4 | 5 | from darwin.future.data_objects.workflow import WFDatasetCore, WFStageCore, WorkflowCore 6 | 7 | test_data_path: Path = Path(__file__).parent / "data" 8 | validate_json = test_data_path / "workflow.json" 9 | 10 | 11 | def test_file_exists() -> None: 12 | # This is a test sanity check to make sure the file exists 13 | # Helps avoids headaches when debugging tests 14 | assert validate_json.exists() 15 | 16 | 17 | def test_Workflow_validates_from_valid_json() -> None: 18 | parsed_set = WorkflowCore.parse_file(validate_json) 19 | 20 | assert isinstance(parsed_set, WorkflowCore) 21 | assert isinstance(parsed_set.id, UUID) 22 | assert isinstance(parsed_set.name, str) 23 | assert isinstance(parsed_set.team_id, int) 24 | 25 | assert isinstance(parsed_set.stages, list) 26 | assert all(isinstance(i, WFStageCore) for i in parsed_set.stages) 27 | assert isinstance(parsed_set.dataset, WFDatasetCore) 28 | 29 | assert isinstance(parsed_set.inserted_at, datetime) 30 | assert isinstance(parsed_set.updated_at, datetime) 31 | 32 | assert isinstance(parsed_set.thumbnails, list) 33 | assert all(isinstance(i, str) for i in parsed_set.thumbnails) 34 | -------------------------------------------------------------------------------- /darwin/future/tests/fixtures.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | from pathlib import Path 4 | from typing import Generator 5 | 6 | import pytest 7 | 8 | 9 | @pytest.fixture 10 | def test_directory() -> Generator[Path, None, None]: 11 | path = Path.home() / "darwin-tests" 12 | if not path.exists(): 13 | os.makedirs(path) 14 | yield path 15 | shutil.rmtree(path) 16 | -------------------------------------------------------------------------------- /darwin/future/tests/meta/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/darwin/future/tests/meta/__init__.py -------------------------------------------------------------------------------- /darwin/future/tests/meta/fixtures.py: -------------------------------------------------------------------------------- 1 | from pytest import fixture 2 | 3 | from darwin.future.core.client import DarwinConfig 4 | from darwin.future.meta.client import Client 5 | from darwin.future.tests.core.fixtures import * 6 | 7 | 8 | @fixture 9 | def base_meta_client(base_config: DarwinConfig) -> Client: 10 | return Client(base_config) 11 | -------------------------------------------------------------------------------- /darwin/future/tests/meta/objects/test_v7_id.py: -------------------------------------------------------------------------------- 1 | from uuid import UUID 2 | 3 | from darwin.future.meta.client import Client 4 | from darwin.future.meta.objects.v7_id import V7ID 5 | from darwin.future.tests.meta.fixtures import * 6 | 7 | 8 | def test_v7_id(base_meta_client: Client) -> None: 9 | # Test creating a V7ID object 10 | uuid = UUID("123e4567-e89b-12d3-a456-426655440000") 11 | v7_id = V7ID(uuid, base_meta_client) 12 | assert v7_id.id == uuid 13 | 14 | # Test __str__ method 15 | assert str(v7_id) == str(uuid) 16 | 17 | # Test __repr__ method 18 | assert repr(v7_id) == str(uuid) 19 | -------------------------------------------------------------------------------- /darwin/future/tests/meta/test_client.py: -------------------------------------------------------------------------------- 1 | import responses 2 | 3 | from darwin.future.core.client import DarwinConfig 4 | from darwin.future.data_objects.team import TeamCore 5 | from darwin.future.meta.client import Client 6 | from darwin.future.meta.objects.team import Team 7 | from darwin.future.tests.core.fixtures import * 8 | from darwin.future.tests.meta.fixtures import * 9 | 10 | 11 | def test_creates_from_api_key() -> None: 12 | with responses.RequestsMock() as rsps: 13 | base_api_endpoint = DarwinConfig._default_api_endpoint() 14 | rsps.add( 15 | responses.GET, 16 | base_api_endpoint + "users/token_info", 17 | json={"selected_team": {"slug": "test-team"}}, 18 | ) 19 | client = Client.from_api_key(api_key="test") 20 | assert client.config.default_team == "test-team" 21 | 22 | 23 | def test_team_property( 24 | base_meta_client: Client, base_team: TeamCore, base_team_json: dict 25 | ) -> None: 26 | client = base_meta_client 27 | endpoint = client.config.api_endpoint + f"teams/{client.config.default_team}" 28 | with responses.RequestsMock() as rsps: 29 | rsps.add(responses.GET, endpoint, json=base_team_json) 30 | team = client.team 31 | assert isinstance(team, Team) 32 | assert team._element == base_team 33 | -------------------------------------------------------------------------------- /darwin/importer/__init__.py: -------------------------------------------------------------------------------- 1 | from importlib import import_module 2 | 3 | from darwin.datatypes import ImportParser 4 | 5 | from .importer import import_annotations # noqa 6 | 7 | 8 | class ImporterNotFoundError(ModuleNotFoundError): 9 | pass 10 | 11 | 12 | def get_importer(format: str) -> ImportParser: 13 | try: 14 | module = import_module(f"darwin.importer.formats.{format}") 15 | return getattr(module, "parse_path") 16 | except ModuleNotFoundError: 17 | raise ImporterNotFoundError 18 | -------------------------------------------------------------------------------- /darwin/importer/formats/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # When updating this file, please also update the docs: 4 | # https://docs.v7labs.com/docs/import-1 5 | supported_formats: List[str] = [ 6 | "coco", 7 | "dataloop", 8 | "csv_tags", 9 | "csv_tags_video", 10 | "darwin", 11 | "labelbox", 12 | "pascal_voc", 13 | "superannotate", 14 | "nifti", 15 | ] 16 | -------------------------------------------------------------------------------- /darwin/importer/formats/csv_tags.py: -------------------------------------------------------------------------------- 1 | import csv 2 | from pathlib import Path 3 | from typing import List, Optional 4 | 5 | import darwin.datatypes as dt 6 | from darwin.path_utils import deconstruct_full_path 7 | 8 | 9 | def parse_path(path: Path) -> Optional[List[dt.AnnotationFile]]: 10 | """ 11 | Parses the given file and returns a ``List[dt.AnnotationFile]`` with the parsed files, or 12 | ``None`` if the given file's extension is not ``.csv``. 13 | 14 | Parameters 15 | ---------- 16 | path : Path 17 | The ``Path`` of the file to parse. 18 | 19 | Returns 20 | ------- 21 | Optional[List[dt.AnnotationFile]] 22 | A ``List[dt.AnnotationFile]`` or ``None`` if the function was not able to parse the file. 23 | """ 24 | if path.suffix != ".csv": 25 | return None 26 | 27 | files = [] 28 | tags_and_files = {} 29 | with path.open() as f: 30 | reader = csv.reader(f) 31 | for row in reader: 32 | filename, *tags = (s.strip() for s in row) 33 | if filename == "": 34 | continue 35 | annotations = [dt.make_tag(tag) for tag in tags if len(tag) > 0] 36 | if filename not in tags_and_files: 37 | tags_and_files[filename] = list(annotations) 38 | else: 39 | tags_and_files[filename].extend(annotations) 40 | 41 | for filename, annotations in tags_and_files.items(): 42 | annotation_classes = { 43 | annotation.annotation_class for annotation in annotations 44 | } 45 | remote_path, filename = deconstruct_full_path(filename) 46 | files.append( 47 | dt.AnnotationFile( 48 | path, 49 | filename, 50 | annotation_classes, 51 | annotations, 52 | remote_path=remote_path, 53 | ) 54 | ) 55 | return files 56 | -------------------------------------------------------------------------------- /darwin/importer/formats/darwin.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Optional 3 | 4 | import darwin.datatypes as dt 5 | from darwin.utils import parse_darwin_json 6 | 7 | 8 | def parse_path(path: Path) -> Optional[dt.AnnotationFile]: 9 | """ 10 | Parses the given file into a darwin ``AnnotationFile`` or returns ``None`` if the file does not 11 | have a ``.json`` extension. 12 | 13 | Parameters 14 | ---------- 15 | path : Path 16 | The ``Path`` of the file to parse. 17 | 18 | Returns 19 | ------- 20 | Optional[dt.AnnotationFile] 21 | The ``AnnotationFile`` file or ``None`` if the file was not parseable. 22 | """ 23 | if path.suffix != ".json": 24 | return None 25 | return parse_darwin_json(path, 0) 26 | -------------------------------------------------------------------------------- /darwin/importer/formats/nifti_schemas.py: -------------------------------------------------------------------------------- 1 | class_map = { 2 | "type": "object", 3 | "patternProperties": {"^([0-9]+)+$": {"type": "string"}}, 4 | "additionalProperties": False, 5 | } 6 | 7 | nifti_image_label_pair = { 8 | "type": "object", 9 | "properties": { 10 | "image": {"type": "string"}, 11 | "label": {"type": "string"}, 12 | "class_map": class_map, 13 | "mode": {"type": "string", "enum": ["video", "instances", "mask"]}, 14 | "is_mpr": {"type": "boolean"}, 15 | "slot_names": {"type": "array", "items": {"type": "string"}}, 16 | }, 17 | "required": ["image", "label", "class_map"], 18 | "additionalProperties": False, 19 | } 20 | 21 | nifti_import_schema = { 22 | "type": "object", 23 | "properties": {"data": {"type": "array", "items": nifti_image_label_pair}}, 24 | "required": ["data"], 25 | "additionalProperties": False, 26 | } 27 | -------------------------------------------------------------------------------- /darwin/torch/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from pathlib import Path 3 | 4 | try: 5 | import torchvision # noqa 6 | 7 | # Here we remove `darwin` directory from `sys.path` to force the importer 8 | # to import the library `torch`, rather than the internal package. 9 | # This hack resolves this naming conflict for Sphinx. 10 | for path in sys.path: 11 | path_str = str(Path("darwin-py") / "darwin") 12 | if path.endswith(path_str): 13 | sys.path.remove(path) 14 | 15 | import torch # noqa 16 | except ImportError: 17 | raise ImportError( 18 | "darwin.torch requires pytorch and torchvision. Install it using: pip install torch torchvision" 19 | ) from None 20 | 21 | from .dataset import get_dataset # noqa 22 | -------------------------------------------------------------------------------- /darwin/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .flatten_list import flatten_list 2 | from .utils import * # noqa F403 3 | -------------------------------------------------------------------------------- /darwin/utils/flatten_list.py: -------------------------------------------------------------------------------- 1 | from typing import Generator, List 2 | 3 | from darwin.datatypes import UnknownType 4 | 5 | 6 | def flatten_list(list_of_lists: List[UnknownType]) -> List[UnknownType]: 7 | """ 8 | Flattens a list of lists into a single list. 9 | 10 | Parameters 11 | ---------- 12 | list_of_lists : List[List[Any]] 13 | The list of lists to flatten. 14 | 15 | Returns 16 | ------- 17 | List[Any] 18 | The flattened list. 19 | """ 20 | 21 | if not isinstance(list_of_lists, list): 22 | raise TypeError("Expected a list") 23 | 24 | def flatten(lists: List[UnknownType]) -> Generator[list, UnknownType, UnknownType]: 25 | if isinstance(lists, list) and len(lists) == 0: 26 | return lists 27 | for item in lists: 28 | if isinstance(item, list): 29 | for i in flatten(item): 30 | yield i 31 | else: 32 | yield item 33 | 34 | return list(flatten(list_of_lists)) 35 | -------------------------------------------------------------------------------- /darwin/utils/get_item_count.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | 3 | 4 | def get_item_count(dataset_dict: Dict) -> int: 5 | """ 6 | Returns the number of items in the dataset. 7 | 8 | Parameters 9 | ---------- 10 | dataset_dict: Dict 11 | The dataset dictionary. 12 | 13 | Returns 14 | ------- 15 | int 16 | The number of items in the dataset. 17 | """ 18 | num_items: Optional[int] = dataset_dict.get("num_items") 19 | num_videos: Optional[int] = dataset_dict.get("num_videos") 20 | num_images: Optional[int] = dataset_dict.get("num_images") 21 | 22 | if num_items is not None: 23 | return num_items 24 | 25 | return (num_images or 0) + (num_videos or 0) 26 | -------------------------------------------------------------------------------- /darwin/validators.py: -------------------------------------------------------------------------------- 1 | """ 2 | Holds functions that convert backend errors into a pythonic format the application can understand. 3 | """ 4 | 5 | from typing import Any, Dict 6 | 7 | from darwin.exceptions import NameTaken, ValidationError 8 | 9 | 10 | def name_taken(code: int, body: Dict[str, Any]) -> None: 11 | """ 12 | Validates if a request to the backend errored out with a NameTaken error. 13 | 14 | Parameters 15 | ---------- 16 | code : int 17 | The response code. 18 | body : Dict[str, Any] 19 | The response body. 20 | 21 | Raises 22 | ------ 23 | NameTaken 24 | If both ``code`` and ``body`` indicate that the server request errored due to a name being 25 | already taken. 26 | """ 27 | if code != 422: 28 | return 29 | if body.get("errors", {}).get("name") == ["has already been taken"]: 30 | raise NameTaken 31 | 32 | 33 | def validation_error(code: int, body: Dict[str, Any]) -> None: 34 | """ 35 | Validates if a request to the backend errored out with a Validation error. 36 | 37 | Parameters 38 | ---------- 39 | code : int 40 | The response code. 41 | body : Dict[str, Any] 42 | The response body. 43 | 44 | Raises 45 | ------ 46 | ValidationError 47 | If both ``code`` and ``body`` indicate that the server request errored because it failed 48 | validation. 49 | """ 50 | if code == 422: 51 | raise ValidationError(body) 52 | -------------------------------------------------------------------------------- /darwin/version/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "3.0.1" 2 | -------------------------------------------------------------------------------- /deploy/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/deploy/__init__.py -------------------------------------------------------------------------------- /deploy/_filter_files.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | from typing import List 5 | 6 | 7 | def main(argv: List[str]) -> None: 8 | file_extension: str = argv[0] 9 | files_in: List[str] = argv[1:] 10 | 11 | if file_extension.startswith("."): 12 | file_extension = file_extension[1:] 13 | 14 | files_out = [file for file in files_in if file.endswith(f".{file_extension}")] 15 | 16 | print(" ".join(files_out)) 17 | 18 | 19 | if __name__ == "__main__": 20 | main(sys.argv[1:]) 21 | -------------------------------------------------------------------------------- /deploy/check_python.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Confirm that python 3.9 or higher is installed and pip installed 3 | 4 | # Check python is installed 5 | if ! command -v python3 &> /dev/null 6 | then 7 | echo "Python3 could not be found" 8 | exit 1 9 | fi 10 | 11 | # Check python version is 3.9 or higher 12 | if [[ $(python3 -c 'import sys; print(sys.version_info >= (3, 9))') != "True" ]] 13 | then 14 | echo "Python version 3.9 or higher is required" 15 | exit 2 16 | fi 17 | 18 | # Check pip is installed 19 | if ! command -v pip3 &> /dev/null 20 | then 21 | echo "pip3 could not be found" 22 | exit 3 23 | fi 24 | 25 | echo "Confirmed Python and pip are installed" 26 | exit 0 -------------------------------------------------------------------------------- /deploy/nightly_package_setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Require a single string argument, the new package name 5 | # Example: python change_package_name.py com.example.newname 6 | 7 | from datetime import datetime 8 | from os import linesep 9 | from pathlib import Path 10 | 11 | 12 | def main() -> None: 13 | epoch_timestring = datetime.now().strftime("%s") 14 | 15 | this_file_path = Path(__file__).parent.resolve() 16 | path_to_pyproject = this_file_path / ".." / "pyproject.toml" 17 | path_to_version = this_file_path / ".." / "version.txt" 18 | 19 | try: 20 | assert path_to_pyproject.exists() 21 | except AssertionError: 22 | print("No pyproject.toml found.") 23 | exit(1) 24 | 25 | lines = path_to_pyproject.read_text().splitlines() 26 | lines_to_write = [] 27 | 28 | for line in lines: 29 | if line.startswith("name ="): 30 | lines_to_write.append('name = "darwin-nightly"\n') 31 | elif line.startswith("version ="): 32 | version = line.split("=")[1].strip() 33 | path_to_version.write_text(version) 34 | lines_to_write.append(f'version = "{epoch_timestring}"\n') 35 | else: 36 | lines_to_write.append(line) 37 | 38 | path_to_pyproject.write_text(linesep.join(lines_to_write)) 39 | 40 | print( 41 | f"Set build to a nightly in pyproject.toml - darwin-nightly@{epoch_timestring}" 42 | ) 43 | 44 | 45 | if __name__ == "__main__": 46 | main() 47 | -------------------------------------------------------------------------------- /deploy/revert_nightly_setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from pathlib import Path 5 | 6 | 7 | def main() -> None: 8 | new_package_name = "darwin-py" 9 | 10 | this_file_path = Path(__file__).parent.resolve() 11 | path_to_pyproject = this_file_path / ".." / "pyproject.toml" 12 | path_to_version = this_file_path / ".." / "version.txt" 13 | 14 | try: 15 | assert path_to_pyproject.exists() 16 | assert path_to_version.exists() 17 | except AssertionError: 18 | print("No nightly build in place to revert") 19 | exit(1) 20 | 21 | lines = path_to_pyproject.read_text().splitlines() 22 | new_version = path_to_version.read_text().strip() 23 | 24 | lines_to_write = [] 25 | 26 | for line in lines: 27 | if line.startswith("name ="): 28 | line = f'name = "{new_package_name}"\n' 29 | if line.startswith("version ="): 30 | line = f"version = {new_version}\n" 31 | lines_to_write.append(line) 32 | 33 | path_to_pyproject.write_text("\n".join(lines_to_write)) 34 | 35 | print(f"Changed package name to {new_package_name}@{new_version} in pyproject.toml") 36 | 37 | 38 | if __name__ == "__main__": 39 | main() 40 | -------------------------------------------------------------------------------- /e2e_tests/.env.example: -------------------------------------------------------------------------------- 1 | # These must be populated before running E2E tests 2 | E2E_ENVIRONMENT= 3 | E2E_API_KEY= 4 | E2E_TEAM= 5 | -------------------------------------------------------------------------------- /e2e_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/__init__.py -------------------------------------------------------------------------------- /e2e_tests/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/cli/__init__.py -------------------------------------------------------------------------------- /e2e_tests/cli/convert/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/cli/convert/__init__.py -------------------------------------------------------------------------------- /e2e_tests/cli/test_pull.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from e2e_tests.helpers import assert_cli, run_cli_command, export_release 4 | from e2e_tests.objects import E2EDataset, ConfigValues 5 | 6 | 7 | def test_pull_with_remote_folder_structure( 8 | local_dataset: E2EDataset, config_values: ConfigValues 9 | ): 10 | """ 11 | Test pulling a dataset release with default arguments. 12 | 13 | The remote directory structure should be recreated locally. 14 | """ 15 | pull_dir = Path( 16 | f"{Path.home()}/.darwin/datasets/{config_values.team_slug}/{local_dataset.slug}/images" 17 | ) 18 | expected_filepaths = [ 19 | f"{pull_dir}/image_1.jpg", 20 | f"{pull_dir}/image_2.jpg", 21 | f"{pull_dir}/dir1/image_3.jpg", 22 | f"{pull_dir}/dir1/image_4.jpg", 23 | f"{pull_dir}/dir2/image_5.jpg", 24 | f"{pull_dir}/dir2/image_6.jpg", 25 | f"{pull_dir}/dir1/dir3/image_7.jpg", 26 | f"{pull_dir}/dir1/dir3/image_8.jpg", 27 | ] 28 | item_type = "single_slotted" 29 | annotation_format = "darwin" 30 | local_dataset.register_read_only_items(config_values, item_type) 31 | release = export_release(annotation_format, local_dataset, config_values) 32 | result = run_cli_command(f"darwin dataset pull {local_dataset.name}:{release.name}") 33 | assert_cli(result, 0) 34 | all_filepaths = list(pull_dir.rglob("*")) 35 | for expected_file in expected_filepaths: 36 | assert Path(expected_file) in all_filepaths 37 | -------------------------------------------------------------------------------- /e2e_tests/data/base_annotation.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "", 6 | "path": "/" 7 | }, 8 | "annotations": [ 9 | { 10 | "bounding_box": { 11 | "h": 1.0, 12 | "w": 1.0, 13 | "x": 0.0, 14 | "y": 0.0 15 | }, 16 | "id": "007882ff-99c4-4c6f-b71b-79cfc147fef6", 17 | "name": "test_bb" 18 | }, 19 | { 20 | "ellipse": { 21 | "angle": 0.0, 22 | "center": { 23 | "x": 1.0, 24 | "y": 1.0 25 | }, 26 | "radius": { 27 | "x": 1.0, 28 | "y": 1.0 29 | } 30 | }, 31 | "id": "320a60f2-643b-4d74-a117-0ea2fdfe7a61", 32 | "name": "test_ellipse" 33 | }, 34 | { 35 | "bounding_box": { 36 | "h": 1.0, 37 | "w": 1.0, 38 | "x": 0.0, 39 | "y": 0.0 40 | }, 41 | "id": "012dcc6c-5b77-406b-8cd7-d9567c8b00b7", 42 | "name": "test_poly", 43 | "polygon": { 44 | "paths": [ 45 | [ 46 | { 47 | "x": 0.0, 48 | "y": 0.0 49 | }, 50 | { 51 | "x": 1.0, 52 | "y": 0.0 53 | }, 54 | { 55 | "x": 1.0, 56 | "y": 1.0 57 | }, 58 | { 59 | "x": 0.0, 60 | "y": 1.0 61 | } 62 | ] 63 | ] 64 | } 65 | } 66 | ] 67 | } -------------------------------------------------------------------------------- /e2e_tests/data/convert/coco/from/base_annotation.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "", 6 | "path": "/", 7 | "slots": [ 8 | { 9 | "type": "image", 10 | "slot_name": "0", 11 | "width": 1080, 12 | "height": 1920 13 | } 14 | ] 15 | }, 16 | "annotations": [ 17 | { 18 | "bounding_box": { 19 | "h": 1.0, 20 | "w": 1.0, 21 | "x": 0.0, 22 | "y": 0.0 23 | }, 24 | "id": "007882ff-99c4-4c6f-b71b-79cfc147fef6", 25 | "name": "test_bb" 26 | }, 27 | { 28 | "ellipse": { 29 | "angle": 0.0, 30 | "center": { 31 | "x": 1.0, 32 | "y": 1.0 33 | }, 34 | "radius": { 35 | "x": 1.0, 36 | "y": 1.0 37 | } 38 | }, 39 | "id": "320a60f2-643b-4d74-a117-0ea2fdfe7a61", 40 | "name": "test_ellipse" 41 | }, 42 | { 43 | "bounding_box": { 44 | "h": 1.0, 45 | "w": 1.0, 46 | "x": 0.0, 47 | "y": 0.0 48 | }, 49 | "id": "012dcc6c-5b77-406b-8cd7-d9567c8b00b7", 50 | "name": "test_poly", 51 | "polygon": { 52 | "paths": [ 53 | [ 54 | { 55 | "x": 0.0, 56 | "y": 0.0 57 | }, 58 | { 59 | "x": 1.0, 60 | "y": 0.0 61 | }, 62 | { 63 | "x": 1.0, 64 | "y": 1.0 65 | }, 66 | { 67 | "x": 0.0, 68 | "y": 1.0 69 | } 70 | ], 71 | [ 72 | { 73 | "x": 1.0, 74 | "y": 0.0 75 | }, 76 | { 77 | "x": 1.0, 78 | "y": 1.0 79 | }, 80 | { 81 | "x": 0.0, 82 | "y": 1.0 83 | }, 84 | { 85 | "x": 0.0, 86 | "y": 0.0 87 | } 88 | ] 89 | ] 90 | } 91 | } 92 | ] 93 | } -------------------------------------------------------------------------------- /e2e_tests/data/convert/coco/to/output.json: -------------------------------------------------------------------------------- 1 | { 2 | "info": { 3 | "description": "Exported from Darwin", 4 | "url": "n/a", 5 | "version": "n/a", 6 | "year": 2023, 7 | "contributor": "n/a", 8 | "date_created": "2023/12/05" 9 | }, 10 | "licenses": [ 11 | { 12 | "url": "n/a", 13 | "id": 0, 14 | "name": "placeholder license" 15 | } 16 | ], 17 | "images": [ 18 | { 19 | "license": 0, 20 | "file_name": "", 21 | "coco_url": "n/a", 22 | "height": 1920, 23 | "width": 1080, 24 | "date_captured": "", 25 | "flickr_url": "n/a", 26 | "darwin_url": null, 27 | "darwin_workview_url": null, 28 | "id": 2043925204, 29 | "tag_ids": [] 30 | } 31 | ], 32 | "annotations": [ 33 | { 34 | "id": 1, 35 | "image_id": 2043925204, 36 | "category_id": 348813479, 37 | "segmentation": [ 38 | [ 39 | 0.0, 40 | 0.0, 41 | 1.0, 42 | 0.0, 43 | 1.0, 44 | 1.0, 45 | 0.0, 46 | 1.0 47 | ] 48 | ], 49 | "area": 1.0, 50 | "bbox": [ 51 | 0.0, 52 | 0.0, 53 | 1.0, 54 | 1.0 55 | ], 56 | "iscrowd": 0, 57 | "extra": {} 58 | }, 59 | { 60 | "id": 3, 61 | "image_id": 2043925204, 62 | "category_id": 3961009249, 63 | "segmentation": { 64 | "counts": [ 65 | 0, 66 | 2, 67 | 1918, 68 | 2, 69 | 2071678 70 | ], 71 | "size": [ 72 | 1920, 73 | 1080 74 | ] 75 | }, 76 | "area": 0, 77 | "bbox": [ 78 | 0, 79 | 0, 80 | 2, 81 | 2 82 | ], 83 | "iscrowd": 1, 84 | "extra": {} 85 | } 86 | ], 87 | "categories": [ 88 | { 89 | "id": 348813479, 90 | "name": "test_bb", 91 | "supercategory": "root" 92 | }, 93 | { 94 | "id": 3961009249, 95 | "name": "test_poly", 96 | "supercategory": "root" 97 | } 98 | ], 99 | "tag_categories": [] 100 | } -------------------------------------------------------------------------------- /e2e_tests/data/convert/cvat/from/000000021295.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "000000021295.jpg", 6 | "path": "/", 7 | "source_info": { 8 | "item_id": "0192c338-2dc8-bb9f-8dde-9c4a8714c957", 9 | "dataset": { 10 | "name": "tmp", 11 | "slug": "tmp", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/426200/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=426200&item=0192c338-2dc8-bb9f-8dde-9c4a8714c957" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 640, 25 | "height": 427, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/6c554c31-8bf6-4fc7-9f31-394fd775b2a6/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "000000021295.jpg", 30 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/ade5f1e3-a165-4a07-989f-86ee342499c9" 31 | } 32 | ] 33 | } 34 | ] 35 | }, 36 | "annotations": [ 37 | { 38 | "bounding_box": { 39 | "h": 43.7181, 40 | "w": 71.2665, 41 | "x": 166.0884, 42 | "y": 113.1879 43 | }, 44 | "id": "46b33c9c-7453-4722-8b43-91bbb3fc247f", 45 | "name": "test_bounding_box_basic", 46 | "properties": [], 47 | "slot_names": [ 48 | "0" 49 | ] 50 | }, 51 | { 52 | "bounding_box": { 53 | "h": 31.7405, 54 | "w": 58.0912, 55 | "x": 360.1248, 56 | "y": 259.913 57 | }, 58 | "id": "2b7c85ed-74b3-4111-a5d6-073da56d0072", 59 | "name": "test_bounding_box_basic", 60 | "properties": [], 61 | "slot_names": [ 62 | "0" 63 | ] 64 | } 65 | ], 66 | "properties": [] 67 | } -------------------------------------------------------------------------------- /e2e_tests/data/convert/cvat/to/output.xml: -------------------------------------------------------------------------------- 1 | 1.12024-10-25 10:33:01.789498+00:001exported_task_from_darwin1annotation0NoneFalse2024-10-25 10:33:01.789603+00:002024-10-25 10:33:01.789608+00:00111not applicableexample_usernameuser@example.com -------------------------------------------------------------------------------- /e2e_tests/data/convert/instance_mask/to/masks/000000021295_00000.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/instance_mask/to/masks/000000021295_00000.png -------------------------------------------------------------------------------- /e2e_tests/data/convert/instance_mask/to/masks/000000021295_00001.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/instance_mask/to/masks/000000021295_00001.png -------------------------------------------------------------------------------- /e2e_tests/data/convert/instance_mask/to/masks/000000021295_00002.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/instance_mask/to/masks/000000021295_00002.png -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti-legacy-scaling/to/2044737.fat.nii/0/2044737.fat_Reference_sBAT.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti-legacy-scaling/to/2044737.fat.nii/0/2044737.fat_Reference_sBAT.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti-legacy-scaling/to/2044737.fat.nii/0/2044737.fat_test_mask_basic_m.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti-legacy-scaling/to/2044737.fat.nii/0/2044737.fat_test_mask_basic_m.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti-multislot/to/2044737.fat.nii/0/2044737.fat_Reference_sBAT.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti-multislot/to/2044737.fat.nii/0/2044737.fat_Reference_sBAT.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti-multislot/to/2044737.fat.nii/0/2044737.fat_test_mask_basic_m.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti-multislot/to/2044737.fat.nii/0/2044737.fat_test_mask_basic_m.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti-multislot/to/2044737.fat.nii/1/2044737.fat_Reference_sBAT.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti-multislot/to/2044737.fat.nii/1/2044737.fat_Reference_sBAT.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti-multislot/to/2044737.fat.nii/1/2044737.fat_test_mask_basic_m.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti-multislot/to/2044737.fat.nii/1/2044737.fat_test_mask_basic_m.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti-no-legacy-scaling/to/2044737.fat.nii/0/2044737.fat_Reference_sBAT.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti-no-legacy-scaling/to/2044737.fat.nii/0/2044737.fat_Reference_sBAT.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti-no-legacy-scaling/to/2044737.fat.nii/0/2044737.fat_test_mask_basic_m.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti-no-legacy-scaling/to/2044737.fat.nii/0/2044737.fat_test_mask_basic_m.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti/to/hippocampus_001_mpr_1_test_hippo_test_mask_basic_m.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti/to/hippocampus_001_mpr_1_test_hippo_test_mask_basic_m.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/nifti/to/hippocampus_001_mpr_1_test_hippo_test_polygon_basic.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/convert/nifti/to/hippocampus_001_mpr_1_test_hippo_test_polygon_basic.nii.gz -------------------------------------------------------------------------------- /e2e_tests/data/convert/pascalvoc/to/000000021295.xml: -------------------------------------------------------------------------------- 1 | images000000021295.jpgimages/000000021295.jpgdarwin64042730test_bounding_box_basicUnspecified001035619179test_bounding_box_basicUnspecified00401129474205test_bounding_box_basicUnspecified00187263305284 -------------------------------------------------------------------------------- /e2e_tests/data/convert/semantic_mask/to/class_mapping.csv: -------------------------------------------------------------------------------- 1 | class_name,class_color 2 | __background__,0 0 0 3 | __raster_layer__,255 50 50 4 | box,50 255 50 5 | -------------------------------------------------------------------------------- /e2e_tests/data/convert/yolo/to/darknet.labels: -------------------------------------------------------------------------------- 1 | bbox 2 | test_poly 3 | -------------------------------------------------------------------------------- /e2e_tests/data/convert/yolov8/to/darknet.labels: -------------------------------------------------------------------------------- 1 | bbox 2 | test_poly 3 | -------------------------------------------------------------------------------- /e2e_tests/data/import/csv_tag_annotations/csv_tags.csv: -------------------------------------------------------------------------------- 1 | image_1.jpg, test_tag_basic 2 | image_2.jpg, test_tag_basic 3 | dir1/image_3.jpg, test_tag_basic 4 | dir1/image_4.jpg, test_tag_basic 5 | dir2/image_5.jpg, test_tag_basic 6 | dir2/image_6.jpg, test_tag_basic 7 | dir1/dir3/image_7.jpg, test_tag_basic 8 | dir1/dir3/image_8.jpg, test_tag_basic -------------------------------------------------------------------------------- /e2e_tests/data/import/csv_tag_video_annotations/csv_tags_video.csv: -------------------------------------------------------------------------------- 1 | mini_uct.mp4, test_tag_basic, 0, 3 2 | mini_uct.mp4, test_tag_basic, 4, 8 3 | -------------------------------------------------------------------------------- /e2e_tests/data/import/image_annotations_item_level_properties_no_annotations/image_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "image_1.jpg", 6 | "path": "/", 7 | "source_info": { 8 | "item_id": "01920b92-1d5d-94a4-6fbe-8a4d7f9fa15d", 9 | "dataset": { 10 | "name": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 11 | "slug": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/339501/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=339501&item=01920b92-1d5d-94a4-6fbe-8a4d7f9fa15d" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 1920, 25 | "height": 1080, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/2ec69e41-91b2-4155-9b05-6ed995677b1e/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "image_1.jpg", 30 | "storage_key": "darwin-py/images/image_1.jpg", 31 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/9dfc5eac-bf16-4380-a148-9fff6e63b9f0" 32 | } 33 | ] 34 | } 35 | ] 36 | }, 37 | "annotations": [], 38 | "properties": [ 39 | { 40 | "name": "test_item_level_property_multi_select", 41 | "value": "1" 42 | }, 43 | { 44 | "name": "test_item_level_property_multi_select", 45 | "value": "2" 46 | }, 47 | { 48 | "name": "test_item_level_property_single_select", 49 | "value": "1" 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /e2e_tests/data/import/image_annotations_item_level_properties_no_annotations/image_2.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "image_2.jpg", 6 | "path": "/", 7 | "source_info": { 8 | "item_id": "01920b92-1d5d-ea77-8fa4-16378bafedb3", 9 | "dataset": { 10 | "name": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 11 | "slug": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/339501/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=339501&item=01920b92-1d5d-ea77-8fa4-16378bafedb3" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 1920, 25 | "height": 1080, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/5e0b3d9d-9bf8-4166-8949-6ab7392161ad/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "image_2.jpg", 30 | "storage_key": "darwin-py/images/image_2.jpg", 31 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/4920b12a-1706-47f1-b084-2d2234ed1151" 32 | } 33 | ] 34 | } 35 | ] 36 | }, 37 | "annotations": [], 38 | "properties": [ 39 | { 40 | "name": "test_item_level_property_multi_select", 41 | "value": "1" 42 | }, 43 | { 44 | "name": "test_item_level_property_multi_select", 45 | "value": "2" 46 | }, 47 | { 48 | "name": "test_item_level_property_single_select", 49 | "value": "1" 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /e2e_tests/data/import/image_annotations_item_level_properties_no_annotations/image_3.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "image_3.jpg", 6 | "path": "/dir1", 7 | "source_info": { 8 | "item_id": "01920b92-1d5d-e8ad-986f-ad4942f1bbfc", 9 | "dataset": { 10 | "name": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 11 | "slug": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/339501/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=339501&item=01920b92-1d5d-e8ad-986f-ad4942f1bbfc" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 1920, 25 | "height": 1080, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/ddd13905-9bbb-4fab-9642-bf4604686fda/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "image_3.jpg", 30 | "storage_key": "darwin-py/images/image_3.jpg", 31 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/30ec0f13-caaa-4374-be5a-e90b3493fb73" 32 | } 33 | ] 34 | } 35 | ] 36 | }, 37 | "annotations": [], 38 | "properties": [ 39 | { 40 | "name": "test_item_level_property_multi_select", 41 | "value": "1" 42 | }, 43 | { 44 | "name": "test_item_level_property_multi_select", 45 | "value": "2" 46 | }, 47 | { 48 | "name": "test_item_level_property_single_select", 49 | "value": "1" 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /e2e_tests/data/import/image_annotations_item_level_properties_no_annotations/image_4.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "image_4.jpg", 6 | "path": "/dir1", 7 | "source_info": { 8 | "item_id": "01920b92-1d5d-8b50-17e9-c0f178e6eee6", 9 | "dataset": { 10 | "name": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 11 | "slug": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/339501/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=339501&item=01920b92-1d5d-8b50-17e9-c0f178e6eee6" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 1920, 25 | "height": 1080, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/3c731d84-7d7f-4ac8-bbd9-0d53f1d47195/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "image_4.jpg", 30 | "storage_key": "darwin-py/images/image_4.jpg", 31 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/609ba1a4-79da-4743-b331-e57ccd9ee518" 32 | } 33 | ] 34 | } 35 | ] 36 | }, 37 | "annotations": [], 38 | "properties": [ 39 | { 40 | "name": "test_item_level_property_multi_select", 41 | "value": "1" 42 | }, 43 | { 44 | "name": "test_item_level_property_multi_select", 45 | "value": "2" 46 | }, 47 | { 48 | "name": "test_item_level_property_single_select", 49 | "value": "1" 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /e2e_tests/data/import/image_annotations_item_level_properties_no_annotations/image_5.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "image_5.jpg", 6 | "path": "/dir2", 7 | "source_info": { 8 | "item_id": "01920b92-1d5d-55bf-d705-8b39dea7fde6", 9 | "dataset": { 10 | "name": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 11 | "slug": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/339501/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=339501&item=01920b92-1d5d-55bf-d705-8b39dea7fde6" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 1920, 25 | "height": 1080, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/8f95e81c-def7-4973-9152-6d0fc39e1473/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "image_5.jpg", 30 | "storage_key": "darwin-py/images/image_5.jpg", 31 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/08448a07-4e23-41f9-abbd-0dc149ef2be4" 32 | } 33 | ] 34 | } 35 | ] 36 | }, 37 | "annotations": [], 38 | "properties": [ 39 | { 40 | "name": "test_item_level_property_multi_select", 41 | "value": "1" 42 | }, 43 | { 44 | "name": "test_item_level_property_multi_select", 45 | "value": "2" 46 | }, 47 | { 48 | "name": "test_item_level_property_single_select", 49 | "value": "1" 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /e2e_tests/data/import/image_annotations_item_level_properties_no_annotations/image_6.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "image_6.jpg", 6 | "path": "/dir2", 7 | "source_info": { 8 | "item_id": "01920b92-1d5d-1832-3a09-1f38557c57b4", 9 | "dataset": { 10 | "name": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 11 | "slug": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/339501/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=339501&item=01920b92-1d5d-1832-3a09-1f38557c57b4" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 1920, 25 | "height": 1080, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/4950b608-00a1-4e73-b746-bfe1ea0a1ab6/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "image_6.jpg", 30 | "storage_key": "darwin-py/images/image_6.jpg", 31 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/9e070e8c-03b3-40b7-a3cb-6da6bcc8d4ed" 32 | } 33 | ] 34 | } 35 | ] 36 | }, 37 | "annotations": [], 38 | "properties": [ 39 | { 40 | "name": "test_item_level_property_multi_select", 41 | "value": "1" 42 | }, 43 | { 44 | "name": "test_item_level_property_multi_select", 45 | "value": "2" 46 | }, 47 | { 48 | "name": "test_item_level_property_single_select", 49 | "value": "1" 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /e2e_tests/data/import/image_annotations_item_level_properties_no_annotations/image_7.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "image_7.jpg", 6 | "path": "/dir1/dir3", 7 | "source_info": { 8 | "item_id": "01920b92-1d5d-46ee-5117-53ba0d29d1b0", 9 | "dataset": { 10 | "name": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 11 | "slug": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/339501/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=339501&item=01920b92-1d5d-46ee-5117-53ba0d29d1b0" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 1920, 25 | "height": 1080, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/1e2f63eb-b7fc-482f-91f3-8caa242e63cb/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "image_7.jpg", 30 | "storage_key": "darwin-py/images/image_7.jpg", 31 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/20de7c08-20dc-4f16-b559-bbcce2f7b319" 32 | } 33 | ] 34 | } 35 | ] 36 | }, 37 | "annotations": [], 38 | "properties": [ 39 | { 40 | "name": "test_item_level_property_multi_select", 41 | "value": "1" 42 | }, 43 | { 44 | "name": "test_item_level_property_multi_select", 45 | "value": "2" 46 | }, 47 | { 48 | "name": "test_item_level_property_single_select", 49 | "value": "1" 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /e2e_tests/data/import/image_annotations_item_level_properties_no_annotations/image_8.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/2.0/schema.json", 4 | "item": { 5 | "name": "image_8.jpg", 6 | "path": "/dir1/dir3", 7 | "source_info": { 8 | "item_id": "01920b92-1d5e-908e-7b24-3d339ea72237", 9 | "dataset": { 10 | "name": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 11 | "slug": "test_dataset_2edf4430-1a35-45a2-8c45-b0325968bee2", 12 | "dataset_management_url": "https://staging.v7labs.com/datasets/339501/dataset-management" 13 | }, 14 | "team": { 15 | "name": "E2E Testing", 16 | "slug": "e2e-testing" 17 | }, 18 | "workview_url": "https://staging.v7labs.com/workview?dataset=339501&item=01920b92-1d5e-908e-7b24-3d339ea72237" 19 | }, 20 | "slots": [ 21 | { 22 | "type": "image", 23 | "slot_name": "0", 24 | "width": 1920, 25 | "height": 1080, 26 | "thumbnail_url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/files/ace6c9a2-d39a-43df-9fd2-9f124176810a/thumbnail", 27 | "source_files": [ 28 | { 29 | "file_name": "image_8.jpg", 30 | "storage_key": "darwin-py/images/image_8.jpg", 31 | "url": "https://staging.v7labs.com/api/v2/teams/e2e-testing/uploads/141cdb56-2494-4052-bce2-b22673e6ad68" 32 | } 33 | ] 34 | } 35 | ] 36 | }, 37 | "annotations": [], 38 | "properties": [ 39 | { 40 | "name": "test_item_level_property_multi_select", 41 | "value": "1" 42 | }, 43 | { 44 | "name": "test_item_level_property_multi_select", 45 | "value": "2" 46 | }, 47 | { 48 | "name": "test_item_level_property_single_select", 49 | "value": "1" 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /e2e_tests/data/import/pascal_voc_annotations/image_1.xml: -------------------------------------------------------------------------------- 1 | imagesimage_1.jpgimages/image_1.jpgdarwin1920108030test_bounding_box_basicUnspecified00681512test_bounding_box_basicUnspecified0016182320 -------------------------------------------------------------------------------- /e2e_tests/data/import/pascal_voc_annotations/image_2.xml: -------------------------------------------------------------------------------- 1 | imagesimage_2.jpgimages/image_2.jpgdarwin1920108030test_bounding_box_basicUnspecified00681512test_bounding_box_basicUnspecified0016182320 -------------------------------------------------------------------------------- /e2e_tests/data/import/pascal_voc_annotations/image_3.xml: -------------------------------------------------------------------------------- 1 | imagesdir1/image_3.jpgimages/image_3.jpgdarwin1920108030test_bounding_box_basicUnspecified00681512test_bounding_box_basicUnspecified0016182320 -------------------------------------------------------------------------------- /e2e_tests/data/import/pascal_voc_annotations/image_4.xml: -------------------------------------------------------------------------------- 1 | imagesdir1/image_4.jpgimages/image_4.jpgdarwin1920108030test_bounding_box_basicUnspecified00681512test_bounding_box_basicUnspecified0016182320 -------------------------------------------------------------------------------- /e2e_tests/data/import/pascal_voc_annotations/image_5.xml: -------------------------------------------------------------------------------- 1 | imagesdir2/image_5.jpgimages/image_5.jpgdarwin1920108030test_bounding_box_basicUnspecified00681512test_bounding_box_basicUnspecified0016182320 -------------------------------------------------------------------------------- /e2e_tests/data/import/pascal_voc_annotations/image_6.xml: -------------------------------------------------------------------------------- 1 | imagesdir2/image_6.jpgimages/image_6.jpgdarwin1920108030test_bounding_box_basicUnspecified00681512test_bounding_box_basicUnspecified0016182320 -------------------------------------------------------------------------------- /e2e_tests/data/import/pascal_voc_annotations/image_7.xml: -------------------------------------------------------------------------------- 1 | imagesdir1/dir3/image_7.jpgimages/image_7.jpgdarwin1920108030test_bounding_box_basicUnspecified00681512test_bounding_box_basicUnspecified0016182320 -------------------------------------------------------------------------------- /e2e_tests/data/import/pascal_voc_annotations/image_8.xml: -------------------------------------------------------------------------------- 1 | imagesdir1/dir3/image_8.jpgimages/image_8.jpgdarwin1920108030test_bounding_box_basicUnspecified00681512test_bounding_box_basicUnspecified0016182320 -------------------------------------------------------------------------------- /e2e_tests/data/push/25_frame_video.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/push/25_frame_video.zip -------------------------------------------------------------------------------- /e2e_tests/data/push/flat_directory_of_2_dicom_files.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/push/flat_directory_of_2_dicom_files.zip -------------------------------------------------------------------------------- /e2e_tests/data/push/flat_directory_of_6_images.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/push/flat_directory_of_6_images.zip -------------------------------------------------------------------------------- /e2e_tests/data/push/mixed_filetypes.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/push/mixed_filetypes.zip -------------------------------------------------------------------------------- /e2e_tests/data/push/nested_directory_of_images.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/data/push/nested_directory_of_images.zip -------------------------------------------------------------------------------- /e2e_tests/exceptions.py: -------------------------------------------------------------------------------- 1 | """Custom exceptions for the e2e_tests module.""" 2 | 3 | from typing import Dict, List 4 | 5 | from pytest import PytestWarning 6 | 7 | 8 | class E2EException(PytestWarning): 9 | """Base class for all exceptions in this module.""" 10 | 11 | ... 12 | 13 | 14 | class E2EEnvironmentVariableNotSet(E2EException): 15 | """Raised when an environment variable is not set.""" 16 | 17 | def __init__(self, name: str, *args: List, **kwargs: Dict) -> None: 18 | super().__init__(*args, **kwargs) 19 | self.name = name 20 | 21 | 22 | class DataAlreadyExists(E2EException): 23 | """Raised when the teardown process fails and has left legacy data""" 24 | 25 | def __init__(self, name: str, *args: List, **kwargs: Dict) -> None: 26 | super().__init__(*args, **kwargs) 27 | self.name = name 28 | -------------------------------------------------------------------------------- /e2e_tests/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | cache_dir = /tmp/pytest_cache 3 | addopts = --ignore=../tests,../future -------------------------------------------------------------------------------- /e2e_tests/sdk/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/sdk/__init__.py -------------------------------------------------------------------------------- /e2e_tests/sdk/future/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/e2e_tests/sdk/future/core/__init__.py -------------------------------------------------------------------------------- /e2e_tests/test_example.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from e2e_tests.objects import ConfigValues 4 | 5 | 6 | @pytest.mark.xfail(reason="Fails unless you set the server and key as below") 7 | def test_example_test_does_nothing(config_values: ConfigValues) -> None: 8 | assert config_values.server == "https://api.example.com" 9 | assert config_values.api_key == "1234567890" 10 | 11 | assert True 12 | 13 | 14 | if __name__ == "__main__": 15 | pytest.main(["-vv", "-s", __file__]) 16 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Darwin-py docs 2 | 3 | theme: 4 | name: readthedocs 5 | highlightjs: true 6 | hljs_languages: 7 | - sh 8 | - bash 9 | - python 10 | -------------------------------------------------------------------------------- /poetry.toml: -------------------------------------------------------------------------------- 1 | [virtualenvs] 2 | in-project = true 3 | prefer-active-python = true 4 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --ignore=e2e_tests -------------------------------------------------------------------------------- /source/_static/js/custom.js: -------------------------------------------------------------------------------- 1 | window.addEventListener("load", (_event) => { 2 | const menu = document.querySelector(".wy-menu ul li:first-child") 3 | recurse(menu) 4 | }); 5 | 6 | /** 7 | * Given a Node, it recursively goes through every child and checks if the child is expandable, it 8 | * expands it unless it is already expanded. 9 | * 10 | * @param {Node} node The node to evaluate. 11 | */ 12 | const recurse = (node) => { 13 | if (isExpandable(node) && !isExpanded(node)) { 14 | node.classList.add("current") 15 | } 16 | 17 | // By default, children are not arrays, so we need to convert them 18 | children = Array.prototype.slice.call(node.children) 19 | 20 | children.forEach(recurse) 21 | } 22 | 23 | /** 24 | * Returns whether or not the given node is an expandable list. 25 | * 26 | * @param {Node} node The node to evaluate. 27 | * 28 | * @returns {boolean} true if the node is a toctree that can be expanded, false otherwise. 29 | */ 30 | const isExpandable = (node) => node.className.includes("toctree-l") 31 | 32 | /** 33 | * Returns whether or not the given expandable node is already expanded. 34 | * Nodes are considered expandaded if they are 'current'ly selected, so we take advantage of this. 35 | * 36 | * @param {Node} node The node to evaluate. 37 | * 38 | * @returns {boolean} true if the node is already expanded, false otherwise. 39 | */ 40 | const isExpanded = (node) => node.classList.contains("current") 41 | -------------------------------------------------------------------------------- /source/index.rst: -------------------------------------------------------------------------------- 1 | .. darwin-py documentation master file, created by 2 | sphinx-quickstart on Thu Aug 5 15:43:37 2021. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | .. toctree:: 7 | :hidden: 8 | :maxdepth: 3 9 | 10 | Packages 11 | README 12 | 13 | .. include:: README.rst -------------------------------------------------------------------------------- /source/modules.rst: -------------------------------------------------------------------------------- 1 | darwin 2 | ====== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | darwin 8 | -------------------------------------------------------------------------------- /test.dcm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/test.dcm -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/__init__.py -------------------------------------------------------------------------------- /tests/darwin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/__init__.py -------------------------------------------------------------------------------- /tests/darwin/data/expected_classes_grey.csv: -------------------------------------------------------------------------------- 1 | class_name,class_color 2 | class1,85 3 | class2,170 4 | class3,255 5 | -------------------------------------------------------------------------------- /tests/darwin/data/expected_classes_index.csv: -------------------------------------------------------------------------------- 1 | class_name,class_color 2 | class1,1 3 | class2,2 4 | class3,3 5 | -------------------------------------------------------------------------------- /tests/darwin/data/expected_classes_rgb.csv: -------------------------------------------------------------------------------- 1 | class_name,class_color 2 | class1,255 0 0 3 | class2,0 255 0 4 | class3,0 0 255 5 | -------------------------------------------------------------------------------- /tests/darwin/data/expected_image_grey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/expected_image_grey.png -------------------------------------------------------------------------------- /tests/darwin/data/expected_image_index.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/expected_image_index.png -------------------------------------------------------------------------------- /tests/darwin/data/expected_image_rgb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/expected_image_rgb.png -------------------------------------------------------------------------------- /tests/darwin/data/expected_mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/expected_mask.png -------------------------------------------------------------------------------- /tests/darwin/data/expected_polygons_image_grey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/expected_polygons_image_grey.png -------------------------------------------------------------------------------- /tests/darwin/data/expected_polygons_image_index.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/expected_polygons_image_index.png -------------------------------------------------------------------------------- /tests/darwin/data/expected_polygons_image_rgb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/expected_polygons_image_rgb.png -------------------------------------------------------------------------------- /tests/darwin/data/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "classes": [ 3 | { 4 | "name": "Bottle", 5 | "description": "Some additional text", 6 | "type": "polygon", 7 | "sub_types": [ 8 | "attributes", 9 | "instance_id" 10 | ], 11 | "color": "rgba(255,0,85,1.0)", 12 | "properties": [ 13 | { 14 | "name": "Colors", 15 | "type": "multi-select", 16 | "description": "Some additional description", 17 | "property_values": [ 18 | { 19 | "value": "red", 20 | "color": "rgba(255, 0, 0, 0)" 21 | }, 22 | { 23 | "value": "green", 24 | "color": "rgba(0, 255, 0, 0)" 25 | }, 26 | { 27 | "value": "blue", 28 | "color": "rgba(0, 0, 255, 0)" 29 | } 30 | ], 31 | "required": true, 32 | "granularity": "section" 33 | }, 34 | { 35 | "name": "Shape (expanded format)", 36 | "description": "Some additional description", 37 | "type": "single-select", 38 | "property_values": [ 39 | { 40 | "value": "Star", 41 | "color": "rgba(0, 0, 0, 0)" 42 | }, 43 | { 44 | "value": "Circle", 45 | "color": "rgba(150, 150, 150, 0)" 46 | } 47 | ], 48 | "required": false, 49 | "granularity": "section" 50 | } 51 | ] 52 | } 53 | ] 54 | } -------------------------------------------------------------------------------- /tests/darwin/data/metadata_empty_properties.json: -------------------------------------------------------------------------------- 1 | { 2 | "classes": [ 3 | { 4 | "name": "Bottle", 5 | "description": "Some additional text", 6 | "type": "polygon", 7 | "sub_types": [ 8 | "attributes", 9 | "instance_id" 10 | ], 11 | "color": "rgba(255,0,85,1.0)", 12 | "properties": [] 13 | }, 14 | { 15 | "name": "Bottle1", 16 | "description": "Some additional text", 17 | "type": "polygon", 18 | "sub_types": [ 19 | "attributes", 20 | "instance_id" 21 | ], 22 | "color": "rgba(255,0,85,1.0)", 23 | "properties": [] 24 | }, 25 | { 26 | "name": "Bottle3", 27 | "description": "Some additional text", 28 | "type": "polygon", 29 | "sub_types": [ 30 | "attributes", 31 | "instance_id" 32 | ], 33 | "color": "rgba(255,0,85,1.0)", 34 | "properties": [] 35 | } 36 | ] 37 | } -------------------------------------------------------------------------------- /tests/darwin/data/metadata_identical_properties_different_classes.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/metadata/1.0/schema.json", 4 | "classes": [ 5 | { 6 | "name": "test_class_1", 7 | "type": "bounding_box", 8 | "description": null, 9 | "color": "rgba(255,46,0,1.0)", 10 | "sub_types": [ 11 | "inference" 12 | ], 13 | "properties": [ 14 | { 15 | "name": "existing_property_single_select", 16 | "type": "single_select", 17 | "description": "", 18 | "required": false, 19 | "property_values": [ 20 | { 21 | "value": "1", 22 | "color": "rgba(255,46,0,1.0)" 23 | } 24 | ] 25 | } 26 | ] 27 | }, 28 | { 29 | "name": "test_class_2", 30 | "type": "bounding_box", 31 | "description": null, 32 | "color": "rgba(255,46,0,1.0)", 33 | "sub_types": [ 34 | "inference" 35 | ], 36 | "properties": [ 37 | { 38 | "name": "existing_property_single_select", 39 | "type": "single_select", 40 | "description": "", 41 | "required": false, 42 | "property_values": [ 43 | { 44 | "value": "1", 45 | "color": "rgba(255,46,0,1.0)" 46 | } 47 | ] 48 | } 49 | ] 50 | } 51 | ], 52 | "properties": [] 53 | } -------------------------------------------------------------------------------- /tests/darwin/data/metadata_missing_annotation_property_values.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/metadata/1.0/schema.json", 4 | "classes": [ 5 | { 6 | "name": "test_class", 7 | "type": "bounding_box", 8 | "description": null, 9 | "color": "rgba(255,46,0,1.0)", 10 | "sub_types": [ 11 | "inference" 12 | ], 13 | "properties": [ 14 | { 15 | "name": "existing_property_single_select", 16 | "type": "single_select", 17 | "description": "", 18 | "required": false, 19 | "property_values": [ 20 | { 21 | "value": "1", 22 | "color": "rgba(255,46,0,1.0)" 23 | } 24 | ], 25 | "granularity": "annotation" 26 | }, 27 | { 28 | "name": "existing_property_multi_select", 29 | "type": "multi_select", 30 | "description": "", 31 | "required": false, 32 | "property_values": [ 33 | { 34 | "value": "1", 35 | "color": "rgba(173,255,0,1.0)" 36 | }, 37 | { 38 | "value": "2", 39 | "color": "rgba(255,199,0,1.0)" 40 | } 41 | ], 42 | "granularity": "annotation" 43 | } 44 | ], 45 | "sub_types_settings": { 46 | "inference": {} 47 | } 48 | } 49 | ], 50 | "properties": [] 51 | } -------------------------------------------------------------------------------- /tests/darwin/data/metadata_missing_section_property_values.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/metadata/1.0/schema.json", 4 | "classes": [ 5 | { 6 | "name": "test_class", 7 | "type": "bounding_box", 8 | "description": null, 9 | "color": "rgba(255,46,0,1.0)", 10 | "sub_types": [ 11 | "inference" 12 | ], 13 | "properties": [ 14 | { 15 | "name": "existing_property_single_select", 16 | "type": "single_select", 17 | "description": "", 18 | "required": false, 19 | "property_values": [ 20 | { 21 | "value": "1", 22 | "color": "rgba(255,46,0,1.0)" 23 | } 24 | ] 25 | }, 26 | { 27 | "name": "existing_property_multi_select", 28 | "type": "multi_select", 29 | "description": "", 30 | "required": false, 31 | "property_values": [ 32 | { 33 | "value": "1", 34 | "color": "rgba(173,255,0,1.0)" 35 | }, 36 | { 37 | "value": "2", 38 | "color": "rgba(255,199,0,1.0)" 39 | } 40 | ] 41 | } 42 | ], 43 | "sub_types_settings": { 44 | "inference": {} 45 | } 46 | } 47 | ], 48 | "properties": [] 49 | } -------------------------------------------------------------------------------- /tests/darwin/data/metadata_nested_properties.json: -------------------------------------------------------------------------------- 1 | { 2 | "classes": [ 3 | { 4 | "name": "Bottle", 5 | "description": "Some additional text", 6 | "type": "polygon", 7 | "sub_types": [ 8 | "attributes", 9 | "instance_id" 10 | ], 11 | "color": "rgba(255,0,85,1.0)", 12 | "properties": [] 13 | }, 14 | { 15 | "name": "Bottle1", 16 | "description": "Some additional text", 17 | "type": "polygon", 18 | "sub_types": [ 19 | "attributes", 20 | "instance_id" 21 | ], 22 | "color": "rgba(255,0,85,1.0)", 23 | "properties": [] 24 | }, 25 | { 26 | "name": "Bottle3", 27 | "description": "Some additional text", 28 | "type": "polygon", 29 | "sub_types": [ 30 | "attributes", 31 | "instance_id" 32 | ], 33 | "color": "rgba(255,0,85,1.0)", 34 | "properties": [ 35 | { 36 | "name": "Colors", 37 | "type": "multi-select", 38 | "description": "Some additional description", 39 | "property_values": [ 40 | { 41 | "value": "red", 42 | "color": "rgba(255, 0, 0, 0)", 43 | "type": "string" 44 | }, 45 | { 46 | "value": "green", 47 | "color": "rgba(0, 255, 0, 0)", 48 | "type": "string" 49 | }, 50 | { 51 | "value": "blue", 52 | "color": "rgba(0, 0, 255, 0)", 53 | "type": "string" 54 | } 55 | ], 56 | "granularity": "section", 57 | "required": true 58 | } 59 | ] 60 | } 61 | ] 62 | } -------------------------------------------------------------------------------- /tests/darwin/data/nifti/BRAINIX_NIFTI_ROI.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/nifti/BRAINIX_NIFTI_ROI.nii.gz -------------------------------------------------------------------------------- /tests/darwin/data/nifti/legacy/.v7/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/metadata/1.0/schema.json", 4 | "classes": [ 5 | { 6 | "name": "Reference_sBAT", 7 | "type": "polygon", 8 | "description": null, 9 | "color": "rgba(0,255,170,1.0)", 10 | "sub_types": [ 11 | "text", 12 | "inference" 13 | ], 14 | "properties": [], 15 | "sub_types_settings": { 16 | "inference": {}, 17 | "text": {} 18 | } 19 | } 20 | ], 21 | "properties": [ 22 | { 23 | "name": "item-level-ss", 24 | "type": "single_select", 25 | "description": "What is this?", 26 | "required": false, 27 | "property_values": [ 28 | { 29 | "value": "1", 30 | "color": "rgba(238,240,241,1.0)" 31 | }, 32 | { 33 | "value": "2", 34 | "color": "rgba(255,0,214,1.0)" 35 | }, 36 | { 37 | "value": "3", 38 | "color": "rgba(173,255,0,1.0)" 39 | } 40 | ], 41 | "granularity": "item" 42 | } 43 | ] 44 | } -------------------------------------------------------------------------------- /tests/darwin/data/nifti/nifti.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": [ 3 | { 4 | "image": "2044737.fat.nii.gz", 5 | "label": "/Users/john/Documents/code/development/darwin-py/tests/darwin/data/nifti/BRAINIX_NIFTI_ROI.nii.gz", 6 | "class_map": { 7 | "1": "Reference_sBAT" 8 | }, 9 | "mode": "video" 10 | } 11 | ] 12 | } -------------------------------------------------------------------------------- /tests/darwin/data/nifti/no-legacy/.v7/metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0", 3 | "schema_ref": "https://darwin-public.s3.eu-west-1.amazonaws.com/darwin_json/metadata/1.0/schema.json", 4 | "classes": [ 5 | { 6 | "name": "Reference_sBAT", 7 | "type": "polygon", 8 | "description": null, 9 | "color": "rgba(0,255,170,1.0)", 10 | "sub_types": [ 11 | "text", 12 | "inference" 13 | ], 14 | "properties": [], 15 | "sub_types_settings": { 16 | "inference": {}, 17 | "text": {} 18 | } 19 | } 20 | ], 21 | "properties": [ 22 | { 23 | "name": "item-level-ss", 24 | "type": "single_select", 25 | "description": "What is this?", 26 | "required": false, 27 | "property_values": [ 28 | { 29 | "value": "1", 30 | "color": "rgba(238,240,241,1.0)" 31 | }, 32 | { 33 | "value": "2", 34 | "color": "rgba(255,0,214,1.0)" 35 | }, 36 | { 37 | "value": "3", 38 | "color": "rgba(173,255,0,1.0)" 39 | } 40 | ], 41 | "granularity": "item" 42 | } 43 | ] 44 | } -------------------------------------------------------------------------------- /tests/darwin/data/nifti/sample_nifti.nii: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/nifti/sample_nifti.nii -------------------------------------------------------------------------------- /tests/darwin/data/push_test_dir.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/push_test_dir.zip -------------------------------------------------------------------------------- /tests/darwin/data/test_video.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/test_video.mp4 -------------------------------------------------------------------------------- /tests/darwin/data/test_video_corrupted.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/test_video_corrupted.mp4 -------------------------------------------------------------------------------- /tests/darwin/data/test_video_with_audio.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/data/test_video_with_audio.mp4 -------------------------------------------------------------------------------- /tests/darwin/dataset/data/manifest_examples/manifest_1.txt.test: -------------------------------------------------------------------------------- 1 | 0:0:0:0 2 | 1:0:1:1.0 3 | 0:1:0:2.0 4 | 1:1:1:3.0 -------------------------------------------------------------------------------- /tests/darwin/dataset/data/manifest_examples/manifest_2.txt.test: -------------------------------------------------------------------------------- 1 | 0:2:0:0 2 | 1:2:1:1.0 3 | 0:3:0:2.0 4 | 1:3:1:3.0 -------------------------------------------------------------------------------- /tests/darwin/dataset/item_test.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from darwin.item import DatasetItem 4 | 5 | 6 | @pytest.fixture 7 | def response_json_slots() -> dict: 8 | return { 9 | "id": "test_id", 10 | "name": "test_filename", 11 | "path": "test_path", 12 | "status": "test_status", 13 | "archived": "test_archived", 14 | "dataset_id": "test_dataset_id", 15 | "dataset_slug": "test_dataset_slug", 16 | "seq": None, 17 | "workflow_data": {"workflow_id": "test_workflow_id"}, 18 | "workflow_status": "test_workflow_status", 19 | "slots": [{"size_bytes": 1, "path": "test_path"}], 20 | "layout": {"type": "grid", "version": 3, "slots": ["0", "1"]}, 21 | } 22 | 23 | 24 | def test_item_parse_w_slots(response_json_slots: dict) -> None: 25 | item = DatasetItem.parse(response_json_slots, "test_dataset_slug") 26 | assert item.id == response_json_slots["id"] 27 | assert item.filename == response_json_slots["name"] 28 | assert item.path == response_json_slots["path"] 29 | assert item.status == response_json_slots["status"] 30 | assert item.archived == response_json_slots["archived"] 31 | assert item.dataset_id == response_json_slots["dataset_id"] 32 | assert item.dataset_slug == "test_dataset_slug" 33 | assert item.seq == response_json_slots["seq"] 34 | assert ( 35 | item.current_workflow_id == response_json_slots["workflow_data"]["workflow_id"] 36 | ) 37 | assert item.current_workflow == response_json_slots["workflow_data"] 38 | assert item.slots == response_json_slots["slots"] 39 | -------------------------------------------------------------------------------- /tests/darwin/dataset/release_test.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from datetime import datetime 3 | from pathlib import Path 4 | from unittest.mock import patch 5 | 6 | import pytest 7 | import requests 8 | 9 | from darwin.dataset.release import Release, ReleaseStatus 10 | from tests.fixtures import * 11 | 12 | 13 | @pytest.fixture 14 | def release(dataset_slug: str, team_slug_darwin_json_v2: str) -> Release: 15 | return Release( 16 | dataset_slug=dataset_slug, 17 | team_slug=team_slug_darwin_json_v2, 18 | version="latest", 19 | name="test", 20 | status=ReleaseStatus("pending"), 21 | url="http://test.v7labs.com/", 22 | export_date=datetime.fromisoformat("2021-01-01T00:00:00+00:00"), 23 | image_count=None, 24 | class_count=None, 25 | available=True, 26 | latest=True, 27 | format="darwin", 28 | ) 29 | 30 | 31 | class TestRelease: 32 | def test_downloads_zip(self, release: Release, tmp_path: Path): 33 | with patch.object(requests, "get") as get: 34 | with patch.object(shutil, "copyfileobj") as copyfileobj: 35 | release.download_zip(tmp_path / "test.zip") 36 | get.assert_called_once_with("http://test.v7labs.com/", stream=True) 37 | copyfileobj.assert_called_once() 38 | -------------------------------------------------------------------------------- /tests/darwin/dataset/resources/random_train: -------------------------------------------------------------------------------- 1 | path/to/annotation/file/one.json 2 | path/to/annotation/file/two.json 3 | three.json -------------------------------------------------------------------------------- /tests/darwin/dataset/resources/stratified_polygon_train: -------------------------------------------------------------------------------- 1 | one -------------------------------------------------------------------------------- /tests/darwin/exporter/formats/export_yolo_test.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from pathlib import Path 3 | 4 | import pytest 5 | 6 | from darwin.datatypes import Annotation, AnnotationClass, AnnotationFile 7 | from darwin.exporter.formats.yolo import export 8 | 9 | 10 | class TestExport: 11 | @pytest.fixture 12 | def folder_path(self, tmp_path: Path): 13 | path: Path = tmp_path / "yolo_export_output_files" 14 | yield path 15 | shutil.rmtree(path) 16 | 17 | def test_it_creates_missing_folders(self, folder_path: Path): 18 | annotation_class: AnnotationClass = AnnotationClass( 19 | name="car", annotation_type="polygon", annotation_internal_type=None 20 | ) 21 | bbox = {"x": 94.0, "y": 438.0, "w": 1709.0, "h": 545.0} 22 | annotation = Annotation( 23 | annotation_class=annotation_class, 24 | data={ 25 | "path": [{...}], 26 | "bounding_box": bbox, 27 | }, 28 | subs=[], 29 | ) 30 | annotation_file = AnnotationFile( 31 | path=Path("/annotation_test.json"), 32 | filename="annotation_test.jpg", 33 | annotation_classes={annotation_class}, 34 | annotations=[annotation], 35 | frame_urls=None, 36 | image_height=1080, 37 | image_width=1920, 38 | is_video=False, 39 | ) 40 | 41 | export([annotation_file], folder_path) 42 | assert folder_path.exists() 43 | 44 | files = list(folder_path.glob("*")) 45 | 46 | assert (folder_path / "annotation_test.txt") in files 47 | assert (folder_path / "darknet.labels") in files 48 | 49 | yolo_lines = (folder_path / "annotation_test.txt").read_text().split("\n") 50 | assert yolo_lines[0] == "0 {} {} {} {}".format( 51 | (bbox["x"] + bbox["w"] / 2) / 1920, 52 | (bbox["y"] + bbox["h"] / 2) / 1080, 53 | bbox["w"] / 1920, 54 | bbox["h"] / 1080, 55 | ) 56 | 57 | yolo_classes = (folder_path / "darknet.labels").read_text().split("\n") 58 | assert yolo_classes[0] == "car" 59 | -------------------------------------------------------------------------------- /tests/darwin/exporter/formats/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/exporter/formats/helpers/__init__.py -------------------------------------------------------------------------------- /tests/darwin/torch/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/darwin/torch/__init__.py -------------------------------------------------------------------------------- /tests/darwin/utils/flatten_list_test.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from darwin.utils import flatten_list 4 | 5 | 6 | def test_raises_if_passed_non_array() -> None: 7 | with pytest.raises(TypeError): 8 | flatten_list("string") # type: ignore 9 | 10 | 11 | def test_returns_empty_list_if_passed_empty_list() -> None: 12 | assert flatten_list([]) == [] 13 | 14 | 15 | def test_returns_list_if_passed_list() -> None: 16 | assert flatten_list([1, 2, 3]) == [1, 2, 3] 17 | 18 | 19 | def test_returns_flattened_list_if_passed_nested_list() -> None: 20 | assert flatten_list([[1, 2], [3, 4]]) == [1, 2, 3, 4] 21 | 22 | 23 | def test_returns_flattened_list_if_passed_nested_list_with_different_depth() -> None: 24 | assert flatten_list([[1, 2], [3, [4, 5]]]) == [1, 2, 3, 4, 5] 25 | 26 | 27 | # Makes file directly runnable with python 28 | if __name__ == "__main__": 29 | import sys 30 | 31 | sys.exit(pytest.main(["-v", "-x", __file__])) 32 | -------------------------------------------------------------------------------- /tests/darwin/utils/get_image_path_from_stream_test.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from darwin.utils.utils import get_image_path_from_stream 4 | 5 | 6 | class TestGetImagePathFromStream: 7 | def test_with_folders_true(self): 8 | darwin_json = { 9 | "item": { 10 | "name": "item_name", 11 | "path": "/folder", 12 | "slots": [ 13 | { 14 | "slot_name": "0", 15 | "source_files": [{"file_name": "source_name.jpg"}], 16 | } 17 | ], 18 | } 19 | } 20 | annotation_filepath = Path("/annotations/annotation.json") 21 | images_dir = Path("/images") 22 | expected = Path("/images/folder/item_name.jpg") 23 | result = get_image_path_from_stream( 24 | darwin_json, images_dir, annotation_filepath, True 25 | ) 26 | assert result == expected 27 | 28 | def test_with_folders_false(self): 29 | darwin_json = { 30 | "item": { 31 | "name": "item_name", 32 | "path": "/folder", 33 | "slots": [ 34 | { 35 | "slot_name": "0", 36 | "source_files": [{"file_name": "source_name.jpg"}], 37 | } 38 | ], 39 | } 40 | } 41 | images_dir = Path("/images") 42 | annotation_filepath = Path("/annotations/annotation.json") 43 | expected = Path("/images/item_name.jpg") 44 | result = get_image_path_from_stream( 45 | darwin_json, images_dir, annotation_filepath, False 46 | ) 47 | assert result == expected 48 | -------------------------------------------------------------------------------- /tests/darwin/utils/get_items_count_test.py: -------------------------------------------------------------------------------- 1 | # Tests for _get_item_count 2 | from darwin.utils.get_item_count import get_item_count 3 | 4 | 5 | def test__get_item_count_defaults_to_num_items_if_present() -> None: 6 | dataset_return = { 7 | "num_images": 2, # Should be ignored 8 | "num_videos": 3, # Should be ignored 9 | "num_items": 5, # Should get this one 10 | } 11 | 12 | assert get_item_count(dataset_return) == 5 13 | 14 | 15 | def test__get_item_count_returns_sum_of_others_if_num_items_not_present() -> None: 16 | dataset_return = { 17 | "num_images": 7, # Should be summed 18 | "num_videos": 3, # Should be summed 19 | } 20 | 21 | assert get_item_count(dataset_return) == 10 22 | 23 | 24 | def test__get_item_count_should_tolerate_missing_members() -> None: 25 | assert ( 26 | get_item_count( 27 | { 28 | "num_videos": 3, # Should be ignored 29 | } 30 | ) 31 | == 3 32 | ) 33 | 34 | assert ( 35 | get_item_count( 36 | { 37 | "num_images": 2, 38 | } 39 | ) 40 | == 2 41 | ) 42 | -------------------------------------------------------------------------------- /tests/data.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/data.zip -------------------------------------------------------------------------------- /tests/dataset.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/dataset.zip -------------------------------------------------------------------------------- /tests/dataset_with_properties.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/dataset_with_properties.zip -------------------------------------------------------------------------------- /tests/e2e_test_internals/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/e2e_test_internals/__init__.py -------------------------------------------------------------------------------- /tests/model_training_data.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/v7labs/darwin-py/028bb20f841b788ee717c15a83a336457a5477cf/tests/model_training_data.zip -------------------------------------------------------------------------------- /tests/server_example_returns.py: -------------------------------------------------------------------------------- 1 | from darwin.future.core.client import JSONType 2 | 3 | CREATE_DATASET_RETURN_RAW: JSONType = { 4 | "active": True, 5 | "annotation_hotkeys": {}, 6 | "annotators_can_create_tags": True, 7 | "annotators_can_instantiate_workflows": True, 8 | "anyone_can_double_assign": False, 9 | "archived": False, 10 | "archived_at": None, 11 | "default_workflow_template_id": 1337, 12 | "id": 13371337, 13 | "instructions": "", 14 | "name": "test_dataset", 15 | "num_classes": 0, 16 | "num_images": 0, 17 | "owner_id": 101, 18 | "parent_id": None, 19 | "pdf_fit_page": True, 20 | "progress": 0.0, 21 | "public": None, 22 | "reviewers_can_annotate": False, 23 | "slug": "test_dataset", 24 | "team_id": 123, 25 | "team_slug": "test-team", 26 | "thumbnails": [], 27 | "version": 1, 28 | "work_prioritization": "inserted_at:desc", 29 | "work_size": 30, 30 | "workflow_ids": [], 31 | } 32 | 33 | CREATE_ITEM_RETURN_RAW: JSONType = { 34 | # fmt: off 35 | "items": [ 36 | { 37 | "id": "test_id", 38 | "name": "test_dataset", 39 | "path": "test_path", 40 | "slots": [ 41 | { 42 | "file_name": "slot_file_name", 43 | "slot_name": "slot_name", 44 | } 45 | ], 46 | } 47 | ] 48 | # fmt: on 49 | } 50 | -------------------------------------------------------------------------------- /tests/version_test.py: -------------------------------------------------------------------------------- 1 | def test_imports_version_without_error() -> None: 2 | from darwin.version import __version__ 3 | 4 | assert __version__ is not None 5 | assert isinstance(__version__, str) 6 | --------------------------------------------------------------------------------