├── .git-blame-ignore-revs ├── .github ├── actions │ ├── run-integration-tests │ │ └── action.yml │ ├── run-unit-tests │ │ └── action.yml │ ├── send-slack-notification │ │ └── action.yml │ ├── setup-integration-tests-poetry-environment │ │ └── action.yml │ └── setup-root-poetry-environment │ │ └── action.yml ├── pull_request_template.md └── workflows │ ├── aikido.yml │ ├── publish_sdk.yml │ ├── sdk-pr.yml │ ├── sdk_code_to_docs.yml │ └── test-sdk.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode ├── extensions.json └── settings.json ├── LICENSE ├── README.md ├── docs └── source │ └── code_examples │ └── labels_v2.py ├── encord ├── __init__.py ├── _version.py ├── api │ ├── __init__.py │ └── ml_endpoints.py ├── client.py ├── client_metadata_schema.py ├── collection.py ├── common │ ├── __init__.py │ ├── bitmask_operations │ │ ├── __init__.py │ │ ├── bitmask_operations.py │ │ └── bitmask_operations_numpy.py │ ├── constants.py │ ├── deprecated.py │ ├── enum.py │ ├── range_manager.py │ ├── time_parser.py │ └── utils.py ├── configs.py ├── constants │ ├── __init__.py │ ├── enums.py │ └── string_constants.py ├── dataset.py ├── exceptions.py ├── filter_preset.py ├── http │ ├── __init__.py │ ├── bundle.py │ ├── common.py │ ├── constants.py │ ├── error_utils.py │ ├── limits.py │ ├── querier.py │ ├── query_methods.py │ ├── request.py │ ├── utils.py │ └── v2 │ │ ├── __init__.py │ │ ├── api_client.py │ │ ├── error_utils.py │ │ ├── payloads.py │ │ └── request_signer.py ├── issues │ └── issue_client.py ├── metadata_schema.py ├── ml_models_client.py ├── objects │ ├── __init__.py │ ├── answers.py │ ├── attributes.py │ ├── bitmask.py │ ├── bundled_operations.py │ ├── classification.py │ ├── classification_instance.py │ ├── common.py │ ├── constants.py │ ├── coordinates.py │ ├── frames.py │ ├── html_node.py │ ├── internal_helpers.py │ ├── metadata.py │ ├── ml_models.py │ ├── ontology_element.py │ ├── ontology_labels_impl.py │ ├── ontology_object.py │ ├── ontology_object_instance.py │ ├── ontology_structure.py │ ├── options.py │ ├── project.py │ ├── skeleton_template.py │ └── utils.py ├── ontology.py ├── orm │ ├── __init__.py │ ├── active.py │ ├── analytics.py │ ├── base_dto │ │ ├── __init__.py │ │ ├── base_dto_interface.py │ │ ├── base_dto_pydantic_v1.py │ │ └── base_dto_pydantic_v2.py │ ├── base_orm.py │ ├── bearer_request.py │ ├── client_metadata_schema.py │ ├── cloud_integration.py │ ├── collection.py │ ├── dataset.py │ ├── deidentification.py │ ├── filter_preset.py │ ├── formatter.py │ ├── group.py │ ├── label_log.py │ ├── label_row.py │ ├── labeling_algorithm.py │ ├── ontology.py │ ├── project.py │ ├── project_with_user_role.py │ ├── skeleton_template.py │ ├── storage.py │ └── workflow.py ├── project.py ├── project_ontology │ ├── __init__.py │ ├── classification_attribute.py │ ├── classification_option.py │ ├── classification_type.py │ ├── object_type.py │ ├── ontology.py │ ├── ontology_classification.py │ └── ontology_object.py ├── py.typed ├── storage.py ├── user_client.py ├── utilities │ ├── __init__.py │ ├── client_utilities.py │ ├── coco │ │ ├── __init__.py │ │ ├── datastructure.py │ │ ├── exporter.py │ │ ├── importer.py │ │ └── polygon_utils.py │ ├── common.py │ ├── hash_utilities.py │ ├── label_utilities.py │ ├── ontology_user.py │ ├── project_user.py │ ├── storage │ │ ├── __init__.py │ │ └── cloud_data_migration.py │ └── type_utilities.py └── workflow │ ├── __init__.py │ ├── common.py │ ├── stages │ ├── agent.py │ ├── annotation.py │ ├── consensus_annotation.py │ ├── consensus_review.py │ ├── final.py │ └── review.py │ └── workflow.py ├── poetry.lock ├── py.typed ├── pyproject.toml ├── scripts └── code_examples_python_to_mdx.py └── tests ├── .DS_Store ├── __init__.py ├── common └── test_datetime_parser.py ├── constants ├── __init__.py └── test_enums.py ├── docs ├── export_labels_all_attributes.py ├── export_labels_all_to_json.py ├── export_labels_checklist_attributes.py ├── export_labels_consensus-labels.py ├── export_labels_frame_range_all_data_units.py ├── export_labels_frame_range_specific_data_unit.py ├── export_labels_radio_buttons.py ├── export_labels_text_attributes.py ├── label_bitmasks_images_videos_example.py ├── label_bitmasks_pdfs_example.py ├── label_bounding_boxes_images_videos_example.py ├── label_bounding_boxes_pdfs_example.py ├── label_classifications_example.py ├── label_keypoints_images_videos_example.py ├── label_polygons_images_videos_example_deprecated.py ├── label_polygons_images_videos_example_donut.py ├── label_polygons_images_videos_example_donut_plus_object.py ├── label_polygons_images_videos_example_multiple.py ├── label_polygons_images_videos_example_simple.py ├── label_polygons_pdfs_example.py ├── label_polylines_images_videos_example.py ├── label_rotatable_bounding_boxes_images_videos_example.py ├── label_skeletons_images_videos_example.py ├── label_text_regions_example.py ├── project_automatic_interpolation.py ├── project_convert_polygon_to_bitmask.py ├── project_convert_polygon_to_bounding_box.py ├── project_copy_project_advanced.py ├── project_copy_project_simple.py ├── project_create_project.py ├── project_cvat_import.py ├── project_dataset_add.py ├── project_dataset_remove.py ├── project_get_details.py ├── project_label_logs_get_logs.py ├── project_list_all_projects.py ├── project_manual_qa_to_workflow_global.py ├── project_manual_qa_to_workflow_us.py ├── project_merge_project_global.py ├── project_merge_project_us.py ├── project_move_all_tasks_to_complete.py ├── project_move_tasks_to_next_stage.py ├── project_reopen_and_list_all_tasks.py ├── project_set_priority_on_tasks.py ├── project_split_project.py ├── project_users_add.py └── project_verify_file_locations.py ├── fixtures.py ├── http ├── test_api_v2_client.py ├── test_api_v2_error_handling.py ├── test_error_handling.py ├── test_payload_deserialisation.py └── test_timeout_overrides_setting.py ├── objects ├── __init__.py ├── common.py ├── data │ ├── all_ontology_types.py │ ├── all_types_ontology_structure.py │ ├── audio_labels.py │ ├── audio_objects.py │ ├── data_1.py │ ├── data_editor_blob.py │ ├── dicom_labels.py │ ├── dicom_labels_with_metadata.py │ ├── dynamic_classifications_ontology.py │ ├── empty_image_group.py │ ├── empty_video.py │ ├── html_text_labels.py │ ├── image_group.py │ ├── image_group_with_reviews.py │ ├── native_image_data.py │ ├── native_image_data_classification_with_no_answer.py │ ├── ontology_with_many_dynamic_classifications.py │ ├── plain_text.py │ ├── skeleton_coordinates.py │ ├── video_with_dynamic_classifications.py │ └── video_with_dynamic_classifications_ui_constructed.py ├── test_bitmask.py ├── test_bitmask_numpy.py ├── test_coordinates.py ├── test_frames.py ├── test_label_parsing.py ├── test_label_structure.py ├── test_label_structure_converter.py ├── test_ontology.py └── test_skeleton_template.py ├── orm ├── test_base_dto.py ├── test_create_dataset_response.py └── test_dataset.py ├── test_analytics.py ├── test_bundled_label_operations.py ├── test_coco_export.py ├── test_data ├── __init__.py ├── label_rows_metadata_blurb.py └── ontology_blurb.py ├── test_label_logs.py ├── test_metadata_schema.py ├── test_project.py ├── test_user_client.py ├── test_user_client_auth.py ├── test_version.py ├── test_workflow_actions.py ├── utilities ├── coco │ ├── data │ │ └── exporter.py │ ├── data_test_datastructure.py │ ├── test_datastructure.py │ ├── test_exporter.py │ └── test_polygon_utils.py ├── test_range_manager.py └── test_user_agent_suffix.py └── workflow ├── conftest.py ├── test_project_workflow.py └── test_workflow_actions.py /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # This file contains a list of git hashes of revisions to be ignored by git 2 | # blame. These revisions are considered "unimportant" in that they are unlikely 3 | # to be what you are interested in when blaming. 4 | 5 | # Apply isort 6 | 8de122a3f511350ac47c8fc0aee1fb4a4f6d5c39 7 | -------------------------------------------------------------------------------- /.github/actions/run-integration-tests/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Run integration tests' 2 | description: 'Run the integration tests for a given Python version. These tests come from the cord-backend.' 3 | 4 | inputs: 5 | python-version: 6 | description: 'Python version to use' 7 | default: 3.11 8 | required: false 9 | test-report-file: 10 | description: 'File name to save the test report in' 11 | required: true 12 | private-key: 13 | description: 'Private key for integration tests' 14 | required: true 15 | private-key-non-org: 16 | description: 'Private key of the second (non-org) user for integration tests' 17 | required: true 18 | private-key-service-account: 19 | description: 'Private key for service account user for integration tests' 20 | required: true 21 | integration-tests-location: 22 | description: 'project of the backend repo.' 23 | default: projects/sdk-integration-tests 24 | required: false 25 | test-dir: 26 | description: 'Test directory of the integration tests project.' 27 | default: ./src/sdk_integration_tests/tests 28 | required: false 29 | environment: 30 | description: 'Encord deployment to run tests' 31 | default: DEV 32 | required: false 33 | sdk-repository-url: 34 | description: 'Git URL to the Encord SDK repository' 35 | default: https://github.com/encord-team/encord-client-python 36 | required: false 37 | 38 | 39 | runs: 40 | using: "composite" 41 | 42 | steps: 43 | - name: Install uv 44 | uses: astral-sh/setup-uv@v5 45 | with: 46 | version: "0.6.16" 47 | enable-cache: 'true' 48 | 49 | - name: Setup FFMPEG 50 | uses: FedericoCarboni/setup-ffmpeg@v2 51 | 52 | - name: Run tests 53 | run: | 54 | cd ${{ inputs.integration-tests-location }} 55 | export PRIVATE_KEY="${{ inputs.private-key }}" 56 | export PRIVATE_KEY_SERVICE_ACCOUNT="${{ inputs.private-key-service-account }}" 57 | export PRIVATE_KEY_NON_ORG='${{ inputs.private-key-non-org }}' 58 | export CORD_ENV="${{ inputs.environment }}" 59 | uv sync 60 | uv add git+${{ inputs.sdk-repository-url }} 61 | source .venv/bin/activate 62 | pytest --timeout=600 -n 4 ${{ inputs.test-dir }} --rootdir=${{ inputs.test-dir }} --verbose --junitxml=${{ inputs.test-report-file }} 63 | shell: bash 64 | 65 | - name: Upload report 66 | uses: actions/upload-artifact@v4 67 | if: always() 68 | with: 69 | name: ${{ inputs.test-report-file }} 70 | path: ${{ inputs.integration-tests-location }}/${{ inputs.test-report-file }} 71 | -------------------------------------------------------------------------------- /.github/actions/run-unit-tests/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Run unit tests' 2 | description: 'Run the unit tests for a given Python version' 3 | 4 | inputs: 5 | python-version: 6 | description: 'Python version to use' 7 | default: 3.8.16 8 | required: false 9 | pydantic-version: 10 | description: 'Pydantic version to use' 11 | default: '' 12 | required: false 13 | poetry-version: 14 | description: 'Poetry version to use' 15 | default: 1.8.3 16 | required: false 17 | test-report-file: 18 | description: 'File name to save the test report in' 19 | required: true 20 | 21 | runs: 22 | using: "composite" 23 | 24 | steps: 25 | - name: Set up python 26 | uses: actions/setup-python@v5 27 | with: 28 | python-version: ${{ inputs.python-version }} 29 | 30 | - uses: snok/install-poetry@v1 31 | with: 32 | version: ${{ inputs.poetry-version }} 33 | virtualenvs-in-project: true 34 | 35 | - name: Check cache 36 | id: cached-poetry 37 | uses: actions/cache@v4 38 | with: 39 | path: .venv 40 | key: unit-${{ hashFiles('poetry.lock') }}-${{ inputs.python-version }}-${{ inputs.pydantic-version }}-1 41 | 42 | - name: Install dependencies 43 | if: steps.cached-poetry.outputs.cache-hit != 'true' 44 | run: poetry install --no-interaction 45 | shell: bash 46 | 47 | - name: Override pydantic version 48 | if: ${{ inputs.pydantic-version }} 49 | run: poetry run python -m pip install pydantic==${{ inputs.pydantic-version }} 50 | shell: bash 51 | 52 | - name: Run tests 53 | run: | 54 | poetry run python -m pytest tests --verbose --junitxml=${{ inputs.test-report-file }} 55 | shell: bash 56 | 57 | - name: Upload report 58 | uses: actions/upload-artifact@v4 59 | if: always() 60 | with: 61 | name: ${{ inputs.test-report-file }} 62 | path: ${{ inputs.test-report-file }} 63 | -------------------------------------------------------------------------------- /.github/actions/send-slack-notification/action.yml: -------------------------------------------------------------------------------- 1 | name: "Send Slack notification" 2 | description: "Send Slack message on success/failure parameter" 3 | 4 | inputs: 5 | success-parameter: 6 | description: "Pass success or failure here" 7 | required: true 8 | success-channel: 9 | description: "Channel ID for sending success notifications" 10 | required: false 11 | default: '' 12 | failure-channel: 13 | description: "Channel ID for sending failure notifications" 14 | required: false 15 | default: '' 16 | success-message: 17 | description: "Message body on success" 18 | required: false 19 | default: '' 20 | failure-message: 21 | description: "Message body on failure" 22 | required: false 23 | default: '' 24 | 25 | runs: 26 | using: "composite" 27 | 28 | steps: 29 | - id: commit-message 30 | run: | 31 | echo "${{ github.event.head_commit.message }}" > message.txt 32 | sed -z 's/\n/\\n/g' message.txt > newmessage.txt 33 | echo "message=$(cat newmessage.txt)" >> $GITHUB_OUTPUT 34 | shell: bash 35 | 36 | - uses: slackapi/slack-github-action@v1.18.0 37 | if: inputs.success-parameter == 'success' && inputs.success-channel != '' 38 | with: 39 | channel-id: ${{ inputs.success-channel }} 40 | payload: | 41 | { 42 | "text": "Success", 43 | "blocks": [ 44 | { 45 | "type": "section", 46 | "text": { 47 | "type": "mrkdwn", 48 | "text": ":white_check_mark: Success: \n:memo: Commit: <${{ github.event.head_commit.url }}|${{ github.ref_name }}>\n:hatching_chick: What's new: ${{ steps.commit-message.outputs.message }}\n\n${{ inputs.success-message }}" 49 | } 50 | } 51 | ] 52 | } 53 | 54 | - uses: slackapi/slack-github-action@v1.18.0 55 | if: inputs.success-parameter == 'failure' && inputs.failure-channel != '' 56 | with: 57 | channel-id: ${{ inputs.failure-channel }} 58 | payload: | 59 | { 60 | "text": "Failure", 61 | "blocks": [ 62 | { 63 | "type": "section", 64 | "text": { 65 | "type": "mrkdwn", 66 | "text": ":exclamation: FAILURE: \n:memo: Commit: <${{ github.event.head_commit.url }}|${{ github.ref_name }}>\n:beetle: Issue: ${{ steps.commit-message.outputs.message }}\n\n${{ inputs.failure-message }}" 67 | } 68 | } 69 | ] 70 | } 71 | -------------------------------------------------------------------------------- /.github/actions/setup-integration-tests-poetry-environment/action.yml: -------------------------------------------------------------------------------- 1 | name: "Setup test environment" 2 | description: "Sets up Python, Poetry and dependencies" 3 | 4 | inputs: 5 | python: 6 | description: "Python version to use" 7 | default: 3.9.14 8 | required: false 9 | poetry: 10 | description: "Poetry version to use" 11 | default: 1.8.3 12 | required: false 13 | 14 | runs: 15 | using: "composite" 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | with: 20 | repository: ${{ inputs.backend_repository }} 21 | 22 | - uses: actions/setup-python@v5 23 | with: 24 | python-version: ${{ inputs.python }} 25 | 26 | - uses: snok/install-poetry@v1 27 | with: 28 | version: ${{ inputs.poetry }} 29 | virtualenvs-create: true 30 | virtualenvs-in-project: true 31 | installer-parallel: true 32 | 33 | - name: Load cached venv 34 | id: cached-poetry-dependencies 35 | uses: actions/cache@v4 36 | with: 37 | path: .venv 38 | key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} 39 | 40 | - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 41 | run: | 42 | poetry lock --no-update 43 | poetry install --no-interaction 44 | source .venv/bin/activate 45 | shell: bash 46 | -------------------------------------------------------------------------------- /.github/actions/setup-root-poetry-environment/action.yml: -------------------------------------------------------------------------------- 1 | name: "Setup test environment" 2 | description: "Sets up Python, Poetry and dependencies" 3 | 4 | inputs: 5 | python: 6 | description: "Python version to use" 7 | default: 3.8.16 8 | required: false 9 | poetry: 10 | description: "Poetry version to use" 11 | default: 1.8.3 12 | required: false 13 | environment-location: 14 | description: "Environment location" 15 | required: false 16 | default: '.' 17 | cache-key: 18 | description: "Cache key" 19 | required: true 20 | 21 | runs: 22 | using: "composite" 23 | 24 | steps: 25 | - uses: actions/setup-python@v5 26 | with: 27 | python-version: ${{ inputs.python }} 28 | 29 | - uses: snok/install-poetry@v1 30 | with: 31 | version: ${{ inputs.poetry }} 32 | virtualenvs-create: true 33 | virtualenvs-in-project: true 34 | installer-parallel: true 35 | 36 | - name: Load cached venv 37 | id: cached-poetry-dependencies 38 | uses: actions/cache@v4 39 | with: 40 | path: ${{ inputs.project }}/.venv 41 | key: ${{inputs.cache-key}} 42 | 43 | - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 44 | run: | 45 | cd ${{ inputs.environment-location }} 46 | poetry lock --no-update 47 | poetry install --no-interaction 48 | source .venv/bin/activate 49 | shell: bash 50 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Introduction and Explanation 2 | _Some introductory context and overview_ 3 | 4 | # JIRA 5 | 6 | _Link ticket(s)_ 7 | 8 | 9 | # Documentation 10 | _There should be enough internal documentation for a product owner to write customer-facing documentation or a separate PR linked if writing the customer documentation directly. Link all that are relevant below_. 11 | - Internal: _notion link_ 12 | - Customer docs PR: _link_ 13 | - OpenAPI/SDK 14 | - Generated docs: _link to example if possible_ 15 | - Command to generate: _here_ 16 | 17 | # Tests 18 | 19 | _Make a quick statement and post any relevant links of CI / test results. If the testing infrastructure isn’t yet in-place, note that instead._ 20 | 21 | - _What are the critical unit tests?_ 22 | - _Explain the Integration Tests such that it’s clear Correctness is satisfied. Link to test results if possible._ 23 | 24 | # Known issues 25 | 26 | _If there are any known issues with the solution, make a statement about what they are and why they are Ok to leave unsolved for now. Make tickets for the known issues linked to the original ticket linked above_ 27 | -------------------------------------------------------------------------------- /.github/workflows/aikido.yml: -------------------------------------------------------------------------------- 1 | on: 2 | schedule: 3 | - cron: '00 11 * * *' 4 | workflow_dispatch: 5 | name: Aikido Scan 6 | jobs: 7 | aikido-local-scan-repo: 8 | runs-on: ubuntu-latest 9 | container: 10 | image: aikidosecurity/local-scanner:v1.0.52 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Run scan 14 | run: aikido-local-scanner scan ./ --apikey ${{ secrets.AIKIDO_API_Key }} --repositoryname enncord-client-python --branchname master 15 | -------------------------------------------------------------------------------- /.github/workflows/publish_sdk.yml: -------------------------------------------------------------------------------- 1 | name: Publish SDK 2 | 3 | on: 4 | release: 5 | types: [ published ] 6 | 7 | env: 8 | PYTHON: 3.8 9 | POETRY: 1.8.3 10 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 11 | 12 | concurrency: 13 | group: cord-client-${{ github.ref }}-publish 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | publish-sdk: 18 | name: Publish SDK 19 | runs-on: ubuntu-24.04 20 | steps: 21 | - name: Checkout repo 22 | uses: actions/checkout@v4 23 | 24 | - name: Set up Python 25 | uses: actions/setup-python@v5 26 | with: 27 | python-version: ${{ env.PYTHON }} 28 | 29 | - name: Setup Poetry 30 | uses: snok/install-poetry@v1 31 | with: 32 | version: ${{ env.POETRY }} 33 | virtualenvs-in-project: true 34 | 35 | - name: Install dependencies 36 | run: | 37 | poetry install --no-interaction 38 | 39 | - name: Build SDK 40 | run: poetry build 41 | 42 | - name: Publish SDK 43 | run: | 44 | poetry config pypi-token.pypi ${{ env.PYPI_TOKEN }} 45 | poetry publish 46 | 47 | send-slack-notification: 48 | name: Send notification 49 | runs-on: ubuntu-24.04 50 | needs: [ publish-sdk ] 51 | if: always() 52 | env: 53 | SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} 54 | steps: 55 | - name: Checkout code 56 | uses: actions/checkout@v3 57 | 58 | - name: Get workflow status 59 | uses: technote-space/workflow-conclusion-action@v2 60 | 61 | - name: Send Slack notification 62 | uses: ./.github/actions/send-slack-notification 63 | with: 64 | success-parameter: ${{ env.WORKFLOW_CONCLUSION }} 65 | success-channel: ${{ secrets.SLACK_DEPLOYMENT_PROD_CHANNEL_ID }} 66 | failure-channel: ${{ secrets.SLACK_FAILURE_CHANNEL_ID }} 67 | success-message: Deployed to https://pypi.org/project/encord/ 68 | failure-message: This pipeline has failed! 69 | -------------------------------------------------------------------------------- /.github/workflows/sdk_code_to_docs.yml: -------------------------------------------------------------------------------- 1 | name: Update Documentation on SDK Changes 2 | 3 | on: 4 | push: 5 | branches: 6 | - master # Trigger on push to the 'master' branch 7 | 8 | jobs: 9 | update_docs: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout source repository (repo1) 14 | uses: actions/checkout@v2 15 | with: 16 | repository: encord-team/encord-client-python # Replace with the name of your source repo (repo1) 17 | token: ${{ secrets.GITHUB_TOKEN }} 18 | ref: main # Ensures the latest changes from the 'main' branch are pulled 19 | 20 | - name: Checkout destination repository (repo2) 21 | uses: actions/checkout@v2 22 | with: 23 | repository: encord-team/encord-docs-mint # Replace with the name of your destination repo (repo2) 24 | token: ${{ secrets.GITHUB_TOKEN }} 25 | ref: main # Same branch as source repo 26 | 27 | - name: Set up Python 28 | uses: actions/setup-python@v2 29 | with: 30 | python-version: '3.x' 31 | 32 | - name: Install dependencies (if any) 33 | run: | 34 | pip install -r requirements.txt # If you have dependencies for the script 35 | 36 | - name: Run the Python script to update docs 37 | run: | 38 | python3 scripts/code_examples_python_to_mdx.py # Path to your Python script in repo1 39 | 40 | - name: Commit and push updated documentation 41 | run: | 42 | git config --global user.name "GitHub Actions" 43 | git config --global user.email "actions@github.com" 44 | git add . 45 | git commit -m "Update documentation after SDK changes" 46 | git push 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .DS_Store 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | pip-wheel-metadata/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test_blurb2.py / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | test.py 132 | test_blurb.py 133 | test_blurb2.py 134 | 135 | tests/internal_tests 136 | script.py 137 | 138 | # VIM temporary files 139 | .*.swp 140 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: 2c9f875913ee60ca25ce70243dc24d5b6415598c # 4.6.0 4 | hooks: 5 | - id: check-yaml 6 | - id: check-json 7 | - id: check-toml 8 | - id: end-of-file-fixer 9 | - id: trailing-whitespace 10 | - repo: https://github.com/astral-sh/ruff-pre-commit 11 | rev: v0.8.6 12 | hooks: 13 | - id: ruff 14 | args: 15 | - --fix 16 | - --exit-non-zero-on-fix 17 | - id: ruff-format 18 | - repo: local 19 | hooks: 20 | - id: mypy 21 | name: mypy 22 | entry: poetry run mypy 23 | language: system 24 | files: ^encord/.*py$ 25 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["ms-python.python"] 3 | } 4 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": ["tests"], 3 | "python.testing.unittestEnabled": false, 4 | "python.testing.pytestEnabled": true, 5 | "search.exclude": { 6 | "*.md": true, 7 | "**/*.rst": true, 8 | "poetry.lock": true, 9 | "test.py": true 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 |

Encord Python API Client

3 | 4 | Encord logo 5 | 6 |

7 | 8 | [![license](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) 9 | 10 | # The data engine for computer vision 11 | 12 | ## 💻 Features 13 | 14 | - Minimal low-level Python client that allows you to interact with Encord's API 15 | - Supports Python: `3.8`, `3.9`, `3.10` and `3.11` 16 | 17 | ## ✨ Relevant Links 18 | 19 | * [Encord website](https://encord.com) 20 | * [Encord web app](https://app.encord.com) 21 | * [Encord documentation](https://docs.encord.com) 22 | 23 | ## 💡 Getting Started 24 | 25 | For full documentation, please visit [Encord Python SDK](https://docs.encord.com/reference/installation-sdk). 26 | 27 | First, install Encord Python API Client using the [pip](https://pip.pypa.io/en/stable/installing) package manager: 28 | 29 | ```bash 30 | python3 -m pip install encord 31 | ``` 32 | 33 | Then, generate an public-private key pair, and upload the public key to [Encord website](https://www.encord.com/). 34 | Detailed guide can be found in the [dedicated manual](https://docs.encord.com/docs/annotate-public-keys). 35 | 36 | Passing the private key to the factory, you can initialise the Encord client directly. 37 | 38 | ```python 39 | # Import dependencies 40 | from encord import EncordUserClient 41 | 42 | # Authenticate with Encord using the path to your private key. Replace with the path to your private key. 43 | user_client = EncordUserClient.create_with_ssh_private_key( 44 | ssh_private_key_path="" 45 | ) 46 | ``` 47 | 48 | For detailed examples and API reference please refer to [Encord SDK documentation](https://python.docs.encord.com/) 49 | 50 | ## 🐛 Troubleshooting 51 | 52 | Please report bugs to the [GitHub Issues](https://github.com/encord-team/encord-client-python/issues). 53 | Just make sure you read the [Encord documentation](https://docs.encord.com) and search for related issues first. 54 | -------------------------------------------------------------------------------- /encord/__init__.py: -------------------------------------------------------------------------------- 1 | from encord._version import __version__ 2 | from encord.dataset import Dataset 3 | from encord.project import Project 4 | from encord.user_client import EncordUserClient 5 | -------------------------------------------------------------------------------- /encord/_version.py: -------------------------------------------------------------------------------- 1 | import importlib.metadata as importlib_metadata 2 | 3 | __version__ = importlib_metadata.version("encord") 4 | -------------------------------------------------------------------------------- /encord/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/api/__init__.py -------------------------------------------------------------------------------- /encord/client_metadata_schema.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | 3 | import encord.orm.client_metadata_schema as orm 4 | from encord.common.deprecated import deprecated 5 | from encord.http.v2.api_client import ApiClient 6 | 7 | 8 | @deprecated("0.1.132") 9 | def set_client_metadata_schema_from_dict(api_client: ApiClient, json_dict: Dict[str, orm.ClientMetadataSchemaTypes]): 10 | """Set the client metadata schema from a dictionary. 11 | 12 | Args: 13 | api_client (ApiClient): The API client to use for the request. 14 | json_dict (Dict[str, orm.ClientMetadataSchemaTypes]): A dictionary containing the client metadata schema types. 15 | 16 | Raises: 17 | NotImplementedError: If an unexpected data type is encountered in the schema. 18 | """ 19 | try: 20 | validated_dict = {key: orm.ClientMetadataSchemaTypes(val) for key, val in json_dict.items()} 21 | except ValueError: 22 | raise NotImplementedError( 23 | f"Got an unexpected data type in schema. Valid data types are: " 24 | f"{', '.join([v for v in orm.ClientMetadataSchemaTypes])}." 25 | ) 26 | payload = orm.ClientMetadataSchemaPayload(metadata_schema=validated_dict) 27 | api_client.post("organisation/client-metadata-schema", params=None, payload=payload, result_type=None) 28 | 29 | 30 | @deprecated("0.1.132") 31 | def get_client_metadata_schema(api_client: ApiClient) -> Optional[Dict[str, orm.ClientMetadataSchemaTypes]]: 32 | """Retrieve the client metadata schema. 33 | 34 | Args: 35 | api_client (ApiClient): The API client to use for the request. 36 | 37 | Returns: 38 | Optional[Dict[str, orm.ClientMetadataSchemaTypes]]: A dictionary containing the client metadata schema types 39 | if available, otherwise None. 40 | """ 41 | client_metadata_schema: Optional[orm.ClientMetadataSchema] = api_client.get( 42 | "organisation/client-metadata-schema", 43 | params=None, 44 | result_type=orm.ClientMetadataSchema, 45 | allow_none=True, 46 | ) 47 | return client_metadata_schema.metadata_schema if client_metadata_schema else None 48 | -------------------------------------------------------------------------------- /encord/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/common/__init__.py -------------------------------------------------------------------------------- /encord/common/bitmask_operations/__init__.py: -------------------------------------------------------------------------------- 1 | try: 2 | from encord.common.bitmask_operations.bitmask_operations_numpy import ( 3 | _mask_to_rle, 4 | _rle_to_mask, 5 | _rle_to_string, 6 | _string_to_rle, 7 | deserialise_bitmask, 8 | serialise_bitmask, 9 | transpose_bytearray, 10 | ) 11 | except ImportError: 12 | from encord.common.bitmask_operations.bitmask_operations import ( 13 | _mask_to_rle, 14 | _rle_to_mask, 15 | _rle_to_string, 16 | _string_to_rle, 17 | deserialise_bitmask, 18 | serialise_bitmask, 19 | transpose_bytearray, 20 | ) 21 | -------------------------------------------------------------------------------- /encord/common/bitmask_operations/bitmask_operations.py: -------------------------------------------------------------------------------- 1 | from itertools import groupby 2 | from typing import List, Sequence, Tuple 3 | 4 | 5 | def _string_to_rle(mask_string: str) -> List[int]: 6 | """COCO-compatible string to RLE-encoded mask de-serialisation""" 7 | cnts: List[int] = [] 8 | p = 0 9 | 10 | while p < len(mask_string): 11 | x = 0 12 | k = 0 13 | more = 1 14 | 15 | while more and p < len(mask_string): 16 | c = ord(mask_string[p]) - 48 17 | x |= (c & 0x1F) << (5 * k) 18 | more = c & 0x20 19 | p += 1 20 | k += 1 21 | 22 | if not more and (c & 0x10): 23 | x |= -1 << (5 * k) 24 | 25 | if len(cnts) > 2: 26 | x += cnts[-2] 27 | 28 | cnts.append(x) 29 | 30 | return cnts 31 | 32 | 33 | def _rle_to_string(rle: Sequence[int]) -> str: 34 | """COCO-compatible RLE-encoded mask to string serialisation""" 35 | rle_string = "" 36 | for i, x in enumerate(rle): 37 | if i > 2: 38 | x -= rle[i - 2] 39 | 40 | more = 1 41 | while more: 42 | c = x & 0x1F 43 | x >>= 5 44 | 45 | if c & 0x10: 46 | more = x != -1 47 | else: 48 | more = x != 0 49 | 50 | if more: 51 | c |= 0x20 52 | 53 | c += 48 54 | rle_string += chr(c) 55 | 56 | return rle_string 57 | 58 | 59 | def _mask_to_rle(mask: bytes) -> List[int]: 60 | """COCO-compatible raw bitmask to COCO-compatible RLE""" 61 | if len(mask) == 0: 62 | return [] 63 | raw_rle = [len(list(group)) for _, group in groupby(mask)] 64 | # note that the odd counts are always the numbers of zeros 65 | if mask[0] == 1: 66 | raw_rle.insert(0, 0) 67 | return raw_rle 68 | 69 | 70 | def _rle_to_mask(rle: List[int], size: int) -> bytes: 71 | """COCO-compatible RLE to bitmask""" 72 | res = bytearray(size) 73 | offset = 0 74 | 75 | for i, c in enumerate(rle): 76 | v = i % 2 77 | while c > 0: 78 | res[offset] = v 79 | offset += 1 80 | c -= 1 81 | 82 | return bytes(res) 83 | 84 | 85 | def serialise_bitmask(bitmask: bytes) -> str: 86 | rle = _mask_to_rle(bitmask) 87 | return _rle_to_string(rle) 88 | 89 | 90 | def deserialise_bitmask(serialised_bitmask: str, length: int) -> bytes: 91 | rle = _string_to_rle(serialised_bitmask) 92 | return _rle_to_mask(rle, length) 93 | 94 | 95 | def transpose_bytearray(byte_data: bytes, shape: Tuple[int, int]) -> bytes: 96 | rows, cols = shape 97 | transposed_byte_data = bytearray(len(byte_data)) 98 | for row in range(rows): 99 | for col in range(cols): 100 | transposed_byte_data[col * rows + row] = byte_data[row * cols + col] 101 | 102 | return transposed_byte_data 103 | -------------------------------------------------------------------------------- /encord/common/bitmask_operations/bitmask_operations_numpy.py: -------------------------------------------------------------------------------- 1 | from typing import List, Sequence, Tuple 2 | 3 | import numpy as np 4 | 5 | # Importing python implementations of functions that not have numpy implementation 6 | from .bitmask_operations import _rle_to_mask, _rle_to_string, _string_to_rle 7 | 8 | 9 | def _mask_to_rle(mask: bytes) -> List[int]: 10 | """COCO-compatible raw bitmask to COCO-compatible RLE""" 11 | if len(mask) == 0: 12 | return [] 13 | mask_buffer = np.frombuffer(mask, dtype=np.bool_) 14 | changes = np.diff(mask_buffer, prepend=mask_buffer[0], append=mask_buffer[-1]) 15 | change_indices = np.flatnonzero(changes != 0) 16 | run_lengths = np.diff(np.concatenate(([0], change_indices, [len(mask_buffer)]))) 17 | 18 | # note that the odd counts are always the numbers of zeros 19 | if mask_buffer[0] == True: 20 | run_lengths = np.concatenate(([0], run_lengths)) 21 | 22 | return run_lengths.tolist() 23 | 24 | 25 | def serialise_bitmask(bitmask: bytes) -> str: 26 | rle = _mask_to_rle(bitmask) 27 | return _rle_to_string(rle) 28 | 29 | 30 | def deserialise_bitmask(serialised_bitmask: str, length: int) -> bytes: 31 | rle = _string_to_rle(serialised_bitmask) 32 | return _rle_to_mask(rle, length) 33 | 34 | 35 | def transpose_bytearray(byte_data: bytes, shape: Tuple[int, int]) -> bytes: 36 | np_byte_data = np.frombuffer(byte_data, dtype=np.int8).reshape(shape) 37 | return bytearray(np_byte_data.T.tobytes()) 38 | -------------------------------------------------------------------------------- /encord/common/constants.py: -------------------------------------------------------------------------------- 1 | DATETIME_STRING_FORMAT = "%Y-%m-%d %H:%M:%S" 2 | DATETIME_LONG_STRING_FORMAT = "%a, %d %b %Y %H:%M:%S %Z" 3 | -------------------------------------------------------------------------------- /encord/common/deprecated.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import warnings 3 | 4 | 5 | def deprecated(version, alternative=None): 6 | def decorator(func): 7 | @functools.wraps(func) 8 | def new_func(*args, **kwargs): 9 | message = f"Function {func.__name__} is deprecated" 10 | if version: 11 | message += f" since version {version}" 12 | if alternative: 13 | message += f", use {alternative} instead" 14 | warnings.warn(message, category=DeprecationWarning, stacklevel=2) 15 | return func(*args, **kwargs) 16 | 17 | return new_func 18 | 19 | return decorator 20 | -------------------------------------------------------------------------------- /encord/common/enum.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from enum import Enum 4 | from typing import Optional, Type, TypeVar, cast 5 | 6 | T = TypeVar("T", bound="StringEnum") 7 | 8 | 9 | class StringEnum(str, Enum): 10 | """Use this enum class if you need the helper that creates the enum instance from a string.""" 11 | 12 | def __str__(self): 13 | return self.name 14 | 15 | @classmethod 16 | def from_string(cls: Type[T], value: str) -> Optional[T]: 17 | # pylint: disable-next=no-member 18 | return cast(T, cls._value2member_map_.get(value)) 19 | -------------------------------------------------------------------------------- /encord/common/time_parser.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | from datetime import datetime 3 | from functools import lru_cache 4 | from typing import Optional, Union 5 | 6 | from dateutil import parser 7 | 8 | from encord.common.constants import DATETIME_LONG_STRING_FORMAT 9 | 10 | 11 | # Cache: major performance win for large classification ranges 12 | @lru_cache(maxsize=8192) 13 | def parse_datetime(time_string: str) -> datetime: 14 | """Parsing datetime strings in the most compatible yet performant way. 15 | 16 | Our labels can contain timestamps in different formats, but applying dateutil.parse straight away is expensive 17 | as it is very smart and tries to guess the time format. 18 | 19 | So instead we're applying parsers with known formats, starting from the formats most likely to occur, 20 | and falling back to the most complicated logic only in case of all other attempt have failed. 21 | """ 22 | with contextlib.suppress(Exception): 23 | return datetime.strptime(time_string, DATETIME_LONG_STRING_FORMAT) 24 | with contextlib.suppress(Exception): 25 | return parser.isoparse(time_string) 26 | with contextlib.suppress(Exception): 27 | return parser.parse(time_string) 28 | 29 | # As a last resort, employ fuzzy parsing, which is most expensive, 30 | # but parses the most obscure timestamp formats 31 | return parser.parse(time_string, fuzzy=True) 32 | 33 | 34 | def parse_datetime_optional(_datetime: Optional[Union[str, datetime]]) -> Optional[datetime]: 35 | if _datetime is None: 36 | return None 37 | elif isinstance(_datetime, datetime): 38 | return _datetime 39 | elif isinstance(_datetime, str): 40 | return parse_datetime(_datetime) 41 | else: 42 | raise ValueError(f"parse_datetime_optional {type(_datetime)=} not supported") 43 | -------------------------------------------------------------------------------- /encord/common/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from typing import List, Optional, TypeVar, Union 5 | from uuid import UUID 6 | 7 | 8 | def snake_to_camel(snake_case_str: str) -> str: 9 | camel = re.sub("([0-9A-Za-z])_(?=[0-9A-Z])", lambda m: m.group(1), snake_case_str.title()) 10 | return re.sub("(^_*[A-Z])", lambda m: m.group(1).lower(), camel) 11 | 12 | 13 | T = TypeVar("T") 14 | 15 | 16 | def ensure_list(v: Union[List[T], T, None]) -> Optional[List[T]]: 17 | if v is None or isinstance(v, list): 18 | return v 19 | return [v] 20 | 21 | 22 | def ensure_uuid_list(value: Union[List[UUID], List[str], UUID, str, None]) -> Optional[List[UUID]]: 23 | vs = ensure_list(value) 24 | if vs is None: 25 | return None 26 | 27 | results: List[UUID] = [] 28 | for v in vs: 29 | if isinstance(v, UUID): 30 | results.append(v) 31 | elif isinstance(v, str): 32 | results.append(UUID(v)) 33 | else: 34 | raise AssertionError(f"Can't convert {type(v)} to UUID") 35 | 36 | return results 37 | 38 | 39 | def validate_user_agent_suffix(user_agent_suffix: str) -> str: 40 | """ 41 | Validate a User-Agent string according to RFC 9110, excluding comments. 42 | Returns it whitespace trimmed 43 | Args: 44 | user_agent_suffix: The User-Agent string to validate 45 | 46 | Returns: 47 | The validated User-Agent string 48 | 49 | Raises: 50 | ValueError: If the User-Agent string is invalid 51 | """ 52 | user_agent = user_agent_suffix.strip() 53 | # Define regex components 54 | tchar = r"[-!#$%&'*+.^_`|~0-9A-Za-z]" 55 | token = f"{tchar}+" 56 | product = f"{token}(?:/{token})?" 57 | RWS = r"[ \t]+" 58 | 59 | # Complete User-Agent pattern (just products separated by whitespace) 60 | pattern = re.compile(f"^{product}(?:{RWS}{product})*$") 61 | 62 | if not pattern.match(user_agent): 63 | raise ValueError(f"Invalid User-Agent string: '{user_agent_suffix}'. ") 64 | 65 | return user_agent 66 | -------------------------------------------------------------------------------- /encord/constants/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/constants/__init__.py -------------------------------------------------------------------------------- /encord/constants/enums.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Enums" 3 | slug: "sdk-ref-enums" 4 | hidden: false 5 | metadata: 6 | title: "Enums" 7 | description: "Encord SDK Enums." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from __future__ import annotations 13 | 14 | from typing import Any 15 | 16 | from encord.common.enum import StringEnum 17 | 18 | 19 | class DataType(StringEnum): 20 | VIDEO = "video" 21 | IMG_GROUP = "img_group" 22 | DICOM = "dicom" 23 | IMAGE = "image" 24 | DICOM_STUDY = "dicom_study" 25 | NIFTI = "nifti" 26 | AUDIO = "audio" 27 | PLAIN_TEXT = "plain_text" 28 | PDF = "pdf" 29 | 30 | # will be displayed if the Encord platform has a new data type that is not present in this SDK version. Please upgrade your SDK version 31 | MISSING_DATA_TYPE = "_MISSING_DATA_TYPE_" 32 | 33 | @classmethod 34 | def _missing_(cls, value: Any) -> DataType: 35 | return cls.MISSING_DATA_TYPE 36 | 37 | @staticmethod 38 | def from_upper_case_string(string: str) -> DataType: 39 | for data_type in DataType: 40 | if string == data_type.to_upper_case_string(): 41 | return data_type 42 | 43 | return DataType.MISSING_DATA_TYPE 44 | 45 | def to_upper_case_string(self) -> str: 46 | return self.value.upper() 47 | 48 | 49 | GEOMETRIC_TYPES = { 50 | DataType.VIDEO, 51 | DataType.IMAGE, 52 | DataType.IMG_GROUP, 53 | DataType.DICOM, 54 | DataType.DICOM_STUDY, 55 | DataType.NIFTI, 56 | DataType.PDF, 57 | } 58 | 59 | 60 | def is_geometric(data_type: DataType) -> bool: 61 | return data_type in GEOMETRIC_TYPES 62 | -------------------------------------------------------------------------------- /encord/constants/string_constants.py: -------------------------------------------------------------------------------- 1 | # Label row constants 2 | 3 | LABELS = "labels" 4 | OBJECTS = "objects" 5 | CLASSIFICATIONS = "classifications" 6 | OBJECT_HASH = "objectHash" 7 | CLASSIFICATION_HASH = "classificationHash" 8 | OBJECT_ANSWERS = "object_answers" 9 | CLASSIFICATION_ANSWERS = "classification_answers" 10 | 11 | 12 | # Labeling algorithm names 13 | INTERPOLATION = "interpolation" 14 | 15 | 16 | # Type of Cord API key 17 | TYPE_PROJECT = "project" 18 | TYPE_DATASET = "dataset" 19 | TYPE_ONTOLOGY = "ontology" 20 | -------------------------------------------------------------------------------- /encord/http/__init__.py: -------------------------------------------------------------------------------- 1 | from encord.http.constants import RequestsSettings 2 | -------------------------------------------------------------------------------- /encord/http/common.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Optional 3 | 4 | from encord.exceptions import ExceptionContext 5 | 6 | HEADER_USER_AGENT = "User-Agent" 7 | HEADER_CLOUD_TRACE_CONTEXT = "X-Cloud-Trace-Context" 8 | 9 | 10 | @dataclass 11 | class RequestContext(ExceptionContext): 12 | trace_id: Optional[str] = None 13 | span_id: Optional[str] = None 14 | domain: Optional[str] = None 15 | -------------------------------------------------------------------------------- /encord/http/constants.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Constants" 3 | slug: "sdk-ref-constants" 4 | hidden: false 5 | metadata: 6 | title: "Constants" 7 | description: "Encord SDK Constants." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from dataclasses import dataclass 13 | from typing import Callable, Optional 14 | 15 | DEFAULT_CONNECTION_RETRIES = 3 16 | DEFAULT_MAX_RETRIES = 3 17 | DEFAULT_BACKOFF_FACTOR = 1.5 18 | 19 | DEFAULT_READ_TIMEOUT = 180 # In seconds 20 | DEFAULT_WRITE_TIMEOUT = 180 # In seconds 21 | DEFAULT_CONNECT_TIMEOUT = 180 # In seconds 22 | 23 | 24 | @dataclass 25 | class RequestsSettings: 26 | """The settings for all outgoing network requests. These apply for each individual request.""" 27 | 28 | max_retries: int = DEFAULT_MAX_RETRIES 29 | """Number of allowed retries when a request is sent. It only affects idempotent retryable requests.""" 30 | 31 | backoff_factor: float = DEFAULT_BACKOFF_FACTOR 32 | """With each retry, there will be a sleep of backoff_factor * (2 ** (retry_number - 1) )""" 33 | 34 | connection_retries: int = DEFAULT_CONNECTION_RETRIES 35 | """Number of allowed retries to establish TCP connection when a request is sent.""" 36 | 37 | connect_timeout: int = DEFAULT_CONNECT_TIMEOUT 38 | """Maximum number of seconds from connection establishment to the first byte of response received""" 39 | 40 | read_timeout: int = DEFAULT_READ_TIMEOUT 41 | """Maximum number of seconds to obtain full response""" 42 | 43 | write_timeout: int = DEFAULT_WRITE_TIMEOUT 44 | """Maximum number of seconds to send request payload""" 45 | 46 | trace_id_provider: Optional[Callable[[], str]] = None 47 | """Function that supplies trace id for every request issued by the library. Random if not provided.""" 48 | 49 | 50 | DEFAULT_REQUESTS_SETTINGS = RequestsSettings() 51 | -------------------------------------------------------------------------------- /encord/http/limits.py: -------------------------------------------------------------------------------- 1 | LABEL_ROW_BUNDLE_DEFAULT_LIMIT = 1000 2 | LABEL_ROW_BUNDLE_GET_LIMIT = 1000 3 | LABEL_ROW_BUNDLE_CREATE_LIMIT = 1000 4 | LABEL_ROW_BUNDLE_SAVE_LIMIT = 1000 5 | -------------------------------------------------------------------------------- /encord/http/query_methods.py: -------------------------------------------------------------------------------- 1 | from encord.constants.enums import StringEnum 2 | 3 | 4 | class QueryMethods(StringEnum): 5 | GET = "GET" 6 | POST = "POST" 7 | PUT = "PUT" 8 | DELETE = "DELETE" 9 | -------------------------------------------------------------------------------- /encord/http/request.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Any, Dict, List, Optional, Type, Union 3 | 4 | from encord.http.query_methods import QueryMethods 5 | 6 | UIDType = Union[None, int, str, Dict[str, str], Dict[str, object], List[int], List[str], List[Dict[str, str]]] 7 | 8 | 9 | class Request: 10 | """Request object. Takes query parameters and prepares them for execution.""" 11 | 12 | def __init__( 13 | self, 14 | query_method: QueryMethods, 15 | db_object_type: Type, 16 | uid: UIDType, 17 | timeout: int, 18 | connect_timeout: int, 19 | payload: Union[None, Dict[str, Any], List[Dict[str, Any]]], 20 | ) -> None: 21 | self.http_method = QueryMethods.POST 22 | self.data: str = json.dumps( 23 | { 24 | "query_type": db_object_type.__name__.lower(), 25 | "query_method": str(query_method), 26 | "values": { 27 | "uid": uid, 28 | "payload": payload, 29 | }, 30 | } 31 | ) 32 | self.timeout = timeout 33 | self.connect_timeout = connect_timeout 34 | 35 | self.headers: Optional[Dict] = None 36 | 37 | def __str__(self): 38 | return f"Request({self.http_method}, {self.data}, {self.headers}, {self.timeout}, {self.connect_timeout})" 39 | 40 | def __repr__(self): 41 | return self.__str__() 42 | 43 | def __eq__(self, other): 44 | return self.__dict__ == other.__dict__ 45 | -------------------------------------------------------------------------------- /encord/http/v2/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/http/v2/__init__.py -------------------------------------------------------------------------------- /encord/http/v2/error_utils.py: -------------------------------------------------------------------------------- 1 | from encord.exceptions import ( 2 | AuthenticationError, 3 | AuthorisationError, 4 | InvalidArgumentsError, 5 | MethodNotAllowedError, 6 | ResourceNotFoundError, 7 | UnknownException, 8 | ) 9 | 10 | HTTP_BAD_REQUEST = 400 11 | HTTP_UNAUTHORIZED = 401 12 | HTTP_FORBIDDEN = 403 13 | HTTP_NOT_FOUND = 404 14 | HTTP_METHOD_NOT_ALLOWED = 405 15 | HTTP_GENERAL_ERROR = 500 16 | 17 | 18 | def handle_error_response(status_code: int, message=None, context=None): 19 | """Checks server response. 20 | Called if HTTP response status code is an error response. 21 | """ 22 | if status_code == HTTP_UNAUTHORIZED: 23 | raise AuthenticationError( 24 | message or "You are not authenticated to access the Encord platform.", context=context 25 | ) 26 | 27 | if status_code == HTTP_FORBIDDEN: 28 | raise AuthorisationError(message or "You are not authorised to access this asset.", context=context) 29 | 30 | if status_code == HTTP_NOT_FOUND: 31 | raise ResourceNotFoundError("The requested resource was not found.", context=context) 32 | 33 | if status_code == HTTP_METHOD_NOT_ALLOWED: 34 | raise MethodNotAllowedError("HTTP method is not allowed.", context=context) 35 | 36 | if status_code == HTTP_BAD_REQUEST: 37 | raise InvalidArgumentsError(message or "Provided payload is invalid and can't be processed.", context=context) 38 | 39 | raise UnknownException(message or "An unknown error occurred.", context=context) 40 | -------------------------------------------------------------------------------- /encord/http/v2/payloads.py: -------------------------------------------------------------------------------- 1 | from typing import Generic, List, Optional, TypeVar 2 | 3 | from encord.orm.base_dto import GenericBaseDTO 4 | 5 | T = TypeVar("T") 6 | 7 | 8 | class Page(GenericBaseDTO, Generic[T]): 9 | results: List[T] 10 | next_page_token: Optional[str] = None 11 | -------------------------------------------------------------------------------- /encord/http/v2/request_signer.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import hashlib 3 | from datetime import datetime 4 | from typing import Dict, Union 5 | 6 | from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey 7 | from requests import PreparedRequest 8 | 9 | _SIGNATURE_LABEL = "encord-sig" 10 | 11 | """ 12 | This file implements request signing according to the following RFC draft: 13 | https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-message-signatures 14 | """ 15 | 16 | 17 | def _sfv_str(key: str, value: Union[str, bytes]) -> str: 18 | if isinstance(value, bytes): 19 | return f"{key}=:{base64.b64encode(value).decode('ascii')}:" 20 | return f"{key}={value}" 21 | 22 | 23 | def _sfv_value(value: Union[int, str]) -> str: 24 | if isinstance(value, int): 25 | return str(value) 26 | return f'"{value}"' 27 | 28 | 29 | def _request_body_bytes(request: PreparedRequest) -> bytes: 30 | if isinstance(request.body, str): 31 | return request.body.encode("utf-8") 32 | elif isinstance(request.body, bytes): 33 | return request.body 34 | else: 35 | return b"" 36 | 37 | 38 | def sign_request(request: PreparedRequest, key_id: str, private_key: Ed25519PrivateKey) -> PreparedRequest: 39 | assert request.method is not None 40 | 41 | content_digest = _sfv_str("sha-256", hashlib.sha256(_request_body_bytes(request)).digest()) 42 | # Moving 'created' time to be a bit in the past, since sometimes requests are failing 43 | # due to the clock skew. 44 | # This is to be fixed on server side and removed from here. 45 | signature_params: Dict[str, Union[str, int]] = { 46 | "created": int(datetime.now().timestamp()) - 30, 47 | "keyid": key_id, 48 | "alg": "ed25519", 49 | } 50 | 51 | signature_elements = { 52 | "@method": request.method.upper(), 53 | "@request-target": request.path_url, 54 | "content-digest": content_digest, 55 | } 56 | 57 | covered_elements = [f'"{element_id}"' for element_id in signature_elements] 58 | signature_elements_pairs = [f"{k}={_sfv_value(v)}" for k, v in signature_params.items()] 59 | sig_params_serialised = ";".join([f"({' '.join(covered_elements)})"] + signature_elements_pairs) 60 | 61 | signature_elements["@signature-params"] = sig_params_serialised 62 | 63 | signature_base = "\n".join(f'"{k}": {v}' for k, v in signature_elements.items()) 64 | signature = private_key.sign(signature_base.encode()) 65 | 66 | request.headers["Content-Digest"] = content_digest 67 | request.headers["Signature-Input"] = _sfv_str(_SIGNATURE_LABEL, sig_params_serialised) 68 | request.headers["Signature"] = _sfv_str(_SIGNATURE_LABEL, signature) 69 | 70 | return request 71 | -------------------------------------------------------------------------------- /encord/objects/__init__.py: -------------------------------------------------------------------------------- 1 | from encord.objects.attributes import ChecklistAttribute, RadioAttribute, TextAttribute 2 | from encord.objects.classification import Classification 3 | from encord.objects.classification_instance import ClassificationInstance 4 | from encord.objects.common import Shape 5 | from encord.objects.metadata import DICOMSeriesMetadata, DICOMSliceMetadata 6 | from encord.objects.ontology_labels_impl import LabelRowV2 7 | from encord.objects.ontology_object import Object 8 | from encord.objects.ontology_object_instance import AnswerForFrames, ObjectInstance 9 | from encord.objects.ontology_structure import OntologyStructure 10 | from encord.objects.options import FlatOption, NestableOption, Option 11 | -------------------------------------------------------------------------------- /encord/objects/bundled_operations.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Objects - Bundled Operations" 3 | slug: "sdk-ref-objects-bundled-op" 4 | hidden: false 5 | metadata: 6 | title: "Objects - Bundled Operations" 7 | description: "Encord SDK Objects - Bundled Operations." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from __future__ import annotations 13 | 14 | from dataclasses import dataclass 15 | from typing import Dict, List, Optional, Set, Tuple 16 | 17 | """ 18 | Operation payloads to work with LabelRowV2 bundling. 19 | These are internal helpers and not supposed to be used by external users. 20 | """ 21 | 22 | 23 | @dataclass 24 | class BundledGetRowsPayload: 25 | uids: List[str] 26 | get_signed_url: bool 27 | include_object_feature_hashes: Optional[Set[str]] 28 | include_classification_feature_hashes: Optional[Set[str]] 29 | include_reviews: bool 30 | 31 | def add(self, other: BundledGetRowsPayload) -> BundledGetRowsPayload: 32 | self.uids.extend(other.uids) 33 | return self 34 | 35 | 36 | @dataclass 37 | class BundledCreateRowsPayload: 38 | uids: List[str] 39 | get_signed_url: bool 40 | branch_name: Optional[str] 41 | 42 | def add(self, other: BundledCreateRowsPayload) -> BundledCreateRowsPayload: 43 | self.uids.extend(other.uids) 44 | return self 45 | 46 | 47 | @dataclass 48 | class BundledSaveRowsPayload: 49 | uids: List[str] 50 | payload: List[Dict] 51 | validate_before_saving: Optional[bool] 52 | 53 | def add(self, other: BundledSaveRowsPayload) -> BundledSaveRowsPayload: 54 | self.uids.extend(other.uids) 55 | self.payload.extend(other.payload) 56 | self.validate_before_saving = self.validate_before_saving or other.validate_before_saving 57 | return self 58 | 59 | 60 | @dataclass 61 | class BundledSetPriorityPayload: 62 | priorities: List[Tuple[str, float]] 63 | 64 | def add(self, other: BundledSetPriorityPayload) -> BundledSetPriorityPayload: 65 | self.priorities.extend(other.priorities) 66 | return self 67 | 68 | 69 | @dataclass 70 | class BundledWorkflowCompletePayload: 71 | label_hashes: List[str] 72 | 73 | def add(self, other: BundledWorkflowCompletePayload) -> BundledWorkflowCompletePayload: 74 | self.label_hashes.extend(other.label_hashes) 75 | return self 76 | 77 | 78 | @dataclass 79 | class BundledWorkflowReopenPayload: 80 | label_hashes: List[str] 81 | 82 | def add(self, other: BundledWorkflowReopenPayload) -> BundledWorkflowReopenPayload: 83 | self.label_hashes.extend(other.label_hashes) 84 | return self 85 | -------------------------------------------------------------------------------- /encord/objects/common.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | from enum import Enum 5 | from typing import Any, Dict, List, Union 6 | 7 | from encord.common.enum import StringEnum 8 | from encord.objects.attributes import ( # pylint: disable=unused-import 9 | Attribute, 10 | ChecklistAttribute, 11 | RadioAttribute, 12 | TextAttribute, 13 | ) 14 | 15 | # Following imports need to be here for backwards compatibility 16 | from encord.objects.ontology_element import NestedID # pylint: disable=unused-import 17 | from encord.objects.options import ( # pylint: disable=unused-import 18 | FlatOption, 19 | NestableOption, 20 | Option, 21 | ) 22 | 23 | 24 | class PropertyType(StringEnum): 25 | RADIO = "radio" 26 | TEXT = "text" 27 | CHECKLIST = "checklist" 28 | 29 | 30 | class Shape(StringEnum): 31 | BOUNDING_BOX = "bounding_box" 32 | POLYGON = "polygon" 33 | POINT = "point" 34 | SKELETON = "skeleton" 35 | POLYLINE = "polyline" 36 | ROTATABLE_BOUNDING_BOX = "rotatable_bounding_box" 37 | BITMASK = "bitmask" 38 | AUDIO = "audio" 39 | TEXT = "text" 40 | 41 | 42 | class DeidentifyRedactTextMode(Enum): 43 | REDACT_ALL_TEXT = "REDACT_ALL_TEXT" 44 | REDACT_NO_TEXT = "REDACT_NO_TEXT" 45 | REDACT_SENSITIVE_TEXT = "REDACT_SENSITIVE_TEXT" 46 | 47 | 48 | class SaveDeidentifiedDicomConditionType(Enum): 49 | NOT_SUBSTR = "NOT_SUBSTR" 50 | IN = "IN" 51 | 52 | 53 | @dataclass 54 | class SaveDeidentifiedDicomConditionIn: 55 | value: List[str] 56 | dicom_tag: str 57 | condition_type: SaveDeidentifiedDicomConditionType = SaveDeidentifiedDicomConditionType.IN 58 | 59 | def to_dict(self) -> Dict[str, Any]: 60 | return { 61 | "value": self.value, 62 | "dicom_tag": self.dicom_tag, 63 | "condition_type": self.condition_type.value, 64 | } 65 | 66 | 67 | @dataclass 68 | class SaveDeidentifiedDicomConditionNotSubstr: 69 | value: str 70 | dicom_tag: str 71 | condition_type: SaveDeidentifiedDicomConditionType = SaveDeidentifiedDicomConditionType.NOT_SUBSTR 72 | 73 | def to_dict(self) -> Dict[str, Any]: 74 | return { 75 | "value": self.value, 76 | "dicom_tag": self.dicom_tag, 77 | "condition_type": self.condition_type.value, 78 | } 79 | 80 | 81 | SaveDeidentifiedDicomCondition = Union[ 82 | SaveDeidentifiedDicomConditionNotSubstr, 83 | SaveDeidentifiedDicomConditionIn, 84 | ] 85 | -------------------------------------------------------------------------------- /encord/objects/constants.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Objects - Constants" 3 | slug: "sdk-ref-objects-constant" 4 | hidden: false 5 | metadata: 6 | title: "Objects - Constants" 7 | description: "Encord SDK Objects - Constants." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from __future__ import annotations 13 | 14 | # for backwards compatibility 15 | from encord.common.constants import DATETIME_LONG_STRING_FORMAT 16 | 17 | DEFAULT_CONFIDENCE = 1.0 18 | DEFAULT_MANUAL_ANNOTATION = True 19 | AVAILABLE_COLORS = ( 20 | "#D33115", 21 | "#E27300", 22 | "#16406C", 23 | "#FE9200", 24 | "#FCDC00", 25 | "#DBDF00", 26 | "#A4DD00", 27 | "#68CCCA", 28 | "#73D8FF", 29 | "#AEA1FF", 30 | "#FCC400", 31 | "#B0BC00", 32 | "#68BC00", 33 | "#16A5A5", 34 | "#009CE0", 35 | "#7B64FF", 36 | "#FA28FF", 37 | "#B3B3B3", 38 | "#9F0500", 39 | "#C45100", 40 | "#FB9E00", 41 | "#808900", 42 | "#194D33", 43 | "#0C797D", 44 | "#0062B1", 45 | "#653294", 46 | "#AB149E", 47 | ) 48 | -------------------------------------------------------------------------------- /encord/objects/html_node.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Objects - HTML Node" 3 | slug: "sdk-ref-objects-html-node" 4 | hidden: false 5 | metadata: 6 | title: "Objects - HTML Node" 7 | description: "Encord SDK Objects - HTML Node." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from __future__ import annotations 13 | 14 | from dataclasses import dataclass 15 | from typing import Collection, List, Union, cast 16 | 17 | from encord.orm.base_dto import BaseDTO 18 | 19 | 20 | class HtmlNode(BaseDTO): 21 | """A class representing a single HTML node, with the xpath and offset. 22 | 23 | Attributes: 24 | xpath (str): The xpath of the node 25 | offset (int): The offset of the content from the xpath 26 | """ 27 | 28 | xpath: str 29 | offset: int 30 | 31 | def __repr__(self): 32 | return f"(Node: {self.xpath} Offset: {self.offset})" 33 | 34 | 35 | class HtmlRange(BaseDTO): 36 | """A class representing a section of HTML with a start and end node. 37 | 38 | Attributes: 39 | start (HtmlNode): The starting node of the range. 40 | end (HtmlNode): The ending node of the range. 41 | """ 42 | 43 | start: HtmlNode 44 | end: HtmlNode 45 | 46 | def __repr__(self): 47 | return f"({self.start} - {self.end})" 48 | 49 | def to_dict(self): 50 | return { 51 | "start": {"xpath": self.start.xpath, "offset": self.start.offset}, 52 | "end": {"xpath": self.end.xpath, "offset": self.end.offset}, 53 | } 54 | 55 | def __hash__(self): 56 | return f"{self.start.xpath}-{self.start.offset}-{self.end.xpath}-{self.end.offset}" 57 | 58 | @classmethod 59 | def from_dict(cls, d: dict): 60 | return HtmlRange( 61 | start=HtmlNode(xpath=d["start"]["xpath"], offset=d["start"]["offset"]), 62 | end=HtmlNode(xpath=d["end"]["xpath"], offset=d["end"]["offset"]), 63 | ) 64 | 65 | 66 | HtmlRanges = List[HtmlRange] 67 | -------------------------------------------------------------------------------- /encord/objects/metadata.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Objects - DICOM Metadata" 3 | slug: "sdk-ref-objects-dicom-metadata" 4 | hidden: false 5 | metadata: 6 | title: "Objects - DICOM Metadata" 7 | description: "Encord SDK Objects - DICOM Metadata." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from typing import Optional 13 | 14 | from encord.orm.base_dto import BaseDTO 15 | 16 | 17 | class DICOMSeriesMetadata(BaseDTO): 18 | """Metadata for a DICOM series. 19 | 20 | Attributes: 21 | patient_id (Optional[str]): The ID of the patient. This attribute is optional. 22 | study_uid (str): The unique identifier for the study. 23 | series_uid (str): The unique identifier for the series. 24 | """ 25 | 26 | patient_id: Optional[str] 27 | study_uid: str 28 | series_uid: str 29 | 30 | 31 | class DICOMSliceMetadata(BaseDTO): 32 | """Metadata for a slice in a DICOM series. 33 | 34 | Attributes: 35 | dicom_instance_uid (str): The unique identifier for the DICOM instance. 36 | multiframe_frame_number (Optional[int]): The frame number if the DICOM instance is a multiframe image. This attribute is optional. 37 | file_uri (str): The URI to the file containing the slice. 38 | width (int): The width of the slice in pixels. 39 | height (int): The height of the slice in pixels. 40 | """ 41 | 42 | dicom_instance_uid: str 43 | multiframe_frame_number: Optional[int] 44 | file_uri: str 45 | width: int 46 | height: int 47 | -------------------------------------------------------------------------------- /encord/objects/project.py: -------------------------------------------------------------------------------- 1 | # Import for backward compatibility 2 | from encord.orm.project import ProjectDataset 3 | -------------------------------------------------------------------------------- /encord/objects/skeleton_template.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Objects - Skeleton Objects" 3 | slug: "sdk-ref-objects-skelly" 4 | hidden: false 5 | metadata: 6 | title: "Objects - Skeleton Objects" 7 | description: "Encord SDK Objects - Skeleton Objects." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from __future__ import annotations 13 | 14 | from dataclasses import dataclass 15 | from typing import Any, List, Optional, Set, Type 16 | 17 | from encord.objects.coordinates import SkeletonCoordinate, SkeletonCoordinates 18 | from encord.orm.skeleton_template import SkeletonTemplate as SkeletonTemplateORM 19 | from encord.orm.skeleton_template import SkeletonTemplateCoordinate 20 | 21 | 22 | class SkeletonTemplate(SkeletonTemplateORM): 23 | @property 24 | def required_vertices(self) -> Set[str]: 25 | """Get the set of required vertex names for the skeleton. 26 | 27 | Returns: 28 | Set[str]: A set containing the names of the required vertices. 29 | """ 30 | return {coordinate.name for coordinate in self.skeleton.values()} 31 | 32 | def create_instance(self, provided_coordinates: List[SkeletonCoordinate]) -> SkeletonCoordinates: 33 | """Create an instance of SkeletonCoordinates with the provided coordinates. 34 | 35 | Args: 36 | provided_coordinates (List[SkeletonCoordinate]): A list of SkeletonCoordinate objects to align. 37 | 38 | Returns: 39 | SkeletonCoordinates: An instance of SkeletonCoordinates with aligned coordinates. 40 | 41 | Raises: 42 | ValueError: If the provided vertices do not match the required vertices. 43 | """ 44 | provided_vertices = {provided_coordinate.name for provided_coordinate in provided_coordinates} 45 | if provided_vertices != self.required_vertices: 46 | difference = provided_vertices.symmetric_difference(self.required_vertices) 47 | raise ValueError(f"Provided vertices does not match the required vertices, difference is {difference}") 48 | aligned_coordinates = [] 49 | for coord in provided_coordinates: 50 | partner = [x for x in self.skeleton.values() if x.name == coord.name][0] 51 | aligned_coordinate = SkeletonCoordinate( 52 | x=coord.x, y=coord.y, name=coord.name, feature_hash=partner.feature_hash 53 | ) 54 | aligned_coordinates.append(aligned_coordinate) 55 | return SkeletonCoordinates(values=aligned_coordinates, name=self.name) 56 | -------------------------------------------------------------------------------- /encord/objects/utils.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Objects - Utils" 3 | slug: "sdk-ref-objects-utils" 4 | hidden: false 5 | metadata: 6 | title: "Objects - Utils" 7 | description: "Encord SDK Objects - Utils." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from __future__ import annotations 13 | 14 | import base64 15 | import re 16 | import uuid 17 | from typing import Any, Iterable, List, Optional, Type, TypeVar, cast 18 | 19 | 20 | def short_uuid_str() -> str: 21 | """This is being used as a condensed uuid.""" 22 | return base64.b64encode(uuid.uuid4().bytes[:6]).decode("utf-8") 23 | 24 | 25 | def _lower_snake_case(s: str): 26 | return s.lower().replace(" ", "_") 27 | 28 | 29 | def check_type(obj: Any, type_: Optional[Type[Any]]) -> None: 30 | if not does_type_match(obj, type_): 31 | raise TypeError(f"Expected {type_}, got {type(obj)}") 32 | 33 | 34 | def does_type_match(obj: Any, type_: Optional[Type[Any]]) -> bool: 35 | return True if type_ is None else isinstance(obj, type_) 36 | 37 | 38 | T = TypeVar("T") 39 | 40 | 41 | def checked_cast(obj: Any, type_: Optional[Type[T]]) -> T: 42 | check_type(obj, type_) 43 | return cast(T, obj) 44 | 45 | 46 | def filter_by_type(objects: Iterable[Any], type_: Optional[Type[T]]) -> List[T]: 47 | return [object_ for object_ in objects if does_type_match(object_, type_)] 48 | 49 | 50 | def is_valid_email(email: str) -> bool: 51 | """Validate that an email is a valid one""" 52 | regex = r"[^@]+@[^@]+\.[^@]+" 53 | return bool(re.match(regex, email)) 54 | 55 | 56 | def check_email(email: str) -> None: 57 | if not is_valid_email(email): 58 | raise ValueError("Invalid email address") 59 | -------------------------------------------------------------------------------- /encord/orm/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/orm/__init__.py -------------------------------------------------------------------------------- /encord/orm/active.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import Optional 3 | 4 | from encord.orm.base_dto import BaseDTO 5 | 6 | 7 | class ActiveProjectMode(Enum): 8 | DATA = "data" 9 | LABEL = "label" 10 | METRIC = "metric" 11 | ADVANCED = "advanced" 12 | 13 | 14 | class ActiveProjectImportPayload(BaseDTO): 15 | project_mode: ActiveProjectMode 16 | video_sampling_rate: Optional[float] = None 17 | -------------------------------------------------------------------------------- /encord/orm/analytics.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from enum import Enum, auto 3 | from typing import List, Literal, Optional 4 | from uuid import UUID 5 | 6 | from encord.common.utils import snake_to_camel 7 | from encord.orm.base_dto import BaseDTO 8 | from encord.orm.workflow import WorkflowNode, WorkflowStageType 9 | from encord.utilities.project_user import ProjectUserRole 10 | 11 | 12 | class CamelStrEnum(str, Enum): 13 | # noinspection PyMethodParameters 14 | def _generate_next_value_(name, start, count, last_values) -> str: # type: ignore 15 | return snake_to_camel(name) 16 | 17 | 18 | class CollaboratorTimersGroupBy(CamelStrEnum): 19 | DATA_UNIT = auto() 20 | PROJECT = auto() 21 | 22 | 23 | class CollaboratorTimerParams(BaseDTO): 24 | project_hash: str 25 | after: datetime 26 | before: Optional[datetime] = None 27 | group_by: CollaboratorTimersGroupBy = CollaboratorTimersGroupBy.DATA_UNIT 28 | page_size: int = 100 29 | page_token: Optional[str] = None 30 | 31 | 32 | class CollaboratorTimer(BaseDTO): 33 | user_email: str 34 | user_role: ProjectUserRole 35 | data_title: Optional[str] = None 36 | time_seconds: float 37 | 38 | 39 | class TimeSpentParams(BaseDTO): 40 | project_uuid: str 41 | after: datetime 42 | before: Optional[datetime] = None 43 | workflow_stage_uuids: Optional[List[UUID]] = None 44 | user_emails: Optional[List[str]] = None 45 | dataset_uuids: Optional[List[UUID]] = None 46 | data_uuids: Optional[List[UUID]] = None 47 | data_title: Optional[str] = None 48 | page_token: Optional[str] = None 49 | 50 | 51 | class TimeSpent(BaseDTO): 52 | period_start_time: datetime 53 | period_end_time: datetime 54 | time_spent_seconds: int 55 | user_email: str 56 | project_user_role: ProjectUserRole 57 | data_uuid: UUID 58 | data_title: str 59 | dataset_uuid: UUID 60 | dataset_title: str 61 | workflow_task_uuid: Optional[UUID] = None 62 | workflow_stage: WorkflowNode 63 | -------------------------------------------------------------------------------- /encord/orm/base_dto/__init__.py: -------------------------------------------------------------------------------- 1 | import importlib.metadata as importlib_metadata 2 | 3 | from encord.orm.base_dto.base_dto_interface import BaseDTOInterface 4 | 5 | pydantic_version_str = importlib_metadata.version("pydantic") 6 | 7 | pydantic_version = int(pydantic_version_str.split(".")[0]) 8 | if pydantic_version < 2: 9 | from encord.orm.base_dto.base_dto_pydantic_v1 import ( 10 | BaseDTO, 11 | BaseDTOWithExtra, 12 | Field, 13 | GenericBaseDTO, 14 | PrivateAttr, 15 | RootModelDTO, 16 | dto_validator, 17 | ) 18 | else: 19 | from encord.orm.base_dto.base_dto_pydantic_v2 import ( # type: ignore[assignment] 20 | BaseDTO, 21 | BaseDTOWithExtra, 22 | Field, 23 | GenericBaseDTO, 24 | PrivateAttr, 25 | RootModelDTO, 26 | dto_validator, 27 | ) 28 | -------------------------------------------------------------------------------- /encord/orm/base_dto/base_dto_interface.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Any, Dict, Type, TypeVar 3 | 4 | T = TypeVar("T", bound="BaseDTOInterface") 5 | 6 | 7 | class BaseDTOInterface(ABC): 8 | @classmethod 9 | @abstractmethod 10 | def from_dict(cls: Type[T], d: Dict[str, Any]) -> T: 11 | pass 12 | 13 | @abstractmethod 14 | def to_dict(self, by_alias=True, exclude_none=True) -> Dict[str, Any]: 15 | pass 16 | -------------------------------------------------------------------------------- /encord/orm/bearer_request.py: -------------------------------------------------------------------------------- 1 | from encord.orm.base_dto import BaseDTO 2 | 3 | 4 | class BearerTokenResponse(BaseDTO): 5 | token: str 6 | -------------------------------------------------------------------------------- /encord/orm/client_metadata_schema.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Any, Dict 3 | from uuid import UUID 4 | 5 | from encord.common.enum import StringEnum 6 | from encord.orm.base_dto import BaseDTO 7 | 8 | 9 | class ClientMetadataSchemaTypes(StringEnum): 10 | NUMBER = "number" 11 | STRING = "string" 12 | BOOLEAN = "boolean" 13 | DATETIME = "datetime" 14 | GEOSPATIAL = "geospatial" 15 | ENUM = "enum" 16 | EMBEDDING = "embedding" 17 | LONG_STRING = "long_string" 18 | 19 | 20 | class ClientMetadataSchema(BaseDTO): 21 | uuid: UUID 22 | metadata_schema: Dict[str, ClientMetadataSchemaTypes] 23 | organisation_id: int 24 | created_at: datetime 25 | updated_at: datetime 26 | 27 | 28 | class ClientMetadataSchemaPayload(BaseDTO): 29 | metadata_schema: Dict[str, ClientMetadataSchemaTypes] 30 | 31 | def to_dict(self, by_alias=True, exclude_none=True) -> Dict[str, Any]: 32 | return self.metadata_schema 33 | -------------------------------------------------------------------------------- /encord/orm/cloud_integration.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from uuid import UUID 3 | 4 | from encord.orm.base_dto import BaseDTO 5 | 6 | 7 | class CloudIntegration(BaseDTO): 8 | id: str 9 | title: str 10 | 11 | 12 | class CloudIntegrationV2(BaseDTO): 13 | integration_uuid: UUID 14 | title: str 15 | 16 | 17 | class GetCloudIntegrationsResponse(BaseDTO): 18 | result: List[CloudIntegrationV2] 19 | 20 | 21 | class GetCloudIntegrationsParams(BaseDTO): 22 | filter_integration_uuids: Optional[List[UUID]] = None 23 | filter_integration_titles: Optional[List[str]] = None 24 | include_org_access: bool = False 25 | -------------------------------------------------------------------------------- /encord/orm/deidentification.py: -------------------------------------------------------------------------------- 1 | from enum import Enum, auto 2 | from typing import List, Optional, Union 3 | from uuid import UUID 4 | 5 | from encord.orm.analytics import CamelStrEnum 6 | from encord.orm.base_dto import BaseDTO 7 | 8 | 9 | class DicomDeIdRedactTextMode(CamelStrEnum): 10 | REDACT_ALL_TEXT = auto() 11 | REDACT_NO_TEXT = auto() 12 | REDACT_SENSITIVE_TEXT = auto() 13 | 14 | 15 | class DicomDeIdSaveConditionType(CamelStrEnum): 16 | NOT_SUBSTR = auto() 17 | IN = auto() 18 | 19 | 20 | class DicomDeIdSaveCondition(BaseDTO): 21 | value: Union[str, List[str]] 22 | condition_type: DicomDeIdSaveConditionType 23 | dicom_tag: str 24 | 25 | 26 | class DicomDeIdStartPayload(BaseDTO): 27 | integration_uuid: UUID 28 | dicom_urls: List[str] 29 | redact_dicom_tags: bool = True 30 | redact_pixels_mode: DicomDeIdRedactTextMode = DicomDeIdRedactTextMode.REDACT_NO_TEXT 31 | save_conditions: Optional[List[DicomDeIdSaveCondition]] = None 32 | upload_dir: Optional[str] = None 33 | 34 | 35 | class DicomDeIdGetResultParams(BaseDTO): 36 | timeout_seconds: int 37 | 38 | 39 | class DicomDeIdGetResultLongPollingStatus(str, Enum): 40 | DONE = "DONE" 41 | ERROR = "ERROR" 42 | PENDING = "PENDING" 43 | 44 | 45 | class DicomDeIdGetResultResponse(BaseDTO): 46 | status: DicomDeIdGetResultLongPollingStatus 47 | urls: Optional[List[str]] = None 48 | -------------------------------------------------------------------------------- /encord/orm/filter_preset.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from datetime import datetime 3 | from typing import Dict, List, Optional 4 | from uuid import UUID 5 | 6 | from encord.orm.base_dto import BaseDTO, Field, dto_validator 7 | 8 | 9 | class GetPresetParams(BaseDTO): 10 | top_level_folder_uuid: Optional[UUID] = Field(default=None, alias="topLevelFolderUuid") 11 | preset_uuids: Optional[List[UUID]] = Field(default=[], alias="uuids") 12 | page_token: Optional[str] = Field(default=None, alias="pageToken") 13 | page_size: Optional[int] = Field(default=None, alias="pageSize") 14 | 15 | 16 | class FilterPreset(BaseDTO): 17 | uuid: uuid.UUID 18 | name: str 19 | description: Optional[str] 20 | created_at: Optional[datetime] = Field(default=None, alias="createdAt") 21 | last_updated_at: Optional[datetime] = Field(default=None, alias="lastUpdatedAt") 22 | 23 | 24 | class GetProjectFilterPresetParams(BaseDTO): 25 | preset_uuids: Optional[List[uuid.UUID]] = Field(default=[]) 26 | page_token: Optional[str] = Field(default=None) 27 | page_size: Optional[int] = Field(default=None) 28 | 29 | 30 | class ProjectFilterPreset(BaseDTO): 31 | preset_uuid: uuid.UUID = Field(alias="presetUuid") 32 | name: str 33 | created_at: Optional[datetime] = Field(default=None) 34 | updated_at: Optional[datetime] = Field(default=None) 35 | 36 | 37 | class FilterDefinition(BaseDTO): 38 | filters: List[Dict] = Field(default_factory=list) 39 | 40 | 41 | # Note alias is strictly required as these are stored in the Annotate DB as unstructured objects 42 | # Stored not in camelCase like most Models 43 | class IndexFilterPresetDefinition(BaseDTO): 44 | local_filters: Dict[str, FilterDefinition] = Field( 45 | default_factory=lambda: {str(uuid.UUID(int=0)): FilterDefinition()}, 46 | alias="local_filters", 47 | ) 48 | global_filters: FilterDefinition = Field(default_factory=FilterDefinition, alias="global_filters") 49 | 50 | 51 | class ActiveFilterPresetDefinition(BaseDTO): 52 | local_filters: Dict[str, FilterDefinition] = Field( 53 | default_factory=lambda: {str(uuid.UUID(int=0)): FilterDefinition()}, 54 | ) 55 | global_filters: FilterDefinition = Field(default_factory=FilterDefinition) 56 | 57 | 58 | class GetPresetsResponse(BaseDTO): 59 | results: List[FilterPreset] 60 | 61 | 62 | class CreatePresetParams(BaseDTO): 63 | top_level_folder_uuid: UUID = Field(default=UUID(int=0), alias="topLevelFolderUuid") 64 | 65 | 66 | class IndexCreatePresetPayload(BaseDTO): 67 | name: str 68 | filter_preset_json: Dict 69 | description: Optional[str] = "" 70 | 71 | 72 | class ActiveCreatePresetPayload(BaseDTO): 73 | name: str 74 | filter_preset_json: Dict 75 | 76 | 77 | class IndexUpdatePresetPayload(BaseDTO): 78 | name: Optional[str] = None 79 | description: Optional[str] = "" 80 | filter_preset: Optional[IndexFilterPresetDefinition] = None 81 | 82 | 83 | class ActiveUpdatePresetPayload(BaseDTO): 84 | name: Optional[str] = None 85 | filter_preset: Optional[ActiveFilterPresetDefinition] = None 86 | -------------------------------------------------------------------------------- /encord/orm/formatter.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Any, Dict, Type, TypeVar 3 | 4 | T = TypeVar("T", bound="Formatter") 5 | 6 | 7 | class Formatter(ABC): 8 | @classmethod 9 | @abstractmethod 10 | def from_dict(cls: Type[T], json_dict: Dict[str, Any]) -> T: 11 | pass 12 | -------------------------------------------------------------------------------- /encord/orm/group.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import List 3 | from uuid import UUID 4 | 5 | from encord.orm.base_dto import BaseDTO 6 | from encord.orm.dataset import DatasetUserRole 7 | from encord.orm.storage import StorageUserRole 8 | from encord.utilities.ontology_user import OntologyUserRole 9 | from encord.utilities.project_user import ProjectUserRole 10 | 11 | 12 | class Group(BaseDTO): 13 | group_hash: UUID 14 | name: str 15 | description: str 16 | created_at: datetime 17 | 18 | 19 | class EntityGroup(Group): 20 | is_same_organisation: bool 21 | 22 | 23 | class ProjectGroup(EntityGroup): 24 | user_role: ProjectUserRole 25 | 26 | 27 | class OntologyGroup(EntityGroup): 28 | user_role: OntologyUserRole 29 | 30 | 31 | class DatasetGroup(EntityGroup): 32 | user_role: DatasetUserRole 33 | 34 | 35 | class StorageFolderGroup(EntityGroup): 36 | user_role: StorageUserRole 37 | 38 | 39 | class AddGroupsPayload(BaseDTO): 40 | group_hash_list: List[UUID] 41 | 42 | 43 | class AddProjectGroupsPayload(AddGroupsPayload): 44 | user_role: ProjectUserRole 45 | 46 | 47 | class AddDatasetGroupsPayload(AddGroupsPayload): 48 | user_role: DatasetUserRole 49 | 50 | 51 | class AddOntologyGroupsPayload(AddGroupsPayload): 52 | user_role: OntologyUserRole 53 | 54 | 55 | class AddStorageFolderGroupsPayload(AddGroupsPayload): 56 | user_role: StorageUserRole 57 | 58 | 59 | class RemoveGroupsParams(BaseDTO): 60 | group_hash_list: List[UUID] 61 | -------------------------------------------------------------------------------- /encord/orm/label_log.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from datetime import datetime 4 | from enum import IntEnum 5 | from typing import Optional 6 | 7 | from encord.common.deprecated import deprecated 8 | from encord.orm.base_dto import BaseDTO 9 | 10 | 11 | class Action(IntEnum): 12 | ADD = 0 13 | EDIT = 1 14 | DELETE = 2 15 | START = 3 16 | END = 4 17 | MARK_AS_NOT_LABELLED = 5 18 | MARK_AS_IN_PROGRESS = 6 19 | MARK_AS_LABELLED = 7 20 | MARK_AS_REVIEW_REQUIRED = 8 21 | MARK_AS_REVIEWED = 9 22 | MARK_AS_REVIEWED_TWICE = 10 23 | SUBMIT_TASK = 11 24 | APPROVE_LABEL = 12 25 | REJECT_LABEL = 13 26 | CLICK_SAVE = 14 27 | CLICK_UNDO = 15 28 | CLICK_REDO = 16 29 | CLICK_BULK = 17 30 | CLICK_ZOOM = 19 31 | CLICK_BRIGHTNESS = 20 32 | CLICK_HOTKEYS = 21 33 | CLICK_SETTINGS = 22 34 | ADD_ATTRIBUTE = 23 35 | EDIT_ATTRIBUTE = 24 36 | DELETE_ATTRIBUTE = 25 37 | APPROVE_NESTED_ATTRIBUTE = 26 38 | REJECT_NESTED_ATTRIBUTE = 27 39 | SUBMIT_LABEL = 28 40 | SUBMIT_NESTED_ATTRIBUTE = 29 41 | BUFFERING_OVERLAY_SHOWN = 30 42 | BITRATE_WARNING_SHOWN = 31 43 | SEEKING_OVERLAY_SHOWN = 32 44 | APPROVE_TASK = 33 45 | REJECT_TASK = 34 46 | CONSENSUS_APPROVE_LABEL = 35 47 | CONSENSUS_CANCEL_APPROVE_LABEL = 36 48 | CONSENSUS_IN_AGREEMENT = 37 49 | 50 | 51 | class LabelLog(BaseDTO): 52 | log_hash: str 53 | user_hash: str 54 | user_email: str 55 | data_hash: str 56 | action: Action 57 | created_at: datetime 58 | identifier: Optional[str] 59 | feature_hash: Optional[str] 60 | label_name: Optional[str] 61 | time_taken: Optional[int] 62 | frame: Optional[int] 63 | 64 | @property 65 | @deprecated(version="0.1.100", alternative="LabelLog.identifier") 66 | def annotation_hash(self) -> Optional[str]: 67 | """DEPRECATED: this field is only provided for backwards compatibility, and will be removed in the future versions. 68 | Please use :attr:`identifier ` instead. 69 | """ 70 | return self.identifier 71 | 72 | 73 | class LabelLogParams(BaseDTO): 74 | user_hash: Optional[str] 75 | data_hash: Optional[str] 76 | start_timestamp: Optional[int] 77 | end_timestamp: Optional[int] 78 | user_email: Optional[str] 79 | # Flag for backwards compatibility 80 | include_user_email_and_interface_key: bool = True 81 | -------------------------------------------------------------------------------- /encord/orm/labeling_algorithm.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | 3 | from encord.orm import base_orm 4 | 5 | 6 | class LabelingAlgorithm(base_orm.BaseORM): 7 | """Labeling algorithm base ORM. 8 | 9 | ORM: 10 | 11 | algorithm_name, 12 | algorithm_params 13 | 14 | """ 15 | 16 | DB_FIELDS = OrderedDict( 17 | [ 18 | ("algorithm_name", str), 19 | ("algorithm_parameters", dict), # Algorithm params 20 | ] 21 | ) 22 | 23 | 24 | class ObjectInterpolationParams(base_orm.BaseORM): 25 | """Labeling algorithm parameters for interpolation algorithm 26 | 27 | ORM: 28 | 29 | key_frames, 30 | objects_to_interpolate 31 | 32 | """ 33 | 34 | DB_FIELDS = OrderedDict( 35 | [ 36 | ("key_frames", dict), 37 | ("objects_to_interpolate", list), 38 | ] 39 | ) 40 | 41 | 42 | class BoundingBoxFittingParams(base_orm.BaseORM): 43 | """Labeling algorithm parameters for bounding box fitting algorithm 44 | 45 | ORM: 46 | 47 | labels, 48 | video 49 | 50 | """ 51 | 52 | DB_FIELDS = OrderedDict( 53 | [ 54 | ("labels", dict), 55 | ("video", dict), 56 | ] 57 | ) 58 | -------------------------------------------------------------------------------- /encord/orm/ontology.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from datetime import datetime 4 | from enum import IntEnum 5 | from typing import Dict, Optional 6 | 7 | # pylint: disable=unused-import 8 | from encord.objects.ontology_structure import OntologyStructure 9 | from encord.orm.base_dto import BaseDTO, dto_validator 10 | from encord.orm.formatter import Formatter 11 | from encord.utilities.ontology_user import OntologyUserRole 12 | 13 | 14 | class Ontology(dict, Formatter): 15 | def __init__( 16 | self, 17 | title: str, 18 | structure: OntologyStructure, 19 | ontology_hash: str, 20 | created_at: datetime, 21 | last_edited_at: datetime, 22 | description: Optional[str] = None, 23 | user_role: Optional[OntologyUserRole] = None, 24 | ): 25 | """DEPRECATED - prefer using the :class:`encord.ontology.Ontology` class instead. 26 | 27 | This class has dict-style accessors for backwards compatibility. 28 | Clients who are using this class for the first time are encouraged to use the property accessors and setters 29 | instead of the underlying dictionary. 30 | The mixed use of the `dict` style member functions and the property accessors and setters is discouraged. 31 | 32 | WARNING: Do NOT use the `.data` member of this class. Its usage could corrupt the correctness of the 33 | datastructure. 34 | """ 35 | super().__init__( 36 | { 37 | "ontology_hash": ontology_hash, 38 | "title": title, 39 | "description": description, 40 | "structure": structure, 41 | "created_at": created_at, 42 | "last_edited_at": last_edited_at, 43 | "user_role": user_role, 44 | } 45 | ) 46 | 47 | @property 48 | def ontology_hash(self) -> str: 49 | return self["ontology_hash"] 50 | 51 | @property 52 | def title(self) -> str: 53 | return self["title"] 54 | 55 | @title.setter 56 | def title(self, value: str) -> None: 57 | self["title"] = value 58 | 59 | @property 60 | def description(self) -> str: 61 | return self["description"] 62 | 63 | @description.setter 64 | def description(self, value: str) -> None: 65 | self["description"] = value 66 | 67 | @property 68 | def structure(self) -> OntologyStructure: 69 | return self["structure"] 70 | 71 | @structure.setter 72 | def structure(self, value: OntologyStructure) -> None: 73 | self["structure"] = value 74 | 75 | @property 76 | def created_at(self) -> datetime: 77 | return self["created_at"] 78 | 79 | @property 80 | def last_edited_at(self) -> datetime: 81 | return self["last_edited_at"] 82 | 83 | @property 84 | def user_role(self) -> OntologyUserRole: 85 | return self["user_role"] 86 | 87 | @classmethod 88 | def from_dict(cls, json_dict: Dict) -> Ontology: 89 | return Ontology( 90 | title=json_dict["title"], 91 | description=json_dict["description"], 92 | ontology_hash=json_dict["ontology_hash"], 93 | structure=OntologyStructure.from_dict(json_dict["editor"]), 94 | created_at=json_dict["created_at"], 95 | last_edited_at=json_dict["last_edited_at"], 96 | user_role=json_dict.get("user_role"), # has to be like this to support the legacy endpoints for tests 97 | ) 98 | 99 | 100 | class CreateOrUpdateOntologyPayload(BaseDTO): 101 | title: str 102 | description: str 103 | editor: dict 104 | -------------------------------------------------------------------------------- /encord/orm/project_with_user_role.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | 4 | @dataclass(frozen=True) 5 | class ProjectWithUserRole: 6 | """This is a helper class denoting the relationship between the current user an a project""" 7 | 8 | user_role: int 9 | project: dict 10 | -------------------------------------------------------------------------------- /encord/orm/skeleton_template.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | 3 | from pydantic import Field 4 | 5 | from encord.orm.base_dto import BaseDTO 6 | 7 | 8 | class SkeletonTemplateCoordinate(BaseDTO): 9 | x: float 10 | y: float 11 | name: str 12 | color: Optional[str] = "#00000" 13 | value: Optional[str] = "" 14 | feature_hash: Optional[str] = None 15 | 16 | 17 | class SkeletonTemplate(BaseDTO): 18 | name: str 19 | width: float 20 | height: float 21 | skeleton: Dict[str, SkeletonTemplateCoordinate] 22 | skeleton_edges: Dict[str, Dict[str, Dict[str, str]]] # start-end-color-hex 23 | feature_node_hash: Optional[str] = Field(default=None, alias="feature_node_hash") 24 | shape: Optional[str] = "skeleton" 25 | -------------------------------------------------------------------------------- /encord/orm/workflow.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import Any, List, Union 3 | from uuid import UUID 4 | 5 | from typing_extensions import Annotated 6 | 7 | from encord.orm.base_dto import BaseDTO, dto_validator 8 | 9 | 10 | class WorkflowAction(str, Enum): 11 | REOPEN = "reopen" 12 | COMPLETE = "complete" 13 | 14 | 15 | class WorkflowStageType(str, Enum): 16 | ANNOTATION = "ANNOTATION" 17 | REVIEW = "REVIEW" 18 | USER_ROUTER = "USER_ROUTER" 19 | PERCENTAGE_ROUTER = "PERCENTAGE_ROUTER" 20 | CONSENSUS_ANNOTATION = "CONSENSUS_ANNOTATION" 21 | CONSENSUS_REVIEW = "CONSENSUS_REVIEW" 22 | DONE = "DONE" 23 | AGENT = "AGENT" 24 | 25 | 26 | class LabelWorkflowGraphNode: 27 | """ 28 | This class only required to indicate correct request type to basic querier, 29 | don't use this anywhere else. 30 | """ 31 | 32 | pass 33 | 34 | 35 | class LabelWorkflowGraphNodePayload(BaseDTO): 36 | action: WorkflowAction 37 | 38 | 39 | class BaseWorkflowNode(BaseDTO): 40 | stage_type: WorkflowStageType 41 | uuid: UUID 42 | title: str 43 | 44 | 45 | class WorkflowNode(BaseWorkflowNode): 46 | @dto_validator(mode="before") 47 | def check_stage_type_not_agent(cls, v: Any) -> Any: 48 | # Handle creation of Object from dictionary or from cls() call 49 | stage_type = v.stage_type if isinstance(v, BaseWorkflowNode) else (v.get("stageType") or v.get("stage_type")) 50 | if stage_type == WorkflowStageType.AGENT: 51 | raise ValueError("stage_type cannot be AGENT for WorkflowNode") 52 | return v 53 | 54 | 55 | class AgentNodePathway(BaseDTO): 56 | uuid: UUID 57 | title: str 58 | destination_uuid: UUID 59 | 60 | 61 | class WorkflowAgentNode(BaseWorkflowNode): 62 | @dto_validator(mode="before") 63 | def check_stage_type_agent(cls, v: Any) -> Any: 64 | # Handle creation of Object from dictionary or from cls() call 65 | stage_type = v.stage_type if isinstance(v, BaseWorkflowNode) else (v.get("stageType") or v.get("stage_type")) 66 | if stage_type != WorkflowStageType.AGENT: 67 | raise ValueError("stage_type must be AGENT for WorkflowNode") 68 | return v 69 | 70 | pathways: List[AgentNodePathway] 71 | 72 | 73 | class WorkflowDTO(BaseDTO): 74 | stages: List[Union[WorkflowAgentNode, WorkflowNode]] 75 | -------------------------------------------------------------------------------- /encord/project_ontology/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/project_ontology/__init__.py -------------------------------------------------------------------------------- /encord/project_ontology/classification_attribute.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "ClassificationAttribute DEPRECATED" 3 | slug: "sdk-ref-classification-attribute-deprecated" 4 | hidden: false 5 | metadata: 6 | title: "ClassificationAttribute DEPRECATED" 7 | description: "Encord SDK ClassificationAttribute DEPRECATED." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from dataclasses import dataclass 13 | from typing import Iterable, Optional 14 | 15 | from encord.project_ontology.classification_option import ClassificationOption 16 | from encord.project_ontology.classification_type import ClassificationType 17 | 18 | 19 | @dataclass 20 | class ClassificationAttribute: 21 | """DEPRECATED: prefer using :class:`encord.ontology.Ontology` 22 | 23 | A dataclass which holds classification attributes. 24 | """ 25 | 26 | #: A unique (to the ontology) identifier of the attribute. 27 | id: str 28 | #: The descriptive name of the attribute. 29 | name: str 30 | #: What type of attribute it is. E.g., Checkbox or radio button. 31 | classification_type: ClassificationType 32 | #: Whether annotating this attribute is required. 33 | required: bool 34 | #: An 8-character hex string uniquely defining the attribute. 35 | feature_node_hash: str 36 | #: Nested classification options. 37 | options: Optional[Iterable[ClassificationOption]] = None 38 | 39 | def __setattr__(self, name, value): 40 | if (name == "classification_type" and value == ClassificationType.TEXT and self.__dict__.get("options")) or ( 41 | name == "options" and value and self.__dict__.get("classification_type") == ClassificationType.TEXT 42 | ): 43 | raise Exception("cannot assign options to a classification text") 44 | self.__dict__[name] = value 45 | -------------------------------------------------------------------------------- /encord/project_ontology/classification_option.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "ClassificationOption DEPRECATED" 3 | slug: "sdk-ref-classification-option-deprecated" 4 | hidden: false 5 | metadata: 6 | title: "ClassificationOption DEPRECATED" 7 | description: "Encord SDK ClassificationOption DEPRECATED." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from dataclasses import dataclass 13 | 14 | 15 | @dataclass 16 | class ClassificationOption: 17 | """DEPRECATED: prefer using :class:`encord.ontology.Ontology` 18 | 19 | A dataclass which holds nested options for the :class:`.ClassificationAttribute`. 20 | """ 21 | 22 | #: A unique (to the ontology) identifier of the option. 23 | id: str 24 | #: A description of the option. 25 | label: str 26 | #: A snake-case concatenated version of the label. 27 | value: str 28 | #: An 8-character hex string uniquely defining the option. 29 | feature_node_hash: str 30 | -------------------------------------------------------------------------------- /encord/project_ontology/classification_type.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "ClassificationType DEPRECATED" 3 | slug: "sdk-ref-classification-type-deprecated" 4 | hidden: false 5 | metadata: 6 | title: "ClassificationType DEPRECATED" 7 | description: "Encord SDK ClassificationType DEPRECATED." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from enum import Enum 13 | 14 | 15 | class ClassificationType(Enum): 16 | """DEPRECATED: prefer using :class:`encord.ontology.Ontology` 17 | 18 | Enum used to define classification type in ontologies. 19 | """ 20 | 21 | #: Single select option 22 | RADIO = "radio" 23 | #: Text option for free text input. 24 | TEXT = "text" 25 | #: Multi select option 26 | CHECKLIST = "checklist" 27 | -------------------------------------------------------------------------------- /encord/project_ontology/object_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class ObjectShape(Enum): 5 | POLYGON = "polygon" 6 | POLYLINE = "polyline" 7 | BOUNDING_BOX = "bounding_box" 8 | KEY_POINT = "point" 9 | SKELETON = "skeleton" 10 | ROTATABLE_BOUNDING_BOX = "rotatable_bounding_box" 11 | -------------------------------------------------------------------------------- /encord/project_ontology/ontology_classification.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "OntologyClassification DEPRECATED" 3 | slug: "sdk-ref-ontology-classification-deprecated" 4 | hidden: false 5 | metadata: 6 | title: "OntologyClassification DEPRECATED" 7 | description: "Encord SDK OntologyClassification DEPRECATED." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from dataclasses import dataclass 13 | from typing import List 14 | 15 | from encord.project_ontology.classification_attribute import ClassificationAttribute 16 | 17 | 18 | @dataclass 19 | class OntologyClassification: 20 | """DEPRECATED: prefer using :class:`encord.ontology.Ontology` 21 | 22 | A dataclass which holds classifications of the :class:`encord.project_ontology.Ontology`. 23 | """ 24 | 25 | #: A unique (to the ontology) identifier of the classification. 26 | id: str 27 | #: An 8-character hex string uniquely defining the option. 28 | feature_node_hash: str 29 | #: A List of attributes for the classification. 30 | attributes: List[ClassificationAttribute] 31 | -------------------------------------------------------------------------------- /encord/project_ontology/ontology_object.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "OntologyObject DEPRECATED" 3 | slug: "sdk-ref-ontology-object-deprecated" 4 | hidden: false 5 | metadata: 6 | title: "OntologyObject DEPRECATED" 7 | description: "Encord SDK OntologyObject DEPRECATED." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from dataclasses import dataclass 13 | 14 | from encord.project_ontology.object_type import ObjectShape 15 | 16 | 17 | @dataclass 18 | class OntologyObject: 19 | """DEPRECATED: prefer using :class:`encord.ontology.Ontology` 20 | 21 | A dataclass which holds an object for of the :class:`encord.project_ontology.Ontology`. 22 | """ 23 | 24 | #: A unique (to the ontology) identifier of the classification. 25 | id: str 26 | #: The color which is displayed in the web-app. 27 | color: str 28 | #: The name of the object. 29 | name: str 30 | #: The shape of the object. E.g., polygon, polyline, and bounding_box. 31 | shape: ObjectShape 32 | #: An 8-character hex string uniquely defining the option. 33 | feature_node_hash: str 34 | -------------------------------------------------------------------------------- /encord/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/py.typed -------------------------------------------------------------------------------- /encord/utilities/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/utilities/__init__.py -------------------------------------------------------------------------------- /encord/utilities/client_utilities.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Utilities - Client" 3 | slug: "sdk-ref-utilities-client" 4 | hidden: false 5 | metadata: 6 | title: "Utilities - Client" 7 | description: "Encord SDK Utilities - Client." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | import pprint 13 | from dataclasses import dataclass 14 | from datetime import datetime 15 | from typing import Callable, List, Optional, Set, TypeVar, Union 16 | 17 | from encord.common.time_parser import parse_datetime 18 | 19 | 20 | def pretty_print(data): 21 | return pprint.pformat(data, indent=4, width=10) 22 | 23 | 24 | @dataclass 25 | class LocalImport: 26 | """file_path: 27 | Supply the path of the exported folder which contains the images and `annotations.xml` file. Make 28 | sure to select "Save images" when exporting your CVAT Task or Project. 29 | transform_file_names: 30 | Encord expects that file names (including their relative paths), exactly matches the 31 | "name" parameter of CVAT image. 32 | If it is not, user can supply transform_file_names function that maps filename to the image name. 33 | """ 34 | 35 | file_path: str 36 | map_filename_to_cvat_name: Optional[Callable[[str], str]] = None 37 | 38 | 39 | ImportMethod = LocalImport 40 | """Using images/videos in cloud storage as an alternative import method will be supported in the future.""" 41 | 42 | 43 | @dataclass 44 | class Issue: 45 | """For each `issue_type` there may be multiple occurrences which are documented in the `instances`. The `instances` 46 | list can provide additional information on how the issue was encountered. If there is no additional information 47 | available, the `instances` list will be empty. 48 | """ 49 | 50 | issue_type: str 51 | instances: List[str] 52 | 53 | 54 | @dataclass 55 | class Issues: 56 | """Any issues that came up during importing a project. These usually come from incompatibilities between data saved 57 | on different platforms. 58 | """ 59 | 60 | errors: List[Issue] 61 | warnings: List[Issue] 62 | infos: List[Issue] 63 | 64 | @staticmethod 65 | def from_dict(d: dict) -> "Issues": 66 | errors, warnings, infos = [], [], [] 67 | for error in d["errors"]: 68 | issue = Issue(issue_type=error["issue_type"], instances=error["instances"]) 69 | errors.append(issue) 70 | for warning in d["warnings"]: 71 | issue = Issue(issue_type=warning["issue_type"], instances=warning["instances"]) 72 | warnings.append(issue) 73 | for info in d["infos"]: 74 | issue = Issue(issue_type=info["issue_type"], instances=info["instances"]) 75 | infos.append(issue) 76 | return Issues(errors=errors, warnings=warnings, infos=infos) 77 | 78 | 79 | @dataclass 80 | class CvatImporterSuccess: 81 | project_hash: str 82 | dataset_hash: str 83 | issues: Issues 84 | 85 | 86 | @dataclass 87 | class CvatImporterError: 88 | issues: Issues 89 | 90 | 91 | def optional_datetime_to_iso_str(key: str, val: Optional[Union[str, datetime]]) -> Optional[str]: 92 | if not val: 93 | return None 94 | if isinstance(val, str): 95 | return parse_datetime(val).isoformat() 96 | if isinstance(val, datetime): 97 | return val.isoformat() 98 | else: 99 | raise ValueError(f"Value for {key} should be a datetime") 100 | 101 | 102 | T = TypeVar("T") 103 | 104 | 105 | def optional_set_to_list(s: Optional[Set[T]]) -> Optional[List[T]]: 106 | if s is None: 107 | return s 108 | else: 109 | return list(s) 110 | -------------------------------------------------------------------------------- /encord/utilities/coco/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/utilities/coco/__init__.py -------------------------------------------------------------------------------- /encord/utilities/coco/polygon_utils.py: -------------------------------------------------------------------------------- 1 | from typing import Any, List 2 | 3 | from encord.common.bitmask_operations import deserialise_bitmask 4 | from encord.objects.coordinates import PolygonCoordinates 5 | 6 | 7 | def find_contours(mask: Any) -> List[List[List[float]]]: 8 | """ 9 | Find all contours in the given binary mask, including inner contours. 10 | Returns polygons in GeoJSON format: triple nested list where: 11 | - Top level = polygon 12 | - Second level = list of rings (first is outer contour, rest are inner/holes) 13 | - Third level = flat list of coordinates [x1, y1, x2, y2, ...] 14 | 15 | Args: 16 | mask: np.ndarray 17 | 18 | Returns: 19 | List of polygons in GeoJSON format 20 | """ 21 | try: 22 | import cv2 23 | import numpy as np 24 | except ImportError as e: 25 | raise ImportError( 26 | "The 'cv2' and 'numpy' packages are required to import polygon from COCO RLE strings" 27 | "Install them with: `pip install encord[coco]`" 28 | ) from e 29 | 30 | contours, hierarchy = cv2.findContours( 31 | mask.astype(np.uint8), 32 | cv2.RETR_CCOMP, # Retrieves all contours and organizes them into a two-level hierarchy 33 | cv2.CHAIN_APPROX_SIMPLE, 34 | ) 35 | 36 | polygons: List[List[List[float]]] = [] 37 | 38 | # First, identify all outer contours (hierarchy[i][3] == -1) 39 | if hierarchy is not None and len(hierarchy) > 0: 40 | hierarchy = hierarchy[0] 41 | 42 | for i, contour in enumerate(contours): 43 | if hierarchy[i][3] == -1: # This is an outer contour 44 | # Create a new polygon with this outer contour as the first ring 45 | polygon = [] 46 | 47 | # Add outer contour as first ring 48 | outer_contour = contour.reshape(-1).tolist() 49 | polygon.append(outer_contour) 50 | 51 | # Find all holes for this contour 52 | for j, inner_contour in enumerate(contours): 53 | # hierarchy[j][3] == i means this contour is a direct child of the outer contour 54 | if hierarchy[j][3] == i: 55 | # Add this inner contour (hole) to the polygon 56 | inner_points = inner_contour.reshape(-1).tolist() 57 | polygon.append(inner_points) 58 | 59 | polygons.append(polygon) 60 | 61 | return polygons 62 | 63 | 64 | def rle_to_polygons_coordinates(*, counts: str, height: int, width: int) -> PolygonCoordinates: 65 | try: 66 | import numpy as np 67 | except ImportError as e: 68 | raise ImportError( 69 | "The 'cv2' and 'numpy' packages are required to import polygon from COCO RLE strings" 70 | "Install them with: `pip install encord[coco]`" 71 | ) from e 72 | buffer = deserialise_bitmask(counts, height * width) 73 | data: np.ndarray = np.frombuffer(buffer, dtype=np.uint8).reshape((height, width)) 74 | polygons = find_contours(data) 75 | # make coordinates relative to image size 76 | for polygon in polygons: 77 | for ring in polygon: 78 | for i in range(0, len(ring), 2): 79 | ring[i] /= width 80 | ring[i + 1] /= height 81 | return PolygonCoordinates.from_polygons_list(polygons) 82 | -------------------------------------------------------------------------------- /encord/utilities/common.py: -------------------------------------------------------------------------------- 1 | def _get_dict_without_none_keys(d: dict) -> dict: 2 | """Remove keys with value None from a dictionary. Does not change the dict in place.""" 3 | return {k: v for k, v in d.items() if v is not None} 4 | -------------------------------------------------------------------------------- /encord/utilities/hash_utilities.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from uuid import UUID 3 | 4 | 5 | def convert_to_uuid(uuid: Union[UUID, str]) -> UUID: 6 | if not isinstance(uuid, UUID): 7 | uuid = UUID(uuid) 8 | return uuid 9 | -------------------------------------------------------------------------------- /encord/utilities/label_utilities.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, cast 2 | 3 | from encord.constants.enums import DataType 4 | from encord.constants.string_constants import ( 5 | CLASSIFICATION_ANSWERS, 6 | CLASSIFICATION_HASH, 7 | CLASSIFICATIONS, 8 | LABELS, 9 | OBJECT_ANSWERS, 10 | OBJECT_HASH, 11 | OBJECTS, 12 | ) 13 | from encord.orm.label_row import LabelRow 14 | 15 | 16 | def construct_answer_dictionaries(label_row) -> LabelRow: 17 | """Adds answer object and classification answer dictionaries from a label row if they do not exist. 18 | Integrity checks are conducted upon saving of labels. 19 | 20 | Args: 21 | label_row: A label row. 22 | 23 | Returns: 24 | LabelRow: A label row instance with updated answer dictionaries 25 | """ 26 | label_row = LabelRow(label_row) # Cast to label row ORM 27 | data_type = label_row.data_type 28 | data_units = cast(Dict[str, Dict[str, Dict]], label_row.data_units) 29 | 30 | object_answers = label_row.object_answers 31 | classification_answers = label_row.classification_answers 32 | 33 | for du in data_units: # Iterate over data units in label row 34 | data_unit = data_units[du] 35 | 36 | if LABELS in data_unit: 37 | labels = data_unit[LABELS] 38 | 39 | if data_type in {DataType.IMG_GROUP.value, DataType.IMAGE.value}: # Go through images 40 | items = labels.get(OBJECTS, {}) + labels.get(CLASSIFICATIONS, {}) 41 | add_answers_to_items(items, classification_answers, object_answers) 42 | 43 | elif data_type in (DataType.VIDEO.value, DataType.DICOM.value): 44 | for frame in labels: # Go through frames 45 | items = labels[frame].get(OBJECTS, {}) + labels[frame].get(CLASSIFICATIONS, {}) 46 | add_answers_to_items(items, classification_answers, object_answers) 47 | 48 | label_row[OBJECT_ANSWERS] = object_answers 49 | label_row[CLASSIFICATION_ANSWERS] = classification_answers 50 | return label_row 51 | 52 | 53 | # --------------------------------------------------------- 54 | # Helper functions 55 | # --------------------------------------------------------- 56 | def add_answers_to_items(items, classification_answers, object_answers): 57 | """If object_hash (uid) or classification_hash (uid) are not in answer dictionaries, 58 | add key entry with empty classification list. 59 | """ 60 | for item in items: 61 | if OBJECT_HASH in item: 62 | object_hash = item.get(OBJECT_HASH) 63 | if object_hash not in object_answers: 64 | object_answers[object_hash] = { 65 | OBJECT_HASH: object_hash, 66 | CLASSIFICATIONS: [], 67 | } 68 | 69 | if CLASSIFICATION_HASH in item: 70 | classification_hash = item.get(CLASSIFICATION_HASH) 71 | if classification_hash not in classification_answers: 72 | classification_answers[classification_hash] = { 73 | CLASSIFICATION_HASH: classification_hash, 74 | CLASSIFICATIONS: [], 75 | } 76 | -------------------------------------------------------------------------------- /encord/utilities/ontology_user.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Utilities - Ontology Helper" 3 | slug: "sdk-ref-utilities-ont-helper" 4 | hidden: false 5 | metadata: 6 | title: "Utilities - Ontology Helper" 7 | description: "Encord SDK Utilities - Ontology Helper." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from dataclasses import dataclass 13 | from datetime import datetime 14 | from enum import IntEnum 15 | from typing import Optional, Union 16 | from uuid import UUID 17 | 18 | from encord.orm.base_dto import BaseDTO 19 | 20 | 21 | class OntologyUserRole(IntEnum): 22 | ADMIN = 0 23 | USER = 1 24 | 25 | 26 | class OntologyWithUserRole(BaseDTO): 27 | """An on-the-wire representation from /v2/public/ontologies endpoints""" 28 | 29 | ontology_uuid: UUID 30 | title: str 31 | description: str 32 | editor: dict 33 | created_at: datetime 34 | last_edited_at: datetime 35 | user_role: Optional[OntologyUserRole] 36 | 37 | 38 | class OntologiesFilterParams(BaseDTO): 39 | """Filter parameters for the /v2/public/ontologies endpoint""" 40 | 41 | title_eq: Optional[str] = None 42 | title_like: Optional[str] = None 43 | desc_eq: Optional[str] = None 44 | desc_like: Optional[str] = None 45 | created_before: Optional[Union[str, datetime]] = None 46 | created_after: Optional[Union[str, datetime]] = None 47 | edited_before: Optional[Union[str, datetime]] = None 48 | edited_after: Optional[Union[str, datetime]] = None 49 | include_org_access: bool = False 50 | -------------------------------------------------------------------------------- /encord/utilities/project_user.py: -------------------------------------------------------------------------------- 1 | from enum import IntEnum 2 | 3 | from encord.orm.base_dto import BaseDTO 4 | 5 | 6 | class ProjectUserRole(IntEnum): 7 | """Enumeration for user roles within a project. 8 | 9 | Attributes: 10 | ADMIN (int): Represents an admin user with value 0. 11 | ANNOTATOR (int): Represents an annotator user with value 1. 12 | REVIEWER (int): Represents a reviewer user with value 2. 13 | ANNOTATOR_REVIEWER (int): Represents a user who is both an annotator and a reviewer with value 3. 14 | TEAM_MANAGER (int): Represents a team manager user with value 4. 15 | """ 16 | 17 | ADMIN = (0,) 18 | ANNOTATOR = (1,) 19 | REVIEWER = (2,) 20 | ANNOTATOR_REVIEWER = (3,) 21 | TEAM_MANAGER = 4 22 | 23 | 24 | class ProjectUser(BaseDTO): 25 | """Data transfer object representing a user within a project. 26 | 27 | Attributes: 28 | user_email (str): The email address of the user. 29 | user_role (ProjectUserRole): The role of the user in the project, defined by the ProjectUserRole enumeration. 30 | project_hash (str): A unique identifier for the project. 31 | """ 32 | 33 | user_email: str 34 | user_role: ProjectUserRole 35 | project_hash: str 36 | -------------------------------------------------------------------------------- /encord/utilities/storage/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/encord/utilities/storage/__init__.py -------------------------------------------------------------------------------- /encord/utilities/type_utilities.py: -------------------------------------------------------------------------------- 1 | from typing import NoReturn, Optional 2 | 3 | 4 | def exhaustive_guard(value: NoReturn, message: Optional[str]) -> NoReturn: 5 | # This also works in runtime as well: 6 | error_message = message if message is not None else f"This code should never be reached, got: [{value}]" 7 | raise TypeError(error_message) 8 | -------------------------------------------------------------------------------- /encord/workflow/__init__.py: -------------------------------------------------------------------------------- 1 | from encord.workflow.stages.agent import AgentStage 2 | from encord.workflow.stages.annotation import AnnotationStage, AnnotationTask, AnnotationTaskStatus 3 | from encord.workflow.stages.consensus_annotation import ConsensusAnnotationStage, ConsensusAnnotationTask 4 | from encord.workflow.stages.consensus_review import ConsensusReviewStage, ConsensusReviewTask, ConsensusReviewTaskStatus 5 | from encord.workflow.stages.final import FinalStage, FinalStageTask 6 | from encord.workflow.stages.review import ReviewStage, ReviewTask, ReviewTaskStatus 7 | from encord.workflow.workflow import Workflow 8 | -------------------------------------------------------------------------------- /encord/workflow/stages/final.py: -------------------------------------------------------------------------------- 1 | """--- 2 | title: "Final Stage" 3 | slug: "sdk-ref-stage-final" 4 | hidden: false 5 | metadata: 6 | title: "Final Stage" 7 | description: "Encord SDK Final Stages: Complete and Archive." 8 | category: "64e481b57b6027003f20aaa0" 9 | --- 10 | """ 11 | 12 | from __future__ import annotations 13 | 14 | from typing import Iterable, List, Literal, Optional, Union 15 | from uuid import UUID 16 | 17 | from encord.common.utils import ensure_uuid_list 18 | from encord.orm.workflow import WorkflowStageType 19 | from encord.workflow.common import TasksQueryParams, WorkflowStageBase, WorkflowTask 20 | 21 | 22 | class _FinalTasksQueryParams(TasksQueryParams): 23 | data_hashes: Optional[List[UUID]] = None 24 | dataset_hashes: Optional[List[UUID]] = None 25 | data_title_contains: Optional[str] = None 26 | 27 | 28 | class FinalStage(WorkflowStageBase): 29 | stage_type: Literal[WorkflowStageType.DONE] = WorkflowStageType.DONE 30 | 31 | """ 32 | Final stage for a task in Consensus and non-Consensus Projects. The final stages are COMPLETE or ARCHIVE. 33 | """ 34 | 35 | def get_tasks( 36 | self, 37 | data_hash: Union[List[UUID], UUID, List[str], str, None] = None, 38 | dataset_hash: Union[List[UUID], UUID, List[str], str, None] = None, 39 | data_title: Optional[str] = None, 40 | ) -> Iterable[FinalStageTask]: 41 | """Retrieves tasks for the FinalStage. 42 | 43 | **Parameters** 44 | 45 | - `data_hash` (Union[List[UUID], UUID, List[str], str, None]): Unique ID(s) for the data unit(s). 46 | - `dataset_hash` (Union[List[UUID], UUID, List[str], str, None]): Unique ID(s) for the dataset(s) that the data unit(s) belongs to. 47 | - `data_title` (Optional[str]): A string to filter tasks by the data unit's name. 48 | 49 | **Returns** 50 | 51 | An iterable of `FinalStageTask` instances with the following information: 52 | - `uuid`: Unique identifier for the task. 53 | - `created_at`: Time and date the task was created. 54 | - `updated_at`: Time and date the task was last edited. 55 | - `data_hash`: Unique identifier for the data unit. 56 | - `data_title`: Name/title of the data unit. 57 | """ 58 | params = _FinalTasksQueryParams( 59 | data_hashes=ensure_uuid_list(data_hash), 60 | dataset_hashes=ensure_uuid_list(dataset_hash), 61 | data_title_contains=data_title, 62 | ) 63 | 64 | yield from self._workflow_client.get_tasks(self.uuid, params, type_=FinalStageTask) 65 | 66 | 67 | class FinalStageTask(WorkflowTask): 68 | data_hash: UUID 69 | data_title: str 70 | 71 | """ 72 | Represents tasks in a FinalStage, which can only be queried. No actions can be taken on the task. 73 | 74 | **Attributes** 75 | 76 | - `data_hash` (UUID): Unique ID for the data unit. 77 | - `data_title` (str): Name of the data unit. 78 | """ 79 | -------------------------------------------------------------------------------- /py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/py.typed -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "encord" 3 | version = "0.1.167" 4 | description = "Encord Python SDK Client" 5 | authors = ["Cord Technologies Limited "] 6 | license = "Apache Software License" 7 | keywords = ["encord"] 8 | packages = [ 9 | { include = "encord" }, 10 | ] 11 | readme = "README.md" 12 | repository="https://github.com/encord-team/encord-client-python" 13 | documentation="https://python.docs.encord.com/" 14 | classifiers=[ 15 | "Programming Language :: Python :: 3", 16 | "License :: OSI Approved :: Apache Software License", 17 | "Operating System :: OS Independent", 18 | ] 19 | include = ["encord/py.typed"] 20 | 21 | 22 | [tool.poetry.dependencies] 23 | python = ">=3.8,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1" 24 | python-dateutil = "^2.8.2" 25 | requests = "^2.25.0" 26 | cryptography = ">=43.0.0" 27 | tqdm = "^4.32.1" 28 | pydantic = ">=1.10.14" 29 | orjson = ">=2" 30 | pycocotools = {version = "^2.0.7", optional = true} 31 | shapely = {version = "^2.0.4", optional = true} 32 | opencv-python = {version = "^4.11.0.86", optional = true} 33 | numpy = [ 34 | {version = "^1.24", python = "<3.12", optional = true}, 35 | {version = "^1.26", python = ">=3.12", optional = true} 36 | ] 37 | 38 | [tool.poetry.extras] 39 | coco = ["pycocotools", "shapely", "opencv-python", "numpy"] 40 | 41 | [tool.poetry.group.dev.dependencies] 42 | pytest = "^7.4.1" 43 | pre-commit = "^3.5.0" 44 | deepdiff = "^6.2.1" 45 | types-requests = "^2.25.0" 46 | mypy = "^1.11.1" 47 | types-python-dateutil = "^2.8.19" 48 | types-tqdm = "^4.32.1" 49 | pyright = "^1.1.374" 50 | numpy = [ 51 | {version = "^1.24", python = "<3.12"}, 52 | {version = "^1.26", python = ">=3.12"} 53 | ] 54 | opencv-python = {version = "^4.11.0.86"} 55 | shapely = {version = "^2.0.4"} 56 | pycocotools = {version = "^2.0.7"} 57 | ruff = "^0.8.6" 58 | 59 | [build-system] 60 | requires = ["poetry-core>=1.3.2"] 61 | build-backend = "poetry.core.masonry.api" 62 | 63 | [tool.mypy] 64 | ignore_missing_imports = true 65 | 66 | [tool.ruff] 67 | target-version = "py38" 68 | line-length = 120 69 | 70 | [tool.ruff.lint] 71 | select = ["W", "Q", "I", "D417"] 72 | 73 | [tool.ruff.lint.pydocstyle] 74 | ignore-var-parameters = true 75 | 76 | [tool.ruff.lint.per-file-ignores] 77 | "encord/*" = ["F401", "E402"] 78 | -------------------------------------------------------------------------------- /scripts/code_examples_python_to_mdx.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import os 3 | import subprocess 4 | 5 | # Clone the repositories only if they don't exist 6 | if not os.path.exists("encord-client-python"): 7 | subprocess.run(["git", "clone", "https://github.com/encord-team/encord-client-python.git"]) 8 | if not os.path.exists("encord-docs-mint"): 9 | subprocess.run(["git", "clone", "https://github.com/encord-team/encord-docs-mint.git"]) 10 | 11 | # Now set the paths 12 | source_repo_path = "encord-client-python" # After cloning, the repo will be in this directory 13 | destination_repo_path = "encord-docs-mint" # Same for the destination repo 14 | 15 | # Folder paths inside the repos 16 | source_folder = "tests/docs" 17 | destination_folder = "snippets/SDKCodeExamples" 18 | 19 | # Define the paths to the Python files in the source repo and the corresponding .mdx output in the destination repo 20 | python_files = [ 21 | os.path.join(source_repo_path, source_folder, file) # All Python files in the source folder 22 | for file in os.listdir(os.path.join(source_repo_path, source_folder)) 23 | if file.endswith(".py") # Only select .py files 24 | ] 25 | 26 | # Ensure the destination folder exists in repo2 27 | destination_folder_path = os.path.join(destination_repo_path, destination_folder) 28 | if not os.path.exists(destination_folder_path): 29 | os.makedirs(destination_folder_path) 30 | 31 | 32 | # Function to extract the code block name from a Python file's docstring 33 | def get_code_block_name(python_file_path): 34 | with open(python_file_path, "r") as file: 35 | # Parse the Python file to extract the docstring using AST 36 | tree = ast.parse(file.read()) 37 | if isinstance(tree.body[0], ast.Expr) and isinstance(tree.body[0].value, ast.Str): 38 | return tree.body[0].value.splitlines()[0].replace("Code Block Name:", "").strip() 39 | return "Untitled Code Block" 40 | 41 | 42 | # Loop through each Python file and create a corresponding .mdx file 43 | for python_file_path in python_files: 44 | # Ensure the source file exists before reading it 45 | if not os.path.exists(python_file_path): 46 | print(f"Error: Python file not found at {python_file_path}") 47 | continue 48 | 49 | # Read the content of the Python file 50 | with open(python_file_path, "r") as python_file: 51 | python_content = python_file.read() 52 | 53 | # Extract the code block name from the docstring 54 | code_block_name = get_code_block_name(python_file_path) 55 | 56 | # Create a corresponding .mdx file in the destination repo 57 | mdx_file_name = ( 58 | os.path.splitext(os.path.basename(python_file_path))[0] + ".mdx" 59 | ) # Use the Python file name, but with .mdx extension 60 | mdx_file_path = os.path.join(destination_folder_path, mdx_file_name) 61 | 62 | # Initialize the .mdx file with the Python code inside a code block, and add the tab name from the docstring 63 | mdx_content = f"```python {code_block_name}\n{python_content}\n```" 64 | 65 | # Write the content to the corresponding .mdx file 66 | with open(mdx_file_path, "w") as mdx_file: 67 | mdx_file.write(mdx_content) 68 | 69 | print( 70 | f"Python code from {python_file_path} has been successfully written to {mdx_file_path} with tab name '{code_block_name}'" 71 | ) 72 | -------------------------------------------------------------------------------- /tests/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/tests/.DS_Store -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/tests/__init__.py -------------------------------------------------------------------------------- /tests/common/test_datetime_parser.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta, timezone 2 | 3 | from encord.common.constants import DATETIME_LONG_STRING_FORMAT 4 | from encord.common.time_parser import parse_datetime 5 | 6 | 7 | def test_can_parse_iso_timestamp() -> None: 8 | expected_datetime = datetime.now() 9 | 10 | datetime_as_iso = expected_datetime.isoformat() 11 | parsed_datetime = parse_datetime(datetime_as_iso) 12 | 13 | assert parsed_datetime == expected_datetime 14 | 15 | 16 | def test_can_parse_encord_time_format() -> None: 17 | original_datetime = datetime.now() 18 | 19 | datetime_as_encord_string = original_datetime.strftime(DATETIME_LONG_STRING_FORMAT) 20 | parsed_datetime = parse_datetime(datetime_as_encord_string) 21 | 22 | # Short string format doesn't include milliseconds, so dropping them 23 | expected_datetime = original_datetime.replace(microsecond=0) 24 | assert parsed_datetime == expected_datetime 25 | 26 | 27 | def test_can_parse_encord_time_from() -> None: 28 | original_datetime = "Thu Jan 11 2024 12:09:51 GMT+0600 (Bangladesh Standard Time)" 29 | parsed_datetime = parse_datetime(original_datetime) 30 | 31 | expected_datetime = datetime(2024, 1, 11, 12, 9, 51, tzinfo=timezone(timedelta(hours=-6))) 32 | assert parsed_datetime == expected_datetime 33 | -------------------------------------------------------------------------------- /tests/constants/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/tests/constants/__init__.py -------------------------------------------------------------------------------- /tests/constants/test_enums.py: -------------------------------------------------------------------------------- 1 | from encord.constants.enums import DataType 2 | 3 | 4 | def test_data_type_consistency() -> None: 5 | values_from_string = set() 6 | for value in DataType: 7 | string_representation = value.to_upper_case_string() 8 | value_from_string = DataType.from_upper_case_string(string_representation) 9 | values_from_string.add(value_from_string) 10 | assert value == value_from_string 11 | 12 | assert len(values_from_string) == len(DataType) 13 | 14 | 15 | def test_data_type_missing() -> None: 16 | assert DataType.from_upper_case_string("new_cord_type").value == "_MISSING_DATA_TYPE_" 17 | assert DataType.from_upper_case_string("new_cord_type") == "_MISSING_DATA_TYPE_" 18 | assert DataType("new_cord_type").value == "_MISSING_DATA_TYPE_" 19 | assert DataType("new_cord_type") == "_MISSING_DATA_TYPE_" 20 | -------------------------------------------------------------------------------- /tests/docs/export_labels_all_to_json.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Export labels to JSON 3 | """ 4 | 5 | # Import dependencies 6 | import json 7 | import os 8 | 9 | from encord import EncordUserClient 10 | 11 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 12 | PROJECT_ID = "8d73bec0-ac61-4d28-b45a-7bffdf4c6b8e" 13 | BUNDLE_SIZE = 100 # Customize as needed 14 | 15 | # Create user client using SSH key 16 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 17 | ssh_private_key_path=SSH_PATH, 18 | # For US platform users use "https://api.us.encord.com" 19 | domain="https://api.encord.com", 20 | ) 21 | 22 | # Specify Project 23 | project = user_client.get_project(PROJECT_ID) 24 | assert project is not None, f"Project with ID {PROJECT_ID} could not be loaded" 25 | 26 | # Get label rows for your Project 27 | label_rows = project.list_label_rows_v2() 28 | assert label_rows, f"No label rows found in project {PROJECT_ID}" 29 | 30 | # Initialize label rows using bundles 31 | with project.create_bundle(bundle_size=BUNDLE_SIZE) as bundle: 32 | for label_row in label_rows: 33 | label_row.initialise_labels(bundle=bundle) 34 | 35 | # Collect all label row data 36 | all_label_rows = [label_row.to_encord_dict() for label_row in label_rows] 37 | assert all_label_rows, "No label row data collected for export" 38 | 39 | # Save the collected label rows data to a JSON file 40 | output_file = "/Users/chris-encord/export-label-rows.json" 41 | assert output_file.endswith(".json"), "Output file must be a .json file" 42 | 43 | with open(output_file, "w") as file: 44 | json.dump(all_label_rows, file, indent=4) 45 | 46 | print(f"Label rows have been saved to {output_file}.") 47 | -------------------------------------------------------------------------------- /tests/docs/export_labels_consensus-labels.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: All labels all branches 3 | """ 4 | 5 | import json 6 | 7 | from encord import EncordUserClient 8 | 9 | # User input 10 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 11 | PROJECT_ID = "8d73bec0-ac61-4d28-b45a-7bffdf4c6b8e" 12 | OUTPUT_FILE_PATH = "/Users/chris-encord/frame_range_output.json" 13 | BUNDLE_SIZE = 100 14 | 15 | # Create user client using SSH key 16 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 17 | ssh_private_key_path=SSH_PATH, 18 | # For US platform users use "https://api.us.encord.com" 19 | domain="https://api.encord.com", 20 | ) 21 | 22 | # Specify Project. Replace with the ID of the Project you want to export labels for. 23 | project = user_client.get_project(PROJECT_ID) 24 | 25 | # Downloads a local copy of all the labels 26 | # Without the include_all_label_branches flag only the MAIN branch labels export 27 | label_rows = project.list_label_rows_v2(include_all_label_branches=True) 28 | 29 | output_data = [] # This will hold the output data to be saved as JSON 30 | 31 | # Initialize label rows using bundles 32 | with project.create_bundle(bundle_size=BUNDLE_SIZE) as bundle: 33 | for label_row in label_rows: 34 | label_row.initialise_labels(bundle=bundle) 35 | 36 | # Collecting data for JSON output 37 | for label_row in label_rows: 38 | label_row_data = { 39 | "title": label_row.data_title, 40 | "branch": label_row.branch_name, 41 | "objects": [], 42 | "classifications": [], 43 | } 44 | 45 | # Collect object instances 46 | for object_instance in label_row.get_object_instances(): 47 | object_data = { 48 | "object_hash": object_instance.object_hash, 49 | "object_name": object_instance.object_name, 50 | "feature_hash": object_instance.feature_hash, 51 | "ontology_item": { 52 | "uid": object_instance.ontology_item.uid, 53 | "color": object_instance.ontology_item.color, 54 | "shape": object_instance.ontology_item.shape, 55 | "attributes": [ 56 | {"name": attribute.name, "value": attribute.value} 57 | for attribute in object_instance.ontology_item.attributes 58 | ], 59 | }, 60 | "annotations": [ 61 | {"frame": annotation.frame, "coordinates": annotation.coordinates} 62 | for annotation in object_instance.get_annotations() 63 | ], 64 | } 65 | label_row_data["objects"].append(object_data) 66 | 67 | # Collect classification instances 68 | for classification_instance in label_row.get_classification_instances(): 69 | classification_data = { 70 | "classification_hash": classification_instance.classification_hash, 71 | "classification_name": classification_instance.classification_name, 72 | "feature_hash": classification_instance.feature_hash, 73 | "classification_answer": { 74 | "value": classification_instance.get_answer().value, 75 | "hash": classification_instance.get_answer().feature_node_hash, 76 | }, 77 | "annotations": [{"frame": annotation.frame} for annotation in classification_instance.get_annotations()], 78 | } 79 | label_row_data["classifications"].append(classification_data) 80 | 81 | # Add label row data to the output list 82 | output_data.append(label_row_data) 83 | 84 | # Saving to JSON file 85 | output_file_path = OUTPUT_FILE_PATH # Replace with the desired file path 86 | with open(output_file_path, "w") as json_file: 87 | json.dump(output_data, json_file, indent=4) 88 | 89 | print(f"Output saved to {output_file_path}") 90 | -------------------------------------------------------------------------------- /tests/docs/export_labels_text_attributes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Text attributes 3 | """ 4 | 5 | # Import dependencies 6 | import json 7 | 8 | from encord import EncordUserClient 9 | from encord.objects import ObjectInstance 10 | from encord.objects.attributes import Attribute, TextAttribute 11 | 12 | # User input 13 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 14 | PROJECT_ID = "8d73bec0-ac61-4d28-b45a-7bffdf4c6b8e" 15 | DATA_UNIT = "cherries-is" 16 | OUTPUT_FILE_PATH = "/Users/chris-encord/text_attributes_output.json" 17 | BUNDLE_SIZE = 100 18 | 19 | # Create user client using SSH key 20 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 21 | ssh_private_key_path=SSH_PATH, 22 | # For US platform users use "https://api.us.encord.com" 23 | domain="https://api.encord.com", 24 | ) 25 | 26 | # Specify Project 27 | project = user_client.get_project(PROJECT_ID) 28 | assert project is not None, f"Project with ID {PROJECT_ID} could not be loaded" 29 | 30 | # Filter label rows for a specific data title 31 | label_rows = project.list_label_rows_v2(data_title_eq=DATA_UNIT) 32 | assert label_rows, f"No label rows found for data title '{DATA_UNIT}'" 33 | 34 | # Initialize labels using bundle 35 | with project.create_bundle(bundle_size=BUNDLE_SIZE) as bundle: 36 | for label_row in label_rows: 37 | label_row.initialise_labels(bundle=bundle) 38 | 39 | 40 | # Function to extract text attributes and store in structured format 41 | def extract_text_attributes(attribute: Attribute, object_instance: ObjectInstance, frame_number: int): 42 | if isinstance(attribute, TextAttribute): 43 | text_answer = object_instance.get_answer(attribute) 44 | return { 45 | "frame": frame_number + 1, 46 | "attribute_name": attribute.title, 47 | "attribute_hash": attribute.feature_node_hash, 48 | "text_answer": text_answer, 49 | } 50 | return None 51 | 52 | 53 | # Collect results for saving 54 | results = [] 55 | 56 | # Iterate through all object instances and collect text attribute data 57 | for label_row in label_rows: 58 | object_instances = label_row.get_object_instances() 59 | assert object_instances, f"No object instances found in label row {label_row.uid}" 60 | 61 | for object_instance in object_instances: 62 | annotations = object_instance.get_annotations() 63 | assert annotations, f"No annotations found for object instance {object_instance.object_hash}" 64 | 65 | ontology_item = object_instance.ontology_item 66 | assert ( 67 | ontology_item and ontology_item.attributes 68 | ), f"Missing ontology item or attributes for object {object_instance.object_hash}" 69 | 70 | for annotation in annotations: 71 | for attribute in ontology_item.attributes: 72 | attr_data = extract_text_attributes(attribute, object_instance, annotation.frame) 73 | if attr_data: 74 | results.append(attr_data) 75 | # Optional: also print to console 76 | print(f"Frame {attr_data['frame']}:") 77 | print(f"Text Attribute name: {attr_data['attribute_name']}") 78 | print(f"Text Attribute hash: {attr_data['attribute_hash']}") 79 | print(f"Text Attribute Answer: {attr_data['text_answer']}") 80 | 81 | # Save results to JSON 82 | assert OUTPUT_FILE_PATH.endswith(".json"), "Output file path must end with .json" 83 | 84 | with open(OUTPUT_FILE_PATH, "w") as f: 85 | json.dump(results, f, indent=4) 86 | 87 | print(f"\nText attribute data saved to: {OUTPUT_FILE_PATH}") 88 | -------------------------------------------------------------------------------- /tests/docs/project_automatic_interpolation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Automatic Interpolation 3 | """ 4 | 5 | # Import dependencies 6 | from encord import EncordUserClient 7 | from encord.objects import LabelRowV2 8 | from encord.objects.coordinates import PointCoordinate, PolygonCoordinates 9 | 10 | # User input 11 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 12 | PROJECT_ID = "f7890e41-6de8-4e66-be06-9fbe182df457" 13 | DATA_UNIT_TITLE = "cherries-vid-001.mp4" 14 | DATA_UNIT_ID = "1041da61-d63c-4489-9001-4a56fe37f1f3" # The data_hash for the data unit 15 | LABEL_ROW_ID = "6e9a3f80-0663-4af4-9143-bc1328a15735" # The label_hash for the data unit 16 | LABEL_ID = "noQksGhW" # The objectHash for the label 17 | 18 | # Create user client using SSH key 19 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 20 | ssh_private_key_path=SSH_PATH, 21 | # For US platform users use "https://api.us.encord.com" 22 | domain="https://api.encord.com", 23 | ) 24 | 25 | # Open the project you want to work on by specifying the Project ID 26 | project = user_client.get_project(PROJECT_ID) 27 | 28 | # Get the label row for a specific data unit 29 | label_row = project.get_label_row(LABEL_ROW_ID) 30 | 31 | print(label_row) 32 | # Prepare interpolation 33 | key_frames = label_row["data_units"][DATA_UNIT_ID]["labels"] 34 | objects_to_interpolate = [LABEL_ID] 35 | 36 | # Run interpolation 37 | interpolation_result = project.object_interpolation(key_frames, objects_to_interpolate) 38 | print(interpolation_result) 39 | -------------------------------------------------------------------------------- /tests/docs/project_copy_project_advanced.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Copy Project - Advanced 3 | """ 4 | 5 | # Import dependencies 6 | from encord import EncordUserClient 7 | from encord.orm.project import CopyDatasetAction, CopyDatasetOptions, CopyLabelsOptions, ReviewApprovalState 8 | 9 | # User input 10 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 11 | PROJECT_ID = "4b8756eb-eecb-415f-a212-4fb57c95b218" 12 | NEW_PROJECT_TITLE = "My New Project 001" # Specify the new Project's title 13 | NEW_PROJECT_DECSCRIPTION = "This new Project is for use with ACME Co." # Specify the new Project's description 14 | NEW_DATASET_TITLE = "My New Dataset 001" # Specify the new Dataset's title 15 | NEW_DATASET_DESCRIPTION = "This new Dataset is for use with My New Project 001" # Specify the new Dataset's description 16 | DATASET_ID_01 = "538d9b35-ba44-4a29-86ea-0cfb366cb9fd" # Specify a Dataset belonging to the Project being copied 17 | DATASET_ID_02 = "5214c000-687d-4122-85b8-e567f5ccc65c" # Specify a Dataset belonging to the Project being copied 18 | DATA_UNIT_ID_01 = ( 19 | "06b485bf-77be-40df-96c6-b61e8714f1b6" # Specify a data_hash for a data unit belonging to a Dataset being copied 20 | ) 21 | DATA_UNIT_ID_02 = ( 22 | "2e3f10c3-fe67-4d93-844d-a7030f510ab1" # Specify a data_hash for a data unit belonging to a Dataset being copied 23 | ) 24 | DATA_UNIT_ID_03 = ( 25 | "1041da61-d63c-4489-9001-4a56fe37f1f3" # Specify a data_hash for a data unit belonging to a Dataset being copied 26 | ) 27 | DATA_UNIT_ID_04 = ( 28 | "5c981777-7324-47f2-80c9-c27018db0a34" # Specify a data_hash for a data unit belonging to a Dataset being copied 29 | ) 30 | DATA_UNIT_ID_05 = ( 31 | "42e6aaf9-4cd8-4647-b9a6-37ba229c7e54" # Specify a data_hash for a data unit belonging to a Dataset being copied 32 | ) 33 | DATA_UNIT_ID_06 = ( 34 | "84f41e2a-f1fd-4b4d-91d0-bb4f3c64c41a" # Specify a data_hash for a data unit belonging to a Dataset being copied 35 | ) 36 | LABEL_ID_01 = "DX2mzwdT" # Specify an objectHash for a label/classification on a data unit being copied 37 | LABEL_ID_02 = "WNkfZ/7u" # Specify an objectHash for a label/classification on a data unit being copied 38 | 39 | 40 | # Create user client using SSH key 41 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 42 | ssh_private_key_path=SSH_PATH, 43 | # For US platform users use "https://api.us.encord.com" 44 | domain="https://api.encord.com", 45 | ) 46 | 47 | # Open the project you want to work on by specifying the Project ID 48 | project = user_client.get_project(PROJECT_ID) 49 | 50 | # Copy the project 51 | project.copy_project( 52 | new_title=NEW_PROJECT_TITLE, 53 | new_description=NEW_PROJECT_DECSCRIPTION, 54 | copy_collaborators=True, # Specify whether Project collaborators are copied to the new Project 55 | copy_datasets=CopyDatasetOptions( 56 | action=CopyDatasetAction.CLONE, # This also creates a new Dataset 57 | dataset_title=NEW_DATASET_TITLE, 58 | dataset_description=NEW_DATASET_DESCRIPTION, 59 | datasets_to_data_hashes_map={ 60 | DATASET_ID_01: [DATA_UNIT_ID_01, DATA_UNIT_ID_02, DATA_UNIT_ID_03], 61 | DATASET_ID_02: [DATA_UNIT_ID_04, DATA_UNIT_ID_05, DATA_UNIT_ID_06], 62 | }, 63 | ), 64 | copy_labels=CopyLabelsOptions( 65 | accepted_label_statuses=[ReviewApprovalState.APPROVED], # Copy all labels in the 'Approved' state. 66 | accepted_label_hashes=[LABEL_ID_01, LABEL_ID_02], # Copy labels with the listed IDs. 67 | ), 68 | ) 69 | -------------------------------------------------------------------------------- /tests/docs/project_copy_project_simple.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Copy Project - Simple 3 | """ 4 | 5 | # Import dependencies 6 | from encord import EncordUserClient, Project 7 | 8 | # User input 9 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 10 | PROJECT_ID = "4b8756eb-eecb-415f-a212-4fb57c95b218" 11 | 12 | # Create user client using SSH key 13 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 14 | ssh_private_key_path=SSH_PATH, 15 | # For US platform users use "https://api.us.encord.com" 16 | domain="https://api.encord.com", 17 | ) 18 | 19 | # Open the project you want to work on by specifying the Project ID 20 | project = user_client.get_project(PROJECT_ID) 21 | 22 | # Copy the Project with attached datasets and collaborators 23 | new_project_id = project.copy_project( 24 | copy_datasets=True, 25 | copy_collaborators=True, 26 | copy_models=False, # Not strictly needed 27 | ) 28 | # Print the new Project's ID 29 | print(new_project_id) 30 | -------------------------------------------------------------------------------- /tests/docs/project_create_project.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Create Project 3 | """ 4 | 5 | # Import dependencies 6 | from encord.user_client import EncordUserClient 7 | 8 | # User input 9 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 10 | PROJECT_TITLE = "My Project 01" 11 | DATASET_ID_01 = "ccb9438b-d9d3-4351-a243-61948f74d062" 12 | DATASET_ID_02 = "4fc8934a-8728-4a80-9b4d-2954afe1a0b5" 13 | DATASET_ID_03 = "26a8c7e2-9259-4853-bf0c-1b7610d4e057" 14 | WORKFLOW_TEMPLATE_ID = "79f68604-7998-4cd3-9c68-d170b690dbb9" 15 | 16 | # Create user client using SSH key 17 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 18 | ssh_private_key_path=SSH_PATH, 19 | # For US platform users use "https://api.us.encord.com" 20 | domain="https://api.encord.com", 21 | ) 22 | 23 | # Create a project 24 | project = user_client.create_project( 25 | project_title=PROJECT_TITLE, 26 | dataset_hashes=[DATASET_ID_01, DATASET_ID_02, DATASET_ID_03], 27 | workflow_template_hash=WORKFLOW_TEMPLATE_ID, 28 | ) 29 | 30 | # Prints the Project ID of the Project you just created 31 | print(project) 32 | -------------------------------------------------------------------------------- /tests/docs/project_cvat_import.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import prettyprinter as pp 4 | 5 | from encord import EncordUserClient 6 | from encord.utilities.client_utilities import CvatImporterSuccess, LocalImport 7 | 8 | # User input 9 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 10 | 11 | # Create user client using SSH key 12 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 13 | ssh_private_key_path=SSH_PATH, 14 | # For US platform users use "https://api.us.encord.com" 15 | domain="https://api.encord.com", 16 | ) 17 | 18 | # Increase networking timeouts for this long running operation. 19 | timeout = 1800 20 | user_client.user_config.read_timeout = timeout 21 | user_client.user_config.write_timeout = timeout 22 | user_client.user_config.connect_timeout = timeout 23 | 24 | # We have placed the unzipped Pizza Project directory into a 25 | # `data` folder relative to this script 26 | data_folder = "data/Pizza Project" 27 | dataset_name = "Pizza Images Dataset" 28 | cvat_importer_ret = user_client.create_project_from_cvat(LocalImport(file_path=data_folder), dataset_name) 29 | 30 | # Check if the import was a success and inspect the return value 31 | if type(cvat_importer_ret) == CvatImporterSuccess: 32 | print(f"project_hash = {cvat_importer_ret.project_hash}") 33 | print(f"dataset_hash = {cvat_importer_ret.dataset_hash}") 34 | pp.pprint(cvat_importer_ret.issues) 35 | -------------------------------------------------------------------------------- /tests/docs/project_dataset_add.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Add Datasets to Project 3 | """ 4 | 5 | from encord import EncordUserClient, Project 6 | 7 | # User input 8 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 9 | PROJECT_ID = "f7890e41-6de8-4e66-be06-9fbe182df457" 10 | DATASET_ID_01 = "6ffd7c78-f585-434b-8897-1178b147aeaa" 11 | DATASET_ID_02 = "4100d33d-e109-4e53-9c84-ba4b6bdfea79" 12 | DATASET_ID_03 = "e0623a14-a8fd-4ed3-b491-8f785a2ba28d" 13 | 14 | # Create user client using SSH key 15 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 16 | ssh_private_key_path=SSH_PATH, 17 | # For US platform users use "https://api.us.encord.com" 18 | domain="https://api.encord.com", 19 | ) 20 | 21 | # Open the project you want to work on by specifying the Project ID 22 | project = user_client.get_project(PROJECT_ID) 23 | 24 | add_these_datasets = project.add_datasets( 25 | [ 26 | DATASET_ID_01, 27 | DATASET_ID_02, 28 | DATASET_ID_03, 29 | # ... 30 | ] 31 | ) 32 | print(f"All Datasets for Project= {project.list_datasets()}") 33 | -------------------------------------------------------------------------------- /tests/docs/project_dataset_remove.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Remove Datasets from Project 3 | """ 4 | 5 | from encord import EncordUserClient, Project 6 | 7 | # User input 8 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 9 | PROJECT_ID = "f7890e41-6de8-4e66-be06-9fbe182df457" 10 | DATASET_ID_01 = "6ffd7c78-f585-434b-8897-1178b147aeaa" 11 | DATASET_ID_02 = "4100d33d-e109-4e53-9c84-ba4b6bdfea79" 12 | DATASET_ID_03 = "e0623a14-a8fd-4ed3-b491-8f785a2ba28d" 13 | 14 | # Create user client using SSH key 15 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 16 | ssh_private_key_path=SSH_PATH, 17 | # For US platform users use "https://api.us.encord.com" 18 | domain="https://api.encord.com", 19 | ) 20 | 21 | # Open the project you want to work on by specifying the Project ID 22 | project = user_client.get_project(PROJECT_ID) 23 | 24 | remove_these_datasets = project.remove_datasets( 25 | [ 26 | DATASET_ID_01, 27 | DATASET_ID_02, 28 | DATASET_ID_03, 29 | # ... 30 | ] 31 | ) 32 | print(f"All Datasets in the Project= {project.list_datasets()}") 33 | -------------------------------------------------------------------------------- /tests/docs/project_get_details.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: View Project details 3 | """ 4 | 5 | # Import dependencies 6 | from encord import EncordUserClient 7 | 8 | # User input 9 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 10 | PROJECT_ID = "f7890e41-6de8-4e66-be06-9fbe182df457" 11 | 12 | # Create user client using SSH key 13 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 14 | ssh_private_key_path=SSH_PATH, 15 | # For US platform users use "https://api.us.encord.com" 16 | domain="https://api.encord.com", 17 | ) 18 | 19 | # Open the project you want to work on by specifying the Project ID 20 | project = user_client.get_project(PROJECT_ID) 21 | 22 | # Prints relevant Project information 23 | print(f"Project Title: {project.title}") 24 | print(f"Description: {project.description}") 25 | print(f"Created at: {project.created_at}") 26 | print(f"Ontology ID: {project.ontology_hash}") 27 | print(f"Datasets: {project.list_datasets()}") 28 | print(f"Workflow: {project.workflow.stages}") 29 | -------------------------------------------------------------------------------- /tests/docs/project_label_logs_get_logs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: View label logs 3 | """ 4 | 5 | # Import dependencies 6 | from encord import EncordUserClient 7 | 8 | # User input 9 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 10 | PROJECT_ID = "f7890e41-6de8-4e66-be06-9fbe182df457" 11 | DATA_UNIT_ID = "1041da61-d63c-4489-9001-4a56fe37f1f3" # The unique identifier (data_hash) for the data unit 12 | 13 | 14 | # Create user client using SSH key 15 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 16 | ssh_private_key_path=SSH_PATH, 17 | # For US platform users use "https://api.us.encord.com" 18 | domain="https://api.encord.com", 19 | ) 20 | 21 | # Open the project you want to work on by specifying the Project ID 22 | project = user_client.get_project(PROJECT_ID) 23 | 24 | # Check if the method get_label_logs exists in project 25 | if hasattr(project, "get_label_logs"): 26 | try: 27 | # Fetch the logs for the given DATA_UNIT_ID 28 | logs = project.get_label_logs(data_hash=DATA_UNIT_ID) 29 | 30 | # Check if logs were returned 31 | if logs: 32 | for log in logs: 33 | print(log) 34 | break # print the first log only 35 | else: 36 | print("No logs found for this data unit.") 37 | except Exception as e: 38 | print(f"An error occurred while fetching logs: {e}") 39 | else: 40 | print("The project does not have a method to fetch label logs.") 41 | -------------------------------------------------------------------------------- /tests/docs/project_list_all_projects.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: List all Projects 3 | """ 4 | 5 | # Import dependencies 6 | from encord.user_client import EncordUserClient 7 | 8 | # User input 9 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 10 | 11 | # Create user client using SSH key 12 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 13 | ssh_private_key_path=SSH_PATH, 14 | # For US platform users use "https://api.us.encord.com" 15 | domain="https://api.encord.com", 16 | ) 17 | 18 | # List existing projects 19 | projects = user_client.get_projects() 20 | print(projects) 21 | -------------------------------------------------------------------------------- /tests/docs/project_merge_project_global.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Merge Projects - Global 3 | """ 4 | 5 | # Import dependencies 6 | from tqdm import tqdm 7 | 8 | from encord import EncordUserClient 9 | 10 | # Instantiate the client. Replace \ with the path to the file containing your private key 11 | user_client = EncordUserClient.create_with_ssh_private_key(ssh_private_key_path="") 12 | 13 | # Specify Projects to merge 14 | project_hashes_to_merge = ["", ""] # Include as many Projects as you need 15 | 16 | 17 | # Create target Project 18 | def create_target_project(user_client, project_hashes_to_merge): 19 | dataset_hashes = set([]) 20 | ontology_hash = "" 21 | 22 | for p_hash in project_hashes_to_merge: 23 | p = user_client.get_project(p_hash) 24 | new_dataset_hashes = {x["dataset_hash"] for x in p.datasets} 25 | if new_dataset_hashes.intersection(dataset_hashes): 26 | raise Exception(f"Source projects should not share datasets!") 27 | dataset_hashes.update(new_dataset_hashes) 28 | 29 | if not ontology_hash: 30 | ontology_hash = p.ontology_hash 31 | elif ontology_hash != p.ontology_hash: 32 | raise Exception( 33 | f"All projects must share the same ontology but " 34 | f"https://app.encord.com/projects/view/{p_hash}/summary. does not!" 35 | ) 36 | 37 | project_hash = user_client.create_project( 38 | "Merged Project", 39 | list(dataset_hashes), 40 | f"Merged Projects: {project_hashes_to_merge}", 41 | ontology_hash=ontology_hash, 42 | ) 43 | 44 | return user_client.get_project(project_hash) 45 | 46 | 47 | # Main function 48 | def main(project_hashes_to_merge): 49 | target_project = create_target_project(user_client, project_hashes_to_merge) 50 | 51 | for source_p_hash in project_hashes_to_merge: 52 | print(f"Merging in project {source_p_hash}") 53 | source_project = user_client.get_project(source_p_hash) 54 | for lr_s in tqdm(source_project.list_label_rows_v2()): 55 | matches = target_project.list_label_rows_v2(data_hashes=[lr_s.data_hash]) 56 | if len(matches) != 1: 57 | print(f"Something went wrong, zero or multiple matches found {matches}") 58 | print(lr_s) 59 | assert len(matches) == 1 60 | lr_t = matches[0] 61 | lr_s.initialise_labels() 62 | lr_t.initialise_labels() 63 | 64 | for obj in lr_s.get_object_instances(): 65 | lr_t.add_object_instance(obj.copy()) 66 | 67 | for cl in lr_s.get_classification_instances(): 68 | lr_t.add_classification_instance(cl.copy()) 69 | lr_t.save() 70 | print("Done!") 71 | print(f"Access project on https://app.encord.com/projects/view/{target_project.project_hash}/summary") 72 | 73 | 74 | # Run the main function 75 | if __name__ == "__main__": 76 | main(project_hashes_to_merge) 77 | -------------------------------------------------------------------------------- /tests/docs/project_merge_project_us.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Merge Projects - US 3 | """ 4 | 5 | # Import dependencies 6 | from tqdm import tqdm 7 | 8 | from encord import EncordUserClient 9 | 10 | # Instantiate the client. Replace \ with the path to the file containing your private key 11 | user_client = EncordUserClient.create_with_ssh_private_key(ssh_private_key_path="") 12 | 13 | # Specify Projects to merge 14 | project_hashes_to_merge = ["", ""] # Include as many Projects as you need 15 | 16 | 17 | # Create target Project 18 | def create_target_project(user_client, project_hashes_to_merge): 19 | dataset_hashes = set([]) 20 | ontology_hash = "" 21 | 22 | for p_hash in project_hashes_to_merge: 23 | p = user_client.get_project(p_hash) 24 | new_dataset_hashes = {x["dataset_hash"] for x in p.datasets} 25 | if new_dataset_hashes.intersection(dataset_hashes): 26 | raise Exception(f"Source projects should not share datasets!") 27 | dataset_hashes.update(new_dataset_hashes) 28 | 29 | if not ontology_hash: 30 | ontology_hash = p.ontology_hash 31 | elif ontology_hash != p.ontology_hash: 32 | raise Exception( 33 | f"All projects must share the same ontology but " 34 | f"https://app.us.encord.com/projects/view/{p_hash}/summary. does not!" 35 | ) 36 | 37 | project_hash = user_client.create_project( 38 | "Merged Project", 39 | list(dataset_hashes), 40 | f"Merged Projects: {project_hashes_to_merge}", 41 | ontology_hash=ontology_hash, 42 | ) 43 | 44 | return user_client.get_project(project_hash) 45 | 46 | 47 | # Main function 48 | def main(project_hashes_to_merge): 49 | target_project = create_target_project(user_client, project_hashes_to_merge) 50 | 51 | for source_p_hash in project_hashes_to_merge: 52 | print(f"Merging in project {source_p_hash}") 53 | source_project = user_client.get_project(source_p_hash) 54 | for lr_s in tqdm(source_project.list_label_rows_v2()): 55 | matches = target_project.list_label_rows_v2(data_hashes=[lr_s.data_hash]) 56 | if len(matches) != 1: 57 | print(f"Something went wrong, zero or multiple matches found {matches}") 58 | print(lr_s) 59 | assert len(matches) == 1 60 | lr_t = matches[0] 61 | lr_s.initialise_labels() 62 | lr_t.initialise_labels() 63 | 64 | for obj in lr_s.get_object_instances(): 65 | lr_t.add_object_instance(obj.copy()) 66 | 67 | for cl in lr_s.get_classification_instances(): 68 | lr_t.add_classification_instance(cl.copy()) 69 | lr_t.save() 70 | print("Done!") 71 | print(f"Access project on https://app.us.encord.com/projects/view/{target_project.project_hash}/summary") 72 | 73 | 74 | # Run the main function 75 | if __name__ == "__main__": 76 | main(project_hashes_to_merge) 77 | -------------------------------------------------------------------------------- /tests/docs/project_move_all_tasks_to_complete.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Move all tasks to COMPLETE 3 | """ 4 | 5 | # Import dependencies 6 | from encord.user_client import EncordUserClient 7 | 8 | # User input 9 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 10 | PROJECT_ID = "d59828bb-d60f-4a66-b4b0-5681c5684d5d" 11 | 12 | # Create user client using SSH key 13 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 14 | ssh_private_key_path=SSH_PATH, 15 | # For US platform users use "https://api.us.encord.com" 16 | domain="https://api.encord.com", 17 | ) 18 | 19 | # Open the project you want to work on by specifying the Project ID 20 | project = user_client.get_project(PROJECT_ID) 21 | 22 | # Create a bundle 23 | with project.create_bundle() as bundle: 24 | # Move all tasks into the final stage of the Workflow 25 | for label_row in project.list_label_rows_v2(): 26 | label_row.workflow_complete(bundle=bundle) 27 | -------------------------------------------------------------------------------- /tests/docs/project_move_tasks_to_next_stage.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Move tasks to the next stage 3 | """ 4 | 5 | # Import dependencies 6 | from encord import EncordUserClient, Project 7 | from encord.workflow import AnnotationStage 8 | 9 | # User input 10 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 11 | PROJECT_ID = "4b8756eb-eecb-415f-a212-4fb57c95b218" 12 | WORKFLOW_STAGE_NAME = "Annotate 1" 13 | WORKFLOW_STAGE_TYPE = AnnotationStage 14 | BUNDLE_SIZE = 100 # You can adjust this value as needed, but keep it <= 1000 15 | 16 | # Create user client using SSH key 17 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 18 | ssh_private_key_path=SSH_PATH, 19 | # For US platform users use "https://api.us.encord.com" 20 | domain="https://api.encord.com", 21 | ) 22 | 23 | # Open the project you want to work on by specifying the Project ID 24 | project = user_client.get_project(PROJECT_ID) 25 | 26 | # Get the specific stage (in this case, "Annotate 1") 27 | stage = project.workflow.get_stage(name=WORKFLOW_STAGE_NAME, type_=WORKFLOW_STAGE_TYPE) 28 | 29 | # Create a bundle and move tasks 30 | with project.create_bundle(bundle_size=BUNDLE_SIZE) as bundle: 31 | for task in stage.get_tasks(): 32 | # The task is submitted as the user who is currently assigned to the task. 33 | # With retain_assignee=True an error occurs if there are tasks without an assignee. 34 | task.submit(retain_assignee=True, bundle=bundle) 35 | print(f"Task: {task}") 36 | 37 | print("All tasks have been processed and moved to the next stage.") 38 | -------------------------------------------------------------------------------- /tests/docs/project_reopen_and_list_all_tasks.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Reopen and list all tasks 3 | """ 4 | 5 | import json 6 | 7 | from encord.user_client import EncordUserClient 8 | 9 | # User input 10 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 11 | PROJECT_ID = "4b8756eb-eecb-415f-a212-4fb57c95b218" 12 | OUTPUT_FILE_PATH = "/Users/chris-encord/all-tasks-output.json" # Specify the path where you want to save the JSON 13 | 14 | # Create user client using SSH key 15 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 16 | ssh_private_key_path=SSH_PATH, 17 | # For US platform users use "https://api.us.encord.com" 18 | domain="https://api.encord.com", 19 | ) 20 | 21 | # Open the project you want to work on by specifying the Project ID 22 | project = user_client.get_project(PROJECT_ID) 23 | 24 | # Collect data to be saved 25 | output_data = [] 26 | 27 | # Create a bundle and automatically execute everything attached to the bundle 28 | with project.create_bundle() as bundle: 29 | # Return all data units in the task back to the first Workflow stage 30 | for label_row in project.list_label_rows_v2(): 31 | label_row.workflow_reopen(bundle=bundle) 32 | # Append the label row data to output_data 33 | output_data.append({"label_row_id": label_row.label_hash, "data": label_row.data_title}) 34 | 35 | # Save output data to JSON file 36 | with open(OUTPUT_FILE_PATH, "w") as json_file: 37 | json.dump(output_data, json_file, indent=4) 38 | 39 | print(f"Data successfully saved to {OUTPUT_FILE_PATH}") 40 | -------------------------------------------------------------------------------- /tests/docs/project_set_priority_on_tasks.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Set Priority on tasks 3 | """ 4 | 5 | # Import dependencies 6 | from encord.user_client import EncordUserClient 7 | 8 | # User input 9 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 10 | PROJECT_ID = "4b8756eb-eecb-415f-a212-4fb57c95b218" 11 | BUNDLE_SIZE = 100 # You can adjust this value as needed, but keep it <= 1000 12 | 13 | # Create user client using SSH key 14 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 15 | ssh_private_key_path=SSH_PATH, 16 | # For US platform users use "https://api.us.encord.com" 17 | domain="https://api.encord.com", 18 | ) 19 | 20 | # Open the project you want to work on by specifying the Project ID 21 | project = user_client.get_project(PROJECT_ID) 22 | 23 | # Create a bundle 24 | with project.create_bundle(bundle_size=BUNDLE_SIZE) as bundle: 25 | # Set the priority of all tasks in the project to 88 26 | for label_row in project.list_label_rows_v2(): 27 | label_row.set_priority(0.88, bundle=bundle) 28 | 29 | print("All task priorities changed.") 30 | -------------------------------------------------------------------------------- /tests/docs/project_users_add.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Add users to Project 3 | """ 4 | 5 | # Import dependencies 6 | from encord import EncordUserClient, Project 7 | from encord.utilities.project_user import ProjectUserRole 8 | 9 | # User input 10 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 11 | PROJECT_ID = "f7890e41-6de8-4e66-be06-9fbe182df457" 12 | USER_01 = "example-user-01@encord.com" # Email addres for user you want to add to Project 13 | USER_02 = "example-user-02@encord.com" # Email addres for user you want to add to Project 14 | USER_03 = "exmaple-user-02@encord.com" # Email addres for user you want to add to Project 15 | 16 | # Create user client using SSH key 17 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 18 | ssh_private_key_path=SSH_PATH, 19 | # For US platform users use "https://api.us.encord.com" 20 | domain="https://api.encord.com", 21 | ) 22 | 23 | # Open the project you want to work on by specifying the Project ID 24 | project = user_client.get_project(PROJECT_ID) 25 | 26 | # Add users by specifying their email addresses, as well as the role these users should have. 27 | added_users = project.add_users( 28 | [USER_01, USER_02, USER_03], 29 | # Specify the role the user's have in the Project 30 | ProjectUserRole.ANNOTATOR, 31 | ) 32 | 33 | # Print the new users added to the project 34 | print(added_users) 35 | -------------------------------------------------------------------------------- /tests/docs/project_verify_file_locations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Code Block Name: Verify where Project files are stored 3 | """ 4 | 5 | import json 6 | 7 | from encord import EncordUserClient 8 | 9 | # User input 10 | SSH_PATH = "/Users/chris-encord/ssh-private-key.txt" 11 | PROJECT_ID = "d59828bb-d60f-4a66-b4b0-5681c5684d5d" 12 | OUTPUT_FILE_PATH = "/Users/chris-encord/file-locations-output.json" # Specify the path where you want to save the JSON 13 | 14 | # Create user client using SSH key 15 | user_client: EncordUserClient = EncordUserClient.create_with_ssh_private_key( 16 | ssh_private_key_path=SSH_PATH, 17 | # For US platform users use "https://api.us.encord.com" 18 | domain="https://api.encord.com", 19 | ) 20 | 21 | # Specify Project. Replace with the ID of the Project you want to export labels for. 22 | project = user_client.get_project(PROJECT_ID) 23 | 24 | # Collect data to be saved 25 | output_data = [] 26 | 27 | # Retrieve and store the label rows 28 | for log_line in project.list_label_rows_v2(): 29 | data_list = project.get_data(log_line.data_hash, get_signed_url=True) 30 | output_data.append(data_list) 31 | 32 | # Save output data to JSON file 33 | with open(OUTPUT_FILE_PATH, "w") as json_file: 34 | json.dump(output_data, json_file, indent=4) 35 | 36 | print(f"Data successfully saved to {OUTPUT_FILE_PATH}") 37 | -------------------------------------------------------------------------------- /tests/fixtures.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from datetime import datetime 3 | from unittest.mock import MagicMock, patch 4 | 5 | import pytest 6 | from cryptography.hazmat.primitives import serialization 7 | from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey 8 | 9 | from encord import EncordUserClient, Project 10 | from encord.client import EncordClientProject 11 | from encord.ontology import Ontology 12 | from encord.orm.ontology import Ontology as OrmOntology 13 | from encord.orm.project import ProjectDTO, ProjectStatus, ProjectType 14 | from tests.test_data.ontology_blurb import ONTOLOGY_BLURB 15 | 16 | PRIVATE_KEY = Ed25519PrivateKey.generate() 17 | 18 | PRIVATE_KEY_PEM = PRIVATE_KEY.private_bytes( 19 | encoding=serialization.Encoding.PEM, 20 | format=serialization.PrivateFormat.OpenSSH, 21 | encryption_algorithm=serialization.NoEncryption(), 22 | ).decode("utf-8") 23 | 24 | 25 | @pytest.fixture 26 | def ontology() -> Ontology: 27 | return Ontology(OrmOntology.from_dict(ONTOLOGY_BLURB), MagicMock()) 28 | 29 | 30 | @pytest.fixture 31 | def user_client() -> EncordUserClient: 32 | return EncordUserClient.create_with_ssh_private_key(PRIVATE_KEY_PEM) 33 | 34 | 35 | @pytest.fixture 36 | @patch.object(EncordClientProject, "get_project_v2") 37 | @patch.object(EncordUserClient, "get_ontology") 38 | def project( 39 | client_ontology_mock: MagicMock, 40 | client_project_mock: MagicMock, 41 | user_client: EncordUserClient, 42 | ontology: Ontology, 43 | ) -> Project: 44 | client_ontology_mock.return_value = ontology 45 | 46 | client_project_mock.return_value = ProjectDTO( 47 | project_hash=uuid.uuid4(), 48 | project_type=ProjectType.MANUAL_QA, 49 | status=ProjectStatus.IN_PROGRESS, 50 | title="Dummy project", 51 | description="", 52 | created_at=datetime.now(), 53 | last_edited_at=datetime.now(), 54 | ontology_hash="dummy-ontology-hash", 55 | editor_ontology=ONTOLOGY_BLURB, 56 | workflow=None, 57 | ) 58 | 59 | return user_client.get_project("dummy-project-hash") 60 | -------------------------------------------------------------------------------- /tests/http/test_api_v2_error_handling.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | import pytest 4 | from requests import Response, Session 5 | 6 | from encord.configs import ENCORD_DOMAIN, SshConfig 7 | from encord.exceptions import ( 8 | AuthenticationError, 9 | AuthorisationError, 10 | EncordException, 11 | InvalidArgumentsError, 12 | ResourceNotFoundError, 13 | SshKeyNotFound, 14 | UnknownException, 15 | ) 16 | from encord.http.v2.api_client import ApiClient 17 | from encord.orm.analytics import CollaboratorTimer 18 | from tests.fixtures import PRIVATE_KEY 19 | 20 | 21 | @pytest.fixture 22 | def api_client(): 23 | return ApiClient(config=SshConfig(PRIVATE_KEY)) 24 | 25 | 26 | @patch.object(Session, "send") 27 | def test_response_mapping_status_codes_to_exception_type_500(send: MagicMock, api_client: ApiClient): 28 | res = Response() 29 | res.status_code = 500 30 | send.return_value = res 31 | 32 | with pytest.raises(UnknownException): 33 | api_client.get("/", params=None, result_type=CollaboratorTimer) 34 | 35 | 36 | @patch.object(Session, "send") 37 | def test_response_mapping_status_codes_to_exception_type_401(send: MagicMock, api_client: ApiClient): 38 | res = Response() 39 | res.status_code = 401 40 | send.return_value = res 41 | 42 | with pytest.raises(AuthenticationError): 43 | api_client.get("/", params=None, result_type=CollaboratorTimer) 44 | 45 | 46 | @patch.object(Session, "send") 47 | def test_response_mapping_status_codes_to_exception_type_403(send: MagicMock, api_client: ApiClient): 48 | res = Response() 49 | res.status_code = 403 50 | send.return_value = res 51 | 52 | with pytest.raises(AuthorisationError): 53 | api_client.get("/", params=None, result_type=CollaboratorTimer) 54 | 55 | 56 | @patch.object(Session, "send") 57 | def test_response_mapping_status_codes_to_exception_type_404(send: MagicMock, api_client: ApiClient): 58 | res = Response() 59 | res.status_code = 404 60 | send.return_value = res 61 | 62 | with pytest.raises(ResourceNotFoundError): 63 | api_client.get("/", params=None, result_type=CollaboratorTimer) 64 | 65 | 66 | @patch.object(Session, "send") 67 | def test_response_mapping_status_codes_to_exception_type_unknown(send: MagicMock, api_client: ApiClient): 68 | res = Response() 69 | res.status_code = 66 70 | send.return_value = res 71 | 72 | with pytest.raises(UnknownException): 73 | api_client.get("/", params=None, result_type=CollaboratorTimer) 74 | 75 | 76 | @patch.object(Session, "send") 77 | def test_response_error_message_including_domain(send: MagicMock, api_client: ApiClient): 78 | res = Response() 79 | res.status_code = 400 80 | send.return_value = res 81 | 82 | with pytest.raises(EncordException) as e_info: 83 | api_client.get("/", params=None, result_type=CollaboratorTimer) 84 | assert ENCORD_DOMAIN in str(e_info.value) 85 | -------------------------------------------------------------------------------- /tests/http/test_error_handling.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from unittest.mock import MagicMock, patch 3 | 4 | import pytest 5 | from requests import Response, Session 6 | 7 | from encord.configs import ENCORD_DOMAIN, SshConfig 8 | from encord.exceptions import EncordException, SshKeyNotFound 9 | from encord.http.querier import HEADER_CLOUD_TRACE_CONTEXT, Querier 10 | from tests.fixtures import PRIVATE_KEY 11 | 12 | 13 | @pytest.fixture 14 | def querier(): 15 | return Querier(config=SshConfig(PRIVATE_KEY)) 16 | 17 | 18 | @patch.object(Session, "send") 19 | @patch("encord.exceptions.datetime", wraps=datetime) 20 | def test_failed_http_request_prints_out_trace_id(dt: MagicMock, send: MagicMock, querier): 21 | fake_time = datetime(2023, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc) 22 | dt.now.return_value = fake_time 23 | 24 | res = Response() 25 | res.status_code = 500 26 | send.return_value = res 27 | 28 | try: 29 | querier.basic_getter(object) 30 | assert False, "Should never get here, previous line is expected to raise" 31 | except EncordException as e: 32 | trace_header = send.call_args_list[0].args[0].headers[HEADER_CLOUD_TRACE_CONTEXT] 33 | trace_id, span_id = trace_header.split(";")[0].split("/") 34 | 35 | assert ( 36 | f"Error parsing JSON response: timestamp='2023-01-01T00:00:00+00:00' trace_id='{trace_id}' span_id='{span_id}'" 37 | in str(e) 38 | ) 39 | 40 | 41 | @patch.object(Session, "send") 42 | def test_response_error_message_including_domain(send: MagicMock, querier: Querier): 43 | res = Response() 44 | res.status_code = 400 45 | res._content = b'{"status":400,"response":["SSH_KEY_NOT_FOUND_ERROR"],"payload":"Your used SSH key does not exist. Please add this SSH key to your user profile."}' 46 | send.return_value = res 47 | 48 | with pytest.raises(SshKeyNotFound) as e_info: 49 | querier.basic_getter(object) 50 | assert ENCORD_DOMAIN in str(e_info.value) 51 | -------------------------------------------------------------------------------- /tests/http/test_payload_deserialisation.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | import pytest 4 | from requests import Response, Session 5 | 6 | from encord.configs import SshConfig 7 | from encord.exceptions import EncordException 8 | from encord.http.v2.api_client import ApiClient 9 | from encord.orm.analytics import CollaboratorTimer 10 | from tests.fixtures import PRIVATE_KEY 11 | 12 | 13 | @pytest.fixture 14 | def api_client(): 15 | return ApiClient(config=SshConfig(PRIVATE_KEY)) 16 | 17 | 18 | @patch.object(Session, "send") 19 | def test_deserialise_payload_raises_on_wrong_payload(send: MagicMock, api_client: ApiClient): 20 | res = Response() 21 | res.status_code = 200 22 | res._content = b'{ "some_wrong_key": "some_wrong_value" }' 23 | send.return_value = res 24 | 25 | with pytest.raises(EncordException): 26 | api_client.get("/", params=None, result_type=CollaboratorTimer) 27 | 28 | 29 | @patch.object(Session, "send") 30 | def test_deserialise_payload_ok_on_correct_payload(send: MagicMock, api_client: ApiClient): 31 | res = Response() 32 | res.status_code = 200 33 | res._content = b""" 34 | { 35 | "user_email": "noone@nowhere.com", 36 | "user_role": 0, 37 | "data_title": "some title", 38 | "time_seconds": 123.456 39 | } 40 | """ 41 | send.return_value = res 42 | 43 | res = api_client.get("/", params=None, result_type=CollaboratorTimer) 44 | 45 | assert isinstance(res, CollaboratorTimer) 46 | assert res == CollaboratorTimer( 47 | user_email="noone@nowhere.com", user_role=0, data_title="some title", time_seconds=123.456 48 | ) 49 | 50 | 51 | @patch.object(Session, "send") 52 | def test_deserialise_payload_ok_on_extra_keys(send: MagicMock, api_client: ApiClient): 53 | res = Response() 54 | res.status_code = 200 55 | res._content = b""" 56 | { 57 | "user_email": "noone@nowhere.com", 58 | "user_role": 0, 59 | "data_title": "some title", 60 | "time_seconds": 123.456, 61 | "extra_key": "extra_value" 62 | } 63 | """ 64 | send.return_value = res 65 | 66 | res = api_client.get("/", params=None, result_type=CollaboratorTimer) 67 | 68 | assert isinstance(res, CollaboratorTimer) 69 | assert res == CollaboratorTimer( 70 | user_email="noone@nowhere.com", user_role=0, data_title="some title", time_seconds=123.456 71 | ) 72 | -------------------------------------------------------------------------------- /tests/objects/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/tests/objects/__init__.py -------------------------------------------------------------------------------- /tests/objects/common.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from encord.orm.label_row import AnnotationTaskStatus, LabelRowMetadata, LabelStatus 4 | 5 | FAKE_LABEL_ROW_METADATA = LabelRowMetadata( 6 | label_hash="", 7 | branch_name="main", 8 | created_at=datetime.datetime.now(), 9 | last_edited_at=datetime.datetime.now(), 10 | data_hash="", 11 | data_title="", 12 | data_type="VIDEO", 13 | data_link="", 14 | dataset_hash="", 15 | dataset_title="", 16 | label_status=LabelStatus.NOT_LABELLED, 17 | annotation_task_status=AnnotationTaskStatus.QUEUED, 18 | workflow_graph_node=None, 19 | is_shadow_data=False, 20 | duration=100, 21 | frames_per_second=25, 22 | number_of_frames=100 * 25, 23 | height=100, 24 | width=10, 25 | audio_codec=None, 26 | audio_bit_depth=None, 27 | audio_num_channels=None, 28 | audio_sample_rate=None, 29 | ) 30 | -------------------------------------------------------------------------------- /tests/objects/data/empty_video.py: -------------------------------------------------------------------------------- 1 | labels = { 2 | "label_hash": "28f0e9d2-51e0-459d-8ffa-2e214da653a9", 3 | "branch_name": "main", 4 | "created_at": "2023-02-09 14:12:03", 5 | "last_edited_at": "2023-02-09 14:12:03", 6 | "data_hash": "cd57cf5c-2541-4a46-a836-444540ee987a", 7 | "dataset_hash": "b0d93919-a5e8-4418-8dd5-2c51e3977de8", 8 | "dataset_title": "Dataset with 2 frame video", 9 | "data_title": "two-frame-video.mp4", 10 | "data_type": "video", 11 | "annotation_task_status": "QUEUED", 12 | "is_shadow_data": False, 13 | "object_answers": {}, 14 | "classification_answers": {}, 15 | "object_actions": {}, 16 | "label_status": "LABEL_IN_PROGRESS", 17 | "data_units": { 18 | "cd57cf5c-2541-4a46-a836-444540ee987a": { 19 | "data_hash": "cd57cf5c-2541-4a46-a836-444540ee987a", 20 | "data_title": "two-frame-video.mp4", 21 | "data_link": "cord-videos-dev/lFW59RQ9jcT4vHZeG14m8QWJKug1/cd57cf5c-2541-4a46-a836-444540ee987a", 22 | "data_type": "video/mp4", 23 | "data_sequence": 0, 24 | "width": 1200, 25 | "height": 924, 26 | "labels": {}, 27 | "data_duration": 0.08, 28 | "data_fps": 25.0, 29 | } 30 | }, 31 | } 32 | -------------------------------------------------------------------------------- /tests/objects/data/native_image_data_classification_with_no_answer.py: -------------------------------------------------------------------------------- 1 | labels = { 2 | "label_hash": "66454013-7382-4418-a9ca-41c089d8997f", 3 | "branch_name": "main", 4 | "created_at": "2023-02-09 14:12:03", 5 | "last_edited_at": "2023-02-09 14:12:03", 6 | "data_hash": "aaa6bc82-9f89-4545-adbb-f271bf28cf99", 7 | "annotation_task_status": "QUEUED", 8 | "is_shadow_data": False, 9 | "dataset_hash": "77f9c59a-89d3-4635-a20e-5da9a01d5138", 10 | "dataset_title": "Dataset with one single image annotated", 11 | "data_title": "rousseau_2.jpg", 12 | "data_type": "image", 13 | "data_units": { 14 | "649aba74-365b-45fa-a375-3036ee0eda78": { 15 | "data_hash": "649aba74-365b-45fa-a375-3036ee0eda78", 16 | "data_title": "rousseau_2.jpg", 17 | "data_type": "image/jpeg", 18 | "data_sequence": 0, 19 | "labels": { 20 | "objects": [], 21 | "classifications": [ 22 | { 23 | "name": "Radio classification 1", 24 | "value": "radio_classification_1", 25 | "createdAt": "Tue, 17 Jan 2023 11:44:53 GMT", 26 | "createdBy": "denis@cord.tech", 27 | "confidence": 1, 28 | "featureHash": "NzIxNTU1", 29 | "lastEditedAt": "Tue, 17 Jan 2023 11:44:53 GMT", 30 | "classificationHash": "3AqiIPrF", 31 | "manualAnnotation": True, 32 | }, 33 | { 34 | "name": "Text classification", 35 | "value": "text_classification", 36 | "createdAt": "Tue, 17 Jan 2023 11:45:01 GMT", 37 | "createdBy": "denis@cord.tech", 38 | "confidence": 1, 39 | "featureHash": "jPOcEsbw", 40 | "lastEditedAt": "Tue, 17 Jan 2023 11:45:01 GMT", 41 | "classificationHash": "HVwOV4hB", 42 | "manualAnnotation": True, 43 | }, 44 | { 45 | "name": "Checklist classification", 46 | "value": "checklist_classification", 47 | "createdAt": "Tue, 17 Jan 2023 11:45:07 GMT", 48 | "createdBy": "denis@cord.tech", 49 | "confidence": 1, 50 | "featureHash": "3DuQbFxo", 51 | "lastEditedAt": "Tue, 17 Jan 2023 11:45:07 GMT", 52 | "classificationHash": "37vDK5u0", 53 | "manualAnnotation": True, 54 | }, 55 | ], 56 | }, 57 | "data_link": "cord-images-dev/lFW59RQ9jcT4vHZeG14m8QWJKug1/649aba74-365b-45fa-a375-3036ee0eda78", 58 | "width": 1200, 59 | "height": 924, 60 | } 61 | }, 62 | "object_answers": {}, 63 | "classification_answers": {}, 64 | "object_actions": {}, 65 | "label_status": "LABEL_IN_PROGRESS", 66 | } 67 | -------------------------------------------------------------------------------- /tests/objects/data/ontology_with_many_dynamic_classifications.py: -------------------------------------------------------------------------------- 1 | ontology = { 2 | "objects": [ 3 | { 4 | "id": "1", 5 | "name": "box", 6 | "color": "#D33115", 7 | "shape": "bounding_box", 8 | "featureNodeHash": "LJNV5PVK", 9 | "attributes": [ 10 | { 11 | "id": "1.1", 12 | "name": "dynamic text", 13 | "type": "text", 14 | "featureNodeHash": "5RGh3YIU", 15 | "required": False, 16 | "dynamic": True, 17 | }, 18 | { 19 | "id": "1.2", 20 | "name": "dynamic classification", 21 | "type": "checklist", 22 | "featureNodeHash": "ZJoiBceM", 23 | "required": False, 24 | "dynamic": True, 25 | "options": [ 26 | { 27 | "id": "1.2.1", 28 | "label": "classification 1", 29 | "value": "classification_1", 30 | "featureNodeHash": "aOQGJvce", 31 | }, 32 | { 33 | "id": "1.2.2", 34 | "label": "classification 2", 35 | "value": "classification_2", 36 | "featureNodeHash": "tyjScPrk", 37 | }, 38 | ], 39 | }, 40 | { 41 | "id": "1.3", 42 | "name": "dynamic radio", 43 | "type": "radio", 44 | "featureNodeHash": "OMpf1zPf", 45 | "required": False, 46 | "dynamic": True, 47 | "options": [ 48 | {"id": "1.3.1", "label": "radio 1", "value": "radio_1", "featureNodeHash": "ps0dwXfI"}, 49 | {"id": "1.3.2", "label": "radio 2", "value": "radio_2", "featureNodeHash": "IwCHSdeL"}, 50 | ], 51 | }, 52 | ], 53 | }, 54 | { 55 | "id": "2", 56 | "name": "box 2", 57 | "color": "#E27300", 58 | "shape": "bounding_box", 59 | "featureNodeHash": "GaAKWvvg", 60 | "attributes": [ 61 | { 62 | "id": "2.1", 63 | "name": "radio 2", 64 | "type": "radio", 65 | "featureNodeHash": "TWPByH4G", 66 | "required": False, 67 | "dynamic": True, 68 | "options": [ 69 | {"id": "2.1.1", "label": "radio 2 1", "value": "radio_2_1", "featureNodeHash": "zhrqAYjQ"}, 70 | {"id": "2.1.2", "label": "radio 2 2", "value": "radio_2_2", "featureNodeHash": "J/I/karX"}, 71 | ], 72 | } 73 | ], 74 | }, 75 | ], 76 | "classifications": [], 77 | } 78 | -------------------------------------------------------------------------------- /tests/objects/test_coordinates.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from copy import copy 3 | 4 | import pytest 5 | 6 | from encord.exceptions import LabelRowError 7 | from encord.objects import Shape 8 | from encord.objects.coordinates import ACCEPTABLE_COORDINATES_FOR_ONTOLOGY_ITEMS, PointCoordinate, PolygonCoordinates 9 | 10 | 11 | def test_acceptable_coordinates_for_ontology_items() -> None: 12 | all_mappings = copy(ACCEPTABLE_COORDINATES_FOR_ONTOLOGY_ITEMS) 13 | for shape in Shape: 14 | assert shape in all_mappings 15 | del all_mappings[shape] 16 | assert not all_mappings 17 | 18 | 19 | def test_polygon_coordinates(caplog): 20 | c1 = PolygonCoordinates(values=[PointCoordinate(x=0, y=0), PointCoordinate(x=1, y=1)]) 21 | assert len(c1.polygons) == 1 # 1 polygon 22 | assert len(c1.polygons[0]) == 1 # 1 ring 23 | assert c1.polygons[0][0] == [PointCoordinate(x=0, y=0), PointCoordinate(x=1, y=1)] 24 | 25 | c2 = PolygonCoordinates( 26 | polygons=[ 27 | [[PointCoordinate(x=0, y=0), PointCoordinate(x=1, y=1)]], 28 | [[PointCoordinate(x=2, y=2), PointCoordinate(x=3, y=3)]], 29 | ] 30 | ) 31 | assert len(c2.polygons) == 2 # 2 polygons 32 | assert c2.values == [ 33 | PointCoordinate(x=0, y=0), 34 | PointCoordinate(x=1, y=1), 35 | ] # only contains the first polygon, second is ignored 36 | 37 | # inconsistent values being provided should log a warning & set to polygons value 38 | caplog.set_level(logging.WARNING) # Set the log level to capture warnings 39 | c3 = PolygonCoordinates(values=[PointCoordinate(x=0, y=0)], polygons=[[[PointCoordinate(x=1, y=1)]]]) 40 | assert "`values` and `polygons` are not consistent, defaulting to polygons value" in caplog.text 41 | # Assert that the values are now equal based on polygons value 42 | assert c3.values == [PointCoordinate(x=1, y=1)] 43 | 44 | # Not providing either values or polygons should raise an error 45 | with pytest.raises(LabelRowError): 46 | PolygonCoordinates() 47 | 48 | 49 | def test_polygon_coordinates_from_and_to_dict(): 50 | c1 = PolygonCoordinates.from_dict({"polygon": [{"x": 0, "y": 0}, {"x": 1, "y": 1}]}) 51 | assert c1.values == [PointCoordinate(x=0, y=0), PointCoordinate(x=1, y=1)] 52 | assert c1.polygons[0][0] == [PointCoordinate(x=0, y=0), PointCoordinate(x=1, y=1)] 53 | assert c1.to_dict() == {"0": {"x": 0, "y": 0}, "1": {"x": 1, "y": 1}} 54 | assert c1.to_dict("multiple_polygons") == [[[0, 0, 1, 1]]] 55 | 56 | c2 = PolygonCoordinates.from_dict({"polygons": [[[0, 0, 1, 1]], [[2, 2, 3, 3]]]}) 57 | assert c2.values == [PointCoordinate(x=0, y=0), PointCoordinate(x=1, y=1)] 58 | assert len(c2.polygons) == 2 59 | assert c2.polygons[0][0] == [PointCoordinate(x=0, y=0), PointCoordinate(x=1, y=1)] 60 | assert c2.polygons[1][0] == [PointCoordinate(x=2, y=2), PointCoordinate(x=3, y=3)] 61 | assert c2.to_dict("multiple_polygons") == [[[0, 0, 1, 1]], [[2, 2, 3, 3]]] 62 | assert c1.to_dict() == {"0": {"x": 0, "y": 0}, "1": {"x": 1, "y": 1}} 63 | 64 | 65 | def test_polygon_coordinates_from_polygons_list(): 66 | c1 = PolygonCoordinates.from_polygons_list([[[0, 0, 1, 1, 2, 2]], [[3, 3, 4, 4, 5, 5]]]) 67 | assert c1.polygons == [ 68 | [[PointCoordinate(x=0, y=0), PointCoordinate(x=1, y=1), PointCoordinate(x=2, y=2)]], 69 | [[PointCoordinate(x=3, y=3), PointCoordinate(x=4, y=4), PointCoordinate(x=5, y=5)]], 70 | ] 71 | -------------------------------------------------------------------------------- /tests/objects/test_frames.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from encord.objects.frames import Range, frames_class_to_frames_list 4 | 5 | 6 | def test_frames_class_to_frames_list(): 7 | # Single frame 8 | assert frames_class_to_frames_list(1) == [1] 9 | assert frames_class_to_frames_list(10) == [10] 10 | 11 | # Range 12 | assert frames_class_to_frames_list(Range(1, 4)) == [1, 2, 3, 4] 13 | assert frames_class_to_frames_list(Range(0, 1)) == [0, 1] 14 | assert frames_class_to_frames_list(Range(4, 4)) == [4] 15 | 16 | # List of Range 17 | list_of_range_1 = [Range(2, 3), Range(3, 5), Range(7, 7)] 18 | assert frames_class_to_frames_list(list_of_range_1) == [2, 3, 4, 5, 7] 19 | list_of_range_2 = [Range(10, 12), Range(3, 5), Range(8, 9), Range(8, 9)] 20 | assert frames_class_to_frames_list(list_of_range_2) == [3, 4, 5, 8, 9, 10, 11, 12] 21 | 22 | # List of integer frame numbers 23 | # Empty List 24 | assert frames_class_to_frames_list([]) == [] 25 | list_of_integers_1 = [4, 5, 6, 24, 60] 26 | assert frames_class_to_frames_list(list_of_integers_1) == list_of_integers_1 27 | assert frames_class_to_frames_list([4]) == [4] 28 | assert frames_class_to_frames_list([9, 8, 7]) == [7, 8, 9] 29 | assert frames_class_to_frames_list([9, 2, 4, 4, 14]) == [2, 4, 9, 14] 30 | 31 | # Exception 32 | with pytest.raises(RuntimeError): 33 | frames_class_to_frames_list([4, Range(4, 6)]) 34 | with pytest.raises(RuntimeError): 35 | frames_class_to_frames_list({3, 5, 7}) 36 | -------------------------------------------------------------------------------- /tests/objects/test_skeleton_template.py: -------------------------------------------------------------------------------- 1 | from deepdiff import DeepDiff 2 | 3 | from encord.objects.skeleton_template import SkeletonTemplate, SkeletonTemplateCoordinate 4 | 5 | SKELETON_TEMPLATE_COORDINATES = [ 6 | SkeletonTemplateCoordinate(x=0, y=0, name="point_0"), 7 | SkeletonTemplateCoordinate(x=1, y=1, name="point_1"), 8 | ] 9 | SKELETON_TEMPLATE_LINE = SkeletonTemplate.from_dict( 10 | { 11 | "name": "Line", 12 | "width": 100, 13 | "height": 100, 14 | "skeleton": {str(i): x for (i, x) in enumerate(SKELETON_TEMPLATE_COORDINATES)}, 15 | "skeletonEdges": {"0": {"1": {"color": "#00000"}}}, 16 | "feature_node_hash": "c67522ee", 17 | "shape": "skeleton", 18 | } 19 | ) 20 | 21 | 22 | def test_skeleton_template_round_trip_internal(): 23 | dict_template = SKELETON_TEMPLATE_LINE.to_dict() 24 | assert SkeletonTemplate.from_dict(dict_template) == SKELETON_TEMPLATE_LINE 25 | 26 | 27 | SKELETON_TEMPLATE_TRIANGLE_JSON = { 28 | "name": "Triangle", 29 | "shape": "skeleton", 30 | "width": 0.235, 31 | "height": 0.25, 32 | "skeleton": { 33 | "0": { 34 | "x": 0.5148689289969273, 35 | "y": 0.5, 36 | "name": "point_0", 37 | "color": "#000000", 38 | "value": "point_0", 39 | "featureHash": "A9sGmBcx", 40 | }, 41 | "1": { 42 | "x": 0.75, 43 | "y": 0.5, 44 | "name": "point_1", 45 | "color": "#000000", 46 | "value": "point_1", 47 | "featureHash": "UWKgC/Dy", 48 | }, 49 | "2": { 50 | "x": 0.675, 51 | "y": 0.25, 52 | "name": "point_2", 53 | "color": "#000000", 54 | "value": "point_2", 55 | "featureHash": "mBt9AAhC", 56 | }, 57 | }, 58 | "skeletonEdges": { 59 | "0": {"1": {"color": "#000000"}}, 60 | "1": {"2": {"color": "#000000"}}, 61 | "2": {"0": {"color": "#000000"}}, 62 | }, 63 | "feature_node_hash": "GSc3nz5D", 64 | } 65 | 66 | 67 | def test_skeleton_template_round_trip_external(): 68 | st = SkeletonTemplate.from_dict(SKELETON_TEMPLATE_TRIANGLE_JSON) 69 | assert not DeepDiff(st.to_dict(), SKELETON_TEMPLATE_TRIANGLE_JSON, ignore_order=True) 70 | -------------------------------------------------------------------------------- /tests/orm/test_create_dataset_response.py: -------------------------------------------------------------------------------- 1 | """Unit tests for the dataset class""" 2 | 3 | import uuid 4 | 5 | from encord.orm.dataset import CreateDatasetResponse, StorageLocation 6 | 7 | DATASET_RESPONSE_JSON = { 8 | "title": "CVAT imported baking dataset", 9 | "type": 0, 10 | "dataset_hash": "460505dd-89ea-485a-b4ea-417558a26889", 11 | "backing_folder_uuid": "434df998-3aac-423d-bc29-1af33040e583", 12 | "user_hash": "yiA5JxmLEGSoEcJAuxr3AJdDDXE2", 13 | } 14 | 15 | 16 | def test_create_dataset_response_conversions(): 17 | create_dataset_response = CreateDatasetResponse.from_dict(DATASET_RESPONSE_JSON) 18 | 19 | assert isinstance(create_dataset_response["backing_folder_uuid"], uuid.UUID) 20 | create_dataset_response["backing_folder_uuid"] = str(create_dataset_response["backing_folder_uuid"]) 21 | 22 | assert create_dataset_response == DATASET_RESPONSE_JSON 23 | 24 | 25 | def test_create_dataset_response_fields(): 26 | create_dataset_response = CreateDatasetResponse.from_dict(DATASET_RESPONSE_JSON) 27 | 28 | assert create_dataset_response.title == DATASET_RESPONSE_JSON["title"] 29 | assert create_dataset_response.storage_location == StorageLocation.CORD_STORAGE 30 | assert create_dataset_response.dataset_hash == DATASET_RESPONSE_JSON["dataset_hash"] 31 | assert create_dataset_response.user_hash == DATASET_RESPONSE_JSON["user_hash"] 32 | 33 | 34 | def test_create_dataset_response_setters_and_getters(): 35 | create_dataset_response = CreateDatasetResponse.from_dict(DATASET_RESPONSE_JSON) 36 | title = "New title" 37 | storage_location = StorageLocation.AWS 38 | dataset_hash = "123456" 39 | user_hash = "abcdef" 40 | 41 | create_dataset_response.title = title 42 | create_dataset_response.storage_location = storage_location 43 | create_dataset_response.dataset_hash = dataset_hash 44 | create_dataset_response.user_hash = user_hash 45 | 46 | assert create_dataset_response.title == title 47 | assert create_dataset_response.storage_location == storage_location 48 | assert create_dataset_response.dataset_hash == dataset_hash 49 | assert create_dataset_response.user_hash == user_hash 50 | 51 | 52 | def test_create_dataset_response_backwards_compatibility(): 53 | create_dataset_response = CreateDatasetResponse.from_dict(DATASET_RESPONSE_JSON) 54 | 55 | assert "title" in create_dataset_response 56 | 57 | # all the following ones are available 58 | create_dataset_response.items() 59 | create_dataset_response.keys() 60 | create_dataset_response.values() 61 | 62 | assert create_dataset_response["title"] == DATASET_RESPONSE_JSON["title"] 63 | assert create_dataset_response["type"] == DATASET_RESPONSE_JSON["type"] 64 | assert create_dataset_response["dataset_hash"] == DATASET_RESPONSE_JSON["dataset_hash"] 65 | assert create_dataset_response["user_hash"] == DATASET_RESPONSE_JSON["user_hash"] 66 | -------------------------------------------------------------------------------- /tests/test_analytics.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | from encord import Project 4 | from encord.common.time_parser import parse_datetime 5 | from encord.http.v2.api_client import ApiClient 6 | from encord.http.v2.payloads import Page 7 | from encord.orm.analytics import ( 8 | CollaboratorTimer, 9 | CollaboratorTimerParams, 10 | CollaboratorTimersGroupBy, 11 | ) 12 | from encord.utilities.project_user import ProjectUserRole 13 | from tests.fixtures import ontology, project, user_client 14 | 15 | assert user_client and project and ontology 16 | 17 | COLLABORATOR_TIMERS_PATH = "analytics/collaborators/timers" 18 | 19 | 20 | def construct_timer( 21 | user_email="noone@nowhere.com", user_role=ProjectUserRole.ADMIN, data_title="data title 1", time_seconds=22.0 22 | ) -> CollaboratorTimer: 23 | return CollaboratorTimer( 24 | user_email=user_email, 25 | user_role=user_role, 26 | data_title=data_title, 27 | time_seconds=time_seconds, 28 | ) 29 | 30 | 31 | @patch.object(ApiClient, "get") 32 | def test_project_collaborator_timers_empty_page(api_client_get: MagicMock, project: Project): 33 | after_time = parse_datetime("2023-01-01T21:00:00") 34 | 35 | api_client_get.return_value = Page[CollaboratorTimer](results=[]) 36 | 37 | timers = list(project.list_collaborator_timers(after=after_time)) 38 | 39 | api_client_get.assert_called_once() 40 | assert [] == timers 41 | 42 | 43 | @patch.object(ApiClient, "get") 44 | def test_project_collaborator_timers_single_page(api_client_get: MagicMock, project: Project): 45 | after_time = parse_datetime("2023-01-01T21:00:00") 46 | 47 | return_value = Page[CollaboratorTimer]( 48 | results=[ 49 | construct_timer(data_title="data title 1"), 50 | construct_timer(data_title="data title 2"), 51 | ] 52 | ) 53 | 54 | api_client_get.return_value = return_value 55 | 56 | timers = list(project.list_collaborator_timers(after=after_time)) 57 | api_client_get.assert_called_once_with( 58 | COLLABORATOR_TIMERS_PATH, 59 | params=CollaboratorTimerParams( 60 | project_hash=project.project_hash, 61 | after=after_time, 62 | before=None, 63 | group_by=CollaboratorTimersGroupBy.DATA_UNIT, 64 | page_size=100, 65 | ), 66 | result_type=Page[CollaboratorTimer], 67 | allow_none=False, 68 | ) 69 | 70 | assert return_value.results == timers 71 | 72 | 73 | @patch.object(ApiClient, "get") 74 | def test_project_collaborator_timers_multi_page(api_client_get: MagicMock, project: Project): 75 | after_time = parse_datetime("2023-01-01T21:00:00") 76 | 77 | return_value_page_1 = Page[CollaboratorTimer]( 78 | results=[ 79 | construct_timer(data_title="data title 1"), 80 | construct_timer(data_title="data title 2"), 81 | ], 82 | next_page_token="page-token-1", 83 | ) 84 | 85 | return_value_page_2 = Page[CollaboratorTimer]( 86 | results=[ 87 | construct_timer(data_title="data title 3"), 88 | construct_timer(data_title="data title 4"), 89 | ] 90 | ) 91 | 92 | api_client_get.side_effect = [return_value_page_1, return_value_page_2] 93 | 94 | timers = list(project.list_collaborator_timers(after=after_time)) 95 | 96 | assert api_client_get.call_count == 2 97 | assert timers == return_value_page_1.results + return_value_page_2.results 98 | 99 | # Check that the get method was called with the correct page token 100 | second_call_args, second_call_kwargs = api_client_get.call_args_list[1] 101 | assert second_call_kwargs["params"].page_token == "page-token-1" 102 | -------------------------------------------------------------------------------- /tests/test_data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/encord-team/encord-client-python/0245e717f07016e69ec062a08797d85026c2febb/tests/test_data/__init__.py -------------------------------------------------------------------------------- /tests/test_data/ontology_blurb.py: -------------------------------------------------------------------------------- 1 | ONTOLOGY_BLURB = { 2 | "ontology_hash": "bab95d1e-3070-48fb-9d40-ad168b55310e", 3 | "title": "Test ontology", 4 | "description": "", 5 | "created_at": "Thu, 18 Feb 2021 22:24:59 GMT", 6 | "last_edited_at": "Thu, 18 Feb 2021 22:24:59 GMT", 7 | "editor": { 8 | "objects": [ 9 | { 10 | "id": "1", 11 | "name": "Apple", 12 | "color": "#D33115", 13 | "shape": "bounding_box", 14 | "featureNodeHash": "qWm5E3j4", 15 | "attributes": [ 16 | { 17 | "id": "1.1", 18 | "name": "Big", 19 | "type": "text", 20 | "required": False, 21 | "dynamic": False, 22 | "featureNodeHash": "LNRORV/z", 23 | }, 24 | { 25 | "id": "1.2", 26 | "name": "Small", 27 | "type": "text", 28 | "required": False, 29 | "dynamic": False, 30 | "featureNodeHash": "hQPM/8Ax", 31 | }, 32 | ], 33 | }, 34 | {"id": "2", "name": "Pear", "color": "#E27300", "shape": "bounding_box", "featureNodeHash": "no8RyWRY"}, 35 | ], 36 | "classifications": [ 37 | { 38 | "id": "1", 39 | "featureNodeHash": "4BjoumKd", 40 | "attributes": [ 41 | { 42 | "id": "1.1", 43 | "name": "hot dog", 44 | "type": "radio", 45 | "required": False, 46 | "featureNodeHash": "t/prlg4Z", 47 | "options": [ 48 | {"id": "1.1.1", "label": "hot dog", "value": "hot_dog", "featureNodeHash": "8fjrfCJy"}, 49 | { 50 | "id": "1.1.2", 51 | "label": "not hot dog", 52 | "value": "not_hot_dog", 53 | "featureNodeHash": "MpKwvogL", 54 | }, 55 | ], 56 | } 57 | ], 58 | } 59 | ], 60 | }, 61 | } 62 | -------------------------------------------------------------------------------- /tests/test_label_logs.py: -------------------------------------------------------------------------------- 1 | import json 2 | from datetime import datetime 3 | from typing import Any 4 | from unittest.mock import MagicMock, patch 5 | 6 | import pytest 7 | from deepdiff import DeepDiff 8 | 9 | from encord.common.time_parser import parse_datetime 10 | from encord.http.querier import Querier, RequestContext 11 | from encord.project import Project 12 | from tests.fixtures import ontology, project, user_client 13 | 14 | assert project and user_client and ontology # Need to import all fixtures 15 | 16 | 17 | def get_mocked_answer(payload: Any) -> MagicMock: 18 | mock_response = MagicMock() 19 | mock_response.status_code = 200 20 | mock_response.json.return_value = {"status": 200, "response": payload} 21 | mock_response.content = json.dumps({"status": 200, "response": payload}) 22 | return mock_response 23 | 24 | 25 | @patch.object(Querier, "_execute") 26 | def test_get_label_logs_filter_by_datetime(querier_mock: MagicMock, project: Project): 27 | after_time = parse_datetime("2023-01-01T21:00:00") 28 | before_time = parse_datetime("2023-01-02T21:00:00") 29 | querier_mock.return_value = ([], RequestContext()) 30 | 31 | _ = project.get_label_logs(after=after_time, before=before_time) 32 | 33 | querier_mock.assert_called_once() 34 | 35 | request = querier_mock.call_args[0][0] 36 | request_data = json.loads(request.data) 37 | 38 | assert request_data["query_type"] == "labellog" 39 | assert not DeepDiff( 40 | request_data["values"]["payload"], 41 | { 42 | "start_timestamp": int(after_time.timestamp()), 43 | "end_timestamp": int(before_time.timestamp()), 44 | "include_user_email_and_interface_key": True, 45 | }, 46 | ignore_order=True, 47 | ) 48 | 49 | 50 | @patch.object(Querier, "_execute") 51 | def test_get_label_logs_filter_by_unix_timestamp(querier_mock: MagicMock, project: Project): 52 | after_timestamp = 22 53 | before_timestamp = 33 54 | querier_mock.return_value = ([], RequestContext()) 55 | 56 | _ = project.get_label_logs(from_unix_seconds=after_timestamp, to_unix_seconds=before_timestamp) 57 | 58 | querier_mock.assert_called_once() 59 | 60 | request = querier_mock.call_args[0][0] 61 | request_data = json.loads(request.data) 62 | 63 | assert request_data["query_type"] == "labellog" 64 | assert not DeepDiff( 65 | request_data["values"]["payload"], 66 | { 67 | "start_timestamp": after_timestamp, 68 | "end_timestamp": before_timestamp, 69 | "include_user_email_and_interface_key": True, 70 | }, 71 | ignore_order=True, 72 | ) 73 | 74 | 75 | @patch.object(Querier, "_execute") 76 | def test_get_label_logs_raises_when_both_time_filter_specified(querier_mock: MagicMock, project: Project): 77 | with pytest.raises(ValueError): 78 | _ = project.get_label_logs(from_unix_seconds=22, after=datetime.now()) 79 | 80 | with pytest.raises(ValueError): 81 | _ = project.get_label_logs(to_unix_seconds=22, before=datetime.now()) 82 | 83 | querier_mock.assert_not_called() 84 | -------------------------------------------------------------------------------- /tests/test_project.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from unittest.mock import MagicMock, PropertyMock, patch 3 | 4 | from encord.client import EncordClientProject 5 | from encord.http.v2.api_client import ApiClient 6 | from encord.http.v2.payloads import Page 7 | from encord.orm.label_row import LabelRow 8 | from encord.orm.project import Project as OrmProject 9 | from encord.orm.project import ProjectDataset 10 | from encord.project import Project 11 | from tests.fixtures import ontology, project, user_client 12 | 13 | assert user_client and project and ontology 14 | 15 | 16 | UID = "d958ddbb-fcd0-477a-adf9-de14431dbbd2" 17 | 18 | 19 | @patch.object(EncordClientProject, "get_project") 20 | def test_label_rows_property_queries_metadata(project_client_mock: MagicMock, project: Project): 21 | project_current_orm_mock = MagicMock(spec=OrmProject) 22 | type(project_current_orm_mock).label_rows = PropertyMock(return_value=None) 23 | project._project_instance = project_current_orm_mock 24 | 25 | project_orm_mock = MagicMock(spec=OrmProject) 26 | project_client_mock.return_value = project_orm_mock 27 | type(project_orm_mock).label_rows = PropertyMock(return_value=[LabelRow({"data_title": "abc"})]) 28 | 29 | project_client_mock.assert_not_called() 30 | 31 | rows = project.label_rows 32 | 33 | # Expect project data query to happen during the property call 34 | project_client_mock.assert_called_once() 35 | 36 | assert project_client_mock.call_args[1] == {"include_labels_metadata": True} 37 | 38 | assert len(rows) == 1 39 | assert rows[0].data_title == "abc" 40 | 41 | # Expect label rows metadata to be cached, so data query doesn't happen again 42 | _ = project.label_rows 43 | 44 | 45 | @patch.object(ApiClient, "get") 46 | def test_project_datasets(api_get: MagicMock, project: Project) -> None: 47 | dataset_hash = uuid.uuid4() 48 | expected_dataset = ProjectDataset(dataset_hash=dataset_hash, title="test dataset", description="my test dataset") 49 | api_get.return_value = Page(results=[expected_dataset]) 50 | 51 | assert len(list(project.list_datasets())) == 1 52 | assert list(project.list_datasets()) == [expected_dataset] 53 | 54 | # Correctly serialised for legacy endpoint 55 | assert len(project.datasets) == 1 56 | assert project.datasets[0] == { 57 | "dataset_hash": str(dataset_hash), 58 | "title": "test dataset", 59 | "description": "my test dataset", 60 | } 61 | -------------------------------------------------------------------------------- /tests/test_user_client.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | from tempfile import TemporaryDirectory 4 | 5 | import pytest 6 | 7 | import encord.exceptions 8 | from encord.configs import _ENCORD_SSH_KEY, _ENCORD_SSH_KEY_FILE 9 | from encord.user_client import EncordUserClient 10 | from tests.fixtures import PRIVATE_KEY_PEM 11 | 12 | 13 | def teardown_function(): 14 | if _ENCORD_SSH_KEY_FILE in os.environ: 15 | del os.environ[_ENCORD_SSH_KEY_FILE] 16 | if _ENCORD_SSH_KEY in os.environ: 17 | del os.environ[_ENCORD_SSH_KEY] 18 | 19 | 20 | def test_initialise_without_env_variables_or_arguments(): 21 | assert _ENCORD_SSH_KEY not in os.environ 22 | assert _ENCORD_SSH_KEY_FILE not in os.environ 23 | with pytest.raises(expected_exception=encord.exceptions.ResourceNotFoundError): 24 | EncordUserClient.create_with_ssh_private_key() 25 | 26 | 27 | def test_initialise_with_wrong_ssh_file_path(): 28 | os.environ[_ENCORD_SSH_KEY_FILE] = "some_wrong/file/path" 29 | with pytest.raises(expected_exception=encord.exceptions.ResourceNotFoundError): 30 | EncordUserClient.create_with_ssh_private_key() 31 | 32 | 33 | def test_initialise_with_correct_ssh_file_path_from_env(): 34 | with TemporaryDirectory() as tmpdir_name: 35 | tmp_dir_path = Path(tmpdir_name) 36 | tmp_key_path = tmp_dir_path / "key" 37 | 38 | with open(tmp_key_path, "w") as f: 39 | f.write(PRIVATE_KEY_PEM) 40 | 41 | os.environ[_ENCORD_SSH_KEY_FILE] = str(tmp_key_path.resolve()) 42 | user_client = EncordUserClient.create_with_ssh_private_key() 43 | assert isinstance(user_client, EncordUserClient) 44 | 45 | 46 | def test_initialise_with_correct_ssh_file_content(): 47 | user_client = EncordUserClient.create_with_ssh_private_key(PRIVATE_KEY_PEM) 48 | assert isinstance(user_client, EncordUserClient) 49 | 50 | 51 | def test_initialise_with_custom_user_agent(): 52 | custom_agent_suffix = "CustomAgentSuffix/1.1.2" 53 | user_client = EncordUserClient.create_with_ssh_private_key(PRIVATE_KEY_PEM, user_agent_suffix=custom_agent_suffix) 54 | assert isinstance(user_client, EncordUserClient) 55 | user_agent_header = user_client._config.config._user_agent() 56 | assert custom_agent_suffix in user_agent_header 57 | 58 | 59 | def test_initialise_with_correct_ssh_file_content_from_env(): 60 | assert _ENCORD_SSH_KEY_FILE not in os.environ 61 | os.environ[_ENCORD_SSH_KEY] = PRIVATE_KEY_PEM 62 | user_client = EncordUserClient.create_with_ssh_private_key() 63 | assert isinstance(user_client, EncordUserClient) 64 | 65 | 66 | def test_initialise_with_wrong_ssh_file_content(): 67 | with pytest.raises(expected_exception=ValueError): 68 | EncordUserClient.create_with_ssh_private_key("Some random content.") 69 | 70 | 71 | def test_initialise_with_wrong_ssh_file_content_from_env(): 72 | assert _ENCORD_SSH_KEY_FILE not in os.environ 73 | os.environ[_ENCORD_SSH_KEY] = "Some random content." 74 | with pytest.raises(expected_exception=ValueError): 75 | EncordUserClient.create_with_ssh_private_key() 76 | -------------------------------------------------------------------------------- /tests/test_version.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | def test_package_version_available(): 5 | from encord import __version__ 6 | 7 | expected_pattern = re.compile(r"\d\.\d\.\d") 8 | assert expected_pattern.match(__version__) 9 | -------------------------------------------------------------------------------- /tests/utilities/coco/test_datastructure.py: -------------------------------------------------------------------------------- 1 | import copy 2 | from typing import Any 3 | 4 | import numpy as np 5 | import pytest 6 | 7 | from encord.utilities.coco.datastructure import ( 8 | CocoAnnotationModel, 9 | CocoBoundingBox, 10 | CocoPolygon, 11 | CocoRLE, 12 | CocoRootModel, 13 | ) 14 | from tests.utilities.coco.data_test_datastructure import DATA_TEST_DATASTRUCTURE_COCO 15 | 16 | 17 | def get_bbox_from_polygon(polygon: CocoPolygon) -> CocoBoundingBox: 18 | min_x = float("inf") 19 | min_y = float("inf") 20 | max_x = -float("inf") 21 | max_y = -float("inf") 22 | for poly in polygon.values: 23 | for point in poly: 24 | min_x = min(min_x, point.x) 25 | min_y = min(min_y, point.y) 26 | max_x = max(max_x, point.x) 27 | max_y = max(max_y, point.y) 28 | return CocoBoundingBox(x=min_x, y=min_y, w=max_x - min_x, h=max_y - min_y) 29 | 30 | 31 | def polygon_area(polygon: CocoPolygon) -> float: 32 | area = 0.0 33 | for poly in polygon.values: 34 | for i in range(len(poly)): 35 | area += poly[i - 1].x * (poly[i].y - poly[i - 2].y) 36 | return abs(area / 2) 37 | 38 | 39 | def test_coco_annotation_model_with_missing_segmentation_field() -> None: 40 | for ann in copy.deepcopy(DATA_TEST_DATASTRUCTURE_COCO)["annotations"]: 41 | ann.pop("segmentation") 42 | ann_model = CocoAnnotationModel.from_dict(ann) 43 | # Assert the generated segmentation is a single polygon with 4 points 44 | assert isinstance(ann_model.segmentation, CocoPolygon) and len(ann_model.segmentation.values[0]) == 4 45 | # Assert the bounding box containing the generated polygon is the same as the input bounding box 46 | containing_bbox = get_bbox_from_polygon(ann_model.segmentation) 47 | assert np.allclose(containing_bbox, ann_model.bbox) 48 | # Assert the area of the generated polygon is the same as the area of the input bounding box 49 | assert np.isclose(polygon_area(ann_model.segmentation), ann_model.bbox.w * ann_model.bbox.h) 50 | 51 | 52 | @pytest.mark.parametrize( 53 | ["field_name", "field_value"], 54 | [ 55 | ("text_value", "abc"), 56 | ("int_value", 42), 57 | ("confidence", 0.5), 58 | ("score", 0.9), 59 | ("confidence_score", 0.01), 60 | ], 61 | ) 62 | def test_coco_annotation_model_with_extra_fields(field_name: str, field_value: Any) -> None: 63 | for i, ann in enumerate(copy.deepcopy(DATA_TEST_DATASTRUCTURE_COCO)["annotations"]): 64 | # Test 1: Missing extra field 65 | ann_model = CocoAnnotationModel.from_dict(ann) 66 | assert ann_model.get_extra(field_name) is None, f"Expected None for missing extra field in annotation {i}" 67 | 68 | # Test 2: Existing extra field 69 | ann[field_name] = field_value 70 | ann_model = CocoAnnotationModel.from_dict(ann) 71 | assert ann_model.get_extra(field_name) == field_value, f"Extra field value mismatch in annotation {i}" 72 | 73 | 74 | def test_coco_model_label_validation() -> None: 75 | coco_model = CocoRootModel.from_dict(copy.deepcopy(DATA_TEST_DATASTRUCTURE_COCO)) 76 | assert sum(isinstance(ann.segmentation, CocoRLE) for ann in coco_model.annotations) == 6 77 | assert sum(isinstance(ann.segmentation, CocoPolygon) for ann in coco_model.annotations) == 466 78 | -------------------------------------------------------------------------------- /tests/utilities/coco/test_exporter.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List 2 | 3 | import pytest 4 | from deepdiff import DeepDiff 5 | from shapely.geometry import MultiPolygon 6 | 7 | from encord.utilities.coco.exporter import CocoExporter, OntologyStructure 8 | from tests.utilities.coco.data.exporter import ( 9 | COCO_EXPORTER_EXPECTED_RES, 10 | LABELS_LIST, 11 | MULTIPOLYGON, 12 | MULTIPOLYGON_EXPECTED_COCO_SEGMENTATION, 13 | MULTIPOLYGON_WITH_ENCLOSED_POLYGONS, 14 | MULTIPOLYGON_WITH_ENCLOSED_POLYGONS_EXPECTED_COCO_SEGMENTATION, 15 | ONTOLOGY_STRUCTURE_DICT, 16 | ) 17 | 18 | 19 | @pytest.fixture 20 | def coco_exporter() -> CocoExporter: 21 | return CocoExporter( 22 | labels_list=LABELS_LIST, 23 | ontology=OntologyStructure.from_dict(ONTOLOGY_STRUCTURE_DICT), 24 | include_videos=True, 25 | ) 26 | 27 | 28 | def test_coco_exporter_extra_complex_nested_ontology_all_data_types(coco_exporter: CocoExporter) -> None: 29 | coco_dict = coco_exporter.export() 30 | assert not DeepDiff(COCO_EXPORTER_EXPECTED_RES, coco_dict) 31 | 32 | 33 | def test_get_polygon_from_dict_or_list(coco_exporter: CocoExporter) -> None: 34 | w, h = 100, 100 35 | 36 | polygon_list: List = [{"x": 1, "y": 2}, {"x": 2, "y": 3}] 37 | polygon_dict: Dict = {str(i): point for i, point in enumerate(polygon_list)} 38 | 39 | expected = [(100, 200), (200, 300)] 40 | 41 | points_dict = coco_exporter.get_polygon_from_dict_or_list(polygon_dict, w, h) 42 | points_list = coco_exporter.get_polygon_from_dict_or_list(polygon_list, w, h) 43 | 44 | assert points_list == expected 45 | assert points_dict == expected 46 | assert points_dict == points_list 47 | 48 | 49 | def test_get_multipolygon_from_list(coco_exporter: CocoExporter) -> None: 50 | w, h = 100, 100 51 | 52 | polygons = [ 53 | [ 54 | # Polygon 1 - outer ring 55 | [0.1, 0.1, 0.2, 0.1, 0.2, 0.2, 0.1, 0.2], 56 | # Polygon 1 - hole 57 | [0.12, 0.12, 0.18, 0.12, 0.18, 0.18, 0.12, 0.18], 58 | ], 59 | [ 60 | # Polygon 2 - outer ring 61 | [0.1, 0.4, 0.2, 0.4, 0.2, 0.5, 0.1, 0.5] 62 | ], 63 | ] 64 | 65 | points = coco_exporter.get_multipolygon_from_polygons(polygons, w, h) 66 | 67 | expected = [ 68 | ( 69 | # Polygon 1 - outer ring 70 | ((10, 10), (20, 10), (20, 20), (10, 20)), 71 | # Polygon 1 - holes 72 | [((12, 12), (18, 12), (18, 18), (12, 18))], 73 | ), 74 | ( 75 | # Polygon 2 - outer ring 76 | ((10, 40), (20, 40), (20, 50), (10, 50)), 77 | ), 78 | ] 79 | 80 | assert points == expected 81 | 82 | 83 | @pytest.mark.parametrize( 84 | "polygons, expected_segmentation", 85 | [ 86 | (MULTIPOLYGON, MULTIPOLYGON_EXPECTED_COCO_SEGMENTATION), 87 | (MULTIPOLYGON_WITH_ENCLOSED_POLYGONS, MULTIPOLYGON_WITH_ENCLOSED_POLYGONS_EXPECTED_COCO_SEGMENTATION), 88 | ], 89 | ) 90 | def test_get_rle_segmentation_from_multipolygon( 91 | coco_exporter: CocoExporter, 92 | polygons, 93 | expected_segmentation: Dict[str, Any], 94 | ) -> None: 95 | w, h = 4032, 3024 96 | 97 | multipolygon = MultiPolygon(coco_exporter.get_multipolygon_from_polygons(polygons, w, h)) 98 | segmentation = coco_exporter.get_rle_segmentation_from_multipolygon(multipolygon, w, h) 99 | 100 | assert not DeepDiff(segmentation, expected_segmentation) 101 | -------------------------------------------------------------------------------- /tests/utilities/coco/test_polygon_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from encord.common.bitmask_operations import serialise_bitmask 5 | from encord.objects.coordinates import PointCoordinate, PolygonCoordinates 6 | from encord.utilities.coco.polygon_utils import find_contours, rle_to_polygons_coordinates 7 | 8 | 9 | @pytest.fixture 10 | def polygon_with_hole() -> np.ndarray: 11 | array = np.zeros((100, 100), dtype=np.uint8) 12 | # Draw outer square (50x50 white square) 13 | array[25:75, 25:75] = 255 14 | # Draw inner square (hole - 20x20 black square inside the white one) 15 | array[40:60, 40:60] = 0 16 | 17 | return array 18 | 19 | 20 | @pytest.fixture 21 | def two_disjoint_polygons() -> np.ndarray: 22 | array = np.zeros((100, 100), dtype=np.uint8) 23 | # Draw two 20x20 white squares 24 | array[10:30, 10:30] = 255 25 | array[40:60, 40:60] = 255 26 | 27 | return array 28 | 29 | 30 | def test_find_contours_with_holes(polygon_with_hole: np.ndarray) -> None: 31 | polygons = find_contours(polygon_with_hole) 32 | assert len(polygons) == 1, "Should have one polygon" 33 | assert polygons == [ 34 | # the hole is a bit "jagged" from the pixelisation, but it's correct. 35 | [[25, 25, 25, 74, 74, 74, 74, 25], [39, 40, 40, 39, 59, 39, 60, 40, 60, 59, 59, 60, 40, 60, 39, 59]] 36 | ] 37 | 38 | 39 | def test_find_contours_multipolygons(two_disjoint_polygons: np.ndarray) -> None: 40 | polygons = find_contours(two_disjoint_polygons) 41 | assert len(polygons) == 2, "Should have two polygons" 42 | assert polygons == [[[40, 40, 40, 59, 59, 59, 59, 40]], [[10, 10, 10, 29, 29, 29, 29, 10]]] 43 | 44 | 45 | def test_rle_to_polygons_coordinates(polygon_with_hole: np.ndarray) -> None: 46 | counts = serialise_bitmask(polygon_with_hole.tobytes()) 47 | res = rle_to_polygons_coordinates(counts=counts, height=100, width=100) 48 | assert isinstance(res, PolygonCoordinates) 49 | assert res.polygons == [ 50 | [ 51 | [ 52 | PointCoordinate(x=0.25, y=0.25), 53 | PointCoordinate(x=0.25, y=0.74), 54 | PointCoordinate(x=0.74, y=0.74), 55 | PointCoordinate(x=0.74, y=0.25), 56 | ], 57 | [ 58 | PointCoordinate(x=0.39, y=0.4), 59 | PointCoordinate(x=0.4, y=0.39), 60 | PointCoordinate(x=0.59, y=0.39), 61 | PointCoordinate(x=0.6, y=0.4), 62 | PointCoordinate(x=0.6, y=0.59), 63 | PointCoordinate(x=0.59, y=0.6), 64 | PointCoordinate(x=0.4, y=0.6), 65 | PointCoordinate(x=0.39, y=0.59), 66 | ], 67 | ] 68 | ] 69 | -------------------------------------------------------------------------------- /tests/utilities/test_range_manager.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from encord.common.range_manager import RangeManager 4 | from encord.objects.frames import Range 5 | 6 | 7 | @pytest.fixture 8 | def range_manager() -> RangeManager: 9 | initial_ranges = [Range(start=2, end=5), Range(start=10, end=20)] 10 | return RangeManager(frame_class=initial_ranges) 11 | 12 | 13 | def test_initialize_ranges(range_manager: RangeManager) -> None: 14 | actual_ranges = range_manager.get_ranges() 15 | assert actual_ranges[0].start == 2 16 | assert actual_ranges[0].end == 5 17 | assert actual_ranges[1].start == 10 18 | assert actual_ranges[1].end == 20 19 | 20 | 21 | def test_add_ranges(range_manager: RangeManager) -> None: 22 | range_manager.add_ranges([Range(start=5, end=7), Range(start=21, end=22)]) 23 | 24 | actual_ranges = range_manager.get_ranges() 25 | assert actual_ranges[0].start == 2 26 | assert actual_ranges[0].end == 7 27 | assert actual_ranges[1].start == 10 28 | assert actual_ranges[1].end == 20 29 | assert actual_ranges[2].start == 21 30 | assert actual_ranges[2].end == 22 31 | 32 | 33 | def test_remove_ranges(range_manager: RangeManager) -> None: 34 | range_manager.remove_ranges([Range(start=4, end=5), Range(start=16, end=19)]) 35 | 36 | actual_ranges = range_manager.get_ranges() 37 | assert actual_ranges[0].start == 2 38 | assert actual_ranges[0].end == 3 39 | assert actual_ranges[1].start == 10 40 | assert actual_ranges[1].end == 15 41 | assert actual_ranges[2].start == 20 42 | assert actual_ranges[2].end == 20 43 | 44 | 45 | def test_get_ranges_as_frames(range_manager: RangeManager) -> None: 46 | frames = range_manager.get_ranges_as_frames() 47 | assert frames == {2, 3, 4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20} 48 | 49 | 50 | def test_get_intersection_with_other_frames(range_manager: RangeManager) -> None: 51 | other_frames = [1, 3, 16, 25] 52 | intersecting_ranges = range_manager.intersection(other_frames) 53 | assert intersecting_ranges[0].start == 3 54 | assert intersecting_ranges[0].end == 3 55 | assert intersecting_ranges[1].start == 16 56 | assert intersecting_ranges[1].end == 16 57 | 58 | other_ranges = [Range(start=0, end=3), Range(start=19, end=22)] 59 | intersecting_ranges = range_manager.intersection(other_ranges) 60 | assert intersecting_ranges[0].start == 2 61 | assert intersecting_ranges[0].end == 3 62 | assert intersecting_ranges[1].start == 19 63 | assert intersecting_ranges[1].end == 20 64 | -------------------------------------------------------------------------------- /tests/utilities/test_user_agent_suffix.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | 3 | import pytest 4 | 5 | from encord.common.utils import validate_user_agent_suffix 6 | 7 | valid_examples = [ 8 | "CERN-LineMode/2.15 libwww/2.17b3", 9 | "Mozilla/5.0", 10 | "Simple-Bot", 11 | "Product/1.0 AnotherProduct/2.0", 12 | "Chrome/91.0.4472.124 Safari/537.36", 13 | ] 14 | 15 | invalid_examples = [ 16 | "", # Empty string 17 | "/", # Just a slash 18 | "Invalid/Version/", # Trailing slash 19 | "Product/1.0/", # Trailing slash 20 | "Product//1.0", # Double slash 21 | "/1.0", # Missing product name 22 | ] 23 | 24 | 25 | @pytest.mark.parametrize( 26 | ["user_agent_suffix", "should_raise"], 27 | [(example, False) for example in valid_examples] + [(example, True) for example in invalid_examples], 28 | ) 29 | def test_user_agent_validate_happy(user_agent_suffix: str, should_raise: bool) -> None: 30 | context = pytest.raises(ValueError) if should_raise else contextlib.nullcontext() 31 | with context: 32 | validate_user_agent_suffix(user_agent_suffix) 33 | -------------------------------------------------------------------------------- /tests/workflow/conftest.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from unittest.mock import MagicMock 3 | 4 | from pytest import fixture 5 | 6 | from encord.orm.workflow import WorkflowDTO, WorkflowNode, WorkflowStageType 7 | from encord.workflow import Workflow 8 | 9 | WORKFLOW_ANNOTATION_UUID = uuid.uuid4() 10 | WORKFLOW_REVIEW_UUID = uuid.uuid4() 11 | WORKFLOW_COMPLETE_UUID = uuid.uuid4() 12 | 13 | workflow_dto = WorkflowDTO( 14 | stages=[ 15 | WorkflowNode(uuid=WORKFLOW_ANNOTATION_UUID, stage_type=WorkflowStageType.ANNOTATION, title="Annotation 1"), 16 | WorkflowNode(uuid=WORKFLOW_REVIEW_UUID, stage_type=WorkflowStageType.REVIEW, title="Review 1"), 17 | WorkflowNode(uuid=WORKFLOW_COMPLETE_UUID, stage_type=WorkflowStageType.DONE, title="Complete"), 18 | ] 19 | ) 20 | 21 | 22 | @fixture 23 | def workflow() -> Workflow: 24 | return Workflow(MagicMock(), uuid.uuid4(), workflow_dto) 25 | -------------------------------------------------------------------------------- /tests/workflow/test_project_workflow.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | from unittest.mock import MagicMock 3 | from uuid import uuid4 4 | 5 | import pytest 6 | 7 | from encord.orm.workflow import WorkflowAgentNode, WorkflowDTO 8 | from encord.workflow import Workflow 9 | from encord.workflow.stages.agent import AgentStage 10 | from encord.workflow.stages.review import ReviewStage 11 | 12 | raw_agent_workflow: Dict[str, Any] = { 13 | "stages": [ 14 | { 15 | "stageType": "AGENT", 16 | "uuid": "9d6edee0-ba3e-423c-8c96-0140b07c93ed", 17 | "title": "Pre-labeler", 18 | "pathways": [ 19 | { 20 | "uuid": "ae956722-561f-4586-b6d6-145e2f64b18a", 21 | "title": "Pre-labelling", 22 | "destinationUuid": "1252fdb8-93cb-41a2-b341-b83ab97b5277", 23 | } 24 | ], 25 | }, 26 | {"stageType": "ANNOTATION", "uuid": "1252fdb8-93cb-41a2-b341-b83ab97b5277", "title": "Annotate"}, 27 | {"stageType": "REVIEW", "uuid": "8cac6636-2ef1-4bbe-848a-b5ffee158c34", "title": "Review"}, 28 | {"stageType": "DONE", "uuid": "7e7598de-612c-40c4-ba08-5dfec8c3ae8f", "title": "Complete"}, 29 | ] 30 | } 31 | 32 | 33 | def test_project_workflow_get_stage(workflow: Workflow) -> None: 34 | stage_name_to_uuid = {stage.title: stage.uuid for stage in workflow.stages} 35 | review_stage_uuid = stage_name_to_uuid["Review 1"] 36 | 37 | assert workflow.get_stage(uuid=review_stage_uuid).title == "Review 1" 38 | assert workflow.get_stage(uuid=str(review_stage_uuid)).title == "Review 1" 39 | assert workflow.get_stage(name="Review 1").uuid == review_stage_uuid 40 | assert isinstance(workflow.get_stage(name="Review 1"), ReviewStage) 41 | with pytest.raises(ValueError): 42 | workflow.get_stage(name="NAME NOT FOUND") 43 | with pytest.raises(ValueError): 44 | workflow.get_stage(uuid=uuid4()) 45 | 46 | 47 | def test_agent_project_with_pathway_serialisation_deserialisation() -> None: 48 | workflow_dto = WorkflowDTO.from_dict(raw_agent_workflow) 49 | assert workflow_dto.stages 50 | agent_stage = workflow_dto.stages[0] 51 | assert isinstance(agent_stage, WorkflowAgentNode) 52 | assert agent_stage.pathways 53 | workflow_obj = Workflow(MagicMock(), uuid4(), workflow_orm=workflow_dto) 54 | agent_stage = workflow_obj.get_stage(name="Pre-labeler") 55 | assert isinstance(agent_stage, AgentStage) 56 | assert agent_stage.pathways 57 | -------------------------------------------------------------------------------- /tests/workflow/test_workflow_actions.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from datetime import datetime 3 | from unittest.mock import MagicMock 4 | 5 | from encord.http.bundle import Bundle 6 | from encord.workflow import AnnotationStage, AnnotationTask, AnnotationTaskStatus, Workflow 7 | 8 | 9 | def test_bulk_assign_annotations(workflow: Workflow) -> None: 10 | annotation_stage = workflow.get_stage(name="Annotation 1", type_=AnnotationStage) 11 | 12 | annotation_stage._workflow_client.api_client.get_paged_iterator.return_value = iter( 13 | [ 14 | AnnotationTask( 15 | uuid=uuid.uuid4(), 16 | created_at=datetime.now(), 17 | updated_at=datetime.now(), 18 | data_hash=uuid.uuid4(), 19 | data_title=f"data unit {x}", 20 | label_branch_name="main", 21 | assignee=None, 22 | status=AnnotationTaskStatus.NEW, 23 | ) 24 | for x in range(0, 3) 25 | ] 26 | ) 27 | 28 | action_resource = MagicMock() 29 | annotation_stage._workflow_client.api_client.post = action_resource 30 | 31 | bundle = Bundle() 32 | for task in annotation_stage.get_tasks(): 33 | task.assign("test-user@encord.com", bundle=bundle) 34 | 35 | assert not action_resource.called 36 | 37 | bundle.execute() 38 | 39 | assert action_resource.called 40 | assert isinstance(action_resource.call_args[1]["payload"], list) 41 | assert len(action_resource.call_args[1]["payload"]) == 3 42 | --------------------------------------------------------------------------------