├── tests ├── __init__.py ├── e2e_tests │ ├── __init__.py │ ├── pipelines │ │ ├── mocks │ │ │ ├── inputs │ │ │ │ └── anonymized.png │ │ │ └── outputs │ │ │ │ ├── expected_iris_orb_pipeline_output.pickle │ │ │ │ └── expected_iris_debug_pipeline_output.pickle │ │ └── test_e2e_iris_pipeline.py │ ├── nodes │ │ ├── geometry_estimation │ │ │ ├── mocks │ │ │ │ ├── eye_center.pickle │ │ │ │ ├── smoothing_result.pickle │ │ │ │ ├── e2e_expected_result_fusion_extrapolation.pickle │ │ │ │ ├── e2e_expected_result_linear_extrapolation.pickle │ │ │ │ └── e2e_expected_result_lsq_ellipse_fit_with_refinement.pickle │ │ │ ├── test_e2e_lsq_ellipse_fit_with_refinement.py │ │ │ ├── test_e2e_fusion_extrapolation.py │ │ │ └── test_e2e_linear_extrapolation.py │ │ ├── encoder │ │ │ ├── mocks │ │ │ │ └── iris_encoder │ │ │ │ │ ├── iris_response.pickle │ │ │ │ │ └── e2e_expected_result.pickle │ │ │ └── test_e2e_iris_encoder.py │ │ ├── vectorization │ │ │ ├── mocks │ │ │ │ └── contouring │ │ │ │ │ ├── geometry_mask.pickle │ │ │ │ │ └── e2e_expected_result.pickle │ │ │ └── test_e2e_contouring.py │ │ ├── geometry_refinement │ │ │ ├── mocks │ │ │ │ ├── smoothing │ │ │ │ │ ├── eye_center.pickle │ │ │ │ │ ├── before_smoothing.pickle │ │ │ │ │ └── e2e_expected_result.pickle │ │ │ │ ├── contour_point_filter │ │ │ │ │ ├── noise_mask.pickle │ │ │ │ │ ├── e2e_expected_result.pickle │ │ │ │ │ └── interpolated_polygons.pickle │ │ │ │ └── contour_interpolation │ │ │ │ │ ├── e2e_expected_result.pickle │ │ │ │ │ └── not_interpolated_polygons.pickle │ │ │ ├── test_e2e_smoothing.py │ │ │ ├── test_e2e_contour_interpolation.py │ │ │ └── test_e2e_contour_point_filter.py │ │ ├── iris_response │ │ │ ├── mocks │ │ │ │ ├── image_filters │ │ │ │ │ ├── gabor_filter.pickle │ │ │ │ │ ├── gabor2_filter.pickle │ │ │ │ │ └── loggabor_filter.pickle │ │ │ │ └── conv_filter_bank │ │ │ │ │ ├── normalized_iris.pickle │ │ │ │ │ └── e2e_expected_result.pickle │ │ │ └── image_filters │ │ │ │ └── test_e2e_gabor_filters.py │ │ ├── matcher │ │ │ ├── mocks │ │ │ │ └── hamming_distance_matcher │ │ │ │ │ └── iris_template.pickle │ │ │ └── test_e2e_hamming_distance_matcher.py │ │ ├── normalization │ │ │ ├── mocks │ │ │ │ ├── nonlinear_normalization │ │ │ │ │ ├── ir_image.pickle │ │ │ │ │ ├── noise_mask.pickle │ │ │ │ │ ├── eye_orientation.pickle │ │ │ │ │ ├── nonlinear_grids.pickle │ │ │ │ │ ├── e2e_expected_result.pickle │ │ │ │ │ └── extrapolated_polygons.pickle │ │ │ │ └── perspective_normalization │ │ │ │ │ ├── ir_image.pickle │ │ │ │ │ ├── noise_mask.pickle │ │ │ │ │ ├── eye_orientation.pickle │ │ │ │ │ ├── e2e_expected_result.pickle │ │ │ │ │ └── extrapolated_polygons.pickle │ │ │ ├── test_e2e_perspective_normalization.py │ │ │ └── test_e2e_nonlinear_normalization.py │ │ ├── eye_properties_estimation │ │ │ ├── mocks │ │ │ │ ├── bisectors_method │ │ │ │ │ ├── geometry_polygons.pickle │ │ │ │ │ └── e2e_expected_result.pickle │ │ │ │ └── occlusion_calculator │ │ │ │ │ ├── eye_center_1.pickle │ │ │ │ │ ├── eye_center_2.pickle │ │ │ │ │ ├── noise_mask_1.pickle │ │ │ │ │ ├── noise_mask_2.pickle │ │ │ │ │ ├── eye_center_cropped.pickle │ │ │ │ │ ├── eye_orientation_1.pickle │ │ │ │ │ ├── eye_orientation_2.pickle │ │ │ │ │ ├── noise_mask_cropped.pickle │ │ │ │ │ ├── extrapolated_polygons_1.pickle │ │ │ │ │ ├── extrapolated_polygons_2.pickle │ │ │ │ │ ├── eye_orientation_cropped.pickle │ │ │ │ │ └── extrapolated_polygons_cropped.pickle │ │ │ ├── test_e2e_moment_of_area.py │ │ │ ├── test_e2e_pupil_iris_property_calculator.py │ │ │ ├── test_e2e_bisectors_method.py │ │ │ └── test_e2e_occlusion_calculator.py │ │ └── iris_response_refinement │ │ │ ├── mocks │ │ │ └── fragile_bits │ │ │ │ ├── artificial_iris_responses_polar.pickle │ │ │ │ ├── artificial_iris_responses_cartesian.pickle │ │ │ │ ├── artificial_mask_responses_polar_expected_refinement.pickle │ │ │ │ └── artificial_mask_responses_cartesian_expected_refinement.pickle │ │ │ └── test_e2e_fragile_bits_refinement.py │ ├── orchestration │ │ ├── mocks │ │ │ ├── mock_iris_pipeline_call_trace.pickle │ │ │ ├── expected_iris_pipeline_debug_output.pickle │ │ │ ├── expected_iris_pipeline_orb_output.pickle │ │ │ └── expected_iris_pipeline_simple_output.pickle │ │ └── test_e2e_output_builder.py │ └── conftest.py └── unit_tests │ ├── __init__.py │ ├── pipelines │ └── mocks │ │ ├── anonymized.png │ │ ├── incoherent_pipeline_1.yml │ │ └── incoherent_pipeline_2.yml │ ├── conftest.py │ ├── nodes │ ├── encoder │ │ └── test_iris_encoder.py │ ├── iris_response_refinement │ │ └── test_fragile_bits_refinement.py │ ├── eye_properties_estimation │ │ └── test_moment_of_area.py │ ├── aggregation │ │ └── test_geometry_mask_aggregator.py │ ├── geometry_estimation │ │ ├── test_lsq_ellipse_fit_with_refinement.py │ │ └── test_linear_extrapolation.py │ ├── normalization │ │ ├── test_nonlinear_normalization.py │ │ ├── test_linear_normalization.py │ │ └── test_normalization_utils.py │ ├── binarization │ │ └── test_specular_reflection_detection.py │ ├── matcher │ │ └── test_hamming_distance_matcher.py │ ├── iris_response │ │ ├── image_filters │ │ │ └── test_image_filter_interface.py │ │ └── probe_schemas │ │ │ ├── test_probe_schema_interface.py │ │ │ └── test_regular_probe_schema.py │ ├── geometry_refinement │ │ ├── test_contour_interpolation.py │ │ └── test_contour_point_filter.py │ └── segmentation │ │ └── test_onnx_multilabel_model_segmentation.py │ ├── orchestration │ └── test_environment.py │ ├── utils │ ├── test_base64_encoding.py │ └── test_common.py │ ├── callbacks │ └── test_callback_api.py │ └── io │ └── test_class_configs.py ├── src └── iris │ ├── io │ ├── __init__.py │ ├── errors.py │ └── class_configs.py │ ├── nodes │ ├── __init__.py │ ├── aggregation │ │ ├── __init__.py │ │ └── noise_mask_union.py │ ├── encoder │ │ ├── __init__.py │ │ └── iris_encoder.py │ ├── matcher │ │ ├── __init__.py │ │ ├── hamming_distance_matcher_interface.py │ │ ├── simple_hamming_distance_matcher.py │ │ └── hamming_distance_matcher.py │ ├── validators │ │ └── __init__.py │ ├── binarization │ │ ├── __init__.py │ │ ├── specular_reflection_detection.py │ │ └── multilabel_binarization.py │ ├── iris_response │ │ ├── __init__.py │ │ ├── image_filters │ │ │ ├── __init__.py │ │ │ └── image_filter_interface.py │ │ └── probe_schemas │ │ │ ├── __init__.py │ │ │ └── probe_schema_interface.py │ ├── normalization │ │ └── __init__.py │ ├── segmentation │ │ ├── assets │ │ │ └── .gitkeep │ │ ├── __init__.py │ │ └── multilabel_segmentation_interface.py │ ├── vectorization │ │ └── __init__.py │ ├── geometry_estimation │ │ ├── __init__.py │ │ ├── fusion_extrapolation.py │ │ └── linear_extrapolation.py │ ├── geometry_refinement │ │ ├── __init__.py │ │ ├── contour_interpolation.py │ │ └── contour_points_filter.py │ ├── eye_properties_estimation │ │ ├── __init__.py │ │ ├── moment_of_area.py │ │ └── pupil_iris_property_calculator.py │ └── iris_response_refinement │ │ ├── __init__.py │ │ └── fragile_bits_refinement.py │ ├── utils │ ├── __init__.py │ ├── common.py │ └── base64_encoding.py │ ├── callbacks │ ├── __init__.py │ └── callback_interface.py │ ├── pipelines │ └── __init__.py │ ├── orchestration │ ├── __init__.py │ ├── error_managers.py │ ├── environment.py │ ├── validators.py │ └── pipeline_dataclasses.py │ └── _version.py ├── .github ├── CODEOWNERS ├── workflows │ ├── relyance-sci.yml │ ├── check-release-version.yml │ └── ci-testing.yml ├── issue_template.md └── pull_request_template.md ├── MANIFEST.in ├── requirements ├── orb.txt ├── server.txt ├── base.txt └── dev.txt ├── docs ├── model_card │ ├── anonymized.png │ └── overlayed_segmaps.png ├── source │ ├── _code_subpages │ │ ├── modules.rst │ │ ├── iris.rst │ │ ├── iris.pipelines.rst │ │ ├── iris.nodes.encoder.rst │ │ ├── iris.nodes.vectorization.rst │ │ ├── iris.nodes.aggregation.rst │ │ ├── iris.nodes.iris_response_refinement.rst │ │ ├── iris.callbacks.rst │ │ ├── iris.nodes.matcher.rst │ │ ├── iris.nodes.rst │ │ ├── iris.nodes.iris_response.rst │ │ ├── iris.nodes.validators.rst │ │ ├── iris.nodes.binarization.rst │ │ ├── iris.io.rst │ │ ├── iris.nodes.iris_response.image_filters.rst │ │ ├── iris.nodes.iris_response.probe_schemas.rst │ │ ├── iris.utils.rst │ │ ├── iris.nodes.geometry_refinement.rst │ │ ├── iris.nodes.segmentation.rst │ │ ├── iris.nodes.geometry_estimation.rst │ │ ├── iris.nodes.normalization.rst │ │ ├── iris.orchestration.rst │ │ └── iris.nodes.eye_properties_estimation.rst │ ├── images │ │ └── logos │ │ │ └── wld.png │ ├── issues_note.rst │ ├── quickstart │ │ ├── running_inference.rst │ │ ├── setup_for_development.rst │ │ └── installation.rst │ ├── examples │ │ └── matching_entities.rst │ ├── index.rst │ └── conf.py ├── performance_card │ ├── lg4000_3a.png │ ├── lg4000_3b.png │ ├── comparison_1a.png │ ├── comparison_1b.png │ ├── comparison_2a.png │ ├── comparison_2b.png │ ├── wld_dataset_4a.png │ ├── wld_dataset_4b.png │ └── ice2005_results.png └── Makefile ├── conda └── environment_dev.yml ├── .pre-commit-config.yaml ├── Makefile ├── scripts ├── common │ └── run_iris_pipeline.py └── ops │ └── check_version_tag.py ├── LICENSE ├── pyproject.toml └── .gitignore /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/io/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/callbacks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/pipelines/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/aggregation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/encoder/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/matcher/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/validators/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/orchestration/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/binarization/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/iris_response/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/normalization/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/segmentation/assets/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/vectorization/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @worldcoin/ml-admin 2 | -------------------------------------------------------------------------------- /src/iris/_version.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.1.1" 2 | -------------------------------------------------------------------------------- /src/iris/nodes/geometry_estimation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/geometry_refinement/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include src/iris/pipelines/confs/* 2 | -------------------------------------------------------------------------------- /src/iris/nodes/eye_properties_estimation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/iris_response_refinement/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/iris_response/image_filters/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/iris/nodes/iris_response/probe_schemas/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements/orb.txt: -------------------------------------------------------------------------------- 1 | pycuda>=2021.1 2 | tensorrt>=8.5.2.2 3 | -------------------------------------------------------------------------------- /tests/e2e_tests/__init__.py: -------------------------------------------------------------------------------- 1 | from tests.e2e_tests import utils 2 | -------------------------------------------------------------------------------- /tests/unit_tests/__init__.py: -------------------------------------------------------------------------------- 1 | from tests.unit_tests import utils 2 | -------------------------------------------------------------------------------- /requirements/server.txt: -------------------------------------------------------------------------------- 1 | onnx==1.15.0 2 | onnxruntime==1.16.3 3 | opencv-python==4.7.0.68 4 | -------------------------------------------------------------------------------- /docs/model_card/anonymized.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/model_card/anonymized.png -------------------------------------------------------------------------------- /docs/source/_code_subpages/modules.rst: -------------------------------------------------------------------------------- 1 | iris 2 | ==== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | iris 8 | -------------------------------------------------------------------------------- /docs/source/images/logos/wld.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/source/images/logos/wld.png -------------------------------------------------------------------------------- /docs/performance_card/lg4000_3a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/lg4000_3a.png -------------------------------------------------------------------------------- /docs/performance_card/lg4000_3b.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/lg4000_3b.png -------------------------------------------------------------------------------- /docs/model_card/overlayed_segmaps.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/model_card/overlayed_segmaps.png -------------------------------------------------------------------------------- /requirements/base.txt: -------------------------------------------------------------------------------- 1 | huggingface-hub==0.19.4 2 | matplotlib==3.7.4 3 | numpy==1.24.4 4 | pydantic==1.10.13 5 | pyyaml==6.0.1 6 | -------------------------------------------------------------------------------- /docs/performance_card/comparison_1a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/comparison_1a.png -------------------------------------------------------------------------------- /docs/performance_card/comparison_1b.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/comparison_1b.png -------------------------------------------------------------------------------- /docs/performance_card/comparison_2a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/comparison_2a.png -------------------------------------------------------------------------------- /docs/performance_card/comparison_2b.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/comparison_2b.png -------------------------------------------------------------------------------- /docs/performance_card/wld_dataset_4a.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/wld_dataset_4a.png -------------------------------------------------------------------------------- /docs/performance_card/wld_dataset_4b.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/wld_dataset_4b.png -------------------------------------------------------------------------------- /docs/performance_card/ice2005_results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/docs/performance_card/ice2005_results.png -------------------------------------------------------------------------------- /tests/unit_tests/pipelines/mocks/anonymized.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/unit_tests/pipelines/mocks/anonymized.png -------------------------------------------------------------------------------- /tests/e2e_tests/pipelines/mocks/inputs/anonymized.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/pipelines/mocks/inputs/anonymized.png -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_estimation/mocks/eye_center.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_estimation/mocks/eye_center.pickle -------------------------------------------------------------------------------- /conda/environment_dev.yml: -------------------------------------------------------------------------------- 1 | name: iris_dev 2 | 3 | channels: 4 | - conda-forge 5 | 6 | dependencies: 7 | - python==3.8 8 | - pip 9 | - pip: 10 | - --editable "../." 11 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/encoder/mocks/iris_encoder/iris_response.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/encoder/mocks/iris_encoder/iris_response.pickle -------------------------------------------------------------------------------- /requirements/dev.txt: -------------------------------------------------------------------------------- 1 | black 2 | coverage 3 | furo 4 | isort 5 | jupyterlab 6 | nb-clean 7 | pre-commit 8 | pydocstyle[toml] 9 | pytest 10 | python-dotenv 11 | ruff 12 | sphinx 13 | twine 14 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_estimation/mocks/smoothing_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_estimation/mocks/smoothing_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/vectorization/mocks/contouring/geometry_mask.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/vectorization/mocks/contouring/geometry_mask.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/orchestration/mocks/mock_iris_pipeline_call_trace.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/orchestration/mocks/mock_iris_pipeline_call_trace.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/encoder/mocks/iris_encoder/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/encoder/mocks/iris_encoder/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/mocks/smoothing/eye_center.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_refinement/mocks/smoothing/eye_center.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response/mocks/image_filters/gabor_filter.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response/mocks/image_filters/gabor_filter.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response/mocks/image_filters/gabor2_filter.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response/mocks/image_filters/gabor2_filter.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response/mocks/image_filters/loggabor_filter.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response/mocks/image_filters/loggabor_filter.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/orchestration/mocks/expected_iris_pipeline_debug_output.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/orchestration/mocks/expected_iris_pipeline_debug_output.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/orchestration/mocks/expected_iris_pipeline_orb_output.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/orchestration/mocks/expected_iris_pipeline_orb_output.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/vectorization/mocks/contouring/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/vectorization/mocks/contouring/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/orchestration/mocks/expected_iris_pipeline_simple_output.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/orchestration/mocks/expected_iris_pipeline_simple_output.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/pipelines/mocks/outputs/expected_iris_orb_pipeline_output.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/pipelines/mocks/outputs/expected_iris_orb_pipeline_output.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/mocks/smoothing/before_smoothing.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_refinement/mocks/smoothing/before_smoothing.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response/mocks/conv_filter_bank/normalized_iris.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response/mocks/conv_filter_bank/normalized_iris.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/matcher/mocks/hamming_distance_matcher/iris_template.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/matcher/mocks/hamming_distance_matcher/iris_template.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/ir_image.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/ir_image.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/noise_mask.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/noise_mask.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/ir_image.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/ir_image.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/pipelines/mocks/outputs/expected_iris_debug_pipeline_output.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/pipelines/mocks/outputs/expected_iris_debug_pipeline_output.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/mocks/smoothing/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_refinement/mocks/smoothing/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response/mocks/conv_filter_bank/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response/mocks/conv_filter_bank/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/noise_mask.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/noise_mask.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/mocks/contour_point_filter/noise_mask.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_refinement/mocks/contour_point_filter/noise_mask.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/eye_orientation.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/eye_orientation.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/nonlinear_grids.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/nonlinear_grids.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | script_path = os.path.abspath(__file__) 5 | tests_dir = os.path.dirname(script_path) 6 | project_root = os.path.dirname(tests_dir) 7 | 8 | sys.path.append(project_root) 9 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/eye_orientation.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/eye_orientation.pickle -------------------------------------------------------------------------------- /tests/unit_tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | script_path = os.path.abspath(__file__) 5 | tests_dir = os.path.dirname(script_path) 6 | project_root = os.path.dirname(tests_dir) 7 | 8 | sys.path.append(project_root) 9 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/bisectors_method/geometry_polygons.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/bisectors_method/geometry_polygons.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_center_1.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_center_1.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_center_2.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_center_2.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/noise_mask_1.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/noise_mask_1.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/noise_mask_2.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/noise_mask_2.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_estimation/mocks/e2e_expected_result_fusion_extrapolation.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_estimation/mocks/e2e_expected_result_fusion_extrapolation.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_estimation/mocks/e2e_expected_result_linear_extrapolation.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_estimation/mocks/e2e_expected_result_linear_extrapolation.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/mocks/contour_point_filter/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_refinement/mocks/contour_point_filter/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/extrapolated_polygons.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/nonlinear_normalization/extrapolated_polygons.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/bisectors_method/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/bisectors_method/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/mocks/contour_interpolation/e2e_expected_result.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_refinement/mocks/contour_interpolation/e2e_expected_result.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/mocks/contour_point_filter/interpolated_polygons.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_refinement/mocks/contour_point_filter/interpolated_polygons.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/extrapolated_polygons.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/normalization/mocks/perspective_normalization/extrapolated_polygons.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_center_cropped.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_center_cropped.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_orientation_1.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_orientation_1.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_orientation_2.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_orientation_2.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/noise_mask_cropped.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/noise_mask_cropped.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/mocks/contour_interpolation/not_interpolated_polygons.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_refinement/mocks/contour_interpolation/not_interpolated_polygons.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/extrapolated_polygons_1.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/extrapolated_polygons_1.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/extrapolated_polygons_2.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/extrapolated_polygons_2.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_orientation_cropped.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/eye_orientation_cropped.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response_refinement/mocks/fragile_bits/artificial_iris_responses_polar.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response_refinement/mocks/fragile_bits/artificial_iris_responses_polar.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_estimation/mocks/e2e_expected_result_lsq_ellipse_fit_with_refinement.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/geometry_estimation/mocks/e2e_expected_result_lsq_ellipse_fit_with_refinement.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response_refinement/mocks/fragile_bits/artificial_iris_responses_cartesian.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response_refinement/mocks/fragile_bits/artificial_iris_responses_cartesian.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/extrapolated_polygons_cropped.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/eye_properties_estimation/mocks/occlusion_calculator/extrapolated_polygons_cropped.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response_refinement/mocks/fragile_bits/artificial_mask_responses_polar_expected_refinement.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response_refinement/mocks/fragile_bits/artificial_mask_responses_polar_expected_refinement.pickle -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response_refinement/mocks/fragile_bits/artificial_mask_responses_cartesian_expected_refinement.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/venkat-0706/open-iris/HEAD/tests/e2e_tests/nodes/iris_response_refinement/mocks/fragile_bits/artificial_mask_responses_cartesian_expected_refinement.pickle -------------------------------------------------------------------------------- /docs/source/issues_note.rst: -------------------------------------------------------------------------------- 1 | Contact 2 | ======= 3 | 4 | If you have any questions, suggestions, or feedback, feel free to reach out to us: 5 | 6 | - Email: `iris@toolsforhumanity.com` 7 | - GitHub Issues: `Open an issue `_ 8 | - Contributors: Feel free to reach out to any project `contributor `_ directly! 9 | -------------------------------------------------------------------------------- /src/iris/nodes/segmentation/__init__.py: -------------------------------------------------------------------------------- 1 | try: 2 | from iris.nodes.segmentation import tensorrt_multilabel_segmentation 3 | 4 | MultilabelSegmentation = tensorrt_multilabel_segmentation.TensorRTMultilabelSegmentation 5 | except ModuleNotFoundError: 6 | from iris.nodes.segmentation import onnx_multilabel_segmentation 7 | 8 | MultilabelSegmentation = onnx_multilabel_segmentation.ONNXMultilabelSegmentation 9 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.rst: -------------------------------------------------------------------------------- 1 | iris package 2 | ============ 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | iris.callbacks 11 | iris.io 12 | iris.nodes 13 | iris.orchestration 14 | iris.pipelines 15 | iris.utils 16 | 17 | Module contents 18 | --------------- 19 | 20 | .. automodule:: iris 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.pipelines.rst: -------------------------------------------------------------------------------- 1 | iris.pipelines package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.pipelines.iris\_pipeline module 8 | ------------------------------------ 9 | 10 | .. automodule:: iris.pipelines.iris_pipeline 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: iris.pipelines 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.encoder.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.encoder package 2 | ========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.encoder.iris\_encoder module 8 | --------------------------------------- 9 | 10 | .. automodule:: iris.nodes.encoder.iris_encoder 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: iris.nodes.encoder 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.vectorization.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.vectorization package 2 | ================================ 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.vectorization.contouring module 8 | ------------------------------------------ 9 | 10 | .. automodule:: iris.nodes.vectorization.contouring 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: iris.nodes.vectorization 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.aggregation.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.aggregation package 2 | ============================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.aggregation.noise\_mask\_union module 8 | ------------------------------------------------ 9 | 10 | .. automodule:: iris.nodes.aggregation.noise_mask_union 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: iris.nodes.aggregation 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /src/iris/callbacks/callback_interface.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from typing import Any 3 | 4 | 5 | class Callback(abc.ABC): 6 | """Base class of the Callback API.""" 7 | 8 | def on_execute_start(self, *args: Any, **kwargs: Any) -> None: 9 | """Execute this method called before node execute method.""" 10 | pass 11 | 12 | def on_execute_end(self, result: Any) -> None: 13 | """Execute this method called after node execute method. 14 | 15 | Args: 16 | result (Any): execute method output. 17 | """ 18 | pass 19 | -------------------------------------------------------------------------------- /.github/workflows/relyance-sci.yml: -------------------------------------------------------------------------------- 1 | name: Relyance SCI Scan 2 | 3 | on: 4 | schedule: 5 | - cron: "0 20 * * *" 6 | workflow_dispatch: 7 | 8 | jobs: 9 | execute-relyance-sci: 10 | name: Relyance SCI Job 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v4 16 | 17 | - name: Pull and run SCI binary 18 | run: |- 19 | docker pull gcr.io/relyance-ext/compliance_inspector:release && \ 20 | docker run --rm -v `pwd`:/repo --env API_KEY='${{ secrets.DPP_SCI_KEY }}' gcr.io/relyance-ext/compliance_inspector:release 21 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/encoder/test_iris_encoder.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pydantic import ValidationError 3 | 4 | from iris.nodes.encoder.iris_encoder import IrisEncoder 5 | 6 | 7 | @pytest.mark.parametrize( 8 | "mask_threshold", 9 | [pytest.param(-0.5), pytest.param(1.5)], 10 | ids=[ 11 | "mask_threshold should not be negative", 12 | "mask_threshold should not be larger than 1", 13 | ], 14 | ) 15 | def test_iris_encoder_threshold_raises_an_exception( 16 | mask_threshold: float, 17 | ) -> None: 18 | with pytest.raises(ValidationError): 19 | _ = IrisEncoder(mask_threshold) 20 | -------------------------------------------------------------------------------- /tests/unit_tests/pipelines/mocks/incoherent_pipeline_1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | pipeline_name: iris_pipeline 3 | iris_version: 1.5.1 4 | 5 | pipeline: 6 | - name: segmentation 7 | algorithm: 8 | class_name: iris.nodes.segmentation.MultilabelSegmentation 9 | params: {} 10 | inputs: 11 | - name: image 12 | source_node: input 13 | callbacks: 14 | 15 | - name: segmentation_binarization 16 | algorithm: 17 | class_name: iris.nodes.segmentation.multilabel_binarization.MultilabelSegmentationBinarization 18 | params: {} 19 | inputs: 20 | - name: segmentation_map 21 | source_node: does_not_exist 22 | callbacks: -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.iris_response_refinement.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.iris\_response\_refinement package 2 | ============================================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.iris\_response\_refinement.fragile\_bits\_refinement module 8 | ---------------------------------------------------------------------- 9 | 10 | .. automodule:: iris.nodes.iris_response_refinement.fragile_bits_refinement 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | Module contents 16 | --------------- 17 | 18 | .. automodule:: iris.nodes.iris_response_refinement 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.callbacks.rst: -------------------------------------------------------------------------------- 1 | iris.callbacks package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.callbacks.callback\_interface module 8 | ----------------------------------------- 9 | 10 | .. automodule:: iris.callbacks.callback_interface 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.callbacks.pipeline\_trace module 16 | ------------------------------------- 17 | 18 | .. automodule:: iris.callbacks.pipeline_trace 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: iris.callbacks 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.matcher.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.matcher package 2 | ========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.matcher.hamming\_distance\_matcher module 8 | ---------------------------------------------------- 9 | 10 | .. automodule:: iris.nodes.matcher.hamming_distance_matcher 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.matcher.utils module 16 | ------------------------------- 17 | 18 | .. automodule:: iris.nodes.matcher.utils 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: iris.nodes.matcher 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.rst: -------------------------------------------------------------------------------- 1 | iris.nodes package 2 | ================== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | iris.nodes.aggregation 11 | iris.nodes.binarization 12 | iris.nodes.encoder 13 | iris.nodes.eye_properties_estimation 14 | iris.nodes.geometry_estimation 15 | iris.nodes.geometry_refinement 16 | iris.nodes.iris_response 17 | iris.nodes.iris_response_refinement 18 | iris.nodes.matcher 19 | iris.nodes.normalization 20 | iris.nodes.segmentation 21 | iris.nodes.validators 22 | iris.nodes.vectorization 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: iris.nodes 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.iris_response.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.iris\_response package 2 | ================================= 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | iris.nodes.iris_response.image_filters 11 | iris.nodes.iris_response.probe_schemas 12 | 13 | Submodules 14 | ---------- 15 | 16 | iris.nodes.iris\_response.conv\_filter\_bank module 17 | --------------------------------------------------- 18 | 19 | .. automodule:: iris.nodes.iris_response.conv_filter_bank 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: iris.nodes.iris_response 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /src/iris/utils/common.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import cv2 4 | import numpy as np 5 | 6 | 7 | def contour_to_mask(vertices: np.ndarray, mask_shape: Tuple[int, int]) -> np.ndarray: 8 | """Generate binary mask based on polygon's vertices. 9 | 10 | Args: 11 | vertices (np.ndarray): Vertices points array. 12 | mask_shape (Tuple[int, int]): Tuple with output mask dimension (weight, height). 13 | 14 | Returns: 15 | np.ndarray: Binary mask. 16 | """ 17 | width, height = mask_shape 18 | mask = np.zeros(shape=(height, width, 3)) 19 | 20 | vertices = np.round(vertices).astype(np.int32) 21 | cv2.fillPoly(mask, pts=[vertices], color=(255, 0, 0)) 22 | 23 | mask = mask[..., 0] 24 | mask = mask.astype(bool) 25 | 26 | return mask 27 | -------------------------------------------------------------------------------- /tests/unit_tests/orchestration/test_environment.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pydantic import ValidationError 3 | 4 | from iris.orchestration.environment import Environment 5 | 6 | 7 | def test_environment_object_creation() -> None: 8 | def build_func(trace): 9 | return trace["doesntmatter"] 10 | 11 | def err_man_func(trace, exception): 12 | pass 13 | 14 | def call_trace_init_func(nodes, pipelines): 15 | pass 16 | 17 | _ = Environment( 18 | pipeline_output_builder=build_func, error_manager=err_man_func, call_trace_initialiser=call_trace_init_func 19 | ) 20 | 21 | 22 | def test_environment_raises_an_error_when_build_function_not_provided() -> None: 23 | with pytest.raises(ValidationError): 24 | _ = Environment(pipeline_output_builder=None) 25 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.validators.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.validators package 2 | ============================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.validators.cross\_object\_validators module 8 | ------------------------------------------------------ 9 | 10 | .. automodule:: iris.nodes.validators.cross_object_validators 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.validators.object\_validators module 16 | ----------------------------------------------- 17 | 18 | .. automodule:: iris.nodes.validators.object_validators 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: iris.nodes.validators 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.binarization.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.binarization package 2 | =============================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.binarization.multilabel\_binarization module 8 | ------------------------------------------------------- 9 | 10 | .. automodule:: iris.nodes.binarization.multilabel_binarization 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.binarization.specular\_reflection\_detection module 16 | -------------------------------------------------------------- 17 | 18 | .. automodule:: iris.nodes.binarization.specular_reflection_detection 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: iris.nodes.binarization 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /.github/issue_template.md: -------------------------------------------------------------------------------- 1 | # {{Issue name here}} 2 | 3 | ## Context 4 | 5 | Please provide any relevant information about your setup. This is important in case the issue is not reproducible except for under certain conditions. 6 | 7 | * Firmware Version: 8 | * Operating System: 9 | * SDK version: 10 | * Toolchain version: 11 | 12 | 13 | ## Expected Behavior 14 | 15 | Please describe the behavior you are expecting 16 | 17 | ## Current Behavior 18 | 19 | What is the current behavior? 20 | 21 | ## Failure Information (for bugs) 22 | 23 | Please help provide information about the failure if this is a bug. If it is not a bug, please remove the rest of this template. 24 | 25 | ### Steps to Reproduce 26 | 27 | Please provide detailed steps for reproducing the issue. 28 | 29 | 1. step 1 30 | 2. step 2 31 | 3. you get it... 32 | 33 | ### Failure Logs 34 | 35 | Please include any relevant log snippets or files here. 36 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 23.3.0 4 | hooks: 5 | - id: black 6 | 7 | - repo: https://github.com/astral-sh/ruff-pre-commit 8 | rev: v0.0.272 9 | hooks: 10 | - id: ruff 11 | 12 | - repo: https://github.com/pycqa/pydocstyle 13 | rev: 6.3.0 14 | hooks: 15 | - id: pydocstyle 16 | args: [--match=iris/*.py] 17 | 18 | - repo: https://github.com/pycqa/isort 19 | rev: 5.12.0 20 | hooks: 21 | - id: isort 22 | name: isort (python) 23 | 24 | - repo: https://github.com/pre-commit/pre-commit-hooks 25 | rev: v4.4.0 26 | hooks: 27 | - id: check-added-large-files 28 | args: ['--maxkb=5000'] 29 | - id: no-commit-to-branch 30 | args: ['--branch', 'main'] 31 | - id: requirements-txt-fixer 32 | - id: check-merge-conflict 33 | - id: detect-private-key 34 | - id: trailing-whitespace 35 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: report-coverage html-coverage build-docs format-code check-code check-ruff check-docstring 2 | 3 | all: report-coverage html-coverage build-docs format-code check-code check-ruff check-docstring 4 | 5 | report-coverage: 6 | coverage run -m pytest 7 | coverage report --ignore-errors --show-missing --skip-empty 8 | 9 | html-coverage: 10 | coverage run -m pytest 11 | coverage html -i 12 | open ./htmlcov/index.html 13 | 14 | build-docs: 15 | cd docs/source/_code_subpages/; rm -f *.rst 16 | cd docs; sphinx-apidoc -o ./source/_code_subpages ../src/iris; make clean html 17 | 18 | format-code: 19 | isort src/iris tests 20 | black src/iris tests docs 21 | 22 | check-code: check-ruff check-docstrings 23 | 24 | check-ruff: 25 | @printf "Running check-ruff\n" 26 | ruff check src/iris scripts tests setup.py 27 | @printf "\n" 28 | 29 | check-docstrings: 30 | pydocstyle --explain src/iris scripts setup.py 31 | -------------------------------------------------------------------------------- /src/iris/orchestration/error_managers.py: -------------------------------------------------------------------------------- 1 | from iris.callbacks.pipeline_trace import PipelineCallTraceStorage 2 | 3 | 4 | def raise_error_manager(call_trace: PipelineCallTraceStorage, exception: Exception) -> None: 5 | """Error manager for the Orb. 6 | 7 | Args: 8 | call_trace (PipelineCallTraceStorage): Pipeline call results storage. 9 | exception (Exception): Exception raised during the pipeline call. 10 | 11 | Raises: 12 | Exception: Reraise the `exception` parameter. 13 | """ 14 | raise exception 15 | 16 | 17 | def store_error_manager(call_trace: PipelineCallTraceStorage, exception: Exception) -> None: 18 | """Error manager for debugging. 19 | 20 | Args: 21 | call_trace (PipelineCallTraceStorage): Pipeline call results storage. 22 | exception (Exception): Exception raised during the pipeline call. 23 | """ 24 | call_trace.write_error(exception) 25 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/iris_response_refinement/test_fragile_bits_refinement.py: -------------------------------------------------------------------------------- 1 | from typing import Literal, Tuple 2 | 3 | import pytest 4 | from pydantic import ValidationError, confloat 5 | 6 | from iris.nodes.iris_response_refinement.fragile_bits_refinement import FragileBitRefinement 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "value_threshold,fragile_type", 11 | [ 12 | pytest.param([-0.6, -0.3], "cartesian"), 13 | pytest.param([-0.2, -0.5], "polar"), 14 | pytest.param([0, 0], "elliptical"), 15 | ], 16 | ids=["error_threshold_cartesian", "error_threshold_polar", "error_fragile_type"], 17 | ) 18 | def test_iris_encoder_threshold_raises_an_exception( 19 | value_threshold: Tuple[confloat(ge=0), confloat(ge=0)], fragile_type: Literal["cartesian", "polar"] 20 | ) -> None: 21 | with pytest.raises(ValidationError): 22 | _ = FragileBitRefinement(value_threshold, fragile_type) 23 | -------------------------------------------------------------------------------- /scripts/common/run_iris_pipeline.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import os 4 | 5 | import cv2 6 | from iris.pipelines.iris_pipeline import IRISPipeline 7 | 8 | if __name__ == "__main__": 9 | logging.basicConfig(format="%(asctime)s - %(message)s", level=logging.INFO) 10 | logging.info("Run iris pipeline inference script STARTED") 11 | 12 | parser = argparse.ArgumentParser("Perform IRISPipeline inference for a given image.") 13 | parser.add_argument( 14 | "-i", 15 | "--in_img", 16 | type=str, 17 | default=os.path.join("tests", "e2e_tests", "pipelines", "mocks", "inputs", "anonymized.png"), 18 | ) 19 | args = parser.parse_args() 20 | 21 | iris_pipeline = IRISPipeline() 22 | 23 | img_data = cv2.imread(args.in_img, cv2.IMREAD_GRAYSCALE) 24 | 25 | out = iris_pipeline(img_data, "right") 26 | 27 | logging.info("Run iris pipeline inference script FINISHED") 28 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.io.rst: -------------------------------------------------------------------------------- 1 | iris.io package 2 | =============== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.io.class\_configs module 8 | ----------------------------- 9 | 10 | .. automodule:: iris.io.class_configs 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.io.dataclasses module 16 | -------------------------- 17 | 18 | .. automodule:: iris.io.dataclasses 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | iris.io.errors module 24 | --------------------- 25 | 26 | .. automodule:: iris.io.errors 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | iris.io.validators module 32 | ------------------------- 33 | 34 | .. automodule:: iris.io.validators 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | Module contents 40 | --------------- 41 | 42 | .. automodule:: iris.io 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.iris_response.image_filters.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.iris\_response.image\_filters package 2 | ================================================ 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.iris\_response.image\_filters.gabor\_filters module 8 | -------------------------------------------------------------- 9 | 10 | .. automodule:: iris.nodes.iris_response.image_filters.gabor_filters 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.iris\_response.image\_filters.image\_filter\_interface module 16 | ------------------------------------------------------------------------ 17 | 18 | .. automodule:: iris.nodes.iris_response.image_filters.image_filter_interface 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: iris.nodes.iris_response.image_filters 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /.github/workflows/check-release-version.yml: -------------------------------------------------------------------------------- 1 | name: check-release-version 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | check-version-tag: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v3 14 | with: 15 | lfs: true 16 | 17 | - name: Set up Python 3.8 18 | uses: actions/setup-python@v3 19 | with: 20 | python-version: 3.8 21 | 22 | - run: echo "REPOSITORY_NAME=$(echo '${{ github.repository }}' | awk -F '/' '{print $2}')" >> $GITHUB_ENV 23 | shell: bash 24 | 25 | - name: Set env 26 | run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV 27 | 28 | - name: Install package 29 | run: | 30 | IRIS_ENV=SERVER python -m pip install . 31 | 32 | - name: Check release tag 33 | run: | 34 | PYTHONPATH=. python scripts/ops/check_version_tag.py --release $RELEASE_VERSION 35 | -------------------------------------------------------------------------------- /.github/workflows/ci-testing.yml: -------------------------------------------------------------------------------- 1 | name: ci-testing 2 | 3 | on: 4 | push: 5 | branches: [main, dev] 6 | pull_request: 7 | branches: [main, dev] 8 | schedule: 9 | - cron: '0 0 * * *' # Runs at 00:00 UTC every day 10 | 11 | jobs: 12 | pytest: 13 | strategy: 14 | matrix: 15 | python-version: [3.8, 3.9, '3.10'] 16 | fail-fast: false 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - name: Checkout to branch 22 | uses: actions/checkout@v3 23 | 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v3 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | 29 | - name: Update pip 30 | run: python -m pip install --upgrade pip 31 | 32 | - name: Install package 33 | run: | 34 | IRIS_ENV=SERVER python -m pip install -e . 35 | python -m pip install pytest 36 | 37 | - name: Run tests 38 | run: pytest 39 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.iris_response.probe_schemas.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.iris\_response.probe\_schemas package 2 | ================================================ 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.iris\_response.probe\_schemas.probe\_schema\_interface module 8 | ------------------------------------------------------------------------ 9 | 10 | .. automodule:: iris.nodes.iris_response.probe_schemas.probe_schema_interface 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.iris\_response.probe\_schemas.regular\_probe\_schema module 16 | ---------------------------------------------------------------------- 17 | 18 | .. automodule:: iris.nodes.iris_response.probe_schemas.regular_probe_schema 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: iris.nodes.iris_response.probe_schemas 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/source/quickstart/running_inference.rst: -------------------------------------------------------------------------------- 1 | Running inference 2 | ================================ 3 | 4 | A simple inference run can be achieved by running source code below. 5 | 6 | .. code-block:: python 7 | 8 | import cv2 9 | import iris 10 | 11 | # 1. Create IRISPipeline object 12 | iris_pipeline = iris.IRISPipeline() 13 | 14 | # 2. Load IR image of an eye 15 | img_pixels = cv2.imread("/path/to/ir/image", cv2.IMREAD_GRAYSCALE) 16 | 17 | # 3. Perform inference 18 | # Options for the `eye_side` argument are: ["left", "right"] 19 | output = iris_pipeline(img_data=img_pixels, eye_side="left") 20 | 21 | To fully explore and understand the extensive capabilities of the iris package, visit the `Examples` subpages. Here, you'll find a collection of Jupyter Notebooks that serve as valuable resources, offering practical guides and real-world examples to provide a comprehensive insight into the rich functionalities and potential applications of the ``iris`` package. 22 | 23 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.utils.rst: -------------------------------------------------------------------------------- 1 | iris.utils package 2 | ================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.utils.base64\_encoding module 8 | ---------------------------------- 9 | 10 | .. automodule:: iris.utils.base64_encoding 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.utils.common module 16 | ------------------------ 17 | 18 | .. automodule:: iris.utils.common 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | iris.utils.math module 24 | ---------------------- 25 | 26 | .. automodule:: iris.utils.math 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | iris.utils.visualisation module 32 | ------------------------------- 33 | 34 | .. automodule:: iris.utils.visualisation 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | Module contents 40 | --------------- 41 | 42 | .. automodule:: iris.utils 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/test_e2e_moment_of_area.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from iris.io.dataclasses import EyeOrientation, GeometryPolygons 5 | from iris.nodes.eye_properties_estimation.moment_of_area import MomentOfArea 6 | from tests.unit_tests.utils import rotated_elliptical_contour 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "input_contour,expected_eye_orientation", 11 | [(rotated_elliptical_contour(theta=0.142857), EyeOrientation(angle=0.142857))], 12 | ids=["regular"], 13 | ) 14 | def test_first_order_area(input_contour: np.ndarray, expected_eye_orientation: EyeOrientation) -> None: 15 | triangle = np.array([[0, 0], [0, 1], [1, 0]]) 16 | input_geometry_polygon = GeometryPolygons(pupil_array=triangle, iris_array=triangle, eyeball_array=input_contour) 17 | 18 | moments_of_area = MomentOfArea(eccentricity_threshold=0) 19 | computed_eye_orientaiton = moments_of_area(input_geometry_polygon) 20 | 21 | assert np.abs(computed_eye_orientaiton.angle - expected_eye_orientation.angle) < 1 / 360 22 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/eye_properties_estimation/test_moment_of_area.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from iris.io.dataclasses import GeometryPolygons 5 | from iris.io.errors import EyeOrientationEstimationError 6 | from iris.nodes.eye_properties_estimation.moment_of_area import MomentOfArea 7 | from tests.unit_tests.utils import rotated_elliptical_contour 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "input_contour,eccentricity_threshold", 12 | [(rotated_elliptical_contour(a=1, b=1, theta=0), 0.5)], 13 | ids=["eccentricity < threshold"], 14 | ) 15 | def test_first_order_area_fail_eccentricity_threhsold(input_contour: np.ndarray, eccentricity_threshold: float) -> None: 16 | triangle = np.array([[0, 0], [0, 1], [1, 0]]) 17 | input_geometry_polygon = GeometryPolygons(pupil_array=triangle, iris_array=triangle, eyeball_array=input_contour) 18 | 19 | with pytest.raises(EyeOrientationEstimationError): 20 | moments_of_area = MomentOfArea(eccentricity_threshold=eccentricity_threshold) 21 | moments_of_area(input_geometry_polygon) 22 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/test_e2e_smoothing.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | 7 | from iris.nodes.geometry_refinement.smoothing import Smoothing 8 | 9 | 10 | def load_mock_pickle(name: str) -> Any: 11 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "smoothing") 12 | 13 | mock_path = os.path.join(testdir, f"{name}.pickle") 14 | 15 | return pickle.load(open(mock_path, "rb")) 16 | 17 | 18 | def test_e2e_smoothing() -> None: 19 | mock_input = load_mock_pickle(name="before_smoothing") 20 | mock_eye_center = load_mock_pickle(name="eye_center") 21 | 22 | expected_result = load_mock_pickle(name="e2e_expected_result") 23 | 24 | algorithm = Smoothing(dphi=1, kernel_size=10) 25 | result = algorithm(polygons=mock_input, eye_centers=mock_eye_center) 26 | 27 | np.testing.assert_equal(expected_result.pupil_array, result.pupil_array) 28 | np.testing.assert_equal(expected_result.iris_array, result.iris_array) 29 | np.testing.assert_equal(expected_result.eyeball_array, result.eyeball_array) 30 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/test_e2e_contour_interpolation.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | 7 | from iris.nodes.geometry_refinement.contour_interpolation import ContourInterpolation 8 | 9 | 10 | def load_mock_pickle(name: str) -> Any: 11 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "contour_interpolation") 12 | 13 | mock_path = os.path.join(testdir, f"{name}.pickle") 14 | 15 | return pickle.load(open(mock_path, "rb")) 16 | 17 | 18 | def test_e2e_contour_interpolation() -> None: 19 | mock_input = load_mock_pickle(name="not_interpolated_polygons") 20 | 21 | expected_result = load_mock_pickle(name="e2e_expected_result") 22 | 23 | algorithm = ContourInterpolation(max_distance_between_boundary_points=0.01) 24 | result = algorithm(polygons=mock_input) 25 | 26 | np.testing.assert_equal(expected_result.pupil_array, result.pupil_array) 27 | np.testing.assert_equal(expected_result.iris_array, result.iris_array) 28 | np.testing.assert_equal(expected_result.eyeball_array, result.eyeball_array) 29 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_estimation/test_e2e_lsq_ellipse_fit_with_refinement.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | 7 | from iris.nodes.geometry_estimation.lsq_ellipse_fit_with_refinement import LSQEllipseFitWithRefinement 8 | 9 | 10 | def load_mock_pickle(name: str) -> Any: 11 | testdir = os.path.join(os.path.dirname(__file__), "mocks") 12 | 13 | mock_path = os.path.join(testdir, f"{name}.pickle") 14 | 15 | return pickle.load(open(mock_path, "rb")) 16 | 17 | 18 | def test_e2e_lsq_ellipse_fit_with_refinement() -> None: 19 | mock_input = load_mock_pickle(name="smoothing_result") 20 | 21 | expected_result = load_mock_pickle(name="e2e_expected_result_lsq_ellipse_fit_with_refinement") 22 | 23 | algorithm = LSQEllipseFitWithRefinement(dphi=1.0) 24 | result = algorithm(input_polygons=mock_input) 25 | 26 | np.testing.assert_equal(expected_result.pupil_array, result.pupil_array) 27 | np.testing.assert_equal(expected_result.iris_array, result.iris_array) 28 | np.testing.assert_equal(expected_result.eyeball_array, result.eyeball_array) 29 | -------------------------------------------------------------------------------- /docs/source/quickstart/setup_for_development.rst: -------------------------------------------------------------------------------- 1 | Setup for development 2 | ================================ 3 | 4 | A ``conda`` environment simplifies the setup process for developing on the ``iris`` package. This ``conda`` environment ensures a seamless and consistent setup for contributors, reducing the complexity of dependency management. By utilizing ``conda``, developers can easily replicate the development environment across different systems, minimizing potential setup obstacles. This approach aims to make it straightforward for anyone interested in contributing to quickly set up and engage in the development of ``iris`` package. 5 | 6 | .. code-block:: bash 7 | 8 | # Clone the iris repo 9 | git clone https://github.com/worldcoin/open-iris 10 | 11 | # Go to the repo directory 12 | cd open-iris 13 | 14 | # Create and activate conda environment 15 | IRIS_ENV=DEV conda env create -f ./conda/environment_dev.yml 16 | conda activate iris_dev 17 | 18 | # (Optional, but recommended) Install git hooks to preserve code format consistency 19 | pre-commit install 20 | nb-clean add-filter --remove-empty-cells 21 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_estimation/test_e2e_fusion_extrapolation.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | 7 | from iris.nodes.geometry_estimation.fusion_extrapolation import FusionExtrapolation 8 | 9 | 10 | def load_mock_pickle(name: str) -> Any: 11 | testdir = os.path.join(os.path.dirname(__file__), "mocks") 12 | 13 | mock_path = os.path.join(testdir, f"{name}.pickle") 14 | 15 | return pickle.load(open(mock_path, "rb")) 16 | 17 | 18 | def test_e2e_fusion_extrapolation() -> None: 19 | mock_input = load_mock_pickle(name="smoothing_result") 20 | mock_eye_center = load_mock_pickle(name="eye_center") 21 | 22 | expected_result = load_mock_pickle(name="e2e_expected_result_fusion_extrapolation") 23 | 24 | algorithm = FusionExtrapolation() 25 | result = algorithm(input_polygons=mock_input, eye_center=mock_eye_center) 26 | 27 | np.testing.assert_equal(expected_result.pupil_array, result.pupil_array) 28 | np.testing.assert_equal(expected_result.iris_array, result.iris_array) 29 | np.testing.assert_equal(expected_result.eyeball_array, result.eyeball_array) 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Worldcoin Foundation 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_estimation/test_e2e_linear_extrapolation.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | 7 | from iris.nodes.geometry_estimation.linear_extrapolation import LinearExtrapolation 8 | 9 | 10 | def load_mock_pickle(name: str) -> Any: 11 | testdir = os.path.join(os.path.dirname(__file__), "mocks") 12 | 13 | mock_path = os.path.join(testdir, f"{name}.pickle") 14 | 15 | return pickle.load(open(mock_path, "rb")) 16 | 17 | 18 | def test_e2e_linear_extrapolation() -> None: 19 | mock_input = load_mock_pickle(name="smoothing_result") 20 | mock_eye_center = load_mock_pickle(name="eye_center") 21 | 22 | expected_result = load_mock_pickle(name="e2e_expected_result_linear_extrapolation") 23 | 24 | algorithm = LinearExtrapolation(dphi=1.0) 25 | result = algorithm(input_polygons=mock_input, eye_center=mock_eye_center) 26 | 27 | np.testing.assert_equal(expected_result.pupil_array, result.pupil_array) 28 | np.testing.assert_equal(expected_result.iris_array, result.iris_array) 29 | np.testing.assert_equal(expected_result.eyeball_array, result.eyeball_array) 30 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.geometry_refinement.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.geometry\_refinement package 2 | ======================================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.geometry\_refinement.contour\_interpolation module 8 | ------------------------------------------------------------- 9 | 10 | .. automodule:: iris.nodes.geometry_refinement.contour_interpolation 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.geometry\_refinement.contour\_points\_filter module 16 | -------------------------------------------------------------- 17 | 18 | .. automodule:: iris.nodes.geometry_refinement.contour_points_filter 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | iris.nodes.geometry\_refinement.smoothing module 24 | ------------------------------------------------ 25 | 26 | .. automodule:: iris.nodes.geometry_refinement.smoothing 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | Module contents 32 | --------------- 33 | 34 | .. automodule:: iris.nodes.geometry_refinement 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/aggregation/test_geometry_mask_aggregator.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from iris.io.dataclasses import NoiseMask 7 | from iris.nodes.aggregation.noise_mask_union import NoiseMaskUnion 8 | 9 | 10 | def array_to_NoiseMask(input_array: list) -> NoiseMask: 11 | return NoiseMask(mask=np.array(input_array).astype(bool)) 12 | 13 | 14 | @pytest.mark.parametrize( 15 | "geometry_masks,expected_output", 16 | [ 17 | ([array_to_NoiseMask([[0, 1], [1, 0]])], array_to_NoiseMask([[0, 1], [1, 0]])), 18 | ( 19 | [array_to_NoiseMask([[0, 0, 1], [1, 0, 0]]), array_to_NoiseMask([[0, 0, 0], [1, 1, 0]])], 20 | array_to_NoiseMask([[0, 0, 1], [1, 1, 0]]), 21 | ), 22 | ], 23 | ids=["1 NoiseMask", "2 NoiseMasks"], 24 | ) 25 | def test_geometry_mask_aggregate_mask(geometry_masks: List[NoiseMask], expected_output: NoiseMask) -> None: 26 | aggregation_node = NoiseMaskUnion() 27 | 28 | aggregated_geometry_mask = aggregation_node.execute(geometry_masks) 29 | 30 | np.testing.assert_equal(aggregated_geometry_mask.mask, expected_output.mask) 31 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # {{Pull Request name here}} 2 | 3 | ## PR description 4 | 5 | Please, give a brief description of what was changed or fixed and how you did it. 6 | 7 | ### Issue 8 | 9 | 10 | ### Solution 11 | 12 | 13 | ### Limitations 14 | 15 | 16 | ## Type 17 | 18 | 19 | - [ ] Feature 20 | - [ ] Refactoring 21 | - [ ] Bugfix 22 | - [ ] DevOps 23 | - [ ] Testing 24 | 25 | ## Checklist 26 | 27 | 28 | - [ ] I've made sure that my code works as expected by writing unit tests. 29 | - [ ] I've checked if my code doesn't generate warnings or errors. 30 | - [ ] I've performed a self-review of my code. 31 | - [ ] I've made sure that my code follows the style guidelines of the project. 32 | - [ ] I've commented hard-to-understand parts of my code. 33 | - [ ] I've made appropriate changes in the documentation. 34 | -------------------------------------------------------------------------------- /src/iris/orchestration/environment.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Dict, List 2 | 3 | from iris.callbacks.pipeline_trace import PipelineCallTraceStorage 4 | from iris.io.class_configs import Algorithm, ImmutableModel 5 | from iris.orchestration.pipeline_dataclasses import PipelineNode 6 | 7 | 8 | class Environment(ImmutableModel): 9 | """Data holder for the pipeline environment properties. 10 | 11 | call_trace_initialiser is responsible for initialising the PipelineCallTraceStorage instance in the pipeline. 12 | 13 | pipeline_output_builder is responsible for building the pipeline output from the call_trace, which kept all intermediary results so far. 14 | 15 | error_manager is responsible for the pipeline's behaviour in case of an exception 16 | 17 | disabled_qa stores a list of Algorithm and/or Callbacks types to be disabled. 18 | """ 19 | 20 | call_trace_initialiser: Callable[[Dict[str, Algorithm], List[PipelineNode]], PipelineCallTraceStorage] 21 | pipeline_output_builder: Callable[[PipelineCallTraceStorage], Any] 22 | error_manager: Callable[[PipelineCallTraceStorage, Exception], None] 23 | disabled_qa: List[type] = [] 24 | -------------------------------------------------------------------------------- /scripts/ops/check_version_tag.py: -------------------------------------------------------------------------------- 1 | ################################################################################# 2 | # The script reads and checks if release's version matches package's version. # 3 | # Used in `check-pre-release` Github Action. # 4 | # Author: Worldcoin AI # 5 | ################################################################################# 6 | 7 | import argparse 8 | import importlib 9 | 10 | 11 | class ReleaseVersionMismatchError(Exception): 12 | """Release version mismatch error class.""" 13 | 14 | pass 15 | 16 | 17 | if __name__ == "__main__": 18 | parser = argparse.ArgumentParser() 19 | 20 | parser.add_argument("-r", "--release", help="Desired version tag", required=True) 21 | 22 | args = parser.parse_args() 23 | 24 | module_name = "iris" 25 | module = importlib.import_module(module_name) 26 | 27 | if f"v{module.__version__}" != args.release: 28 | raise ReleaseVersionMismatchError( 29 | f"Check your release tag and module version {module_name}. {module.__version__} != {args.release}" 30 | ) 31 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/test_e2e_pupil_iris_property_calculator.py: -------------------------------------------------------------------------------- 1 | import math 2 | import os 3 | import pickle 4 | from typing import Any 5 | 6 | from iris.nodes.eye_properties_estimation.bisectors_method import BisectorsMethod 7 | from iris.nodes.eye_properties_estimation.pupil_iris_property_calculator import PupilIrisPropertyCalculator 8 | 9 | 10 | def load_mock_pickle(name: str) -> Any: 11 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "bisectors_method") 12 | mock_path = os.path.join(testdir, f"{name}.pickle") 13 | return pickle.load(open(mock_path, "rb")) 14 | 15 | 16 | def test_precomputed_pupil_iris_property() -> None: 17 | mock_polygons = load_mock_pickle(name="geometry_polygons") 18 | 19 | eye_center_obj = BisectorsMethod() 20 | eye_center = eye_center_obj(mock_polygons) 21 | 22 | pupil_iris_property_obj = PupilIrisPropertyCalculator() 23 | p2i_property = pupil_iris_property_obj(mock_polygons, eye_center) 24 | 25 | assert math.isclose(p2i_property.pupil_to_iris_diameter_ratio, 0.543019583685283) 26 | assert math.isclose(p2i_property.pupil_to_iris_center_dist_ratio, 0.032786957796171405) 27 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.segmentation.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.segmentation package 2 | =============================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.segmentation.multilabel\_segmentation\_interface module 8 | ------------------------------------------------------------------ 9 | 10 | .. automodule:: iris.nodes.segmentation.multilabel_segmentation_interface 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.segmentation.onnx\_multilabel\_segmentation module 16 | ------------------------------------------------------------- 17 | 18 | .. automodule:: iris.nodes.segmentation.onnx_multilabel_segmentation 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | iris.nodes.segmentation.tensorrt\_multilabel\_segmentation module 24 | ----------------------------------------------------------------- 25 | 26 | .. automodule:: iris.nodes.segmentation.tensorrt_multilabel_segmentation 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | Module contents 32 | --------------- 33 | 34 | .. automodule:: iris.nodes.segmentation 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/vectorization/test_e2e_contouring.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | import pytest 7 | 8 | from iris.nodes.vectorization.contouring import ContouringAlgorithm, filter_polygon_areas 9 | 10 | 11 | def load_mock_pickle(name: str) -> Any: 12 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "contouring") 13 | 14 | mock_path = os.path.join(testdir, f"{name}.pickle") 15 | 16 | return pickle.load(open(mock_path, "rb")) 17 | 18 | 19 | @pytest.fixture 20 | def algorithm() -> ContouringAlgorithm: 21 | return ContouringAlgorithm(contour_filters=[filter_polygon_areas]) 22 | 23 | 24 | def test_e2e_vectorization_algorithm(algorithm: ContouringAlgorithm) -> None: 25 | mock_geometry_mask = load_mock_pickle(name="geometry_mask") 26 | 27 | expected_result = load_mock_pickle(name="e2e_expected_result") 28 | 29 | result = algorithm(geometry_mask=mock_geometry_mask) 30 | 31 | np.testing.assert_equal(expected_result.pupil_array, result.pupil_array) 32 | np.testing.assert_equal(expected_result.iris_array, result.iris_array) 33 | np.testing.assert_equal(expected_result.eyeball_array, result.eyeball_array) 34 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.geometry_estimation.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.geometry\_estimation package 2 | ======================================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.geometry\_estimation.fusion\_extrapolation module 8 | ------------------------------------------------------------ 9 | 10 | .. automodule:: iris.nodes.geometry_estimation.fusion_extrapolation 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.geometry\_estimation.linear\_extrapolation module 16 | ------------------------------------------------------------ 17 | 18 | .. automodule:: iris.nodes.geometry_estimation.linear_extrapolation 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | iris.nodes.geometry\_estimation.lsq\_ellipse\_fit\_with\_refinement module 24 | -------------------------------------------------------------------------- 25 | 26 | .. automodule:: iris.nodes.geometry_estimation.lsq_ellipse_fit_with_refinement 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | Module contents 32 | --------------- 33 | 34 | .. automodule:: iris.nodes.geometry_estimation 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.coverage.report] 2 | omit = ["tests/*", "*__init__.py", "*_version.py"] 3 | exclude_lines = ["if __name__ == .__main__.:"] 4 | 5 | [tool.ruff] 6 | exclude = ["__init__.py"] 7 | lint.select = ["E", "F", "PLC", "PLE", "PLR", "PLW"] 8 | lint.ignore = ["E501", "F722", "F821", "PLR2004", "PLR0915", "PLR0913", "PLC0414", "PLR0402", "PLR5501", "PLR0911", "PLR0912", "PLW0603", "PLW2901"] 9 | 10 | [tool.isort] 11 | profile = "black" 12 | multi_line_output = 3 13 | include_trailing_comma = true 14 | force_grid_wrap = 0 15 | use_parentheses = true 16 | ensure_newline_before_comments = true 17 | line_length = 120 18 | src_paths = ["src", "tests"] 19 | 20 | [tool.pydocstyle] 21 | inherit = false 22 | ignore = ["D100", "D104", "D203", "D205", "D211", "D213", "D406", "D407", "D413"] 23 | match = ".*.py" 24 | 25 | [tool.pytest] 26 | minversion = 6.0 27 | xfail_strict = true 28 | log_auto_indent = true 29 | 30 | [tool.pytest.ini_options] 31 | testpaths = ["tests"] 32 | 33 | [tool.black] 34 | line-length = 120 35 | 36 | include = '\.pyi?$' 37 | exclude = ''' 38 | /( 39 | \.git 40 | | \.hg 41 | | \.mypy_cache 42 | | \.tox 43 | | \.venv 44 | | _build 45 | | buck-out 46 | | build 47 | | dist 48 | )/ 49 | ''' 50 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/geometry_refinement/test_e2e_contour_point_filter.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | 7 | from iris.nodes.geometry_refinement.contour_points_filter import ContourPointNoiseEyeballDistanceFilter 8 | 9 | 10 | def load_mock_pickle(name: str) -> Any: 11 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "contour_point_filter") 12 | 13 | mock_path = os.path.join(testdir, f"{name}.pickle") 14 | 15 | return pickle.load(open(mock_path, "rb")) 16 | 17 | 18 | def test_e2e_contour_point_filter() -> None: 19 | mock_input = load_mock_pickle(name="interpolated_polygons") 20 | mock_noise_mask = load_mock_pickle(name="noise_mask") 21 | 22 | expected_result = load_mock_pickle(name="e2e_expected_result") 23 | 24 | algorithm = ContourPointNoiseEyeballDistanceFilter(min_distance_to_noise_and_eyeball=0.025) 25 | result = algorithm(polygons=mock_input, geometry_mask=mock_noise_mask) 26 | 27 | np.testing.assert_equal(expected_result.pupil_array, result.pupil_array) 28 | np.testing.assert_equal(expected_result.iris_array, result.iris_array) 29 | np.testing.assert_equal(expected_result.eyeball_array, result.eyeball_array) 30 | -------------------------------------------------------------------------------- /src/iris/nodes/aggregation/noise_mask_union.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import numpy as np 4 | 5 | from iris.io.class_configs import Algorithm 6 | from iris.io.dataclasses import NoiseMask 7 | 8 | 9 | class NoiseMaskUnion(Algorithm): 10 | """Aggregate several NoiseMask into one by computing their union. I.E. For every bit of the NoiseMask, the output is an OR of the same bit across all NoiseMasks.""" 11 | 12 | def run(self, elements: List[NoiseMask]) -> NoiseMask: 13 | """Compute the union of a list of NoiseMask. 14 | 15 | Args: 16 | elements (List[NoiseMask]): input NoiseMasks. 17 | 18 | Raises: 19 | ValueError: if not all NoiseMask.mask do not have the same shape. 20 | 21 | Returns: 22 | NoiseMask: aggregated NoiseMasks 23 | """ 24 | if not all([mask.mask.shape == elements[0].mask.shape for mask in elements]): 25 | raise ValueError( 26 | f"Every NoiseMask.mask must have the same shape to be aggregated. " 27 | f"Received {[mask.mask.shape for mask in elements]}" 28 | ) 29 | 30 | noise_union = np.sum([mask.mask for mask in elements], axis=0) > 0 31 | 32 | return NoiseMask(mask=noise_union) 33 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/test_e2e_bisectors_method.py: -------------------------------------------------------------------------------- 1 | import math 2 | import os 3 | import pickle 4 | from typing import Any 5 | 6 | import pytest 7 | 8 | from iris.nodes.eye_properties_estimation.bisectors_method import BisectorsMethod 9 | 10 | 11 | def load_mock_pickle(name: str) -> Any: 12 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "bisectors_method") 13 | 14 | mock_path = os.path.join(testdir, f"{name}.pickle") 15 | 16 | return pickle.load(open(mock_path, "rb")) 17 | 18 | 19 | @pytest.fixture 20 | def algorithm() -> BisectorsMethod: 21 | return BisectorsMethod(num_bisectors=100, min_distance_between_sector_points=0.75, max_iterations=50) 22 | 23 | 24 | def test_e2e_bisectors_method_algorithm(algorithm: BisectorsMethod) -> None: 25 | mock_polygons = load_mock_pickle(name="geometry_polygons") 26 | expected_result = load_mock_pickle(name="e2e_expected_result") 27 | 28 | result = algorithm(geometries=mock_polygons) 29 | 30 | assert math.isclose(result.pupil_x, expected_result.pupil_x) 31 | assert math.isclose(result.pupil_y, expected_result.pupil_y) 32 | assert math.isclose(result.iris_x, expected_result.iris_x) 33 | assert math.isclose(result.iris_y, expected_result.iris_y) 34 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/encoder/test_e2e_iris_encoder.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | 7 | from iris.nodes.encoder.iris_encoder import IrisEncoder 8 | 9 | 10 | def load_mock_pickle(name: str) -> Any: 11 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "iris_encoder") 12 | 13 | mock_path = os.path.join(testdir, f"{name}.pickle") 14 | 15 | return pickle.load(open(mock_path, "rb")) 16 | 17 | 18 | def test_iris_encoder_constructor() -> None: 19 | iris_response = load_mock_pickle("iris_response") 20 | expected_result = load_mock_pickle("e2e_expected_result") 21 | 22 | iris_encoder = IrisEncoder(mask_threshold=0.5) 23 | result = iris_encoder(iris_response) 24 | 25 | assert len(result.iris_codes) == len(expected_result.iris_codes) 26 | assert len(result.mask_codes) == len(expected_result.mask_codes) 27 | assert len(result.iris_code_version) == len(expected_result.iris_code_version) 28 | 29 | for i, (i_iris_code, i_mask_code) in enumerate(zip(result.iris_codes, result.mask_codes)): 30 | assert np.allclose(expected_result.iris_codes[i], i_iris_code, rtol=1e-05, atol=1e-07) 31 | assert np.allclose(expected_result.mask_codes[i], i_mask_code, rtol=1e-05, atol=1e-07) 32 | -------------------------------------------------------------------------------- /tests/unit_tests/utils/test_base64_encoding.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from iris.utils import base64_encoding as be 5 | 6 | 7 | @pytest.mark.parametrize("mock_shape", [(3, 10, 100), (10, 3, 100), (100, 10, 3)]) 8 | def test_base64_array_encode_decode(mock_shape: tuple) -> None: 9 | mock_array = np.random.choice(2, size=mock_shape).astype(bool) 10 | 11 | result = be.base64_decode_array(be.base64_encode_array(mock_array), array_shape=mock_shape) 12 | 13 | np.testing.assert_equal(result, mock_array) 14 | 15 | 16 | @pytest.mark.parametrize( 17 | "plain_str,base64_str", [("test", "dGVzdA=="), ("un:\n - deux\n - trois", "dW46CiAgLSBkZXV4CiAgLSB0cm9pcw==")] 18 | ) 19 | def test_base64_str_encode_decode(plain_str: str, base64_str: str) -> None: 20 | # Test base64_encode_str 21 | encoded_str = be.base64_encode_str(plain_str) 22 | assert encoded_str == base64_str 23 | assert isinstance(encoded_str, str) 24 | 25 | # Test base64_decode_str 26 | decoded_str = be.base64_decode_str(base64_str) 27 | assert decoded_str == plain_str 28 | assert isinstance(decoded_str, str) 29 | 30 | # Test that encoding and decoding convolve 31 | encoded_decoded_str = be.base64_decode_str(be.base64_encode_str(plain_str)) 32 | assert encoded_decoded_str == plain_str 33 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.normalization.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.normalization package 2 | ================================ 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.normalization.common module 8 | -------------------------------------- 9 | 10 | .. automodule:: iris.nodes.normalization.common 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.normalization.linear\_normalization module 16 | ----------------------------------------------------- 17 | 18 | .. automodule:: iris.nodes.normalization.linear_normalization 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | iris.nodes.normalization.nonlinear\_normalization module 24 | -------------------------------------------------------- 25 | 26 | .. automodule:: iris.nodes.normalization.nonlinear_normalization 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | iris.nodes.normalization.perspective\_normalization module 32 | ---------------------------------------------------------- 33 | 34 | .. automodule:: iris.nodes.normalization.perspective_normalization 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | Module contents 40 | --------------- 41 | 42 | .. automodule:: iris.nodes.normalization 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | -------------------------------------------------------------------------------- /tests/unit_tests/pipelines/mocks/incoherent_pipeline_2.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | pipeline_name: iris_pipeline 3 | iris_version: 1.5.1 4 | 5 | pipeline: 6 | - name: segmentation 7 | algorithm: 8 | class_name: iris.nodes.segmentation.MultilabelSegmentation 9 | params: {} 10 | inputs: 11 | - name: image 12 | source_node: input 13 | callbacks: 14 | 15 | - name: segmentation_binarization 16 | algorithm: 17 | class_name: iris.nodes.segmentation.multilabel_binarization.MultilabelSegmentationBinarization 18 | params: {} 19 | inputs: 20 | - name: segmentation_map 21 | source_node: segmentation 22 | callbacks: 23 | 24 | - name: specular_reflection_detection 25 | algorithm: 26 | class_name: iris.nodes.binarization.specular_reflection_detection.SpecularReflectionDetection 27 | params: {} 28 | inputs: 29 | - name: ir_image 30 | source_node: input 31 | callbacks: 32 | 33 | - name: noise_masks_aggregation 34 | algorithm: 35 | class_name: iris.nodes.aggregation.noise_mask_union.NoiseMaskUnion 36 | params: {} 37 | inputs: 38 | - name: elements 39 | source_node: 40 | - name: segmentation_binarization 41 | index: 1 42 | - name: specular_reflection_detection 43 | - name: does_not_exist 44 | callbacks: 45 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.orchestration.rst: -------------------------------------------------------------------------------- 1 | iris.orchestration package 2 | ========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.orchestration.environment module 8 | ------------------------------------- 9 | 10 | .. automodule:: iris.orchestration.environment 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.orchestration.error\_managers module 16 | ----------------------------------------- 17 | 18 | .. automodule:: iris.orchestration.error_managers 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | iris.orchestration.output\_builders module 24 | ------------------------------------------ 25 | 26 | .. automodule:: iris.orchestration.output_builders 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | iris.orchestration.pipeline\_dataclasses module 32 | ----------------------------------------------- 33 | 34 | .. automodule:: iris.orchestration.pipeline_dataclasses 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | iris.orchestration.validators module 40 | ------------------------------------ 41 | 42 | .. automodule:: iris.orchestration.validators 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | Module contents 48 | --------------- 49 | 50 | .. automodule:: iris.orchestration 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/geometry_estimation/test_lsq_ellipse_fit_with_refinement.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from iris.nodes.geometry_estimation.lsq_ellipse_fit_with_refinement import LSQEllipseFitWithRefinement 5 | 6 | 7 | @pytest.fixture 8 | def algorithm() -> LSQEllipseFitWithRefinement: 9 | return LSQEllipseFitWithRefinement() 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "src_pt, dst_pts, expected_index", 14 | [ 15 | (np.array([0.0, 0.0]), np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 2.0]]), 0), 16 | (np.array([1.0, 1.0]), np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 2.0]]), 1), 17 | (np.array([2.0, 2.0]), np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 2.0]]), 2), 18 | (np.array([0.2, 0.2]), np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 2.0]]), 0), 19 | (np.array([0.8, 0.8]), np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 2.0]]), 1), 20 | (np.array([1.4, 1.4]), np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 2.0]]), 1), 21 | (np.array([1.9, 1.6]), np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 2.0]]), 2), 22 | (np.array([3.4, 3.4]), np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 2.0]]), 2), 23 | ], 24 | ) 25 | def test_find_correspondence( 26 | algorithm: LSQEllipseFitWithRefinement, src_pt: np.ndarray, dst_pts: np.ndarray, expected_index: int 27 | ) -> None: 28 | result_idx = algorithm._find_correspondence(src_pt, dst_pts) 29 | 30 | assert result_idx == expected_index 31 | -------------------------------------------------------------------------------- /src/iris/orchestration/validators.py: -------------------------------------------------------------------------------- 1 | from collections import Counter 2 | from typing import Any, List 3 | 4 | from pydantic import fields 5 | 6 | from iris._version import __version__ 7 | from iris.io.errors import IRISPipelineError 8 | 9 | 10 | def pipeline_config_duplicate_node_name_check(cls: type, v: List[Any], field: fields.ModelField) -> List[Any]: 11 | """Check if all pipeline nodes have distinct names. 12 | 13 | Args: 14 | cls (type): Class type. 15 | v (List[Any]): Value to check. 16 | field (fields.ModelField): Field descriptor. 17 | 18 | Raises: 19 | IRISPipelineError: Raised if pipeline nodes aren't unique. 20 | 21 | Returns: 22 | List[Any]: `v` sent for further processing. 23 | """ 24 | node_names = [node.name for node in v] 25 | 26 | if len(set(node_names)) != len(node_names): 27 | raise IRISPipelineError(f"Pipeline node name must be unique. Received {dict(Counter(node_names))}") 28 | 29 | return v 30 | 31 | 32 | def pipeline_metadata_version_check(cls: type, v: str, field: fields.ModelField) -> str: 33 | """Check if the version provided in the input config matches the current iris.__version__.""" 34 | if v != __version__: 35 | raise IRISPipelineError( 36 | f"Wrong config version. Cannot initialise IRISPipeline version {__version__} on a config file " 37 | f"version {v}" 38 | ) 39 | 40 | return v 41 | -------------------------------------------------------------------------------- /tests/unit_tests/callbacks/test_callback_api.py: -------------------------------------------------------------------------------- 1 | from iris.callbacks.callback_interface import Callback 2 | from iris.io.class_configs import Algorithm 3 | 4 | 5 | class MockCallback(Callback): 6 | ON_EXECUTE_START_MSG = "on_execute_start" 7 | ON_EXECUTE_END_MSG = "on_execute_end" 8 | MSG_SEP = " " 9 | 10 | def __init__(self): 11 | self.buffer = "" 12 | 13 | def on_execute_start(self) -> None: 14 | self._clean_buffer() 15 | self.buffer = MockCallback.ON_EXECUTE_START_MSG 16 | 17 | def on_execute_end(self, result: str) -> None: 18 | self.buffer = ( 19 | f"{self.buffer}{MockCallback.MSG_SEP}{result}{MockCallback.MSG_SEP}{MockCallback.ON_EXECUTE_END_MSG}" 20 | ) 21 | 22 | def _clean_buffer(self) -> None: 23 | self.buffer = "" 24 | 25 | 26 | class MockParameterizedModel(Algorithm): 27 | EXECUTE_OUTPUT = "WorldcoinAI" 28 | 29 | def run(self) -> str: 30 | return "WorldcoinAI" 31 | 32 | 33 | def test_callback_api() -> None: 34 | mock_parametrized_model = MockParameterizedModel() 35 | mock_callback = MockCallback() 36 | 37 | mock_parametrized_model._callbacks = [mock_callback] 38 | 39 | expected_cb_buffer = f"{MockCallback.ON_EXECUTE_START_MSG}{MockCallback.MSG_SEP}{MockParameterizedModel.EXECUTE_OUTPUT}{MockCallback.MSG_SEP}{MockCallback.ON_EXECUTE_END_MSG}" 40 | 41 | _ = mock_parametrized_model() 42 | 43 | assert mock_callback.buffer == expected_cb_buffer 44 | -------------------------------------------------------------------------------- /tests/unit_tests/utils/test_common.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from _pytest.fixtures import FixtureRequest 4 | 5 | from iris.utils import common 6 | 7 | MOCK_MASK_SHAPE = (40, 40) 8 | 9 | 10 | @pytest.fixture 11 | def expected_result() -> np.ndarray: 12 | expected_result = np.zeros(shape=MOCK_MASK_SHAPE, dtype=bool) 13 | expected_result[10:21, 10:21] = True 14 | return expected_result 15 | 16 | 17 | @pytest.fixture 18 | def expected_result_line() -> np.ndarray: 19 | expected_result = np.zeros(shape=MOCK_MASK_SHAPE, dtype=bool) 20 | expected_result[10, 10:21] = True 21 | return expected_result 22 | 23 | 24 | @pytest.mark.parametrize( 25 | "mock_vertices,expected", 26 | [ 27 | (np.array([[10.0, 10.0], [20.0, 10.0], [20.0, 20.0], [10.0, 20.0]]), "expected_result"), 28 | (np.array([[10.0, 10.0], [20.0, 10.0], [20.0, 20.0], [10.0, 20.0], [10.0, 10.0]]), "expected_result"), 29 | (np.array([[10.0, 10.0], [20.0, 10.0]]), "expected_result_line"), 30 | (np.array([[10.0, 10.0], [15.0, 10.0], [20.0, 10.0]]), "expected_result_line"), 31 | ], 32 | ids=["standard", "loop", "2 vertices line", "3 vertices line"], 33 | ) 34 | def test_contour_to_mask(mock_vertices: np.ndarray, expected: str, request: FixtureRequest) -> None: 35 | expected_result = request.getfixturevalue(expected) 36 | result = common.contour_to_mask(mock_vertices, MOCK_MASK_SHAPE) 37 | 38 | assert np.all(expected_result == result) 39 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/normalization/test_nonlinear_normalization.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from pydantic import ValidationError 4 | 5 | from iris.io.errors import NormalizationError 6 | from iris.nodes.normalization.nonlinear_normalization import NonlinearNormalization 7 | from tests.unit_tests.utils import generate_arc 8 | 9 | 10 | @pytest.fixture 11 | def algorithm() -> NonlinearNormalization: 12 | return NonlinearNormalization( 13 | res_in_r=2, 14 | ) 15 | 16 | 17 | @pytest.mark.parametrize( 18 | "wrong_param", 19 | [ 20 | ({"res_in_r": -10}), 21 | ({"res_in_r": 0}), 22 | ], 23 | ) 24 | def test_constructor_raises_exception(wrong_param: dict) -> None: 25 | with pytest.raises((NormalizationError, ValidationError)): 26 | _ = NonlinearNormalization(**wrong_param) 27 | 28 | 29 | def test_generate_correspondences(algorithm: NonlinearNormalization) -> None: 30 | pupil_points = (generate_arc(3.0, 5.0, 5.0, 0.0, 2 * np.pi, 3),) 31 | iris_points = (generate_arc(10.0, 4.8, 5.1, 0.0, 2 * np.pi, 3),) 32 | 33 | expected_correspondences = np.array( 34 | [ 35 | [[10, 5], [3, 9], [3, 1]], 36 | [[13, 5], [1, 12], [1, -2]], 37 | ] 38 | ) 39 | 40 | result = algorithm._generate_correspondences( 41 | pupil_points=pupil_points[0], 42 | iris_points=iris_points[0], 43 | ) 44 | 45 | np.testing.assert_allclose(result, expected_correspondences, rtol=1e-05) 46 | -------------------------------------------------------------------------------- /docs/source/quickstart/installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ================================ 3 | 4 | Installation is as simple as running ``pip install`` with specifying ``IRIS_ENV`` installation global flag (``IRIS_ENV`` flag may be skipped if ``iris`` is installed from PyPl server but this option is only available when ``iris`` is installed on local machine). The ``IRIS_ENV`` flag is used to indicate an "environment" in which package is meant to work. Possible options are: 5 | 6 | #. ``SERVER`` - For installing ``iris`` package with dependencies required for running an inference on a local machines. 7 | 8 | .. code:: bash 9 | 10 | # On a local machine 11 | pip install open-iris 12 | # or directly from GitHub 13 | IRIS_ENV=SERVER pip install git+https://github.com/worldcoin/open-iris.git 14 | 15 | #. ``ORB`` - For installing ``iris`` package with dependencies required for running an inference on the Orb. 16 | 17 | .. code:: bash 18 | 19 | # On the Orb 20 | IRIS_ENV=ORB pip install git+https://github.com/worldcoin/open-iris.git 21 | 22 | #. ``DEV`` - For installing iris package together with packages necessary for development of ``iris`` package. 23 | 24 | .. code:: bash 25 | 26 | # For development 27 | IRIS_ENV=DEV pip install git+https://github.com/worldcoin/open-iris.git 28 | 29 | After successfully installing ``iris``, verify your installation by attempting to import. 30 | 31 | .. code:: bash 32 | 33 | python3 -c "import iris; print(iris.__version__)" 34 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/test_e2e_perspective_normalization.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | import pytest 7 | 8 | from iris.nodes.normalization.perspective_normalization import PerspectiveNormalization 9 | 10 | 11 | def load_mock_pickle(name: str) -> Any: 12 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "perspective_normalization") 13 | 14 | mock_path = os.path.join(testdir, f"{name}.pickle") 15 | 16 | return pickle.load(open(mock_path, "rb")) 17 | 18 | 19 | @pytest.fixture 20 | def algorithm() -> PerspectiveNormalization: 21 | return PerspectiveNormalization( 22 | res_in_phi=400, 23 | res_in_r=100, 24 | skip_boundary_points=10, 25 | intermediate_radiuses=np.linspace(0.0, 1.0, 10), 26 | ) 27 | 28 | 29 | def test_e2e_perspective_normalization(algorithm: PerspectiveNormalization) -> None: 30 | ir_image = load_mock_pickle("ir_image") 31 | noise_mask = load_mock_pickle("noise_mask") 32 | eye_orientation = load_mock_pickle("eye_orientation") 33 | extrapolated_polygons = load_mock_pickle("extrapolated_polygons") 34 | 35 | e2e_expected_result = load_mock_pickle("e2e_expected_result") 36 | 37 | result = algorithm(ir_image, noise_mask, extrapolated_polygons, eye_orientation) 38 | 39 | np.testing.assert_equal(result.normalized_image, e2e_expected_result.normalized_image) 40 | np.testing.assert_equal(result.normalized_mask, e2e_expected_result.normalized_mask) 41 | -------------------------------------------------------------------------------- /src/iris/nodes/binarization/specular_reflection_detection.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | from pydantic import Field 3 | 4 | from iris.io.class_configs import Algorithm 5 | from iris.io.dataclasses import IRImage, NoiseMask 6 | 7 | 8 | class SpecularReflectionDetection(Algorithm): 9 | """Apply a threshold to the IR Image to detect specular reflections.""" 10 | 11 | class Parameters(Algorithm.Parameters): 12 | """Parameter class for FusedSemanticSegmentation class.""" 13 | 14 | reflection_threshold: int = Field(..., ge=0, le=255) 15 | 16 | __parameters_type__ = Parameters 17 | 18 | def __init__(self, reflection_threshold: int = 254) -> None: 19 | """Assign parameters. 20 | 21 | Args: 22 | reflection_threshold (int, optional): Specular Reflection minimal brightness threshold. Defaults to 254. 23 | """ 24 | super().__init__(reflection_threshold=reflection_threshold) 25 | 26 | def run(self, ir_image: IRImage) -> NoiseMask: 27 | """Thresholds an IRImage to detect Specular Reflection. 28 | 29 | Args: 30 | ir_image (IRImage): Infrared image object. 31 | 32 | Returns: 33 | NoiseMask: a binary map of the thresholded IRImage. 34 | """ 35 | _, reflection_segmap = cv2.threshold( 36 | ir_image.img_data, self.params.reflection_threshold, 255, cv2.THRESH_BINARY 37 | ) 38 | reflection_segmap = (reflection_segmap / 255.0).astype(bool) 39 | 40 | return NoiseMask(mask=reflection_segmap) 41 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response_refinement/test_e2e_fragile_bits_refinement.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any, Literal, Tuple 4 | 5 | import numpy as np 6 | import pytest 7 | 8 | from iris.nodes.iris_response_refinement.fragile_bits_refinement import FragileBitRefinement 9 | 10 | 11 | def load_mock_pickle(name: str) -> Any: 12 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "fragile_bits") 13 | 14 | mock_path = os.path.join(testdir, f"{name}.pickle") 15 | 16 | return pickle.load(open(mock_path, "rb")) 17 | 18 | 19 | @pytest.mark.parametrize( 20 | "value_threshold,fragile_type", 21 | [ 22 | pytest.param([0.5, 0.5], "cartesian"), 23 | pytest.param([0.49, np.pi / 8], "polar"), 24 | ], 25 | ids=["cartesian", "polar"], 26 | ) 27 | def test_fragile_bits_dummy_responses( 28 | value_threshold: Tuple[float, float], fragile_type: Literal["cartesian", "polar"] 29 | ) -> None: 30 | iris_filter_response = load_mock_pickle(f"artificial_iris_responses_{fragile_type}") 31 | 32 | mask_responses_refined = load_mock_pickle(f"artificial_mask_responses_{fragile_type}_expected_refinement") 33 | 34 | fragile_bit_refinement = FragileBitRefinement(value_threshold=value_threshold, fragile_type=fragile_type) 35 | iris_filter_response_refined = fragile_bit_refinement(iris_filter_response) 36 | 37 | for refined_mask, obtained_mask in zip(mask_responses_refined, iris_filter_response_refined.mask_responses): 38 | assert np.array_equal(refined_mask, obtained_mask) 39 | -------------------------------------------------------------------------------- /src/iris/nodes/iris_response/image_filters/image_filter_interface.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from typing import Any 3 | 4 | import numpy as np 5 | 6 | from iris.io.class_configs import Algorithm 7 | from iris.io.errors import ImageFilterError 8 | 9 | 10 | class ImageFilter(Algorithm): 11 | """Image filter abstract class.""" 12 | 13 | class Parameters(Algorithm.Parameters): 14 | """Default ImageFilter parameters.""" 15 | 16 | pass 17 | 18 | __parameters_type__ = Parameters 19 | 20 | def __init__(self, **kwargs: Any) -> None: 21 | """Init function.""" 22 | super().__init__(**kwargs) 23 | self.__kernel_values = self.compute_kernel_values() 24 | 25 | @property 26 | def kernel_values(self) -> np.ndarray: 27 | """Get kernel values. 28 | 29 | Returns: 30 | np.ndarray: Filter kernel values. 31 | """ 32 | return self.__kernel_values 33 | 34 | @kernel_values.setter 35 | def kernel_values(self, value: Any) -> None: 36 | """Prevent overwriting generated kernel values. 37 | 38 | Args: 39 | value (Any): New kernel values. 40 | 41 | Raises: 42 | ImageFilterError: Raised always since overwriting is forbidden. 43 | """ 44 | raise ImageFilterError("ImageFilter kernel_values are immutable.") 45 | 46 | @abc.abstractmethod 47 | def compute_kernel_values(self) -> np.ndarray: 48 | """Compute values of filter kernel. 49 | 50 | Returns: 51 | np.ndarray: Computed kernel values. 52 | """ 53 | pass 54 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/geometry_estimation/test_linear_extrapolation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from iris.nodes.geometry_estimation.linear_extrapolation import LinearExtrapolation 5 | from tests.unit_tests.utils import generate_arc 6 | 7 | 8 | @pytest.fixture 9 | def algorithm() -> LinearExtrapolation: 10 | return LinearExtrapolation(dphi=1.0) 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "dphi,from_angle,to_angle", 15 | [ 16 | (1.0, 0.0, 2 * np.pi), 17 | (1.0, np.pi, 2 * np.pi), 18 | (1.0, np.pi / 4, 2 * np.pi), 19 | (1.0, np.pi / 4, 2 * np.pi), 20 | (1.0, np.pi / 4, 3 * np.pi / 4), 21 | (1.0, 3 * np.pi / 4, 5 * np.pi / 4), 22 | (0.1, 0.0, 2 * np.pi), 23 | (0.1, np.pi, 2 * np.pi), 24 | (0.1, np.pi / 4, 2 * np.pi), 25 | (0.1, np.pi / 4, 2 * np.pi), 26 | (0.1, np.pi / 4, 3 * np.pi / 4), 27 | (0.1, 3 * np.pi / 4, 5 * np.pi / 4), 28 | ], 29 | ) 30 | def test_estimate_method(dphi: float, from_angle: float, to_angle: float) -> None: 31 | radius = 1.0 32 | center_x, center_y = 0.0, 0.0 33 | num_points = 1000 34 | algorithm = LinearExtrapolation(dphi) 35 | 36 | mock_arc = generate_arc(radius, center_x, center_y, from_angle, to_angle, num_points) 37 | 38 | # Full circle is expected with same radius, center_x and center_y 39 | expected_result = generate_arc(radius, center_x, center_y, 0.0, 2 * np.pi, int(360.0 / algorithm.params.dphi)) 40 | 41 | result = algorithm._estimate(mock_arc, (center_x, center_y)) 42 | 43 | np.testing.assert_allclose(np.sort(result), np.sort(expected_result), atol=1e-1) 44 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/binarization/test_specular_reflection_detection.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from iris.io.dataclasses import IRImage 7 | from iris.nodes.binarization.specular_reflection_detection import SpecularReflectionDetection 8 | 9 | 10 | def _generate_chessboard(shape: Tuple[int, int]) -> None: 11 | chessboard = np.zeros(shape, dtype=np.uint8) 12 | 13 | chessboard[1::2, ::2] = 1 14 | chessboard[::2, ::2] = 1 15 | 16 | return chessboard 17 | 18 | 19 | @pytest.mark.parametrize( 20 | "img_data,expected_result,reflection_threshold", 21 | [ 22 | (np.zeros(shape=(1080, 1440), dtype=np.uint8), np.zeros(shape=(1080, 1440), dtype=np.uint8), 254), 23 | (np.ones(shape=(1080, 1440), dtype=np.uint8) * 255, np.ones(shape=(1080, 1440), dtype=np.uint8), 254), 24 | (_generate_chessboard(shape=(1080, 1440)) * 255, _generate_chessboard(shape=(1080, 1440)), 254), 25 | ( 26 | np.linspace([0] * 100, [255] * 100, 100).astype(np.uint8), 27 | np.linspace([0] * 100, [255] * 100, 100) > 120, 28 | 120, 29 | ), 30 | ], 31 | ids=["all zeros", "all 255", "chessboard test", "linear gradient image"], 32 | ) 33 | def test_segment_specular_reflections( 34 | img_data: np.ndarray, 35 | expected_result: np.ndarray, 36 | reflection_threshold: int, 37 | ) -> None: 38 | spec_ref_algo = SpecularReflectionDetection(reflection_threshold=reflection_threshold) 39 | ir_image = IRImage(img_data=img_data, eye_side="right") 40 | result = spec_ref_algo.run(ir_image) 41 | 42 | np.testing.assert_equal(result.mask, expected_result) 43 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/matcher/test_hamming_distance_matcher.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import numpy as np 4 | import pytest 5 | from pydantic import ValidationError 6 | 7 | from iris.nodes.matcher.hamming_distance_matcher import HammingDistanceMatcher 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "rotation_shift, nm_dist", 12 | [ 13 | pytest.param(-0.5, 0.45), 14 | pytest.param(1.5, None), 15 | pytest.param(200, "a"), 16 | pytest.param(100, -0.2), 17 | pytest.param(10, 1.3), 18 | ], 19 | ids=[ 20 | "rotation_shift should not be negative", 21 | "rotation_shift should not be floating points", 22 | "nm_dist should be float", 23 | "nm_dist should not be negative", 24 | "nm_dist should not be more than 1", 25 | ], 26 | ) 27 | def test_iris_matcher_raises_an_exception1( 28 | rotation_shift: int, 29 | nm_dist: bool, 30 | ) -> None: 31 | with pytest.raises(ValidationError): 32 | _ = HammingDistanceMatcher(rotation_shift, nm_dist) 33 | 34 | 35 | @pytest.mark.parametrize( 36 | "rotation_shift, nm_dist, weights", 37 | [ 38 | pytest.param(5, 0.4, 3), 39 | pytest.param(15, None, np.zeros((3, 4))), 40 | pytest.param(200, 0.45, [("a", 13)]), 41 | ], 42 | ids=[ 43 | "weights should be a list of arrays", 44 | "weights should be a list of arrays", 45 | "n_rows need to be int or float", 46 | ], 47 | ) 48 | def test_iris_matcher_raises_an_exception2( 49 | rotation_shift: int, 50 | nm_dist: float, 51 | weights: List[np.ndarray], 52 | ) -> None: 53 | with pytest.raises(ValidationError): 54 | _ = HammingDistanceMatcher(rotation_shift, nm_dist, weights) 55 | -------------------------------------------------------------------------------- /src/iris/orchestration/pipeline_dataclasses.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List, Optional, Tuple, Union 4 | 5 | from pydantic import validator 6 | 7 | from iris.io.class_configs import ImmutableModel 8 | from iris.orchestration.validators import pipeline_metadata_version_check 9 | 10 | 11 | class PipelineMetadata(ImmutableModel): 12 | """Data holder for input config's metadata.""" 13 | 14 | pipeline_name: str 15 | iris_version: str 16 | 17 | _version_check = validator("iris_version", allow_reuse=True)(pipeline_metadata_version_check) 18 | 19 | 20 | class PipelineValue(ImmutableModel): 21 | """Data holder for pipeline value that flows through the system with optional index to specify value this holder refers to if value is of Iterable type.""" 22 | 23 | name: str 24 | index: Optional[int] 25 | 26 | 27 | class PipelineInput(PipelineValue): 28 | """Data holder for the reference to an input node.""" 29 | 30 | source_node: Union[str, List[Union[str, PipelineValue]]] 31 | 32 | 33 | class PipelineClass(ImmutableModel): 34 | """Data holder for the reference to any class: Algorithm, Callback, etc.""" 35 | 36 | class_name: str 37 | params: Dict[ 38 | str, 39 | Union[ 40 | int, 41 | float, 42 | str, 43 | PipelineClass, 44 | Tuple[int, float, str, PipelineClass], 45 | List[Union[int, float, str, PipelineClass]], 46 | ], 47 | ] 48 | 49 | 50 | class PipelineNode(ImmutableModel): 51 | """Data holder for one node in a declared pipeline.""" 52 | 53 | name: str 54 | algorithm: PipelineClass 55 | inputs: List[PipelineInput] 56 | callbacks: Optional[List[PipelineClass]] 57 | seed: Optional[str] = None 58 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/iris_response/image_filters/test_e2e_gabor_filters.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | 4 | import numpy as np 5 | import pytest 6 | 7 | from iris.nodes.iris_response.image_filters import gabor_filters 8 | 9 | 10 | @pytest.fixture 11 | def precomputed_filters_dirpath() -> str: 12 | return os.path.join(os.path.dirname(os.path.dirname(__file__)), "mocks") 13 | 14 | 15 | def test_computed_kernel_values(precomputed_filters_dirpath: str) -> None: 16 | filename = os.path.join(precomputed_filters_dirpath, "image_filters", "gabor_filter.pickle") 17 | expected_gabor_filter = pickle.load(open(filename, "rb")) 18 | 19 | filename = os.path.join(precomputed_filters_dirpath, "image_filters", "gabor2_filter.pickle") 20 | expected_gabor2_filter = pickle.load(open(filename, "rb")) 21 | 22 | filename = os.path.join(precomputed_filters_dirpath, "image_filters", "loggabor_filter.pickle") 23 | expected_loggabor_filter = pickle.load(open(filename, "rb")) 24 | 25 | first_result = gabor_filters.GaborFilter( 26 | kernel_size=(21, 21), sigma_phi=2, sigma_rho=4, theta_degrees=45, lambda_phi=10, dc_correction=True 27 | ) 28 | second_result = gabor_filters.GaborFilter( 29 | kernel_size=(15, 17), sigma_phi=1.5, sigma_rho=2.5, theta_degrees=90, lambda_phi=8, dc_correction=True 30 | ) 31 | third_result = gabor_filters.LogGaborFilter( 32 | kernel_size=(19, 17), sigma_phi=np.pi / 10, sigma_rho=0.8, theta_degrees=5.5, lambda_rho=8.5 33 | ) 34 | 35 | assert np.allclose(expected_gabor_filter, first_result.kernel_values, rtol=1e-05, atol=1e-07) 36 | assert np.allclose(expected_gabor2_filter, second_result.kernel_values, rtol=1e-05, atol=1e-07) 37 | assert np.allclose(expected_loggabor_filter, third_result.kernel_values, rtol=1e-05, atol=1e-07) 38 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/normalization/test_e2e_nonlinear_normalization.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | import pytest 7 | 8 | from iris.nodes.normalization.common import getgrids 9 | from iris.nodes.normalization.nonlinear_normalization import NonlinearNormalization 10 | 11 | 12 | def load_mock_pickle(name: str) -> Any: 13 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "nonlinear_normalization") 14 | 15 | mock_path = os.path.join(testdir, f"{name}.pickle") 16 | 17 | return pickle.load(open(mock_path, "rb")) 18 | 19 | 20 | @pytest.fixture 21 | def algorithm() -> NonlinearNormalization: 22 | return NonlinearNormalization( 23 | res_in_r=100, 24 | ) 25 | 26 | 27 | def test_getgrids() -> None: 28 | grids30, grids49, grids70 = load_mock_pickle("nonlinear_grids") 29 | results30 = getgrids(100, 30) 30 | results49 = getgrids(100, 49) 31 | results70 = getgrids(120, 70) 32 | 33 | np.testing.assert_equal(results30, grids30) 34 | np.testing.assert_equal(results49, grids49) 35 | np.testing.assert_equal(results70, grids70) 36 | 37 | 38 | def test_e2e_perspective_normalization_nonlinear(algorithm: NonlinearNormalization) -> None: 39 | ir_image = load_mock_pickle("ir_image") 40 | noise_mask = load_mock_pickle("noise_mask") 41 | eye_orientation = load_mock_pickle("eye_orientation") 42 | extrapolated_polygons = load_mock_pickle("extrapolated_polygons") 43 | 44 | e2e_expected_result = load_mock_pickle("e2e_expected_result") 45 | 46 | result = algorithm(ir_image, noise_mask, extrapolated_polygons, eye_orientation) 47 | 48 | np.testing.assert_equal(result.normalized_image, e2e_expected_result.normalized_image) 49 | np.testing.assert_equal(result.normalized_mask, e2e_expected_result.normalized_mask) 50 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/iris_response/image_filters/test_image_filter_interface.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from iris.nodes.iris_response.image_filters.image_filter_interface import ImageFilter, ImageFilterError 5 | 6 | 7 | # Mock ImageFilters implementation for testing mechanism purposes 8 | class AverageFilter(ImageFilter): 9 | class AverageFilterParameters(ImageFilter.Parameters): 10 | weight: float 11 | 12 | __parameters_type__ = AverageFilterParameters 13 | 14 | def __init__(self, weight: float) -> None: 15 | super().__init__(weight=weight) 16 | 17 | def compute_kernel_values(self) -> np.ndarray: 18 | kernel_value = np.ones(shape=(3, 3)) * self.params.weight 19 | return kernel_value / np.linalg.norm(kernel_value, ord="fro") 20 | 21 | 22 | def test_parameters_assignment() -> None: 23 | expected_param_class_name = "AverageFilterParameters" 24 | expected_num_params = 1 25 | expected_param_name = "weight" 26 | expected_param_type = float 27 | expected_param_value = 3.0 28 | 29 | mock_filter = AverageFilter(weight=3.0) 30 | filter_params = mock_filter.params 31 | 32 | assert filter_params.__class__.__name__ == expected_param_class_name 33 | assert len(filter_params.__dict__) == expected_num_params 34 | assert list(filter_params.__dict__.keys())[0] == expected_param_name 35 | assert type(filter_params.weight) == expected_param_type 36 | assert filter_params.weight == expected_param_value 37 | 38 | 39 | def test_setting_kernel_values_raise_an_error() -> None: 40 | mock_filter = AverageFilter(weight=3.0) 41 | expected_err_msg = "ImageFilter kernel_values are immutable." 42 | 43 | with pytest.raises(ImageFilterError) as e: 44 | mock_filter.kernel_values = np.ones(shape=(3, 3)) 45 | 46 | assert str(e.value) == expected_err_msg 47 | -------------------------------------------------------------------------------- /src/iris/utils/base64_encoding.py: -------------------------------------------------------------------------------- 1 | import base64 2 | from typing import Tuple 3 | 4 | import numpy as np 5 | 6 | 7 | def base64_encode_array(array2encode: np.ndarray) -> bytes: 8 | """Convert a numpy array to a packed base64 string. 9 | 10 | Args: 11 | array2encode (np.ndarray): The array to convert. 12 | 13 | Returns: 14 | bytes: The packed base64 string. 15 | """ 16 | co_pack = np.packbits(array2encode) 17 | 18 | return base64.b64encode(co_pack.tobytes()) 19 | 20 | 21 | def base64_decode_array(bytes_array: str, array_shape: Tuple[int, int, int, int] = (16, 256, 2, 2)) -> np.ndarray: 22 | """Convert a packed base64 string to a numpy array. 23 | 24 | Args: 25 | bytes_array (bytes): The packed base64 byte string. 26 | shape (Tuple[int, int, int, int], optional): The shape of the array. Defaults to (16, 256, 2, 2). 27 | 28 | Returns: 29 | np.ndarray: The array. 30 | """ 31 | decoded_bytes = base64.b64decode(bytes_array) 32 | 33 | deserialized_bytes = np.frombuffer(decoded_bytes, dtype=np.uint8) 34 | unpacked_bits = np.unpackbits(deserialized_bytes) 35 | 36 | return unpacked_bits.reshape(*array_shape).astype(bool) 37 | 38 | 39 | def base64_encode_str(input_str: str) -> str: 40 | """Convert a string to base64 string. Both input and output are string, but base64 encoded vs non-encoded. 41 | 42 | Args: 43 | input_str (str): The string to encode. 44 | 45 | Returns: 46 | str: the encoded base64 string. 47 | """ 48 | return base64.b64encode(input_str.encode()).decode() 49 | 50 | 51 | def base64_decode_str(base64_str: str) -> str: 52 | """Convert base64-encoded string to decoded string. Both input and output are string, but base64 encoded vs non-encoded. 53 | 54 | Args: 55 | base64_str (str): The base64-encoded string 56 | 57 | Returns: 58 | str: the decoded string 59 | """ 60 | return base64.b64decode(base64_str).decode() 61 | -------------------------------------------------------------------------------- /tests/e2e_tests/pipelines/test_e2e_iris_pipeline.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any, Dict 4 | 5 | import cv2 6 | import numpy as np 7 | import pytest 8 | 9 | from iris.pipelines.iris_pipeline import IRISPipeline 10 | from tests.e2e_tests.utils import compare_debug_pipeline_outputs, compare_iris_pipeline_outputs 11 | 12 | 13 | @pytest.fixture 14 | def ir_image() -> np.ndarray: 15 | ir_image_path = os.path.join(os.path.dirname(__file__), "mocks", "inputs", "anonymized.png") 16 | img_data = cv2.imread(ir_image_path, cv2.IMREAD_GRAYSCALE) 17 | return img_data 18 | 19 | 20 | @pytest.fixture 21 | def expected_iris_pipeline_output() -> Dict[str, Any]: 22 | expected_iris_code_path = os.path.join( 23 | os.path.dirname(__file__), "mocks", "outputs", "expected_iris_orb_pipeline_output.pickle" 24 | ) 25 | return pickle.load(open(expected_iris_code_path, "rb")) 26 | 27 | 28 | @pytest.fixture 29 | def expected_debug_pipeline_output() -> Dict[str, Any]: 30 | expected_iris_code_path = os.path.join( 31 | os.path.dirname(__file__), "mocks", "outputs", "expected_iris_debug_pipeline_output.pickle" 32 | ) 33 | return pickle.load(open(expected_iris_code_path, "rb")) 34 | 35 | 36 | def test_e2e_iris_pipeline(ir_image: np.ndarray, expected_iris_pipeline_output: Dict[str, Any]) -> None: 37 | """End-to-end test of the IRISPipeline in the Orb setup""" 38 | iris_pipeline = IRISPipeline(env=IRISPipeline.ORB_ENVIRONMENT) 39 | computed_pipeline_output = iris_pipeline(img_data=ir_image, eye_side="right") 40 | 41 | compare_iris_pipeline_outputs(computed_pipeline_output, expected_iris_pipeline_output) 42 | 43 | 44 | def test_e2e_debug_pipeline(ir_image: np.ndarray, expected_debug_pipeline_output: Dict[str, Any]) -> None: 45 | """End-to-end test of the IRISPipeline in the debug setup""" 46 | iris_pipeline = IRISPipeline(env=IRISPipeline.DEBUGGING_ENVIRONMENT) 47 | 48 | computed_pipeline_output = iris_pipeline(img_data=ir_image, eye_side="right") 49 | 50 | compare_debug_pipeline_outputs(computed_pipeline_output, expected_debug_pipeline_output) 51 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/normalization/test_linear_normalization.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from pydantic import ValidationError 4 | 5 | from iris.io.errors import NormalizationError 6 | from iris.nodes.normalization.linear_normalization import LinearNormalization 7 | from tests.unit_tests.utils import generate_arc 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "wrong_param", 12 | [ 13 | ({"res_in_r": -1}), 14 | ({"res_in_r": 0}), 15 | ], 16 | ) 17 | def test_constructor_raises_exception(wrong_param: dict) -> None: 18 | with pytest.raises((NormalizationError, ValidationError)): 19 | _ = LinearNormalization(**wrong_param) 20 | 21 | 22 | @pytest.mark.parametrize( 23 | "pupil_points, iris_points, expected_correspondences", 24 | [ 25 | ( 26 | generate_arc(3.0, 5.0, 5.0, 0.0, 2 * np.pi, 3), 27 | generate_arc(10.0, 4.8, 5.1, 0.0, 2 * np.pi, 3), 28 | np.array( 29 | [ 30 | [[8, 5], [4, 8], [3, 2]], 31 | [[15, 5], [0, 14], [0, -4]], 32 | ] 33 | ), 34 | ), 35 | ( 36 | generate_arc(50.0, 0.0, 0.0, 0.0, 2 * np.pi, 8), 37 | generate_arc(100.0, 0.0, 0.0, 0.0, 2 * np.pi, 8), 38 | np.array( 39 | [ 40 | [[50, 0], [35, 35], [0, 50], [-35, 35], [-50, 0], [-35, -35], [0, -50], [35, -35]], 41 | [[100, 0], [71, 71], [0, 100], [-71, 71], [-100, 0], [-71, -71], [0, -100], [71, -71]], 42 | ] 43 | ), 44 | ), 45 | ], 46 | ids=[ 47 | "test1", 48 | "test2", 49 | ], 50 | ) 51 | def test_generate_correspondences( 52 | pupil_points: np.ndarray, iris_points: np.ndarray, expected_correspondences: np.ndarray 53 | ) -> None: 54 | algorithm = LinearNormalization( 55 | res_in_r=2, 56 | ) 57 | result = algorithm._generate_correspondences( 58 | pupil_points=pupil_points, 59 | iris_points=iris_points, 60 | ) 61 | 62 | np.testing.assert_allclose(result, expected_correspondences, rtol=1e-05) 63 | -------------------------------------------------------------------------------- /docs/source/_code_subpages/iris.nodes.eye_properties_estimation.rst: -------------------------------------------------------------------------------- 1 | iris.nodes.eye\_properties\_estimation package 2 | ============================================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | iris.nodes.eye\_properties\_estimation.bisectors\_method module 8 | --------------------------------------------------------------- 9 | 10 | .. automodule:: iris.nodes.eye_properties_estimation.bisectors_method 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | iris.nodes.eye\_properties\_estimation.eccentricity\_offgaze\_estimation module 16 | ------------------------------------------------------------------------------- 17 | 18 | .. automodule:: iris.nodes.eye_properties_estimation.eccentricity_offgaze_estimation 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | iris.nodes.eye\_properties\_estimation.iris\_bbox\_calculator module 24 | -------------------------------------------------------------------- 25 | 26 | .. automodule:: iris.nodes.eye_properties_estimation.iris_bbox_calculator 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | iris.nodes.eye\_properties\_estimation.moment\_of\_area module 32 | -------------------------------------------------------------- 33 | 34 | .. automodule:: iris.nodes.eye_properties_estimation.moment_of_area 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | iris.nodes.eye\_properties\_estimation.occlusion\_calculator module 40 | ------------------------------------------------------------------- 41 | 42 | .. automodule:: iris.nodes.eye_properties_estimation.occlusion_calculator 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | iris.nodes.eye\_properties\_estimation.pupil\_iris\_property\_calculator module 48 | ------------------------------------------------------------------------------- 49 | 50 | .. automodule:: iris.nodes.eye_properties_estimation.pupil_iris_property_calculator 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | Module contents 56 | --------------- 57 | 58 | .. automodule:: iris.nodes.eye_properties_estimation 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/matcher/test_e2e_hamming_distance_matcher.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from copy import deepcopy 4 | from typing import Any, List, Optional 5 | 6 | import numpy as np 7 | import pytest 8 | 9 | from iris.nodes.matcher.hamming_distance_matcher import HammingDistanceMatcher 10 | 11 | 12 | def load_mock_pickle(name: str) -> Any: 13 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "hamming_distance_matcher") 14 | 15 | mock_path = os.path.join(testdir, f"{name}.pickle") 16 | 17 | return pickle.load(open(mock_path, "rb")) 18 | 19 | 20 | @pytest.mark.parametrize( 21 | "rotation_shift,normalise,nm_dist,weights,expected_result", 22 | [ 23 | pytest.param(10, False, 0.45, None, 0.0), 24 | pytest.param(15, False, 0.45, None, 0.0), 25 | pytest.param(10, True, 0.45, None, 0.0123), 26 | pytest.param(15, True, 0.45, None, 0.0123), 27 | pytest.param(10, False, 0.45, [np.ones([16, 256, 2]), np.ones([16, 256, 2])], 0.0), 28 | pytest.param(15, False, 0.45, [np.ones([16, 256, 2]), np.ones([16, 256, 2])], 0.0), 29 | pytest.param(10, True, 0.45, [np.ones([16, 256, 2]), np.ones([16, 256, 2])], 0.0492), 30 | pytest.param(15, True, 0.45, [np.ones([16, 256, 2]), np.ones([16, 256, 2])], 0.0492), 31 | ], 32 | ids=[ 33 | "regular1", 34 | "regular2", 35 | "regular_normalized1", 36 | "regular_normalized2", 37 | "regular_weighted1", 38 | "regular_weighted2", 39 | "regular_normalizedweighted1", 40 | "regular_normalizedweighted2", 41 | ], 42 | ) 43 | def test_e2e_iris_matcher( 44 | rotation_shift: int, 45 | normalise: bool, 46 | nm_dist: float, 47 | weights: Optional[List[np.ndarray]], 48 | expected_result: float, 49 | ) -> None: 50 | first_template = load_mock_pickle("iris_template") 51 | second_template = deepcopy(first_template) 52 | 53 | matcher = HammingDistanceMatcher( 54 | rotation_shift=rotation_shift, 55 | normalise=normalise, 56 | nm_dist=nm_dist, 57 | weights=weights, 58 | ) 59 | result = matcher.run(first_template, second_template) 60 | 61 | assert round(result, 4) == expected_result 62 | -------------------------------------------------------------------------------- /src/iris/nodes/iris_response/probe_schemas/probe_schema_interface.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from typing import Any, Tuple 3 | 4 | import numpy as np 5 | 6 | from iris.io.class_configs import Algorithm 7 | from iris.io.errors import ProbeSchemaError 8 | 9 | 10 | class ProbeSchema(Algorithm): 11 | """Probe schema abstract class.""" 12 | 13 | class ProbeSchemaParameters(Algorithm.Parameters): 14 | """Default ProbeSchema parameters.""" 15 | 16 | __parameters_type__ = ProbeSchemaParameters 17 | 18 | def __init__(self, **kwargs: Any) -> None: 19 | """Init function.""" 20 | super().__init__(**kwargs) 21 | self.__rhos, self.__phis = self.generate_schema() 22 | 23 | @property 24 | def rhos(self) -> np.ndarray: 25 | """Get rhos' position values. 26 | 27 | Returns: 28 | np.ndarray: rhos' position values. 29 | """ 30 | return self.__rhos 31 | 32 | @rhos.setter 33 | def rhos(self, value: Any) -> None: 34 | """Prevent overwriting generated rhos' positions values. 35 | 36 | Args: 37 | value (Any): New rhos' position values. 38 | 39 | Raises: 40 | ProbeSchemaError: Raised always since overwriting is forbidden. 41 | """ 42 | raise ProbeSchemaError("ProbeSchema rhos values are immutable.") 43 | 44 | @property 45 | def phis(self) -> np.ndarray: 46 | """Get phis' position values. 47 | 48 | Returns: 49 | np.ndarray: phis' position values. 50 | """ 51 | return self.__phis 52 | 53 | @phis.setter 54 | def phis(self, value: Any) -> None: 55 | """Prevent overwriting generated phis' positions values. 56 | 57 | Args: 58 | value (Any): New phis' position values. 59 | 60 | Raises: 61 | ProbeSchemaError: Raised always since overwriting is forbidden. 62 | """ 63 | raise ProbeSchemaError("ProbeSchema phis values are immutable.") 64 | 65 | @abc.abstractmethod 66 | def generate_schema(self) -> Tuple[np.ndarray, np.ndarray]: 67 | """Generate rhos' and phis' positions values. 68 | 69 | Returns: 70 | Tuple[np.ndarray, np.ndarray]: Tuple with generated schema (rhos, phis). 71 | """ 72 | pass 73 | -------------------------------------------------------------------------------- /src/iris/nodes/encoder/iris_encoder.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import numpy as np 4 | from pydantic import Field 5 | 6 | from iris.callbacks.callback_interface import Callback 7 | from iris.io.class_configs import Algorithm 8 | from iris.io.dataclasses import IrisFilterResponse, IrisTemplate 9 | 10 | 11 | class IrisEncoder(Algorithm): 12 | """Binarize IrisFilterResponse to generate iris code using Daugman's method. 13 | 14 | Algorithm steps: 15 | 1) Binarize iris response by comparing real and imaginary parts to zero. 16 | 2) Binarize mask response by comparing real and imaginary parts to a given parameter: mask_threshold. 17 | 18 | Reference: 19 | [1] https://www.robots.ox.ac.uk/~az/lectures/est/iris.pdf. 20 | """ 21 | 22 | class Parameters(Algorithm.Parameters): 23 | """IrisEncoder parameters.""" 24 | 25 | mask_threshold: float = Field(..., ge=0.0, le=1.0) 26 | 27 | __parameters_type__ = Parameters 28 | 29 | def __init__(self, mask_threshold: float = 0.9, callbacks: List[Callback] = []) -> None: 30 | """Assign parameters. 31 | 32 | Args: 33 | mask_threshold (float): threshold to binarize mask_responses, in the range of [0,1]. Defaults to 0.9. 34 | callbacks (List[Callback]): callbacks list. Defaults to []. 35 | """ 36 | super().__init__(mask_threshold=mask_threshold, callbacks=callbacks) 37 | 38 | def run(self, response: IrisFilterResponse) -> IrisTemplate: 39 | """Encode iris code and mask code. 40 | 41 | Args: 42 | response (IrisFilterResponse): Filter responses. 43 | 44 | Returns: 45 | IrisTemplate: Final iris template. 46 | """ 47 | iris_codes: List[np.ndarray] = [] 48 | mask_codes: List[np.ndarray] = [] 49 | 50 | for iris_response, mask_response in zip(response.iris_responses, response.mask_responses): 51 | mask_code = mask_response >= self.params.mask_threshold 52 | 53 | iris_code = np.stack([iris_response.real > 0, iris_response.imag > 0], axis=-1) 54 | mask_code = np.stack([mask_code, mask_code], axis=-1) 55 | 56 | iris_codes.append(iris_code) 57 | mask_codes.append(mask_code) 58 | 59 | return IrisTemplate(iris_codes=iris_codes, mask_codes=mask_codes, iris_code_version=response.iris_code_version) 60 | -------------------------------------------------------------------------------- /tests/e2e_tests/nodes/eye_properties_estimation/test_e2e_occlusion_calculator.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any 4 | 5 | import numpy as np 6 | import pytest 7 | 8 | from iris.io.dataclasses import EyeOcclusion 9 | from iris.nodes.eye_properties_estimation.occlusion_calculator import OcclusionCalculator 10 | 11 | 12 | def load_mock_pickle(name: str) -> Any: 13 | testdir = os.path.join(os.path.dirname(__file__), "mocks", "occlusion_calculator") 14 | 15 | mock_path = os.path.join(testdir, f"{name}.pickle") 16 | 17 | return pickle.load(open(mock_path, "rb")) 18 | 19 | 20 | @pytest.fixture 21 | def algorithm() -> OcclusionCalculator: 22 | return OcclusionCalculator(quantile_angle=30.0) 23 | 24 | 25 | @pytest.mark.parametrize( 26 | "extrapolated_polygons_name, noise_mask_name, eye_orientation_name, eye_center_name, expected_result", 27 | [ 28 | ( 29 | "extrapolated_polygons_1", 30 | "noise_mask_1", 31 | "eye_orientation_1", 32 | "eye_center_1", 33 | EyeOcclusion(visible_fraction=0.9953), 34 | ), 35 | ( 36 | "extrapolated_polygons_2", 37 | "noise_mask_2", 38 | "eye_orientation_2", 39 | "eye_center_2", 40 | EyeOcclusion(visible_fraction=0.9904), 41 | ), 42 | ( 43 | "extrapolated_polygons_cropped", 44 | "noise_mask_cropped", 45 | "eye_orientation_cropped", 46 | "eye_center_cropped", 47 | EyeOcclusion(visible_fraction=0.5652), 48 | ), 49 | ], 50 | ids=["regular 1", "regular 2", "heavily cropped iris"], 51 | ) 52 | def test_e2e_occlusion_calculator( 53 | extrapolated_polygons_name, noise_mask_name, eye_orientation_name, eye_center_name, expected_result 54 | ) -> None: 55 | mock_extrapolated_polygons = load_mock_pickle(extrapolated_polygons_name) 56 | mock_noise_mask = load_mock_pickle(noise_mask_name) 57 | mock_eye_orientation = load_mock_pickle(eye_orientation_name) 58 | mock_eye_center = load_mock_pickle(eye_center_name) 59 | 60 | algorithm = OcclusionCalculator(quantile_angle=30.0) 61 | 62 | result = algorithm(mock_extrapolated_polygons, mock_noise_mask, mock_eye_orientation, mock_eye_center) 63 | 64 | np.testing.assert_almost_equal(result.visible_fraction, expected_result.visible_fraction, decimal=4) 65 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/geometry_refinement/test_contour_interpolation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from pydantic import ValidationError 4 | 5 | from iris.nodes.geometry_refinement.contour_interpolation import ContourInterpolation 6 | 7 | 8 | @pytest.fixture 9 | def algorithm() -> ContourInterpolation: 10 | return ContourInterpolation(max_distance_between_boundary_points=0.01) 11 | 12 | 13 | def test_constructor() -> None: 14 | mock_max_distance_between_boundary_points = 0.01 15 | 16 | _ = ContourInterpolation(mock_max_distance_between_boundary_points) 17 | 18 | 19 | @pytest.mark.parametrize( 20 | "max_distance_between_boundary_points", 21 | [(-1.0), (0.0)], 22 | ids=[ 23 | "wrong max_distance_between_boundary_points < 0", 24 | "wrong max_distance_between_boundary_points = 0", 25 | ], 26 | ) 27 | def test_constructor_raises_an_exception(max_distance_between_boundary_points: float) -> None: 28 | with pytest.raises(ValidationError): 29 | _ = ContourInterpolation(max_distance_between_boundary_points) 30 | 31 | 32 | @pytest.mark.parametrize( 33 | "mock_polygon,mock_distance_between_points,expected_result", 34 | [ 35 | ( 36 | np.array([[0.0, 0.0], [50.0, 0.0], [100.0, 0.0]], dtype=np.int32), 37 | 25.0, 38 | np.array([[0.0, 0.0], [25.0, 0.0], [50.0, 0.0], [75.0, 0.0], [100.0, 0.0]]), 39 | ), 40 | ( 41 | np.array([[0.0, 0.0], [0.0, 100.0], [100.0, 100.0], [100.0, 0.0]], dtype=np.int32), 42 | 50.0, 43 | np.array( 44 | [[0.0, 0.0], [0.0, 50.0], [0.0, 100.0], [50.0, 100.0], [100.0, 100.0], [100.0, 50.0], [100.0, 0.0]] 45 | ), 46 | ), 47 | ( 48 | np.array([[0.0, 0.0], [0.0, 10.0], [0.0, 15.0]], dtype=np.int32), 49 | 7.0, 50 | np.array([[0.0, 0.0], [0.0, 5.0], [0.0, 10.0], [0.0, 15.0]]), 51 | ), 52 | ], 53 | ids=["along line", "complex polygon", "not uniform distance"], 54 | ) 55 | def test_interpolate_contour_points( 56 | algorithm: ContourInterpolation, 57 | mock_polygon: np.ndarray, 58 | mock_distance_between_points: float, 59 | expected_result: np.ndarray, 60 | ) -> None: 61 | result = algorithm._interpolate_polygon_points( 62 | polygon=mock_polygon, max_distance_between_points_px=mock_distance_between_points 63 | ) 64 | 65 | for point in result: 66 | assert point in expected_result 67 | 68 | for point in expected_result: 69 | assert point in result 70 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/geometry_refinement/test_contour_point_filter.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from pydantic import ValidationError 4 | 5 | from iris.io.errors import GeometryRefinementError 6 | from iris.nodes.geometry_refinement.contour_points_filter import ContourPointNoiseEyeballDistanceFilter 7 | 8 | 9 | @pytest.fixture 10 | def algorithm() -> ContourPointNoiseEyeballDistanceFilter: 11 | return ContourPointNoiseEyeballDistanceFilter(min_distance_to_noise_and_eyeball=0.025) 12 | 13 | 14 | def test_constructor() -> None: 15 | mock_min_distance_to_noise_and_eyeball = 0.01 16 | 17 | _ = ContourPointNoiseEyeballDistanceFilter(mock_min_distance_to_noise_and_eyeball) 18 | 19 | 20 | @pytest.mark.parametrize( 21 | "min_distance_to_noise_and_eyeball", 22 | [(-1.0), (0.0)], 23 | ids=[ 24 | "wrong min_distance_to_noise_and_eyeball < 0", 25 | "wrong min_distance_to_noise_and_eyeball = 0", 26 | ], 27 | ) 28 | def test_constructor_raises_an_exception(min_distance_to_noise_and_eyeball: float) -> None: 29 | with pytest.raises(ValidationError): 30 | _ = ContourPointNoiseEyeballDistanceFilter(min_distance_to_noise_and_eyeball) 31 | 32 | 33 | def test_filter_polygon_points(algorithm: ContourPointNoiseEyeballDistanceFilter) -> None: 34 | mock_forbidden_touch_map = np.ones((3, 3)) * np.array([1.0, 0.0, 0.0]) 35 | mock_polygon_points = np.array( 36 | [ 37 | [0, 0], 38 | [0, 1], 39 | [0, 2], 40 | [1, 0], 41 | [1, 1], 42 | [1, 2], 43 | [2, 0], 44 | [2, 1], 45 | [2, 2], 46 | ] 47 | ) 48 | 49 | expected_result = np.array( 50 | [ 51 | [1, 0], 52 | [1, 1], 53 | [1, 2], 54 | [2, 0], 55 | [2, 1], 56 | [2, 2], 57 | ] 58 | ) 59 | 60 | result = algorithm._filter_polygon_points(mock_forbidden_touch_map, mock_polygon_points) 61 | 62 | np.testing.assert_equal(result, expected_result) 63 | 64 | 65 | def test_filter_polygon_points_raises_an_exception(algorithm: ContourPointNoiseEyeballDistanceFilter) -> None: 66 | mock_forbidden_touch_map = np.ones((3, 3)) * np.array([1.0, 0.0, 0.0]) 67 | mock_polygon_points = np.array( 68 | [ 69 | [0, 0], 70 | [0, 1], 71 | [0, 2], 72 | ] 73 | ) 74 | 75 | with pytest.raises(GeometryRefinementError): 76 | _ = algorithm._filter_polygon_points(mock_forbidden_touch_map, mock_polygon_points) 77 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/iris_response/probe_schemas/test_probe_schema_interface.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import numpy as np 4 | import pytest 5 | from pydantic import Field 6 | 7 | from iris.io.errors import ProbeSchemaError 8 | from iris.nodes.iris_response.probe_schemas.probe_schema_interface import ProbeSchema 9 | 10 | 11 | # Mock ProbeSchema implementation for testing mechanism purposes 12 | class MockProbeSchema(ProbeSchema): 13 | class MockProbeSchemaParameters(ProbeSchema.ProbeSchemaParameters): 14 | """Default MockProbeSchema parameters.""" 15 | 16 | n_rows: int = Field(..., gt=0) 17 | n_cols: int = Field(..., gt=0) 18 | 19 | params: MockProbeSchemaParameters 20 | __parameters_type__ = MockProbeSchemaParameters 21 | 22 | def __init__(self, n_rows: int, n_cols: int) -> None: 23 | super().__init__(n_rows=n_rows, n_cols=n_cols) 24 | 25 | def generate_schema(self) -> Tuple[np.ndarray, np.ndarray]: 26 | return np.linspace(0, 1, self.params.n_rows), np.linspace(0, 1, self.params.n_cols) 27 | 28 | 29 | def test_parameters_assignment() -> None: 30 | expected_param_class_name = "MockProbeSchemaParameters" 31 | expected_num_params = 2 32 | expected_params_names = sorted(["n_rows", "n_cols"]) 33 | expected_params_type = int 34 | expected_params_value = 10 35 | 36 | mock_filter = MockProbeSchema(n_rows=10, n_cols=10) 37 | filter_params = mock_filter.params 38 | 39 | assert filter_params.__class__.__name__ == expected_param_class_name 40 | assert len(filter_params.__dict__) == expected_num_params 41 | assert sorted(list(filter_params.__dict__.keys())) == expected_params_names 42 | assert type(filter_params.n_rows) == expected_params_type 43 | assert type(filter_params.n_cols) == expected_params_type 44 | assert filter_params.n_rows == filter_params.n_cols == expected_params_value 45 | 46 | 47 | def test_setting_rhos_values_raise_an_error() -> None: 48 | mock_schema = MockProbeSchema(n_rows=10, n_cols=10) 49 | expected_err_msg = "ProbeSchema rhos values are immutable." 50 | 51 | with pytest.raises(ProbeSchemaError) as e: 52 | mock_schema.rhos = np.arange(10) 53 | 54 | assert str(e.value) == expected_err_msg 55 | 56 | 57 | def test_setting_phis_values_raise_an_error() -> None: 58 | mock_schema = MockProbeSchema(n_rows=10, n_cols=10) 59 | expected_err_msg = "ProbeSchema phis values are immutable." 60 | 61 | with pytest.raises(ProbeSchemaError) as e: 62 | mock_schema.phis = np.arange(10) 63 | 64 | assert str(e.value) == expected_err_msg 65 | -------------------------------------------------------------------------------- /src/iris/io/errors.py: -------------------------------------------------------------------------------- 1 | class EncoderError(Exception): 2 | """Encoder module Error class.""" 3 | 4 | pass 5 | 6 | 7 | class EyeCentersEstimationError(Exception): 8 | """EyeOrientationEstimation module Error class.""" 9 | 10 | pass 11 | 12 | 13 | class EyeOrientationEstimationError(Exception): 14 | """EyeOrientationEstimation module Error class.""" 15 | 16 | pass 17 | 18 | 19 | class OffgazeEstimationError(Exception): 20 | """OffgazeEstimation module Error class.""" 21 | 22 | pass 23 | 24 | 25 | class BoundingBoxEstimationError(Exception): 26 | """BoundingBoxEstimationError module Error class.""" 27 | 28 | pass 29 | 30 | 31 | class LandmarkEstimationError(Exception): 32 | """LandmarkEstimationError module Error class.""" 33 | 34 | pass 35 | 36 | 37 | class OcclusionError(Exception): 38 | """EyeOrientationEstimation module Error class.""" 39 | 40 | pass 41 | 42 | 43 | class PupilIrisPropertyEstimationError(Exception): 44 | """PupilIrisPropertyEstimation module Error class.""" 45 | 46 | pass 47 | 48 | 49 | class GeometryEstimationError(Exception): 50 | """GeometryEstimation module Error class.""" 51 | 52 | pass 53 | 54 | 55 | class GeometryRefinementError(Exception): 56 | """GeometryRefinementError error class.""" 57 | 58 | pass 59 | 60 | 61 | class ImageFilterError(Exception): 62 | """ImageFilter's base and subclasses error class.""" 63 | 64 | pass 65 | 66 | 67 | class ProbeSchemaError(Exception): 68 | """ProbeSchema's base and subclasses error class.""" 69 | 70 | pass 71 | 72 | 73 | class NormalizationError(Exception): 74 | """Normalization module Error class.""" 75 | 76 | pass 77 | 78 | 79 | class EyeCentersInsideImageValidatorError(Exception): 80 | """EyeCentersInsideImageValidatorError error class.""" 81 | 82 | pass 83 | 84 | 85 | class ExtrapolatedPolygonsInsideImageValidatorError(Exception): 86 | """ExtrapolatedPolygonsInsideImageValidatorError error class.""" 87 | 88 | pass 89 | 90 | 91 | class IsPupilInsideIrisValidatorError(Exception): 92 | """IsPupilInsideIrisValidator error class.""" 93 | 94 | pass 95 | 96 | 97 | class VectorizationError(Exception): 98 | """Vectorization module Error class.""" 99 | 100 | pass 101 | 102 | 103 | class MatcherError(Exception): 104 | """Matcher module Error class.""" 105 | 106 | pass 107 | 108 | 109 | class IRISPipelineError(Exception): 110 | """IRIS Pipeline module Error class.""" 111 | 112 | pass 113 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .idea 3 | .ruff_cache 4 | .vscode 5 | /src/iris/nodes/segmentation/assets 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | pip-wheel-metadata/ 30 | share/python-wheels/ 31 | *.egg-info/ 32 | .installed.cfg 33 | *.egg 34 | MANIFEST 35 | 36 | # PyInstaller 37 | # Usually these files are written by a python script from a template 38 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 39 | *.manifest 40 | *.spec 41 | 42 | # Installer logs 43 | pip-log.txt 44 | pip-delete-this-directory.txt 45 | 46 | # Unit test / coverage reports 47 | htmlcov/ 48 | .tox/ 49 | .nox/ 50 | .coverage 51 | .coverage.* 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | *.cover 56 | *.py,cover 57 | .hypothesis/ 58 | .pytest_cache/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ 136 | 137 | -------------------------------------------------------------------------------- /src/iris/nodes/matcher/hamming_distance_matcher_interface.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from typing import Any, List 3 | 4 | from iris.io.class_configs import ImmutableModel 5 | from iris.io.dataclasses import IrisTemplate 6 | from pydantic import conint 7 | 8 | 9 | class Matcher(abc.ABC): 10 | """Parent Abstract class for 1-to-1 matchers.""" 11 | 12 | class Parameters(ImmutableModel): 13 | """IrisMatcherParameters parameters.""" 14 | 15 | rotation_shift: conint(ge=0, strict=True) 16 | 17 | __parameters_type__ = Parameters 18 | 19 | def __init__(self, **kwargs) -> None: 20 | """Assign parameters. 21 | 22 | Args: 23 | rotation_shift (int = 15): rotation allowed in matching, converted to columns. Defaults to 15. 24 | """ 25 | self.params = self.__parameters_type__(**kwargs) 26 | 27 | @abc.abstractmethod 28 | def run(self, template_probe: IrisTemplate, template_gallery: IrisTemplate) -> float: 29 | """Match iris templates using Hamming distance. 30 | 31 | Args: 32 | template_probe (IrisTemplate): Iris template from probe. 33 | template_gallery (IrisTemplate): Iris template from gallery. 34 | 35 | Returns: 36 | float: matching distance. 37 | """ 38 | pass 39 | 40 | 41 | class BatchMatcher(abc.ABC): 42 | """Parent Abstract class for 1-to-N matchers.""" 43 | 44 | class Parameters(ImmutableModel): 45 | """IrisMatcherParameters parameters.""" 46 | 47 | rotation_shift: conint(ge=0, strict=True) 48 | 49 | __parameters_type__ = Parameters 50 | 51 | def __init__(self, **kwargs: Any) -> None: 52 | """Assign parameters. 53 | 54 | Args: 55 | rotation_shift (int = 15): rotation allowed in matching, converted to columns. Defaults to 15. 56 | """ 57 | self.params = self.__parameters_type__(**kwargs) 58 | 59 | @abc.abstractmethod 60 | def intra_gallery(self, template_gallery: List[IrisTemplate]) -> List[List[float]]: 61 | """Match iris templates using Hamming distance. 62 | 63 | Args: 64 | template_gallery (List[IrisTemplate]): Iris template gallery. 65 | 66 | Returns: 67 | List[List[float]]: matching distances. 68 | """ 69 | pass 70 | 71 | @abc.abstractmethod 72 | def gallery_to_gallery( 73 | self, template_gallery_1: List[IrisTemplate], template_gallery_2: List[IrisTemplate] 74 | ) -> List[List[float]]: 75 | """Match iris templates using Hamming distance. 76 | 77 | Args: 78 | template_gallery_1 (List[IrisTemplate]): Iris template gallery. 79 | template_gallery_2 (List[IrisTemplate]): Iris template gallery. 80 | 81 | Returns: 82 | List[List[float]]: matching distances. 83 | """ 84 | pass 85 | -------------------------------------------------------------------------------- /docs/source/examples/matching_entities.rst: -------------------------------------------------------------------------------- 1 | *Matching entities* tutorial 2 | ================================ 3 | 4 | This subpage will walk you through the basics of how to use matchers available in the ``iris`` package. From it you will learn how to: 5 | - Use the ``HammingDistanceMatcher`` matcher to compute distance between two eyes. 6 | 7 | 1. Use the ``HammingDistanceMatcher`` matcher to compute distance between two eyes. 8 | ------------------------------------------------------------------------------------------------ 9 | 10 | Load all IR images with ``opencv-python`` package. 11 | 12 | .. code-block:: python 13 | 14 | import cv2 15 | 16 | subject1_first_image = cv2.imread("./subject1_first_image.png", cv2.IMREAD_GRAYSCALE) 17 | subject1_second_image = cv2.imread("./subject1_second_image.png", cv2.IMREAD_GRAYSCALE) 18 | subject2_image = cv2.imread("./subject2_image.png", cv2.IMREAD_GRAYSCALE) 19 | 20 | Create ``IRISPipeline`` object and compute ``IrisTemplates`` for all images. 21 | 22 | .. code-block:: python 23 | 24 | import iris 25 | 26 | iris_pipeline = iris.IRISPipeline() 27 | 28 | output_1 = iris_pipeline(subject1_first_image, eye_side="left") 29 | subject1_first_code = output_1["iris_template"] 30 | 31 | output_2 = iris_pipeline(subject1_second_image, eye_side="left") 32 | subject1_second_code = output_2["iris_template"] 33 | 34 | output_3 = iris_pipeline(subject2_image, eye_side="left") 35 | subject2_code = output_3["iris_template"] 36 | 37 | Create a ``HammingDistanceMatcher`` matcher object. 38 | 39 | .. code-block:: python 40 | 41 | def __init__( 42 | self, 43 | rotation_shift: int = 15, 44 | nm_dist: Optional[confloat(ge=0, le=1, strict=True)] = None, 45 | weights: Optional[List[np.ndarray]] = None, 46 | ) -> None: 47 | """Assign parameters. 48 | 49 | Args: 50 | rotation_shift (int): rotations allowed in matching, converted to shifts in columns. Defaults to 15. 51 | nm_dist (Optional[confloat(ge=0, le = 1, strict=True)]): nonmatch distance used for normalized HD. Optional paremeter for normalized HD. Defaults to None. 52 | weights (Optional[List[np.ndarray]]): list of weights table. Optional paremeter for weighted HD. Defaults to None. 53 | """ 54 | 55 | .. code-block:: python 56 | 57 | matcher = iris.HammingDistanceMatcher() 58 | 59 | Call ``run`` method and provide two ``IrisTemplates`` to compute distances. 60 | 61 | .. code-block:: python 62 | 63 | def run(self, template_probe: IrisTemplate, template_gallery: IrisTemplate) -> float: 64 | 65 | .. code-block:: python 66 | 67 | same_subjects_distance = matcher.run(subject1_first_code, subject1_second_code) 68 | different_subjects_distance = matcher.run(subject1_first_code, subject2_code) 69 | 70 | **Thank you for making it to the end of this tutorial!** 71 | -------------------------------------------------------------------------------- /src/iris/nodes/matcher/simple_hamming_distance_matcher.py: -------------------------------------------------------------------------------- 1 | from pydantic import confloat, conint 2 | 3 | from iris.io.dataclasses import IrisTemplate 4 | from iris.nodes.matcher.utils import simple_hamming_distance 5 | from iris.nodes.matcher.hamming_distance_matcher_interface import Matcher 6 | 7 | 8 | class SimpleHammingDistanceMatcher(Matcher): 9 | """Hamming distance Matcher, without the bells and whistles. 10 | 11 | Algorithm steps: 12 | 1) Calculate counts of nonmatch irisbits (IB_Counts) in common unmasked region and the counts of common maskbits (MB_Counts) in common unmasked region. 13 | 2) Calculate Hamming distance (HD) based on IB_Counts and MB_Counts. 14 | 3) If parameter `normalise` is True, normalize Hamming distance based on parameter `norm_mean` and parameter `norm_nb_bits`. 15 | 4) If parameter rotation_shift is > 0, repeat the above steps for additional rotations of the iriscode. 16 | 5) Return the minimium distance from above calculations. 17 | """ 18 | 19 | class Parameters(Matcher.Parameters): 20 | """IrisMatcherParameters parameters.""" 21 | 22 | normalise: bool 23 | norm_mean: confloat(ge=0, le=1) 24 | norm_nb_bits: conint(gt=0) 25 | 26 | __parameters_type__ = Parameters 27 | 28 | def __init__( 29 | self, 30 | rotation_shift: int = 15, 31 | normalise: bool = False, 32 | norm_mean: float = 0.45, 33 | norm_nb_bits: float = 12288, 34 | ) -> None: 35 | """Assign parameters. 36 | 37 | Args: 38 | rotation_shift (int = 15): rotation allowed in matching, converted to columns. Defaults to 15. 39 | normalise (bool = False): Flag to normalize HD. Defaults to False. 40 | norm_mean (float = 0.45): Peak of the non-match distribution. Defaults to 0.45. 41 | norm_nb_bits (float = 12288): Average number of bits visible in 2 randomly sampled iris codes. Defaults to 12288 (3/4 * total_bits_number for the iris code format v0.1). 42 | 43 | """ 44 | super().__init__( 45 | rotation_shift=rotation_shift, normalise=normalise, norm_mean=norm_mean, norm_nb_bits=norm_nb_bits 46 | ) 47 | 48 | def run(self, template_probe: IrisTemplate, template_gallery: IrisTemplate) -> float: 49 | """Match iris templates using Hamming distance. 50 | 51 | Args: 52 | template_probe (IrisTemplate): Iris template from probe. 53 | template_gallery (IrisTemplate): Iris template from gallery. 54 | 55 | Returns: 56 | float: matching distance. 57 | """ 58 | score, _ = simple_hamming_distance( 59 | template_probe=template_probe, 60 | template_gallery=template_gallery, 61 | rotation_shift=self.params.rotation_shift, 62 | normalise=self.params.normalise, 63 | norm_mean=self.params.norm_mean, 64 | norm_nb_bits=self.params.norm_nb_bits, 65 | ) 66 | return score 67 | -------------------------------------------------------------------------------- /src/iris/nodes/eye_properties_estimation/moment_of_area.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | from pydantic import Field 3 | 4 | import iris.utils.math as math_utils 5 | from iris.io.class_configs import Algorithm 6 | from iris.io.dataclasses import EyeOrientation, GeometryPolygons 7 | from iris.io.errors import EyeOrientationEstimationError 8 | 9 | 10 | class MomentOfArea(Algorithm): 11 | """Estimate the eye orientation using the second order moments of the eyeball polygon. 12 | 13 | The eye orientation refers to the horizontal direction of the eye. It comes useful for determining the 14 | partial eye occlusion (e.g. occlusion at the horizontal middle third of the iris). 15 | 16 | References: 17 | [1] https://t1.daumcdn.net/cfile/tistory/15425F4150F4EBFC19 18 | [2] https://en.wikipedia.org/wiki/Image_moment 19 | """ 20 | 21 | class Parameters(Algorithm.Parameters): 22 | """MomentOfArea parameters. 23 | 24 | eccentricity_threshold: float in [0, 1]. 25 | The threshold below which a shape is considered not linear enough to reliably estimate its orientation. 26 | """ 27 | 28 | eccentricity_threshold: float = Field(ge=0.0, le=1.0) 29 | 30 | __parameters_type__ = Parameters 31 | 32 | def __init__(self, eccentricity_threshold: float = 0.1) -> None: 33 | """Assign parameters. 34 | 35 | Args: 36 | eccentricity_threshold: float in [0, 1]. The threshold below which a shape is considered not linear enough to reliably estimate its orientation. Defaults to 0.1. 37 | """ 38 | super().__init__(eccentricity_threshold=eccentricity_threshold) 39 | 40 | def run(self, geometries: GeometryPolygons) -> EyeOrientation: 41 | """Compute the eye orientation using the second order moments or the eyeball. 42 | 43 | WARNING: cv2.moments MUST only receive np.float32 arrays. Otherwise, the array will be interpreted as a sparse 44 | matrix instead of a list of points. See https://github.com/opencv/opencv/issues/6643#issuecomment-224204774. 45 | 46 | Args: 47 | geometries (GeometryPolygons): segmentation map used for eye orientation estimation. 48 | 49 | Raises: 50 | EyeOrientationEstimationError if the eyeball's eccentricity is below `eccentricity_threshold` i.e. if the eyeball shape is not circular enough to reliably estimate the orientation. 51 | 52 | Returns: 53 | EyeOrientation: eye orientation object. 54 | """ 55 | moments = cv2.moments(geometries.eyeball_array) 56 | 57 | eccentricity = math_utils.eccentricity(moments) 58 | if eccentricity < self.params.eccentricity_threshold: 59 | raise EyeOrientationEstimationError( 60 | "The eyeball is too circular to reliably determine its orientation. " 61 | f"Computed eccentricity: {eccentricity}. Threshold: {self.params.eccentricity_threshold}" 62 | ) 63 | 64 | orientation = math_utils.orientation(moments) 65 | return EyeOrientation(angle=orientation) 66 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. image:: /images/logos/wld.png 2 | :alt: Worldcoin AI logo 3 | :align: center 4 | :scale: 65% 5 | 6 | `Iris Recognition Inference System` 7 | ------------------------------------------- 8 | 9 | Welcome to Worldcoin's Iris Recognition Inference System (IRIS) project, an advanced iris recognition pipeline designed for robust and secure biometric verification. This project leverages state-of-the-art computer vision and machine learning techniques to provide accurate and efficient iris recognition system. 10 | 11 | Iris recognition is a powerful biometric technology that identifies individuals based on the unique patterns within the iris of the eye. IRIS package aims to make iris recognition accessible and enable further advancement in the field. 12 | 13 | Project features highlights are: 14 | 15 | - **Large-Scale Verification**: Capable of verifying uniqueness among billions of users. 16 | - **High-Performance Iris Segmentation**: Accurate segmentation of iris regions for precise feature extraction. 17 | - **Scalable Matching Algorithm**: Robust matching algorithm designed for scalability without compromising accuracy. 18 | - **User-Friendly Integration**: Simple integration into applications that demand seamless biometric verification. 19 | 20 | High-level iris recognition pipeline steps overview: 21 | 22 | #. **Iris Image Input**: Provide an iris image for verification. 23 | #. **Iris Segmentation**: Identify and isolate the iris region within the image. 24 | #. **Feature Extraction**: Extract unique features from the iris to create a template. 25 | #. **Scalable Matching**: Efficiently compare extracted features for large-scale uniqueness verification. 26 | #. **Result**: Receive the verification result with a confidence score, enabling secure and scalable authentication. 27 | 28 | The Worldcoin system utilizes iris recognition algorithm for verifying uniqueness in a challenging environment, involving billions of individuals. This entails a detailed exploration of the Worldcoin biometric pipeline, a system that confirms uniqueness through the encoding of iris texture into an iris code. 29 | 30 | More detailed pipeline overview can be found in our `blog post `_ dedicated to IRIS project. 31 | 32 | **Disclaimer** 33 | 34 | *The Iris Recognition Inference System (IRIS) software repository is owned and maintained by the Worldcoin Foundation, the steward of the Worldcoin protocol; the repository is not affiliated with any other project or service provider* 35 | 36 | .. toctree:: 37 | :hidden: 38 | :caption: Quickstart 39 | 40 | quickstart/installation 41 | quickstart/setup_for_development 42 | quickstart/running_inference 43 | 44 | .. toctree:: 45 | :hidden: 46 | :caption: Examples 47 | 48 | examples/getting_started 49 | examples/custom_pipeline 50 | examples/matching_entities 51 | 52 | .. toctree:: 53 | :hidden: 54 | :caption: Issues, pull requests and feature requests 55 | 56 | issues_note 57 | 58 | .. toctree:: 59 | :maxdepth: 2 60 | :hidden: 61 | :caption: API Reference 62 | 63 | _code_subpages/modules 64 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | 13 | import os 14 | import sys 15 | from typing import List 16 | 17 | import iris 18 | 19 | sys.path.insert(0, os.path.abspath("../..")) 20 | 21 | 22 | # -- Project information ----------------------------------------------------- 23 | 24 | project = "IRIS: Iris Recognition Inference System" 25 | copyright = "2023, Worldcoin AI" 26 | author = "Worldcoin AI" 27 | 28 | # The full version, including alpha/beta/rc tags 29 | release = iris.__version__ 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # Add any Sphinx extension module names here, as strings. They can be 35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 36 | # ones. 37 | extensions = ["sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", "sphinx.ext.autodoc"] 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ["_templates"] 41 | 42 | # List of patterns, relative to source directory, that match files and 43 | # directories to ignore when looking for source files. 44 | # This pattern also affects html_static_path and html_extra_path. 45 | exclude_patterns: List[str] = [] 46 | 47 | 48 | # -- Options for HTML output ------------------------------------------------- 49 | 50 | # The theme to use for HTML and HTML Help pages. See the documentation for 51 | # a list of builtin themes. 52 | 53 | html_theme = "furo" 54 | 55 | # Add any paths that contain custom static files (such as style sheets) here, 56 | # relative to this directory. They are copied after the builtin static files, 57 | # so a file named "default.css" will overwrite the builtin "default.css". 58 | html_static_path = ["_static"] 59 | 60 | html_favicon = "./images/logos/wld.png" 61 | html_title = f"iris v{iris.__version__}" 62 | html_theme_options = { 63 | "dark_css_variables": { 64 | "color-brand-primary": "#FFFFFF", 65 | "color-brand-content": "#1AA7EC", 66 | }, 67 | } 68 | 69 | # LaTeX 70 | 71 | latex_engine = "xelatex" 72 | latex_elements = { 73 | "fontpkg": r""" 74 | \setmainfont{DejaVu Serif} 75 | \setsansfont{DejaVu Sans} 76 | \setmonofont{DejaVu Sans Mono} 77 | """, 78 | "preamble": r""" 79 | \usepackage[titles]{tocloft} 80 | \cftsetpnumwidth {1.25cm}\cftsetrmarg{1.5cm} 81 | \setlength{\cftchapnumwidth}{0.75cm} 82 | \setlength{\cftsecindent}{\cftchapnumwidth} 83 | \setlength{\cftsecnumwidth}{1.25cm} 84 | """, 85 | "fncychap": r"\usepackage[Bjornstrup]{fncychap}", 86 | "printindex": r"\footnotesize\raggedright\printindex", 87 | } 88 | latex_show_urls = "footnote" 89 | -------------------------------------------------------------------------------- /tests/e2e_tests/orchestration/test_e2e_output_builder.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | from typing import Any, Dict, Tuple 4 | 5 | import numpy as np 6 | import pytest 7 | 8 | from iris.callbacks.pipeline_trace import PipelineCallTraceStorage 9 | from iris.io.dataclasses import Landmarks 10 | from iris.orchestration.output_builders import build_orb_output, build_simple_debugging_output, build_simple_orb_output 11 | from tests.e2e_tests.utils import ( 12 | compare_debug_pipeline_outputs, 13 | compare_iris_pipeline_outputs, 14 | compare_simple_pipeline_outputs, 15 | ) 16 | 17 | 18 | @pytest.fixture 19 | def expected_simple_iris_pipeline_output() -> Tuple[Tuple[np.ndarray, np.ndarray], Landmarks, Dict[str, Any]]: 20 | expected_output_path = os.path.join( 21 | os.path.dirname(__file__), "mocks", "expected_iris_pipeline_simple_output.pickle" 22 | ) 23 | return pickle.load(open(expected_output_path, "rb")) 24 | 25 | 26 | @pytest.fixture 27 | def expected_orb_iris_pipeline_output() -> Tuple[Tuple[np.ndarray, np.ndarray], Landmarks, Dict[str, Any]]: 28 | expected_output_path = os.path.join(os.path.dirname(__file__), "mocks", "expected_iris_pipeline_orb_output.pickle") 29 | return pickle.load(open(expected_output_path, "rb")) 30 | 31 | 32 | @pytest.fixture 33 | def expected_debug_iris_pipeline_output() -> Any: 34 | expected_output_path = os.path.join( 35 | os.path.dirname(__file__), "mocks", "expected_iris_pipeline_debug_output.pickle" 36 | ) 37 | return pickle.load(open(expected_output_path, "rb")) 38 | 39 | 40 | @pytest.fixture 41 | def mock_iris_pipeline_call_trace() -> PipelineCallTraceStorage: 42 | expected_call_trace_path = os.path.join(os.path.dirname(__file__), "mocks", "mock_iris_pipeline_call_trace.pickle") 43 | 44 | return pickle.load(open(expected_call_trace_path, "rb")) 45 | 46 | 47 | def test_e2e_build_simple_output( 48 | mock_iris_pipeline_call_trace: PipelineCallTraceStorage, 49 | expected_simple_iris_pipeline_output: Tuple[Tuple[np.ndarray, np.ndarray], Landmarks, Dict[str, Any]], 50 | ) -> None: 51 | build_orb_iris_pipeline_output = build_simple_orb_output(mock_iris_pipeline_call_trace) 52 | 53 | compare_simple_pipeline_outputs(expected_simple_iris_pipeline_output, build_orb_iris_pipeline_output) 54 | 55 | 56 | def test_e2e_build_orb_output( 57 | mock_iris_pipeline_call_trace: PipelineCallTraceStorage, 58 | expected_orb_iris_pipeline_output: Tuple[Tuple[np.ndarray, np.ndarray], Landmarks, Dict[str, Any]], 59 | ) -> None: 60 | build_orb_iris_pipeline_output = build_orb_output(mock_iris_pipeline_call_trace) 61 | 62 | compare_iris_pipeline_outputs(expected_orb_iris_pipeline_output, build_orb_iris_pipeline_output) 63 | 64 | 65 | def test_e2e_build_debug_output( 66 | mock_iris_pipeline_call_trace: PipelineCallTraceStorage, 67 | expected_debug_iris_pipeline_output: Tuple[Tuple[np.ndarray, np.ndarray], Landmarks, Dict[str, Any]], 68 | ) -> None: 69 | build_debug_iris_pipeline_output = build_simple_debugging_output(mock_iris_pipeline_call_trace) 70 | 71 | compare_debug_pipeline_outputs(expected_debug_iris_pipeline_output, build_debug_iris_pipeline_output) 72 | -------------------------------------------------------------------------------- /src/iris/io/class_configs.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from copy import deepcopy 3 | from typing import Any, List 4 | 5 | import pydantic 6 | from pydantic import Extra 7 | 8 | from iris.callbacks.callback_interface import Callback 9 | 10 | 11 | class ImmutableModel(pydantic.BaseModel): 12 | """Specifies configurations for validating classes which objects should be immutable.""" 13 | 14 | class Config: 15 | """Configuration options for classes which objects are meant to be immutable.""" 16 | 17 | arbitrary_types_allowed = True 18 | allow_mutation = False 19 | validate_all = True 20 | smart_union = True 21 | extra = Extra.forbid 22 | 23 | def serialize(self) -> Any: 24 | """Serialize the object. By defaults, this method raises a RuntimeError to notify the user that the method wasn't implemented. 25 | 26 | Raises: 27 | RuntimeError: Always. 28 | """ 29 | raise RuntimeError(f"{self.__class__.__name__}.serialize not implemented!") 30 | 31 | @staticmethod 32 | def deserialize(self) -> Any: 33 | """Deserialize the object. By defaults, this method raises a RuntimeError to notify the user that the method wasn't implemented. 34 | 35 | Raises: 36 | RuntimeError: Always. 37 | """ 38 | raise RuntimeError(f"{self.__class__.__name__}.deserialize not implemented!") 39 | 40 | 41 | class Algorithm(abc.ABC): 42 | """Base class of every node of the iris recognition pipeline.""" 43 | 44 | class Parameters(ImmutableModel): 45 | """Default parameters.""" 46 | 47 | pass 48 | 49 | __parameters_type__ = Parameters 50 | 51 | def __init__(self, **kwargs: Any) -> None: 52 | """Init function.""" 53 | self._callbacks: List[Callback] = [] 54 | 55 | if "callbacks" in kwargs.keys(): 56 | self._callbacks = deepcopy(kwargs["callbacks"]) 57 | del kwargs["callbacks"] 58 | 59 | self.params = self.__parameters_type__(**kwargs) 60 | 61 | def __call__(self, *args: Any, **kwargs: Any) -> Any: 62 | """Make an object a functor. 63 | 64 | Returns: 65 | Any: Object specified by an interface. 66 | """ 67 | return self.execute(*args, **kwargs) 68 | 69 | def execute(self, *args: Any, **kwargs: Any) -> Any: 70 | """Execute method and wrapped with hooks if such are specified. 71 | 72 | Returns: 73 | Any: Object specified by an interface. 74 | """ 75 | for callback_func in self._callbacks: 76 | callback_func.on_execute_start(*args, **kwargs) 77 | 78 | result = self.run(*args, **kwargs) 79 | 80 | for callback_func in self._callbacks: 81 | callback_func.on_execute_end(result) 82 | 83 | return result 84 | 85 | def run(self, *args: Any, **kwargs: Any) -> Any: 86 | """Implement method design pattern. Not overwritten by subclass will raise an error. 87 | 88 | Raises: 89 | NotImplementedError: Raised if subclass doesn't implement `run` method. 90 | 91 | Returns: 92 | Any: Return value by concrate implementation of the `run` method. 93 | """ 94 | raise NotImplementedError(f"{self.__class__.__name__}.run method not implemented!") 95 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/segmentation/test_onnx_multilabel_model_segmentation.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from iris.io.dataclasses import IRImage, SegmentationMap 7 | from iris.nodes.segmentation.multilabel_segmentation_interface import MultilabelSemanticSegmentationInterface 8 | from iris.nodes.segmentation.onnx_multilabel_segmentation import ONNXMultilabelSegmentation 9 | 10 | 11 | @pytest.fixture 12 | def multilabel_model() -> ONNXMultilabelSegmentation: 13 | return ONNXMultilabelSegmentation.create_from_hugging_face() 14 | 15 | 16 | def test_forward(multilabel_model: ONNXMultilabelSegmentation) -> None: 17 | mock_grayscale_image_data = np.ones((1440, 1080), dtype=np.uint) * 255 18 | mock_irimage = IRImage(img_data=mock_grayscale_image_data, eye_side="left") 19 | 20 | preprocessed_input = multilabel_model._preprocess(image=mock_irimage.img_data) 21 | predictions = multilabel_model._forward(preprocessed_input) 22 | 23 | assert len(predictions) == 1 24 | 25 | geometry_pred = predictions[0] 26 | assert geometry_pred.shape == ( 27 | 1, 28 | len(MultilabelSemanticSegmentationInterface.CLASSES_MAPPING), 29 | 480, 30 | 640, 31 | ) 32 | 33 | 34 | def test_postprocess_segmap(multilabel_model: ONNXMultilabelSegmentation) -> None: 35 | mock_predicted_segmap = np.ones((1, 2, 512, 512)) * 0.5 36 | mock_original_input_image_resolution = (1080, 1440) 37 | 38 | expected_output = np.ones((1440, 1080, 2)) * 0.5 39 | 40 | postprocess_segmap = multilabel_model.postprocess_segmap( 41 | mock_predicted_segmap, mock_original_input_image_resolution 42 | ) 43 | 44 | np.testing.assert_allclose(postprocess_segmap, expected_output) 45 | 46 | 47 | def test_postprocess(multilabel_model: ONNXMultilabelSegmentation) -> None: 48 | mock_prediction = [np.ones((1, 4, 512, 512)) * 0.25, np.ones((1, 2, 512, 512)) * 0.5] 49 | mock_original_input_image_resolution = (1080, 1440) 50 | 51 | expected_output = SegmentationMap( 52 | predictions=np.ones((1440, 1080, 4)) * 0.25, 53 | index2class=MultilabelSemanticSegmentationInterface.CLASSES_MAPPING, 54 | ) 55 | 56 | postprocess_output = multilabel_model._postprocess(mock_prediction, mock_original_input_image_resolution) 57 | 58 | assert postprocess_output == expected_output 59 | 60 | 61 | def test_run(multilabel_model: ONNXMultilabelSegmentation) -> None: 62 | mock_grayscale_image_data = np.ones((1440, 1080), dtype=np.uint) * 255 63 | mock_irimage = IRImage(img_data=mock_grayscale_image_data, eye_side="left") 64 | 65 | mock_prediction = [ 66 | np.ones((1, 4, 512, 512)) * 0.5, 67 | np.ones((1, 4, 512, 512)) * 0.5, 68 | np.ones((1, 4, 512, 512)) * 0.5, 69 | np.ones((1, 2, 512, 512)) * 0.5, 70 | ] 71 | 72 | expected_output = SegmentationMap( 73 | predictions=np.ones((1440, 1080, 4)) * 0.5, 74 | index2class=MultilabelSemanticSegmentationInterface.CLASSES_MAPPING, 75 | ) 76 | 77 | with mock.patch( 78 | "iris.nodes.segmentation.onnx_multilabel_segmentation.ONNXMultilabelSegmentation._forward" 79 | ) as model_mock: 80 | model_mock.return_value = mock_prediction 81 | 82 | output = multilabel_model(mock_irimage) 83 | 84 | assert output == expected_output 85 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/normalization/test_normalization_utils.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from iris.nodes.normalization.common import correct_orientation, interpolate_pixel_intensity, to_uint8 7 | from tests.unit_tests.utils import generate_arc 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "eye_orientation,pupil_points,iris_points,expected_pupil_points,expected_iris_points", 12 | [ 13 | ( 14 | -1.0, 15 | generate_arc(10.0, 0.0, 0.0, 0.0, 2 * np.pi, 360), 16 | generate_arc(10.0, 0.0, 0.0, 0.0, 2 * np.pi, 360), 17 | np.roll(generate_arc(10.0, 0.0, 0.0, 0.0, 2 * np.pi, 360), 1, axis=0), 18 | np.roll(generate_arc(10.0, 0.0, 0.0, 0.0, 2 * np.pi, 360), 1, axis=0), 19 | ), 20 | ( 21 | -1.0, 22 | generate_arc(10.0, 0.0, 0.0, 0.0, 2 * np.pi, 720), 23 | generate_arc(10.0, 0.0, 0.0, 0.0, 2 * np.pi, 720), 24 | np.roll(generate_arc(10.0, 0.0, 0.0, 0.0, 2 * np.pi, 720), 2, axis=0), 25 | np.roll(generate_arc(10.0, 0.0, 0.0, 0.0, 2 * np.pi, 720), 2, axis=0), 26 | ), 27 | ], 28 | ids=["1 point rotation", "2 points rotation"], 29 | ) 30 | def test_correct_orientation( 31 | eye_orientation: float, 32 | pupil_points: np.ndarray, 33 | iris_points: np.ndarray, 34 | expected_pupil_points: np.ndarray, 35 | expected_iris_points: np.ndarray, 36 | ) -> None: 37 | result_pupil_points, result_iris_points = correct_orientation( 38 | pupil_points=pupil_points, 39 | iris_points=iris_points, 40 | eye_orientation=np.radians(eye_orientation), 41 | ) 42 | 43 | assert np.all(result_pupil_points == expected_pupil_points) 44 | assert np.all(result_iris_points == expected_iris_points) 45 | 46 | 47 | @pytest.mark.parametrize( 48 | "pixel_coords,expected_intensity", 49 | [ 50 | # Corners 51 | ((0.0, 0.0), 0.0), 52 | ((0.0, 1.0), 3.0), 53 | ((0.0, 2.0), 6.0), 54 | ((1.0, 0.0), 1.0), 55 | ((1.0, 1.0), 4.0), 56 | ((1.0, 2.0), 7.0), 57 | ((2.0, 0.0), 2.0), 58 | ((2.0, 1.0), 5.0), 59 | ((2.0, 2.0), 8.0), 60 | # Inside 61 | ((0.5, 0.5), 2), 62 | ((0.5, 1.5), 5), 63 | ((1.5, 0.5), 3), 64 | # Outside 65 | ((10.0, 0.5), 0.0), 66 | ((0.5, 10.0), 0.0), 67 | ((10.0, 10.0), 0.0), 68 | ], 69 | ) 70 | def test_interpolate_pixel_intensity(pixel_coords: Tuple[float, float], expected_intensity: float) -> None: 71 | # fmt: off 72 | test_image = np.array( 73 | [ 74 | [0, 1, 2], 75 | [3, 4, 5], 76 | [6, 7, 8], 77 | ] 78 | ) 79 | # fmt: on 80 | 81 | result = interpolate_pixel_intensity(image=test_image, pixel_coords=pixel_coords) 82 | 83 | assert result == expected_intensity 84 | 85 | 86 | @pytest.mark.parametrize( 87 | "input_img", 88 | [ 89 | (np.ones(shape=(10, 10), dtype=np.uint8)), 90 | (np.zeros(shape=(10, 10), dtype=np.uint8)), 91 | (np.random.randn(100).reshape((10, 10))), 92 | ], 93 | ) 94 | def test_to_uint8(input_img: np.ndarray) -> None: 95 | result = to_uint8(input_img) 96 | 97 | assert result.dtype == np.uint8 98 | assert np.all(result >= 0) and np.all(result <= 255) 99 | -------------------------------------------------------------------------------- /src/iris/nodes/binarization/multilabel_binarization.py: -------------------------------------------------------------------------------- 1 | from typing import List, Tuple 2 | 3 | from pydantic import Field 4 | 5 | from iris.callbacks.callback_interface import Callback 6 | from iris.io.class_configs import Algorithm 7 | from iris.io.dataclasses import GeometryMask, NoiseMask, SegmentationMap 8 | 9 | 10 | class MultilabelSegmentationBinarization(Algorithm): 11 | """Implementation of a binarization algorithm for multilabel segmentation. Algorithm performs thresholding of each prediction's channel separately to create rasters based on specified by the user classes' thresholds.""" 12 | 13 | class Parameters(Algorithm.Parameters): 14 | """Parameters class for MultilabelSegmentationBinarization objects.""" 15 | 16 | eyeball_threshold: float = Field(..., ge=0.0, le=1.0) 17 | iris_threshold: float = Field(..., ge=0.0, le=1.0) 18 | pupil_threshold: float = Field(..., ge=0.0, le=1.0) 19 | eyelashes_threshold: float = Field(..., ge=0.0, le=1.0) 20 | 21 | __parameters_type__ = Parameters 22 | 23 | def __init__( 24 | self, 25 | eyeball_threshold: float = 0.5, 26 | iris_threshold: float = 0.5, 27 | pupil_threshold: float = 0.5, 28 | eyelashes_threshold: float = 0.5, 29 | callbacks: List[Callback] = [], 30 | ) -> None: 31 | """Assign parameters. 32 | 33 | Args: 34 | eyeball_threshold (float, optional): Eyeball class threshold. Defaults to 0.5. 35 | iris_threshold (float, optional): Iris class threshold. Defaults to 0.5. 36 | pupil_threshold (float, optional): Pupil class threshold. Defaults to 0.5. 37 | eyelashes_threshold (float, optional): Eyelashes class threshold. Defaults to 0.5. 38 | callbacks (List[Callback], optional): List of algorithm callbacks. Defaults to []. 39 | """ 40 | super().__init__( 41 | eyeball_threshold=eyeball_threshold, 42 | iris_threshold=iris_threshold, 43 | pupil_threshold=pupil_threshold, 44 | eyelashes_threshold=eyelashes_threshold, 45 | callbacks=callbacks, 46 | ) 47 | 48 | def run(self, segmentation_map: SegmentationMap) -> Tuple[GeometryMask, NoiseMask]: 49 | """Perform segmentation binarization. 50 | 51 | Args: 52 | segmentation_map (SegmentationMap): Predictions. 53 | 54 | Returns: 55 | Tuple[GeometryMask, NoiseMask]: Binarized geometry mask and noise mask. 56 | """ 57 | eyeball_preds = segmentation_map.predictions[..., segmentation_map.index_of("eyeball")] 58 | iris_preds = segmentation_map.predictions[..., segmentation_map.index_of("iris")] 59 | pupil_preds = segmentation_map.predictions[..., segmentation_map.index_of("pupil")] 60 | eyelashes_preds = segmentation_map.predictions[..., segmentation_map.index_of("eyelashes")] 61 | 62 | eyeball_mask = eyeball_preds >= self.params.eyeball_threshold 63 | iris_mask = iris_preds >= self.params.iris_threshold 64 | pupil_mask = pupil_preds >= self.params.pupil_threshold 65 | eyelashes_mask = eyelashes_preds >= self.params.eyelashes_threshold 66 | 67 | return GeometryMask(pupil_mask=pupil_mask, iris_mask=iris_mask, eyeball_mask=eyeball_mask), NoiseMask( 68 | mask=eyelashes_mask 69 | ) 70 | -------------------------------------------------------------------------------- /tests/unit_tests/io/test_class_configs.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict 2 | 3 | import pytest 4 | from pydantic import Field, ValidationError 5 | 6 | from iris.callbacks.callback_interface import Callback 7 | from iris.io.class_configs import Algorithm, ImmutableModel 8 | 9 | 10 | class ConcreteImmutableModel(ImmutableModel): 11 | """A concrete implementation of ImmutableModel with parameters""" 12 | 13 | my_param_1: int = Field(..., gt=0) 14 | my_param_2: str 15 | 16 | 17 | @pytest.mark.parametrize( 18 | "parameters", 19 | [ 20 | ({"my_param_1": 3, "my_param_2": "toto"}), 21 | ({"my_param_1": 3, "my_param_2": "3.7"}), 22 | ], 23 | ) 24 | def test_immutable_model_constructor(parameters: Dict[str, Any]) -> None: 25 | cim = ConcreteImmutableModel(**parameters) 26 | 27 | for key, value in parameters.items(): 28 | assert getattr(cim, key) == value 29 | 30 | 31 | @pytest.mark.parametrize( 32 | "parameters", 33 | [ 34 | ({"my_param_1": -4, "my_param_2": "toto"}), 35 | ({"my_param_1": 3, "my_param_2": "toto", "extra_parameter": "forbidden"}), 36 | ], 37 | ids=["pydantic checks", "extra parameter forbidden"], 38 | ) 39 | def test_immutable_model_constructor_raises_exception(parameters: Dict) -> None: 40 | with pytest.raises((ValidationError, TypeError)): 41 | _ = ConcreteImmutableModel(**parameters) 42 | 43 | 44 | @pytest.mark.parametrize( 45 | "parameters,new_parameters", 46 | [ 47 | pytest.param( 48 | {"my_param_1": 3, "my_param_2": "toto"}, 49 | {"my_param_1": 6, "my_param_2": "not toto"}, 50 | ), 51 | ], 52 | ids=["regular"], 53 | ) 54 | def test_immutability_of_immutable_model(parameters: Dict[str, Any], new_parameters: Dict[str, Any]) -> None: 55 | immutable_obj = ConcreteImmutableModel(**parameters) 56 | 57 | with pytest.raises(TypeError): 58 | for key, value in new_parameters.items(): 59 | setattr(immutable_obj, key, value) 60 | 61 | 62 | class MockDummyValidationAlgorithm(Callback): 63 | CORRECT_MSG = "Worldcoin AI is the best" 64 | ERROR_MSG = "Incorrect msg returned!" 65 | 66 | def on_execute_end(self, result: str) -> None: 67 | if result != self.CORRECT_MSG: 68 | raise RuntimeError(MockDummyValidationAlgorithm.ERROR_MSG) 69 | 70 | 71 | class MockParametrizedModelWithCallback(Algorithm): 72 | class Parameters(Algorithm.Parameters): 73 | ret_msg: str 74 | 75 | __parameters_type__ = Parameters 76 | 77 | def __init__(self, ret_msg: str = "Worldcoin AI is the best") -> None: 78 | super().__init__(ret_msg=ret_msg, callbacks=[MockDummyValidationAlgorithm()]) 79 | 80 | def run(self) -> str: 81 | return self.params.ret_msg 82 | 83 | 84 | def test_parametrized_model_validation_hook_not_raising_an_error() -> None: 85 | mock_model = MockParametrizedModelWithCallback() 86 | 87 | result = mock_model.execute() 88 | 89 | assert result == mock_model.params.ret_msg 90 | 91 | 92 | def test_parametrized_model_validation_hook_raising_an_error() -> None: 93 | mock_model = MockParametrizedModelWithCallback(ret_msg="Worldcoin AI isn't the best") 94 | 95 | with pytest.raises(RuntimeError) as err: 96 | _ = mock_model.execute() 97 | 98 | assert str(err.value) == MockDummyValidationAlgorithm.ERROR_MSG 99 | -------------------------------------------------------------------------------- /src/iris/nodes/segmentation/multilabel_segmentation_interface.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | from typing import Any, Tuple 5 | 6 | import cv2 7 | import numpy as np 8 | 9 | from iris.io.class_configs import Algorithm 10 | 11 | 12 | class MultilabelSemanticSegmentationInterface(Algorithm): 13 | """Interface of a model semantic segmentation prediction trained with multilabel labels.""" 14 | 15 | HUGGING_FACE_REPO_ID = "Worldcoin/iris-semantic-segmentation" 16 | MODEL_CACHE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets") 17 | 18 | CLASSES_MAPPING = { 19 | 0: "eyeball", 20 | 1: "iris", 21 | 2: "pupil", 22 | 3: "eyelashes", 23 | } 24 | 25 | @classmethod 26 | def create_from_hugging_face(cls) -> MultilabelSemanticSegmentationInterface: 27 | """Abstract function just to make sure all subclasses implement it. 28 | 29 | Raises: 30 | RuntimeError: Raised if subclass doesn't implement that class method. 31 | 32 | Returns: 33 | MultilabelSemanticSegmentationInterface: MultilabelSemanticSegmentationInterface subclass object. 34 | """ 35 | raise RuntimeError(f"`create_from_hugging_face` function hasn't been implemented for {cls.__name__} subclass.") 36 | 37 | def __init__(self, **kwargs: Any) -> None: 38 | """Assign parameters.""" 39 | super().__init__(**kwargs) 40 | 41 | def preprocess(self, image: np.ndarray, input_resolution: Tuple[int, int], nn_input_channels: int) -> np.ndarray: 42 | """Preprocess image before running a model inference. 43 | 44 | Args: 45 | image (np.ndarray): Image to preprocess. 46 | input_resolution (Tuple[int, int]): A model input resolution. 47 | nn_input_channels (int): A model input channels. 48 | 49 | Returns: 50 | np.ndarray: Preprocessed image. 51 | """ 52 | nn_input = cv2.resize(image.astype(float), input_resolution) 53 | nn_input = np.divide(nn_input, 255) # Replicates torchvision's ToTensor 54 | 55 | nn_input = np.expand_dims(nn_input, axis=-1) 56 | nn_input = np.tile(nn_input, (1, 1, nn_input_channels)) 57 | 58 | # Replicates torchvision's Normalization 59 | means = np.array([0.485, 0.456, 0.406]) if nn_input_channels == 3 else 0.5 60 | stds = np.array([0.229, 0.224, 0.225]) if nn_input_channels == 3 else 0.5 61 | 62 | nn_input -= means 63 | nn_input /= stds 64 | 65 | nn_input = nn_input.transpose(2, 0, 1) 66 | nn_input = np.expand_dims(nn_input, axis=0) 67 | 68 | return nn_input 69 | 70 | def postprocess_segmap( 71 | self, 72 | segmap: np.ndarray, 73 | original_image_resolution: Tuple[int, int], 74 | ) -> np.ndarray: 75 | """Postprocess segmentation map. 76 | 77 | Args: 78 | segmap (np.ndarray): Predicted segmentation map. 79 | original_image_resolution (Tuple[int, int]): Original input image resolution (width, height). 80 | 81 | Returns: 82 | np.ndarray: Postprocessed segmentation map. 83 | """ 84 | segmap = np.squeeze(segmap, axis=0) 85 | segmap = np.transpose(segmap, (1, 2, 0)) 86 | segmap = cv2.resize(segmap, original_image_resolution, interpolation=cv2.INTER_NEAREST) 87 | 88 | return segmap 89 | -------------------------------------------------------------------------------- /src/iris/nodes/matcher/hamming_distance_matcher.py: -------------------------------------------------------------------------------- 1 | from typing import List, Literal, Optional 2 | 3 | import numpy as np 4 | from pydantic import confloat 5 | 6 | from iris.io.dataclasses import IrisTemplate 7 | from iris.nodes.matcher.utils import hamming_distance 8 | from iris.nodes.matcher.hamming_distance_matcher_interface import Matcher 9 | 10 | 11 | class HammingDistanceMatcher(Matcher): 12 | """Hamming distance Matcher. 13 | 14 | Algorithm steps: 15 | 1) Calculate counts of nonmatch irisbits (IB_Counts) in common unmasked region and the counts of common maskbits (MB_Counts) in common unmasked region for both upper and lower half of iris, respectively. 16 | 2) If parameter nm_dist is defined, calculate normalized Hamming distance (NHD) based on IB_Counts, MB_Counts and nm_dist. 17 | 3) If parameter weights is defined, calculate weighted Hamming distance (WHD) based on IB_Counts, MB_Counts and weights. 18 | 4) If parameters nm_dist and weights are both defined, calculate weighted normalized Hamming distance (WNHD) based on IB_Counts, MB_Counts, nm_dist and weights. 19 | 5) Otherwise, calculate Hamming distance (HD) based on IB_Counts and MB_Counts. 20 | 6) If parameter rotation_shift is > 0, repeat the above steps for additional rotations of the iriscode. 21 | 7) Return the minimium distance from above calculations. 22 | """ 23 | 24 | class Parameters(Matcher.Parameters): 25 | """IrisMatcherParameters parameters.""" 26 | 27 | normalise: bool 28 | nm_dist: confloat(ge=0, le=1, strict=True) 29 | nm_type: Literal["linear", "sqrt"] 30 | weights: Optional[List[np.ndarray]] 31 | 32 | __parameters_type__ = Parameters 33 | 34 | def __init__( 35 | self, 36 | rotation_shift: int = 15, 37 | normalise: bool = False, 38 | nm_dist: confloat(ge=0, le=1, strict=True) = 0.45, 39 | nm_type: Literal["linear", "sqrt"] = "sqrt", 40 | weights: Optional[List[np.ndarray]] = None, 41 | ) -> None: 42 | """Assign parameters. 43 | 44 | Args: 45 | rotation_shift (int): rotations allowed in matching, experessed in iris code columns. Defaults to 15. 46 | nm_dist (Optional[confloat(ge=0, le = 1, strict=True)]): nonmatch distance used for normalized HD. Optional paremeter for normalized HD. Defaults to None. 47 | weights (Optional[List[np.ndarray]]): list of weights table. Optional paremeter for weighted HD. Defaults to None. 48 | """ 49 | super().__init__( 50 | rotation_shift=rotation_shift, normalise=normalise, nm_dist=nm_dist, nm_type=nm_type, weights=weights 51 | ) 52 | 53 | def run(self, template_probe: IrisTemplate, template_gallery: IrisTemplate) -> float: 54 | """Match iris templates using Hamming distance. 55 | 56 | Args: 57 | template_probe (IrisTemplate): Iris template from probe. 58 | template_gallery (IrisTemplate): Iris template from gallery. 59 | 60 | Returns: 61 | float: matching distance. 62 | """ 63 | score, _ = hamming_distance( 64 | template_probe, 65 | template_gallery, 66 | self.params.rotation_shift, 67 | self.params.normalise, 68 | self.params.nm_dist, 69 | self.params.nm_type, 70 | self.params.weights, 71 | ) 72 | 73 | return score 74 | -------------------------------------------------------------------------------- /src/iris/nodes/iris_response_refinement/fragile_bits_refinement.py: -------------------------------------------------------------------------------- 1 | from typing import Literal, Tuple 2 | 3 | import numpy as np 4 | from pydantic import confloat 5 | 6 | from iris.io.class_configs import Algorithm 7 | from iris.io.dataclasses import IrisFilterResponse 8 | 9 | 10 | class FragileBitRefinement(Algorithm): 11 | """Refining mask by masking out fragile bits. 12 | 13 | Algorithm: 14 | Thresholding by the given parameter value_threshold at each bit, set the corresponding mask response to 0 if iris response is below the threshold. 15 | """ 16 | 17 | class Parameters(Algorithm.Parameters): 18 | """RegularProbeSchema parameters.""" 19 | 20 | value_threshold: Tuple[confloat(ge=0), confloat(ge=0)] 21 | fragile_type: Literal["cartesian", "polar"] 22 | 23 | __parameters_type__ = Parameters 24 | 25 | def __init__( 26 | self, 27 | value_threshold: Tuple[confloat(ge=0), confloat(ge=0)], 28 | fragile_type: Literal["cartesian", "polar"] = "polar", 29 | ) -> None: 30 | """Create Fragile Bit Refinement object. 31 | 32 | Args: 33 | value_threshold (Tuple[confloat(ge=0), confloat(ge=0)]): Thresholding iris response values. 34 | fragile_type (Literal["cartesian", "polar"], optional): The Fragile bits can be either 35 | calculated in cartesian or polar coordinates. In the first, the values 36 | of value_threshold denote to x and y axis, in the case of polar coordinates, 37 | the values denote to radius and angle. Defaults to "polar". 38 | """ 39 | super().__init__(value_threshold=value_threshold, fragile_type=fragile_type) 40 | 41 | def run(self, iris_filter_response: IrisFilterResponse) -> IrisFilterResponse: 42 | """Generate refined IrisFilterResponse. 43 | 44 | Args: 45 | iris_filter_response (IrisFilterResponse): Filter bank response. 46 | 47 | Returns: 48 | IrisFilterResponse: Filter bank response. 49 | """ 50 | fragile_masks = [] 51 | for iris_response, iris_mask in zip(iris_filter_response.iris_responses, iris_filter_response.mask_responses): 52 | if self.params.fragile_type == "cartesian": 53 | mask_value_real = np.abs(np.real(iris_response)) >= self.params.value_threshold[0] 54 | mask_value_imaginary = np.abs(np.imag(iris_response)) >= self.params.value_threshold[1] 55 | mask_value = mask_value_real * mask_value_imaginary 56 | 57 | if self.params.fragile_type == "polar": 58 | iris_response_r = np.abs(iris_response) 59 | iris_response_phi = np.angle(iris_response) 60 | 61 | mask_value_r = iris_response_r >= self.params.value_threshold[0] 62 | 63 | cos_mask = np.abs(np.cos(iris_response_phi)) <= np.abs(np.cos(self.params.value_threshold[1])) 64 | sine_mask = np.abs(np.sin(iris_response_phi)) <= np.abs(np.cos(self.params.value_threshold[1])) 65 | mask_value_phi = cos_mask * sine_mask 66 | mask_value = mask_value_r * mask_value_phi 67 | 68 | mask_value = mask_value * iris_mask 69 | fragile_masks.append(mask_value) 70 | 71 | return IrisFilterResponse( 72 | iris_responses=iris_filter_response.iris_responses, 73 | mask_responses=fragile_masks, 74 | iris_code_version=iris_filter_response.iris_code_version, 75 | ) 76 | -------------------------------------------------------------------------------- /tests/unit_tests/nodes/iris_response/probe_schemas/test_regular_probe_schema.py: -------------------------------------------------------------------------------- 1 | from typing import List, Literal, Optional, Union 2 | 3 | import pytest 4 | from pydantic import PositiveInt, ValidationError 5 | 6 | import iris.nodes.iris_response.probe_schemas.regular_probe_schema as rps 7 | from iris.io.errors import ProbeSchemaError 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "n_rows,n_cols,boundary_rho,boundary_phi,image_shape", 12 | [ 13 | (-3, 12, [0, 0], "periodic-symmetric", None), 14 | (6, 6, [0, 0], "periodic-symmetric", [10, 10]), 15 | (6, 6, [0, 0], [-1, 0], None), 16 | (6, 6, [0, 0], [0.6, 0.6], None), 17 | (6, 6, [-1, 0], 0, None), 18 | (6, 6, [0.6, 0.7], 0, None), 19 | (6, 6, [0, 0], "periodic-symmetric", [-5, 0.5]), 20 | (-3, 12, "some string", "some string", None), 21 | (5, 5, [0, 0], "periodic-symmetric", [10, 10]), 22 | ], 23 | ids=[ 24 | "negative n_rows", 25 | "aliasing effects", 26 | "offset negative phi", 27 | "offset overlapping phi", 28 | "offset negative rho", 29 | "offset overlapping rho", 30 | "negative image_shape and float values", 31 | "wrong boundary option", 32 | "aliasing effects rho", 33 | ], 34 | ) 35 | def test_regular_probe_schema_constructor_fails( 36 | n_rows: int, 37 | n_cols: int, 38 | boundary_rho: List[float], 39 | boundary_phi: Union[Literal["periodic-symmetric", "periodic-left"], List[float]], 40 | image_shape: Optional[List[PositiveInt]], 41 | ) -> None: 42 | with pytest.raises((ProbeSchemaError, ValidationError)): 43 | _ = rps.RegularProbeSchema( 44 | n_rows=n_rows, n_cols=n_cols, boundary_rho=boundary_rho, boundary_phi=boundary_phi, image_shape=image_shape 45 | ) 46 | 47 | 48 | @pytest.mark.parametrize( 49 | "n_rows,n_cols,boundary_rho,boundary_phi,image_shape", 50 | [ 51 | (3, 6, [0, 0], "periodic-symmetric", None), 52 | (3, 5, [0, 0], "periodic-symmetric", [10, 10]), 53 | (3, 6, [0, 0], "periodic-left", None), 54 | (3, 6, [0.2, 0.5], [0.1, 0.3], None), 55 | ], 56 | ids=[ 57 | "regular 1", 58 | "regular 2", 59 | "regular 3", 60 | "regular 4", 61 | ], 62 | ) 63 | def test_regular_probe_schema_constructor( 64 | n_rows: int, 65 | n_cols: int, 66 | boundary_rho: List[float], 67 | boundary_phi: Union[Literal["periodic-symmetric", "periodic-left"], List[float]], 68 | image_shape: Optional[List[PositiveInt]], 69 | ) -> None: 70 | schema = rps.RegularProbeSchema( 71 | n_rows=n_rows, n_cols=n_cols, boundary_rho=boundary_rho, boundary_phi=boundary_phi, image_shape=image_shape 72 | ) 73 | 74 | assert schema.rhos.shape == schema.phis.shape == (n_rows * n_cols,) 75 | 76 | 77 | @pytest.mark.parametrize( 78 | "row_min,row_max,length,boundary_condition", 79 | [ 80 | (2, 11, 10, "periodic-symmetric"), 81 | (2, 11, 10, "periodic-left"), 82 | (2, 11, 10, [0.2, 0.4]), 83 | ], 84 | ids=[ 85 | "regular 1", 86 | "regular 2", 87 | "regular 3", 88 | ], 89 | ) 90 | def test_find_suitable_n_rows( 91 | row_min: int, 92 | row_max: int, 93 | length: int, 94 | boundary_condition: Union[Literal["periodic-symmetric", "periodic-left"], List[float]], 95 | ) -> None: 96 | _ = rps.RegularProbeSchema.find_suitable_n_rows( 97 | row_min=row_min, row_max=row_max, length=length, boundary_condition=boundary_condition 98 | ) 99 | -------------------------------------------------------------------------------- /src/iris/nodes/eye_properties_estimation/pupil_iris_property_calculator.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from pydantic import Field 4 | 5 | from iris.callbacks.callback_interface import Callback 6 | from iris.io.class_configs import Algorithm 7 | from iris.io.dataclasses import EyeCenters, GeometryPolygons, PupilToIrisProperty 8 | from iris.io.errors import PupilIrisPropertyEstimationError 9 | 10 | 11 | class PupilIrisPropertyCalculator(Algorithm): 12 | """Computes pupil-to-iris properties. 13 | 14 | Algorithm steps: 15 | (1) Calculate pupil diameter to iris diameter ratio, i.e. pupil dilation. 16 | (2) Calculate the ratio of the pupil center to iris center distance over the iris diameter. 17 | """ 18 | 19 | class Parameters(Algorithm.Parameters): 20 | """PupilIrisPropertyCalculator parameters. 21 | 22 | min_pupil_diameter (float): threshold of pupil diameter, below which the pupil is too small. min_pupil_diameter should be higher than 0. 23 | min_iris_diameter (float): threshold of iris diameter, below which the iris is too small. min_iris_diameter should be higher than 0. 24 | """ 25 | 26 | min_pupil_diameter: float = Field(..., gt=0.0) 27 | min_iris_diameter: float = Field(..., gt=0.0) 28 | 29 | __parameters_type__ = Parameters 30 | 31 | def __init__( 32 | self, 33 | min_pupil_diameter: float = 1.0, 34 | min_iris_diameter: float = 150.0, 35 | callbacks: List[Callback] = [], 36 | ) -> None: 37 | """Assign parameters. 38 | 39 | Args: 40 | min_pupil_diameter (float): minimum pupil diameter. Defaults to 1.0. 41 | min_iris_diameter (float): minimum iris diameter. Defaults to 150.0. 42 | callbacks (List[Callback]): callbacks list. Defaults to []. 43 | """ 44 | super().__init__( 45 | min_pupil_diameter=min_pupil_diameter, 46 | min_iris_diameter=min_iris_diameter, 47 | callbacks=callbacks, 48 | ) 49 | 50 | def run(self, geometries: GeometryPolygons, eye_centers: EyeCenters) -> PupilToIrisProperty: 51 | """Calculate pupil-to-iris property. 52 | 53 | Args: 54 | geometries (GeometryPolygons): polygons used for calculating pupil-ro-iris property. 55 | eye_centers (EyeCenters): eye centers used for calculating pupil-ro-iris property. 56 | 57 | Raises: 58 | PupilIrisPropertyEstimationError: Raised if 1) the pupil or iris diameter is too small, 2) pupil diameter is larger than or equal to iris diameter, 3) pupil center is outside iris. 59 | 60 | Returns: 61 | PupilToIrisProperty: pupil-ro-iris property object. 62 | """ 63 | iris_diameter = geometries.iris_diameter 64 | pupil_diameter = geometries.pupil_diameter 65 | 66 | if pupil_diameter < self.params.min_pupil_diameter: 67 | raise PupilIrisPropertyEstimationError("Pupil diameter is too small!") 68 | if iris_diameter < self.params.min_iris_diameter: 69 | raise PupilIrisPropertyEstimationError("Iris diameter is too small!") 70 | if pupil_diameter >= iris_diameter: 71 | raise PupilIrisPropertyEstimationError("Pupil diameter is larger than/equal to Iris diameter!") 72 | if eye_centers.center_distance * 2 >= iris_diameter: 73 | raise PupilIrisPropertyEstimationError("Pupil center is outside iris!") 74 | 75 | return PupilToIrisProperty( 76 | pupil_to_iris_diameter_ratio=pupil_diameter / iris_diameter, 77 | pupil_to_iris_center_dist_ratio=eye_centers.center_distance * 2 / iris_diameter, 78 | ) 79 | -------------------------------------------------------------------------------- /src/iris/nodes/geometry_estimation/fusion_extrapolation.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from iris.callbacks.callback_interface import Callback 4 | from iris.io.class_configs import Algorithm 5 | from iris.io.dataclasses import EyeCenters, GeometryPolygons 6 | from iris.nodes.geometry_estimation.linear_extrapolation import LinearExtrapolation 7 | from iris.nodes.geometry_estimation.lsq_ellipse_fit_with_refinement import LSQEllipseFitWithRefinement 8 | from iris.utils.math import cartesian2polar 9 | 10 | 11 | class FusionExtrapolation(Algorithm): 12 | """Algorithm implements fusion extrapolation that consist of two concreate extrapolation algoriths. 13 | 1) circle extrapolation algorithm - linear extrapolation algorithm 14 | 2) ellipse extrapolation algorithm - least square ellipse fit with iris polygon refinement. 15 | 16 | By default the linear extrapolation algorithm is used but if standard deviation of radiuses is greater than given threshold then least square ellipse fit algorithm is applied because eye is ver likely to be more elliptical then circular. 17 | """ 18 | 19 | class Parameters(Algorithm.Parameters): 20 | """Parameters of fusion extrapolation algorithm.""" 21 | 22 | circle_extrapolation: Algorithm 23 | ellipse_fit: Algorithm 24 | algorithm_switch_std_threshold: float 25 | 26 | __parameters_type__ = Parameters 27 | 28 | def __init__( 29 | self, 30 | circle_extrapolation: Algorithm = LinearExtrapolation(dphi=360 / 512), 31 | ellipse_fit: Algorithm = LSQEllipseFitWithRefinement(dphi=360 / 512), 32 | algorithm_switch_std_threshold: float = 3.5, 33 | callbacks: List[Callback] = [], 34 | ) -> None: 35 | """Assign parameters. 36 | 37 | Args: 38 | circle_extrapolation (Algorithm, optional): More circular shape estimation algorithm. Defaults to LinearExtrapolation(dphi=360 / 512, degrees / width of normalized image). 39 | ellipse_fit (Algorithm, optional): More elliptical shape estimation algorithm. Defaults to LSQEllipseFitWithRefinement(dphi=360 / 512, degrees / width of normalized image). 40 | algorithm_switch_std_threshold (float, optional): Algorithm switch threshold. Defaults to 3.5. 41 | callbacks (List[Callback], optional): _description_. Defaults to []. 42 | """ 43 | super().__init__( 44 | ellipse_fit=ellipse_fit, 45 | circle_extrapolation=circle_extrapolation, 46 | algorithm_switch_std_threshold=algorithm_switch_std_threshold, 47 | callbacks=callbacks, 48 | ) 49 | 50 | def run(self, input_polygons: GeometryPolygons, eye_center: EyeCenters) -> GeometryPolygons: 51 | """Perform extrapolation algorithm. 52 | 53 | Args: 54 | input_polygons (GeometryPolygons): Smoothed polygons. 55 | eye_center (EyeCenters): Computed eye centers. 56 | 57 | Returns: 58 | GeometryPolygons: Extrapolated polygons 59 | """ 60 | xs, ys = input_polygons.iris_array[:, 0], input_polygons.iris_array[:, 1] 61 | rhos, _ = cartesian2polar(xs, ys, eye_center.iris_x, eye_center.iris_y) 62 | 63 | new_poly = self.params.circle_extrapolation(input_polygons, eye_center) 64 | 65 | radius_std = rhos.std() 66 | if radius_std > self.params.algorithm_switch_std_threshold: 67 | ellipse_poly = self.params.ellipse_fit(input_polygons) 68 | new_poly = GeometryPolygons( 69 | pupil_array=new_poly.pupil_array, 70 | iris_array=ellipse_poly.iris_array, 71 | eyeball_array=input_polygons.eyeball_array, 72 | ) 73 | 74 | return new_poly 75 | -------------------------------------------------------------------------------- /src/iris/nodes/geometry_estimation/linear_extrapolation.py: -------------------------------------------------------------------------------- 1 | from typing import List, Tuple 2 | 3 | import numpy as np 4 | from pydantic import Field 5 | 6 | from iris.callbacks.callback_interface import Callback 7 | from iris.io.class_configs import Algorithm 8 | from iris.io.dataclasses import EyeCenters, GeometryPolygons 9 | from iris.utils import math 10 | 11 | 12 | class LinearExtrapolation(Algorithm): 13 | """Implementation of geometry estimation algorithm through linear extrapolation in polar space. 14 | 15 | Algorithm depends on np.interp therefore it's better to perform smoothing beforehand. 16 | 17 | Algorith steps: 18 | 1) Map iris/pupil polygon vertices to polar space based on estimated iris/pupil centers. 19 | 2) For iris/pupil, perform function interpolation in polar space to estimate missing circle points. 20 | Note: interpolation in polar space is extrapolation in cartesian space. 21 | 3) Take 2 * np.pi / dphi points from function in polar space. 22 | 4) Map iris/pupil points from polar space back to cartesian space. 23 | """ 24 | 25 | class Parameters(Algorithm.Parameters): 26 | """Parameters of linear extrapolation algorithm.""" 27 | 28 | dphi: float = Field(..., gt=0.0, lt=360.0) 29 | 30 | __parameters_type__ = Parameters 31 | 32 | def __init__(self, dphi: float = 0.9, callbacks: List[Callback] = []) -> None: 33 | """Assign parameters. 34 | 35 | Args: 36 | dphi (float, optional): phi angle delta used to sample points while doing smoothing by interpolation. Defaults to 0.9. 37 | callbacks (List[Callback]): callbacks list. Defaults to []. 38 | """ 39 | super().__init__(dphi=dphi, callbacks=callbacks) 40 | 41 | def run(self, input_polygons: GeometryPolygons, eye_center: EyeCenters) -> GeometryPolygons: 42 | """Estimate contours. 43 | 44 | Args: 45 | input_polygons (GeometryPolygons): Input contours. 46 | eye_center (EyeCenters): Eye's centers. 47 | 48 | Returns: 49 | GeometryPolygons: Extrapolated contours. 50 | """ 51 | estimated_pupil = self._estimate(input_polygons.pupil_array, (eye_center.pupil_x, eye_center.pupil_y)) 52 | estimated_iris = self._estimate(input_polygons.iris_array, (eye_center.iris_x, eye_center.iris_y)) 53 | 54 | return GeometryPolygons( 55 | pupil_array=estimated_pupil, iris_array=estimated_iris, eyeball_array=input_polygons.eyeball_array 56 | ) 57 | 58 | def _estimate(self, vertices: np.ndarray, center_xy: Tuple[float, float]) -> np.ndarray: 59 | """Estimate a circle fit for a single contour. 60 | 61 | Args: 62 | vertices (np.ndarray): Contour's vertices. 63 | center_xy (Tuple[float, float]): Contour's center position. 64 | 65 | Returns: 66 | np.ndarray: Estimated polygon. 67 | """ 68 | rhos, phis = math.cartesian2polar(vertices[:, 0], vertices[:, 1], *center_xy) 69 | 70 | padded_rhos = np.concatenate([rhos, rhos, rhos]) 71 | padded_phis = np.concatenate([phis - 2 * np.pi, phis, phis + 2 * np.pi]) 72 | 73 | interpolated_phis = np.arange(padded_phis.min(), padded_phis.max(), np.radians(self.params.dphi)) 74 | interpolated_rhos = np.interp(interpolated_phis, xp=padded_phis, fp=padded_rhos, period=2 * np.pi) 75 | 76 | mask = (interpolated_phis >= 0) & (interpolated_phis < 2 * np.pi) 77 | interpolated_phis, interpolated_rhos = interpolated_phis[mask], interpolated_rhos[mask] 78 | 79 | xs, ys = math.polar2cartesian(interpolated_rhos, interpolated_phis, *center_xy) 80 | estimated_vertices = np.column_stack([xs, ys]) 81 | 82 | return estimated_vertices 83 | -------------------------------------------------------------------------------- /src/iris/nodes/geometry_refinement/contour_interpolation.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import numpy as np 4 | from pydantic import Field 5 | 6 | from iris.io.class_configs import Algorithm 7 | from iris.io.dataclasses import GeometryPolygons 8 | 9 | 10 | class ContourInterpolation(Algorithm): 11 | """Implementation of contour interpolation algorithm conditioned by given NoiseMask. 12 | 13 | Algorithm performs linar interpolation of points between vectorized, predicted points such that maximum distance between two consecutive points in a polygon isn't greater than 14 | a fraction of an iris diameter length specified as `max_distance_between_boundary_points` parameter. 15 | """ 16 | 17 | class Parameters(Algorithm.Parameters): 18 | """Parameters class for ContourInterpolation objects.""" 19 | 20 | max_distance_between_boundary_points: float = Field(..., gt=0.0, lt=1.0) 21 | 22 | __parameters_type__ = Parameters 23 | 24 | def __init__(self, max_distance_between_boundary_points: float = 0.01) -> None: 25 | """Assign parameters. 26 | 27 | Args: 28 | max_distance_between_boundary_points (float, optional): Maximum distance between boundary contour points expressed as a fraction of a iris diameter length. Defaults to 0.01. 29 | """ 30 | super().__init__(max_distance_between_boundary_points=max_distance_between_boundary_points) 31 | 32 | def run(self, polygons: GeometryPolygons) -> GeometryPolygons: 33 | """Refine polygons by interpolating contour points. 34 | 35 | Args: 36 | polygons (GeometryPolygons): Polygons to refine. 37 | 38 | Returns: 39 | GeometryPolygons: Refined polygons. 40 | """ 41 | max_boundary_dist_in_px = self.params.max_distance_between_boundary_points * polygons.iris_diameter 42 | 43 | refined_pupil_array = self._interpolate_polygon_points(polygons.pupil_array, max_boundary_dist_in_px) 44 | refined_iris_array = self._interpolate_polygon_points(polygons.iris_array, max_boundary_dist_in_px) 45 | refined_eyeball_array = self._interpolate_polygon_points(polygons.eyeball_array, max_boundary_dist_in_px) 46 | 47 | return GeometryPolygons( 48 | pupil_array=refined_pupil_array, 49 | iris_array=refined_iris_array, 50 | eyeball_array=refined_eyeball_array, 51 | ) 52 | 53 | def _interpolate_polygon_points(self, polygon: np.ndarray, max_distance_between_points_px: float) -> np.ndarray: 54 | """Interpolate contours points, so that the distance between two is no greater than `self.params.max_distance_between_boundary_points` in pixel space. 55 | 56 | Args: 57 | polygon (np.ndarray): Contour polygons. 58 | max_distance_between_points_px (float): `self.params.max_distance_between_boundary_points` expressed in pixel length relative to iris diameter. 59 | 60 | Returns: 61 | np.ndarray: Interpolated polygon points. 62 | """ 63 | previous_boundary = np.roll(polygon, shift=1, axis=0) 64 | distances = np.linalg.norm(polygon - previous_boundary, axis=1) 65 | num_points = np.ceil(distances / max_distance_between_points_px).astype(int) 66 | 67 | x: List[np.ndarray] = [] 68 | y: List[np.ndarray] = [] 69 | for (x1, y1), (x2, y2), num_point in zip(previous_boundary, polygon, num_points): 70 | x.append(np.linspace(x1, x2, num=num_point, endpoint=False)) 71 | y.append(np.linspace(y1, y2, num=num_point, endpoint=False)) 72 | 73 | new_boundary = np.stack([np.concatenate(x), np.concatenate(y)], axis=1) 74 | _, indices = np.unique(new_boundary, axis=0, return_index=True) 75 | new_boundary = new_boundary[np.sort(indices)] 76 | 77 | return new_boundary 78 | -------------------------------------------------------------------------------- /src/iris/nodes/geometry_refinement/contour_points_filter.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | from pydantic import Field 4 | 5 | from iris.io.class_configs import Algorithm 6 | from iris.io.dataclasses import GeometryPolygons, NoiseMask 7 | from iris.io.errors import GeometryRefinementError 8 | 9 | 10 | class ContourPointNoiseEyeballDistanceFilter(Algorithm): 11 | """Implementation of point filtering algorithm that removes points which are to close to eyeball or noise. 12 | 13 | The role of this algorithm is to create a buffer around the pupil and iris polygons. This accounts for 14 | potential segmentation imprecisions, making the overall pipeline more robust against edge cases and out-of-distribution images. 15 | 16 | The buffer width is computed relatively to the iris diameter: `min_distance_to_noise_and_eyeball * iris_diameter` 17 | The trigger for this buffer are the eyeball boundary and the noise (e.g. eyelashes, specular reflection, etc.). 18 | """ 19 | 20 | class Parameters(Algorithm.Parameters): 21 | """Default ContourPointToNoiseEyeballDistanceFilter parameters.""" 22 | 23 | min_distance_to_noise_and_eyeball: float = Field(..., gt=0.0, lt=1.0) 24 | 25 | __parameters_type__ = Parameters 26 | 27 | def __init__(self, min_distance_to_noise_and_eyeball: float = 0.005) -> None: 28 | """Assign parameters. 29 | 30 | Args: 31 | min_distance_to_noise_and_eyeball (float, optional): Minimum distance to eyeball or noise expressed as a fraction of iris diameter length. Defaults to 0.025. 32 | """ 33 | super().__init__(min_distance_to_noise_and_eyeball=min_distance_to_noise_and_eyeball) 34 | 35 | def run(self, polygons: GeometryPolygons, geometry_mask: NoiseMask) -> GeometryPolygons: 36 | """Perform polygon refinement by filtering out those iris/pupil polygons points which are to close to eyeball or noise. 37 | 38 | Args: 39 | polygons (GeometryPolygons): Polygons to refine. 40 | geometry_mask (NoiseMask): Geometry noise mask. 41 | 42 | Returns: 43 | GeometryPolygons: Refined geometry polygons. 44 | """ 45 | noise_and_eyeball_polygon_points_mask = geometry_mask.mask.copy() 46 | 47 | for eyeball_point in np.round(polygons.eyeball_array).astype(int): 48 | x, y = eyeball_point 49 | noise_and_eyeball_polygon_points_mask[y, x] = True 50 | 51 | min_dist_to_noise_and_eyeball_in_px = round( 52 | self.params.min_distance_to_noise_and_eyeball * polygons.iris_diameter 53 | ) 54 | 55 | forbidden_touch_map = cv2.blur( 56 | noise_and_eyeball_polygon_points_mask.astype(float), 57 | ksize=( 58 | 2 * min_dist_to_noise_and_eyeball_in_px + 1, 59 | 2 * min_dist_to_noise_and_eyeball_in_px + 1, 60 | ), 61 | ) 62 | forbidden_touch_map = forbidden_touch_map.astype(bool) 63 | 64 | return GeometryPolygons( 65 | pupil_array=self._filter_polygon_points(forbidden_touch_map, polygons.pupil_array), 66 | iris_array=self._filter_polygon_points(forbidden_touch_map, polygons.iris_array), 67 | eyeball_array=polygons.eyeball_array, 68 | ) 69 | 70 | def _filter_polygon_points(self, forbidden_touch_map: np.ndarray, polygon_points: np.ndarray) -> np.ndarray: 71 | """Filter polygon's points. 72 | 73 | Args: 74 | forbidden_touch_map (np.ndarray): Forbidden touch map. If value of an element is greater then 0 then it means that point is to close to noise or eyeball. 75 | polygon_points (np.ndarray): Polygon's points. 76 | 77 | Returns: 78 | np.ndarray: Filtered polygon's points. 79 | """ 80 | valid_points = [not forbidden_touch_map[y, x] for x, y in np.round(polygon_points).astype(int)] 81 | if not any(valid_points): 82 | raise GeometryRefinementError("No valid points after filtering polygon points!") 83 | 84 | return polygon_points[valid_points] 85 | --------------------------------------------------------------------------------