├── .cargo └── config.toml ├── .github ├── actions │ └── test │ │ ├── action.yml │ │ └── test.sh ├── dependabot.yml ├── pull_request_template.md └── workflows │ ├── ci.yml │ ├── clear-cache.yml │ ├── containers.yml │ ├── devcontainer.yml │ └── lint-pr-title.yml ├── .gitignore ├── .scripts └── update-expression-deps.rs ├── .sqlfluff ├── CHANGELOG.md ├── CODESTYLE.md ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── Settings-default.toml ├── Settings-test.toml ├── clippy.toml ├── datatypes ├── .gitignore ├── Cargo.toml ├── README.md ├── benches │ ├── grid_mapping.rs │ ├── grid_updates.rs │ ├── masked_grid_mapping.rs │ └── multi_point_collection.rs ├── src │ ├── collections │ │ ├── batch_builder.rs │ │ ├── data_collection.rs │ │ ├── data_types.rs │ │ ├── error.rs │ │ ├── feature_collection.rs │ │ ├── feature_collection_builder.rs │ │ ├── geo_feature_collection.rs │ │ ├── ipc.rs │ │ ├── mod.rs │ │ ├── multi_line_string_collection.rs │ │ ├── multi_point_collection.rs │ │ └── multi_polygon_collection.rs │ ├── dataset.rs │ ├── error.rs │ ├── lib.rs │ ├── machine_learning.rs │ ├── operations │ │ ├── image │ │ │ ├── colorizer.rs │ │ │ ├── into_lossy.rs │ │ │ ├── mod.rs │ │ │ ├── rgba_transmutable.rs │ │ │ └── to_png.rs │ │ ├── mod.rs │ │ ├── reproject.rs │ │ └── spatial_relation.rs │ ├── plots │ │ ├── area_line_plot.rs │ │ ├── bar_chart.rs │ │ ├── box_plot.rs │ │ ├── histogram.rs │ │ ├── histogram2d.rs │ │ ├── mod.rs │ │ ├── multi_line_plot.rs │ │ ├── pie_chart.rs │ │ └── scatter_plot.rs │ ├── primitives │ │ ├── bounding_box.rs │ │ ├── circle.rs │ │ ├── coordinate.rs │ │ ├── datetime.rs │ │ ├── db_types.rs │ │ ├── error.rs │ │ ├── feature_data.rs │ │ ├── geometry.rs │ │ ├── line.rs │ │ ├── measurement.rs │ │ ├── mod.rs │ │ ├── multi_line_string.rs │ │ ├── multi_point.rs │ │ ├── multi_polygon.rs │ │ ├── no_geometry.rs │ │ ├── query_rectangle.rs │ │ ├── spatial_partition.rs │ │ ├── spatial_resolution.rs │ │ ├── spatio_temporal_bounded.rs │ │ ├── time_instance.rs │ │ ├── time_interval.rs │ │ ├── time_step.rs │ │ └── ttl.rs │ ├── raster │ │ ├── arrow_conversion.rs │ │ ├── band_names.rs │ │ ├── data_type.rs │ │ ├── empty_grid.rs │ │ ├── geo_transform.rs │ │ ├── grid.rs │ │ ├── grid_bounds.rs │ │ ├── grid_index.rs │ │ ├── grid_or_empty.rs │ │ ├── grid_traits.rs │ │ ├── grid_typed.rs │ │ ├── macros_raster.rs │ │ ├── macros_raster_tile.rs │ │ ├── masked_grid.rs │ │ ├── mod.rs │ │ ├── no_data_value_grid.rs │ │ ├── operations │ │ │ ├── blit.rs │ │ │ ├── checked_scaling.rs │ │ │ ├── convert_data_type.rs │ │ │ ├── from_index_fn.rs │ │ │ ├── grid_blit.rs │ │ │ ├── interpolation.rs │ │ │ ├── map_elements.rs │ │ │ ├── map_indexed_elements.rs │ │ │ ├── mod.rs │ │ │ ├── update_elements.rs │ │ │ └── update_indexed_elements.rs │ │ ├── raster_properties.rs │ │ ├── raster_tile.rs │ │ ├── raster_traits.rs │ │ ├── tiling.rs │ │ ├── typed_raster_conversion.rs │ │ └── typed_raster_tile.rs │ ├── spatial_reference.rs │ └── util │ │ ├── any.rs │ │ ├── arrow.rs │ │ ├── byte_size.rs │ │ ├── db_types.rs │ │ ├── gdal.rs │ │ ├── helpers.rs │ │ ├── identifiers.rs │ │ ├── image.rs │ │ ├── mod.rs │ │ ├── ranges.rs │ │ ├── result.rs │ │ ├── test.rs │ │ └── well_known_data.rs └── tests │ └── example-arrow.rs ├── expression ├── Cargo.toml ├── deps-workspace │ ├── Cargo.lock │ ├── Cargo.toml │ └── lib.rs ├── src │ ├── codegen.rs │ ├── compiled.rs │ ├── dependencies.rs │ ├── error.rs │ ├── expression.pest │ ├── functions.rs │ ├── lib.rs │ ├── parser.rs │ └── util.rs └── tests │ └── check-expression-deps.rs ├── macros ├── Cargo.toml └── src │ ├── lib.rs │ ├── testing.rs │ ├── typetag.rs │ └── util.rs ├── openapi.json ├── operators ├── .gitignore ├── Cargo.toml ├── README.md ├── benches │ ├── bands.rs │ ├── cache.rs │ ├── cache_concurrent.rs │ ├── expression.rs │ ├── pip.rs │ ├── query_chunks.rs │ ├── sources.rs │ ├── thread_pool.rs │ └── workflows.rs ├── src │ ├── adapters │ │ ├── band_extractor.rs │ │ ├── feature_collection_merger.rs │ │ ├── mod.rs │ │ ├── raster_stacker.rs │ │ ├── raster_subquery │ │ │ ├── mod.rs │ │ │ ├── raster_subquery_adapter.rs │ │ │ └── raster_subquery_reprojection.rs │ │ ├── raster_time.rs │ │ ├── raster_time_substream.rs │ │ ├── simple_raster_stacker.rs │ │ ├── sparse_tiles_fill_adapter.rs │ │ └── stream_statistics_adapter.rs │ ├── cache │ │ ├── cache_chunks.rs │ │ ├── cache_operator.rs │ │ ├── cache_stream.rs │ │ ├── cache_tiles.rs │ │ ├── error.rs │ │ ├── mod.rs │ │ ├── shared_cache.rs │ │ └── util.rs │ ├── engine │ │ ├── clonable_operator.rs │ │ ├── execution_context.rs │ │ ├── initialized_sources.rs │ │ ├── mod.rs │ │ ├── operator.rs │ │ ├── operator_impl.rs │ │ ├── query.rs │ │ ├── query_processor.rs │ │ ├── result_descriptor.rs │ │ └── workflow_path.rs │ ├── error.rs │ ├── lib.rs │ ├── machine_learning │ │ ├── mod.rs │ │ ├── onnx.rs │ │ └── onnx_util.rs │ ├── meta │ │ ├── mod.rs │ │ ├── quota.rs │ │ └── wrapper.rs │ ├── mock │ │ ├── mock_dataset_data_source.rs │ │ ├── mock_feature_collection_source.rs │ │ ├── mock_point_source.rs │ │ ├── mock_raster_source.rs │ │ └── mod.rs │ ├── plot │ │ ├── box_plot.rs │ │ ├── class_histogram.rs │ │ ├── histogram.rs │ │ ├── mod.rs │ │ ├── pie_chart.rs │ │ ├── scatter_plot.rs │ │ ├── statistics.rs │ │ ├── temporal_raster_mean_plot.rs │ │ └── temporal_vector_line_plot.rs │ ├── processing │ │ ├── band_neighborhood_aggregate │ │ │ └── mod.rs │ │ ├── bandwise_expression │ │ │ └── mod.rs │ │ ├── circle_merging_quadtree.rs │ │ ├── circle_merging_quadtree │ │ │ ├── aggregates.rs │ │ │ ├── circle_of_points.rs │ │ │ ├── circle_radius_model.rs │ │ │ ├── grid.rs │ │ │ ├── hash_map.rs │ │ │ ├── node.rs │ │ │ ├── operator.rs │ │ │ └── quadtree.rs │ │ ├── column_range_filter.rs │ │ ├── expression │ │ │ ├── error.rs │ │ │ ├── mod.rs │ │ │ ├── raster_operator.rs │ │ │ ├── raster_query_processor.rs │ │ │ └── vector_operator.rs │ │ ├── interpolation │ │ │ └── mod.rs │ │ ├── line_simplification.rs │ │ ├── map_query.rs │ │ ├── meteosat │ │ │ ├── mod.rs │ │ │ ├── radiance.rs │ │ │ ├── reflectance.rs │ │ │ ├── satellite.rs │ │ │ └── temperature.rs │ │ ├── mod.rs │ │ ├── neighborhood_aggregate │ │ │ ├── aggregate.rs │ │ │ ├── mod.rs │ │ │ └── tile_sub_query.rs │ │ ├── point_in_polygon.rs │ │ ├── point_in_polygon │ │ │ ├── tester.rs │ │ │ └── wrapper.rs │ │ ├── raster_scaling.rs │ │ ├── raster_stacker.rs │ │ ├── raster_type_conversion.rs │ │ ├── raster_vector_join │ │ │ ├── aggregated.rs │ │ │ ├── aggregator.rs │ │ │ ├── mod.rs │ │ │ ├── non_aggregated.rs │ │ │ └── util.rs │ │ ├── rasterization │ │ │ └── mod.rs │ │ ├── reprojection.rs │ │ ├── temporal_raster_aggregation │ │ │ ├── aggregators.rs │ │ │ ├── first_last_subquery.rs │ │ │ ├── mod.rs │ │ │ ├── subquery.rs │ │ │ └── temporal_aggregation_operator.rs │ │ ├── time_projection │ │ │ └── mod.rs │ │ ├── time_shift.rs │ │ └── vector_join │ │ │ ├── equi_data_join.rs │ │ │ ├── mod.rs │ │ │ └── util.rs │ ├── source │ │ ├── csv.rs │ │ ├── gdal_source │ │ │ ├── db_types.rs │ │ │ ├── error.rs │ │ │ ├── loading_info.rs │ │ │ └── mod.rs │ │ ├── mod.rs │ │ └── ogr_source │ │ │ ├── dataset_iterator.rs │ │ │ └── mod.rs │ └── util │ │ ├── async_util.rs │ │ ├── gdal.rs │ │ ├── input │ │ ├── float_with_nan_serde.rs │ │ ├── mod.rs │ │ ├── multi_raster_or_vector.rs │ │ ├── raster_or_vector.rs │ │ ├── string_or_number.rs │ │ └── string_or_number_range.rs │ │ ├── math.rs │ │ ├── mod.rs │ │ ├── number_statistics.rs │ │ ├── raster_stream_to_geotiff.rs │ │ ├── raster_stream_to_png.rs │ │ ├── rayon.rs │ │ ├── retry.rs │ │ ├── statistics.rs │ │ ├── stream_zip │ │ ├── mod.rs │ │ ├── tuple_zip.rs │ │ └── vec_zip.rs │ │ ├── string_token.rs │ │ ├── sunpos.rs │ │ └── temporary_gdal_thread_local_config_options.rs └── tests │ └── streams.rs ├── rust-toolchain.toml ├── services ├── Cargo.toml ├── README.md ├── benches │ └── quota_check.rs ├── build.rs ├── src │ ├── api │ │ ├── apidoc.rs │ │ ├── handlers │ │ │ ├── datasets.rs │ │ │ ├── ebv.rs │ │ │ ├── layers.rs │ │ │ ├── machine_learning.rs │ │ │ ├── mod.rs │ │ │ ├── permissions.rs │ │ │ ├── plots.rs │ │ │ ├── projects.rs │ │ │ ├── spatial_references.rs │ │ │ ├── tasks.rs │ │ │ ├── upload.rs │ │ │ ├── users.rs │ │ │ ├── wcs.rs │ │ │ ├── wfs.rs │ │ │ ├── wms.rs │ │ │ └── workflows.rs │ │ ├── mod.rs │ │ ├── model │ │ │ ├── datatypes.rs │ │ │ ├── mod.rs │ │ │ ├── operators.rs │ │ │ ├── responses │ │ │ │ ├── datasets │ │ │ │ │ ├── errors.rs │ │ │ │ │ └── mod.rs │ │ │ │ ├── ml_models │ │ │ │ │ └── mod.rs │ │ │ │ └── mod.rs │ │ │ └── services.rs │ │ └── ogc │ │ │ ├── mod.rs │ │ │ ├── util.rs │ │ │ ├── wcs │ │ │ ├── mod.rs │ │ │ └── request.rs │ │ │ ├── wfs │ │ │ ├── mod.rs │ │ │ └── request.rs │ │ │ └── wms │ │ │ ├── mod.rs │ │ │ └── request.rs │ ├── bin │ │ ├── geoengine-cli.rs │ │ └── geoengine-server.rs │ ├── cli │ │ ├── check_successful_startup.rs │ │ ├── heartbeat.rs │ │ ├── mod.rs │ │ └── openapi.rs │ ├── config.rs │ ├── contexts │ │ ├── db_types.rs │ │ ├── migrations │ │ │ ├── current_schema.rs │ │ │ ├── current_schema.sql │ │ │ ├── database_migration.rs │ │ │ ├── migration_0015_log_quota.rs │ │ │ ├── migration_0015_snapshot.sql │ │ │ ├── migration_0016_merge_providers.rs │ │ │ ├── migration_0016_merge_providers.sql │ │ │ ├── migration_0016_test_data.sql │ │ │ ├── migration_0017_ml_model_tensor_shape.rs │ │ │ ├── migration_0018_wildlive_connector.rs │ │ │ ├── migration_0018_wildlive_connector.sql │ │ │ ├── mod.rs │ │ │ └── schema_info.rs │ │ ├── mod.rs │ │ ├── postgres.rs │ │ └── session.rs │ ├── datasets │ │ ├── create_from_workflow.rs │ │ ├── dataset_listing_provider.rs │ │ ├── external │ │ │ ├── aruna │ │ │ │ ├── error.rs │ │ │ │ ├── metadata.rs │ │ │ │ ├── mock_grpc_server.rs │ │ │ │ └── mod.rs │ │ │ ├── copernicus_dataspace │ │ │ │ ├── ids.rs │ │ │ │ ├── mod.rs │ │ │ │ ├── provider.rs │ │ │ │ ├── sentinel2.rs │ │ │ │ └── stac.rs │ │ │ ├── edr.rs │ │ │ ├── gbif.rs │ │ │ ├── gfbio_abcd.rs │ │ │ ├── gfbio_collections.rs │ │ │ ├── mod.rs │ │ │ ├── netcdfcf │ │ │ │ ├── database.rs │ │ │ │ ├── ebvportal_api.rs │ │ │ │ ├── ebvportal_provider.rs │ │ │ │ ├── error.rs │ │ │ │ ├── loading.rs │ │ │ │ ├── metadata.rs │ │ │ │ ├── mod.rs │ │ │ │ └── overviews.rs │ │ │ ├── pangaea │ │ │ │ ├── meta.rs │ │ │ │ └── mod.rs │ │ │ ├── sentinel_s2_l2a_cogs.rs │ │ │ └── wildlive │ │ │ │ ├── cache.rs │ │ │ │ ├── datasets.rs │ │ │ │ ├── error.rs │ │ │ │ ├── mod.rs │ │ │ │ └── wildlive.http │ │ ├── listing.rs │ │ ├── mod.rs │ │ ├── name.rs │ │ ├── postgres.rs │ │ ├── storage.rs │ │ └── upload.rs │ ├── error.rs │ ├── layers │ │ ├── add_from_directory.rs │ │ ├── error.rs │ │ ├── external.rs │ │ ├── layer.rs │ │ ├── listing.rs │ │ ├── mod.rs │ │ ├── postgres_layer_db.rs │ │ └── storage.rs │ ├── lib.rs │ ├── machine_learning │ │ ├── error.rs │ │ ├── mod.rs │ │ ├── name.rs │ │ └── postgres.rs │ ├── permissions │ │ ├── mod.rs │ │ └── postgres_permissiondb.rs │ ├── projects │ │ ├── error.rs │ │ ├── mod.rs │ │ ├── postgres_projectdb.rs │ │ ├── project.rs │ │ └── projectdb.rs │ ├── quota │ │ └── mod.rs │ ├── server.rs │ ├── stac │ │ └── mod.rs │ ├── tasks │ │ ├── error.rs │ │ ├── in_memory.rs │ │ ├── mod.rs │ │ ├── time_estimation.rs │ │ ├── users.rs │ │ └── util.rs │ ├── users │ │ ├── mod.rs │ │ ├── oidc.rs │ │ ├── postgres_userdb.rs │ │ ├── session.rs │ │ ├── user.rs │ │ └── userdb.rs │ ├── util │ │ ├── apidoc.rs │ │ ├── encryption.rs │ │ ├── extractors.rs │ │ ├── identifiers.rs │ │ ├── middleware.rs │ │ ├── mod.rs │ │ ├── openapi_examples.rs │ │ ├── openapi_visitor.rs │ │ ├── openapi_visitors.rs │ │ ├── operators.rs │ │ ├── parsing.rs │ │ ├── postgres.rs │ │ ├── server.rs │ │ ├── tests.rs │ │ └── workflows.rs │ └── workflows │ │ ├── mod.rs │ │ ├── postgres_workflow_registry.rs │ │ ├── raster_stream.rs │ │ ├── registry.rs │ │ ├── vector_stream.rs │ │ └── workflow.rs ├── test.http └── tests │ ├── drivers.rs │ ├── openapi.rs │ └── startup.rs └── test_data ├── .gitignore ├── api_calls ├── copernicus_provider.http ├── ebv_api.http ├── log_test.http ├── quota.http ├── wcs.http └── wms.http ├── colorizer ├── empty.png ├── linear_gradient.png ├── logarithmic_gradient.png ├── no_data.png ├── palette.png └── rgba.png ├── copernicus_dataspace ├── eodata │ └── Sentinel-2 │ │ └── MSI │ │ └── L2A_N0500 │ │ └── 2020 │ │ └── 07 │ │ └── 03 │ │ └── S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE │ │ ├── GRANULE │ │ └── L2A_T32UMB_A026274_20200703T103027 │ │ │ ├── IMG_DATA │ │ │ └── R10m │ │ │ │ ├── T32UMB_20200703T103031_B02_10m.jp2.head │ │ │ │ ├── T32UMB_20200703T103031_B03_10m.jp2.head │ │ │ │ ├── T32UMB_20200703T103031_B04_10m.jp2.head │ │ │ │ └── T32UMB_20200703T103031_B08_10m.jp2.head │ │ │ └── MTD_TL.xml │ │ └── MTD_MSIL2A.xml ├── license.txt └── stac_responses │ └── stac_response_1.json ├── dataset_defs ├── germany_polygon.json ├── landcover.json ├── mock.json ├── natural_earth_2_blue.json ├── natural_earth_2_green.json ├── natural_earth_2_red.json ├── ndvi (3587).json ├── ndvi.json ├── ndvi_flipped_y_axis.json ├── ndvi_list.json ├── ne_10m_ports (3857).json ├── ne_10m_ports.json └── points_with_time.json ├── drone_mapping ├── drone_images │ ├── drone_image_0.jpg │ └── drone_image_1.jpg └── odm_result │ └── odm_orthophoto │ └── odm_orthophoto.tif ├── edr ├── edr_GFS_isobaric.json ├── edr_GFS_single-level.json ├── edr_PointsInFrance.json ├── edr_PointsInGermany.json ├── edr_collections.json └── edr_raster.tif ├── gbif ├── init_test_data.sql └── test_data.sql ├── gfbio ├── collections_api_response.json ├── init_test_data.sql └── test_data.sql ├── layer_collection_defs ├── empty_collection.json ├── no_parent_collection.json ├── root_collection.json └── test_collection.json ├── layer_defs ├── ports_in_germany.json ├── raster_stack.json └── rgb.json ├── migrations └── test_data.sql ├── ml ├── b764bf81-e21d-4eb8-bf01-fac9af13faee │ └── mock_model.json ├── onnx │ ├── test_a_plus_b.onnx │ ├── test_classification.onnx │ ├── test_regression.onnx │ └── training_scripts │ │ ├── build_test_a_plus_b.py │ │ ├── train_test_classification.py │ │ └── train_test_regression.py └── xgboost │ ├── b764bf81-e21d-4eb8-bf01-fac9af13faee │ └── model.json │ ├── reference_test_model.json │ └── s2_10m_de_marburg │ └── model.json ├── netcdf4d ├── Biodiversity │ ├── dataset_daily.nc │ └── dataset_monthly.nc ├── dataset_esri.nc ├── dataset_irr_ts.nc ├── dataset_m.nc ├── dataset_sm.colorizer.json ├── dataset_sm.nc ├── license_notes.txt └── overviews │ └── .gitkeep ├── pangaea ├── license_notes.txt ├── pangaea_geo_box.tsv ├── pangaea_geo_box_meta.json ├── pangaea_geo_box_meta_invalid_coord.json ├── pangaea_geo_box_meta_missing_coord.json ├── pangaea_geo_lat_lon.tsv ├── pangaea_geo_lat_lon_citation.txt ├── pangaea_geo_lat_lon_meta.json ├── pangaea_geo_none.tsv ├── pangaea_geo_none_meta.json ├── pangaea_geo_point.tsv ├── pangaea_geo_point_meta.json └── pangaea_single_creator_meta.json ├── provider_defs ├── aruna.json ├── copernicus_dataspace.json ├── ebv.json ├── gbif.json ├── gfbio_abcd.json ├── gfbio_collections.json ├── netcdfcf.json ├── open_weather.json ├── pangaea.json ├── sentinel_s2_l2a_cogs.json ├── user_datasets.json └── wildlive.json ├── raster ├── cloud_optimized_geotiff_big_tiff_from_stream_compressed.tiff ├── cloud_optimized_geotiff_from_stream_compressed.tiff ├── cloud_optimized_geotiff_timestep_0_from_stream_compressed.tiff ├── cloud_optimized_geotiff_timestep_1_from_stream_compressed.tiff ├── cloud_optimized_geotiff_timestep_2_from_stream_compressed.tiff ├── geotiff_big_tiff_from_stream_compressed.tiff ├── geotiff_from_stream_compressed.tiff ├── geotiff_with_mask_from_stream_compressed.tiff ├── landcover │ ├── landcover.tif │ └── readme.txt ├── modis_ndvi │ ├── MOD13A2_M_NDVI_2014-01-01.TIFF │ ├── MOD13A2_M_NDVI_2014-02-01.TIFF │ ├── MOD13A2_M_NDVI_2014-03-01.TIFF │ ├── MOD13A2_M_NDVI_2014-04-01.TIFF │ ├── MOD13A2_M_NDVI_2014-05-01.TIFF │ ├── MOD13A2_M_NDVI_2014-06-01.TIFF │ ├── flipped_axis_y │ │ └── MOD13A2_M_NDVI_2014-01-01_flipped_y.tiff │ ├── projected_3857 │ │ ├── MOD13A2_M_NDVI_2014-04-01.TIFF │ │ ├── MOD13A2_M_NDVI_2014-04-01_tile-20.pgw │ │ ├── MOD13A2_M_NDVI_2014-04-01_tile-20.png │ │ ├── MOD13A2_M_NDVI_2014-04-01_tile-20.rdc │ │ ├── MOD13A2_M_NDVI_2014-04-01_tile-20.ref │ │ └── MOD13A2_M_NDVI_2014-04-01_tile-20.rst │ └── with_offset_scale │ │ └── MOD13A2_M_NDVI_2014-01-01.TIFF ├── natural_earth_2 │ └── NE2_150M_SR_W.tif ├── png │ └── png_from_stream.png └── simple_raster │ └── wikipedia_esri_asci.asc ├── stac_responses ├── cog-header.bin ├── cog-tile.bin └── items_page_1_limit_500.json ├── vector ├── data │ ├── empty.json │ ├── germany_polygon.gpkg │ ├── lonlat.csv │ ├── lonlat_date.csv │ ├── lonlat_date_time.csv │ ├── lonlat_date_time_tz.csv │ ├── lonlat_unix_date.csv │ ├── missing_geo.json │ ├── ne_10m_ports │ │ ├── license.txt │ │ ├── ne_10m_ports.README.html │ │ ├── ne_10m_ports.VERSION.txt │ │ ├── ne_10m_ports.cpg │ │ ├── ne_10m_ports.dbf │ │ ├── ne_10m_ports.prj │ │ ├── ne_10m_ports.shp │ │ ├── ne_10m_ports.shx │ │ ├── projected_3857 │ │ │ ├── ne_10m_ports.dbf │ │ │ ├── ne_10m_ports.prj │ │ │ ├── ne_10m_ports.shp │ │ │ └── ne_10m_ports.shx │ │ ├── source.txt │ │ └── with_spatial_index │ │ │ └── ne_10m_ports.gpkg │ ├── plain_data.csv │ ├── points.csv │ ├── points.fgb │ ├── points_with_bool.csv │ ├── points_with_date.cpg │ ├── points_with_date.dbf │ ├── points_with_date.prj │ ├── points_with_date.shp │ ├── points_with_date.shx │ ├── points_with_iso_start_duration.json │ ├── points_with_iso_time.json │ ├── points_with_time.gpkg │ ├── points_with_time.json │ └── two_layers.gpkg └── germany_polygon.json ├── wildlive └── responses │ ├── annotations.json │ ├── image_objects.json │ ├── project.json │ ├── projects.json │ ├── station_coordinates.json │ └── station_setups.json └── wms ├── gaussian_blur.png ├── get_map.png ├── get_map_colorizer.png ├── get_map_ndvi.png ├── ne2_rgb_colorizer.png ├── ne2_rgb_colorizer_gray.png ├── partial_derivative.png ├── raster.png ├── raster_colorizer.png └── raster_small.png /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [target.x86_64-unknown-linux-gnu] 2 | linker = "clang" 3 | rustflags = ["-C", "link-arg=-fuse-ld=lld"] 4 | -------------------------------------------------------------------------------- /.github/actions/test/action.yml: -------------------------------------------------------------------------------- 1 | name: "Run Tests & Generate Coverage" 2 | description: "Run Geo Engine tests with coverage" 3 | runs: 4 | using: "docker" 5 | image: "docker://quay.io/geoengine/devcontainer:latest" 6 | entrypoint: "./.github/actions/test/test.sh" 7 | -------------------------------------------------------------------------------- /.github/actions/test/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function print_headline() { 4 | local BOLD_WHITE_ON_CYAN="\e[1;46;37m" 5 | local BOLD_CYAN="\e[1;49;36m" 6 | local RESET_COLOR="\e[0m" 7 | printf "${BOLD_WHITE_ON_CYAN} ▶ ${BOLD_CYAN} $1 ${RESET_COLOR}\n" >&2 8 | } 9 | 10 | print_headline "Install cargo-llvm-cov" 11 | cargo install --locked cargo-llvm-cov 12 | 13 | print_headline "Run Tests & Generate Code Coverage" 14 | service postgresql start 15 | cargo llvm-cov \ 16 | --locked \ 17 | --all-features \ 18 | --profile ci \ 19 | --lcov \ 20 | --output-path lcov.info \ 21 | || exit 1 22 | 23 | print_headline "Run Doctests" 24 | # cf. https://github.com/taiki-e/cargo-llvm-cov/issues/2 25 | cargo test --doc --all-features --profile ci --locked || exit 1 26 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "cargo" 4 | # Look for `Cargo.toml` and `lock` files in the `root` directory 5 | directory: "/" 6 | # Check the `crates.io` for updates every day (weekdays) 7 | schedule: 8 | interval: "weekly" 9 | day: "sunday" # to be checked before Monday 10 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | - [ ] I added an entry to [`CHANGELOG.md`](CHANGELOG.md) if knowledge of this change could be valuable to users. 2 | 3 | --- 4 | 5 | Here is a brief summary of what I did: 6 | 7 | 8 | -------------------------------------------------------------------------------- /.github/workflows/clear-cache.yml: -------------------------------------------------------------------------------- 1 | name: Clear Cache 2 | 3 | on: 4 | # Allows you to run this workflow manually from the Actions tab 5 | workflow_dispatch: 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | clear-cache: 12 | runs-on: ubuntu-24.04 13 | 14 | steps: 15 | - name: Clear all caches 16 | uses: actions/github-script@v6 17 | with: 18 | script: | 19 | const {GITHUB_TOKEN, GITHUB_REPOSITORY} = process.env 20 | 21 | const [owner, repo] = GITHUB_REPOSITORY.split('/') 22 | 23 | console.log(`Clearing all caches for ${owner}/${repo}`) 24 | 25 | let caches = []; 26 | 27 | do { 28 | const result = await github.request('GET /repos/{owner}/{repo}/actions/caches', { 29 | owner, 30 | repo, 31 | }); 32 | 33 | caches = result.data.actions_caches; 34 | console.log(`Found ${caches.length} caches`); 35 | 36 | for (const cache of caches) { 37 | console.log(`Deleting cache ${cache.id}...`); 38 | 39 | await github.request('DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}', { 40 | owner, 41 | repo, 42 | cache_id: cache.id 43 | }) 44 | } 45 | } while (caches.length > 0); 46 | -------------------------------------------------------------------------------- /.github/workflows/lint-pr-title.yml: -------------------------------------------------------------------------------- 1 | name: "Lint PR" 2 | on: 3 | pull_request: 4 | types: 5 | - opened 6 | - edited 7 | 8 | jobs: 9 | title: 10 | name: Title 11 | if: github.event.action == 'opened' || github.event.changes.title.from 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: geo-engine/conventional-pr-title@v1 15 | with: 16 | types: |- 17 | build 18 | ci 19 | docs 20 | feat 21 | fix 22 | perf 23 | refactor 24 | test 25 | scopes: |- 26 | datatypes 27 | expression 28 | macros 29 | operators 30 | services 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compile output 2 | /target 3 | **/*.rs.bk 4 | 5 | # Build artifacts 6 | *.profraw 7 | lcov.info 8 | 9 | # IDE files 10 | /.idea 11 | /.vscode 12 | Settings.toml 13 | .env 14 | 15 | # Venvs 16 | /.venv 17 | 18 | # Data 19 | upload/ 20 | test_upload/ 21 | services/test_upload/ 22 | **/*.gpkg-shm 23 | **/*.gpkg-wal 24 | geo_engine_*.log 25 | **/**.aux.xml 26 | test_data/netcdf4d/overviews/* 27 | -------------------------------------------------------------------------------- /.scripts/update-expression-deps.rs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S cargo +nightly -Zscript 2 | 3 | ---cargo 4 | [package] 5 | edition = "2024" 6 | 7 | [dependencies] 8 | tempfile = "3.15" 9 | --- 10 | //! This script updates the dependencies for the `expression/deps-workspace` by performing the following steps: 11 | //! 12 | //! 1. Creates a temporary directory. 13 | //! 2. Copies all files from the `expression/deps-workspace` directory to the temporary directory. 14 | //! 4. Runs `cargo update` in the temporary directory to update the dependencies. 15 | //! 5. Copies the updated `Cargo.lock` file back to the `expression/deps-workspace` directory. 16 | //! 17 | //! If any step fails, the script will print an error message and exit with a non-zero status code. 18 | 19 | use std::fs; 20 | use std::path::Path; 21 | 22 | fn main() { 23 | let temp_dir = tempfile::tempdir().unwrap(); 24 | 25 | let deps_workspace = Path::new("expression/deps-workspace"); 26 | 27 | if !deps_workspace.exists() || !deps_workspace.is_dir() { 28 | eprintln!("Dependencies workspace does not exist at {:?}", deps_workspace); 29 | std::process::exit(1); 30 | } 31 | 32 | for entry in fs::read_dir(deps_workspace).unwrap() { 33 | let entry = entry.unwrap(); 34 | let path = entry.path(); 35 | if path.is_file() { 36 | fs::copy(&path, temp_dir.path().join(path.file_name().unwrap())).unwrap(); 37 | } 38 | } 39 | 40 | eprintln!("Copied dependencies workspace to {:?}", temp_dir); 41 | 42 | // Run `cargo update` in the temporary directory 43 | 44 | let status = std::process::Command::new("cargo") 45 | .arg("update") 46 | .current_dir(temp_dir.path()) 47 | .status() 48 | .unwrap(); 49 | 50 | if !status.success() { 51 | eprintln!("Failed to update dependencies"); 52 | std::process::exit(1); 53 | } 54 | 55 | eprintln!("Updated dependencies successfully"); 56 | 57 | // Copy the updated lockfile back to the workspace 58 | 59 | fs::copy(temp_dir.path().join("Cargo.lock"), deps_workspace.join("Cargo.lock")).unwrap(); 60 | 61 | eprintln!("Copied updated lockfile back to {:?}", deps_workspace); 62 | } 63 | -------------------------------------------------------------------------------- /.sqlfluff: -------------------------------------------------------------------------------- 1 | [sqlfluff] 2 | dialect = postgres 3 | templater = jinja 4 | sql_file_exts = .sql 5 | large_file_skip_byte_limit = 40000 6 | -------------------------------------------------------------------------------- /CODESTYLE.md: -------------------------------------------------------------------------------- 1 | # Code style 2 | 3 | This document collects some best practices we use in our code. 4 | 5 | ## Expect messages 6 | 7 | If you unwrap an `Error` without handling it, you should use `expect` and adhere to the [common messages styles from the Rust Doc](https://doc.rust-lang.org/std/error/index.html#common-message-styles): 8 | 9 | > describe the reason we expect the Result should be Ok. With this style we would prefer to write: 10 | > `let path = std::env::var("IMPORTANT_PATH").expect("env variable IMPORTANT_PATH should be set by wrapper_script.sh");` 11 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Geo Engine 2 | 3 | We are grateful for any contributions. 4 | This guide gives you an overview of how to add value to this repository. 5 | 6 | ## 📝 Opening an Issue 7 | 8 | Feel free to open an issue. 9 | There are three main types of issues. 10 | 11 | ### 🐞 Bug report 12 | 13 | If you file a bug report, please tell us what the expected outcome would be and how to reproduce the bug. 14 | It would be nice if you create, for instance, a failing test with the expected outcome. 15 | 16 | ### 📣 Discussion 17 | 18 | If you want to discuss new features before implementing them, feel free to start a discussion on [GitHub](https://github.com/geo-engine/geoengine/discussions) and get feedback on your idea. 19 | 20 | ### ❓ Get in touch 21 | 22 | In case you have difficulties getting started, feel free to ask for help! 23 | You can either start a Q&A on [GitHub](https://github.com/geo-engine/geoengine/discussions) or write an email to [dev@geoengine.de](mailto:dev@geoengine.de). 24 | 25 | ## 🛠️ Pull requests 26 | 27 | We are happy to receive pull requests for bug fixes, improvements, and new features. 28 | All PRs run through some steps. 29 | 30 | ### CLA 31 | 32 | We require you to sign a contributor license agreement before we can accept your code contribution. 33 | You can visit [www.geoengine.de/cla](https://www.geoengine.de/cla/) to get more information. 34 | 35 | ### Code style 36 | 37 | Please have a look at our [code style](CODESTYLE.md) where we collect some best practices your code should adhere to. 38 | 39 | ### Code reviews 40 | 41 | The core team usually performs a code review on each pull request. 42 | We address all issues and try to give helpful feedback. 43 | You can either rebut a comment or provide a fix. 44 | Please refer to your commit as a response to each comment such that we can easily check it and have a fast process. 45 | Moreover, please do not mark the issues as resolved yourself – it will always be the one that commented on the code that resolves it. 46 | 47 | ### Tests 48 | 49 | Our CI runs several tests regarding code formatting, static lints, and also our test suite of unit and integration tests. 50 | Please provide tests that accompany your code. 51 | Our coverage tool will automatically tell you how much of our contribution is covered by tests. 52 | 53 | ### Your PR is merged! 54 | 55 | Congratulations! The Geo Engine community thanks you for your contribution. ✨ 56 | -------------------------------------------------------------------------------- /Settings-test.toml: -------------------------------------------------------------------------------- 1 | [postgres] 2 | host = "localhost" 3 | port = 5432 4 | database = "geoengine" 5 | schema = "pg_temp" # we need the right to create new schemata for tests 6 | user = "geoengine" 7 | password = "geoengine" 8 | 9 | [operators.gdal_source] 10 | raster_data_root_path = "../test_data/raster" # relative to sub crate directory for tests 11 | 12 | [raster.tiling_specification] 13 | origin_coordinate_x = 0.0 14 | origin_coordinate_y = 0.0 15 | tile_shape_pixels_x = 512 16 | tile_shape_pixels_y = 512 17 | 18 | [upload] 19 | path = "test_upload" 20 | 21 | [oidc] 22 | enabled = true 23 | issuer = "" 24 | client_id = "" 25 | client_secret = "" 26 | redirect_uri = "" 27 | scopes = [] 28 | 29 | [user] 30 | admin_email = "admin@localhost" 31 | admin_password = "admin" 32 | 33 | [quota] 34 | mode = "check" 35 | initial_credits = 9999 36 | increment_quota_buffer_size = 0 # number of quota updates to buffer before sending them to the database 37 | increment_quota_buffer_timeout_seconds = 60 # number of seconds after which the quota updates are sent to the database 38 | 39 | [cache] 40 | enabled = false 41 | # storage limit for the cache 42 | size_in_mb = 1_000 # 1 GB 43 | # storage limit for collecting query results before insertion into the cache in 44 | landing_zone_ratio = 0.1 # 10% of total cache size 45 | -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | avoid-breaking-exported-api = false 2 | 3 | # Adding "OpenAPI" is necessary because the word is used in a doc string in geoengine_services::api. 4 | doc-valid-idents = ["OpenAPI", "PostgreSQL", ".."] 5 | 6 | # It is okay to being able to easily write tests. 7 | allow-unwrap-in-tests = true 8 | -------------------------------------------------------------------------------- /datatypes/.gitignore: -------------------------------------------------------------------------------- 1 | # Compile output 2 | /target 3 | **/*.rs.bk 4 | 5 | # Rust artifacts 6 | Cargo.lock 7 | 8 | # IDE files 9 | /.idea 10 | -------------------------------------------------------------------------------- /datatypes/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "geoengine-datatypes" 3 | version.workspace = true 4 | authors.workspace = true 5 | edition.workspace = true 6 | publish.workspace = true 7 | license-file.workspace = true 8 | documentation.workspace = true 9 | repository.workspace = true 10 | 11 | [features] 12 | 13 | [dependencies] 14 | arrow = { workspace = true } 15 | arrow-array = { workspace = true } 16 | arrow-ord = { workspace = true } 17 | arrow-schema = { workspace = true } 18 | bytes = { workspace = true } 19 | chrono = { workspace = true } 20 | fallible-iterator = { workspace = true } 21 | float-cmp = { workspace = true } 22 | gdal = { workspace = true } 23 | geo = { workspace = true } 24 | geojson = { workspace = true } 25 | image = { workspace = true } 26 | num = { workspace = true } 27 | num-traits = { workspace = true } 28 | ordered-float = { workspace = true } 29 | paste = { workspace = true } 30 | postgres-protocol = { workspace = true } 31 | postgres-types = { workspace = true } 32 | proj = { workspace = true } 33 | rayon = { workspace = true } 34 | serde = { workspace = true } 35 | serde_json = { workspace = true } 36 | serde_with = { workspace = true } 37 | snafu = { workspace = true } 38 | uuid = { workspace = true } 39 | wkt = { workspace = true } 40 | strum = { workspace = true } 41 | 42 | [dev-dependencies] 43 | criterion = { workspace = true } 44 | tempfile = { workspace = true } 45 | 46 | [[bench]] 47 | name = "multi_point_collection" 48 | harness = false 49 | 50 | [[bench]] 51 | name = "grid_mapping" 52 | harness = false 53 | 54 | [[bench]] 55 | name = "grid_updates" 56 | harness = false 57 | 58 | [[bench]] 59 | name = "masked_grid_mapping" 60 | harness = false 61 | 62 | [lints] 63 | workspace = true 64 | -------------------------------------------------------------------------------- /datatypes/README.md: -------------------------------------------------------------------------------- 1 | # geo engine datatypes 2 | 3 | This crate creates all datatypes such as primitives and collections for the geo engine. 4 | -------------------------------------------------------------------------------- /datatypes/src/collections/error.rs: -------------------------------------------------------------------------------- 1 | use arrow::error::ArrowError; 2 | use snafu::prelude::*; 3 | 4 | use crate::error::Error; 5 | use crate::primitives::PrimitivesError; 6 | 7 | #[derive(Debug, Snafu)] 8 | #[snafu(visibility(pub(crate)))] 9 | #[snafu(context(suffix(false)))] // disables default `Snafu` suffix 10 | pub enum FeatureCollectionError { 11 | #[snafu(display("Arrow internal error: {:?}", source))] 12 | ArrowInternal { 13 | source: ArrowError, 14 | }, 15 | 16 | CannotAccessReservedColumn { 17 | name: String, 18 | }, 19 | 20 | ColumnDoesNotExist { 21 | name: String, 22 | }, 23 | 24 | ColumnAlreadyExists { 25 | name: String, 26 | }, 27 | 28 | ColumnDuplicate { 29 | name: String, 30 | }, 31 | 32 | EmptyPredicate, 33 | 34 | Primitives { 35 | source: PrimitivesError, 36 | }, 37 | 38 | UnmatchedLength { 39 | a: usize, 40 | b: usize, 41 | }, 42 | 43 | UnmatchedSchema { 44 | a: Vec, 45 | b: Vec, 46 | }, 47 | 48 | WrongDataType, 49 | 50 | MissingColumnArray, 51 | 52 | MissingTime, 53 | MissingGeo, 54 | } 55 | 56 | impl From for Error { 57 | fn from(error: FeatureCollectionError) -> Self { 58 | Error::FeatureCollection { source: error } 59 | } 60 | } 61 | 62 | impl From for FeatureCollectionError { 63 | fn from(source: ArrowError) -> Self { 64 | FeatureCollectionError::ArrowInternal { source } 65 | } 66 | } 67 | 68 | impl From for FeatureCollectionError { 69 | fn from(source: PrimitivesError) -> Self { 70 | FeatureCollectionError::Primitives { source } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /datatypes/src/collections/mod.rs: -------------------------------------------------------------------------------- 1 | mod batch_builder; 2 | mod data_types; 3 | mod error; 4 | mod feature_collection; 5 | #[macro_use] 6 | mod feature_collection_builder; 7 | #[macro_use] 8 | mod geo_feature_collection; 9 | mod ipc; 10 | 11 | mod data_collection; 12 | mod multi_line_string_collection; 13 | mod multi_point_collection; 14 | mod multi_polygon_collection; 15 | 16 | pub(crate) use error::FeatureCollectionError; 17 | use feature_collection::FilterArray; 18 | pub use feature_collection::{ 19 | ChunksEqualIgnoringCacheHint, ColumnNamesIter, FeatureCollection, FeatureCollectionInfos, 20 | FeatureCollectionInternals, FeatureCollectionIterator, FeatureCollectionModifications, 21 | FeatureCollectionRow, FilteredColumnNameIter, ToGeoJson, 22 | }; 23 | pub use feature_collection_builder::{ 24 | BuilderProvider, FeatureCollectionBuilder, FeatureCollectionRowBuilder, 25 | GeoFeatureCollectionRowBuilder, 26 | }; 27 | pub use geo_feature_collection::{ 28 | GeoFeatureCollectionModifications, GeometryCollection, GeometryRandomAccess, 29 | IntoGeometryIterator, IntoGeometryOptionsIterator, 30 | }; 31 | 32 | pub use data_collection::DataCollection; 33 | pub use data_types::{ 34 | GeoVectorDataType, TypedFeatureCollection, TypedFeatureCollectionRef, VectorDataType, 35 | VectorDataTyped, 36 | }; 37 | pub use multi_line_string_collection::MultiLineStringCollection; 38 | pub use multi_point_collection::MultiPointCollection; 39 | pub use multi_polygon_collection::MultiPolygonCollection; 40 | 41 | pub use batch_builder::RawFeatureCollectionBuilder; 42 | pub use ipc::FeatureCollectionIpc; 43 | 44 | /// Calls a function on a `TypedFeatureCollection` by calling it on its variant. 45 | /// Call via `call_generic_features!(input, features => function)`. 46 | #[macro_export] 47 | macro_rules! call_generic_features { 48 | ($input_features:expr, $features:ident => $function_call:expr) => { 49 | call_generic_features!( 50 | @variants $input_features, $features => $function_call, 51 | Data, MultiPoint, MultiLineString, MultiPolygon 52 | ) 53 | }; 54 | 55 | (@variants $input_features:expr, $features:ident => $function_call:expr, $($variant:tt),+) => { 56 | match $input_features { 57 | $( 58 | $crate::collections::TypedFeatureCollection::$variant($features) => $function_call, 59 | )+ 60 | } 61 | }; 62 | } 63 | -------------------------------------------------------------------------------- /datatypes/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod collections; 2 | pub mod dataset; 3 | pub mod error; 4 | pub mod machine_learning; 5 | pub mod operations; 6 | pub mod plots; 7 | pub mod primitives; 8 | pub mod raster; 9 | pub mod spatial_reference; 10 | pub mod util; 11 | 12 | /// This macro resolves paths of files or folders in the `test_data` folder. 13 | /// The `test_data` folder is located in the workspace root and has the same name as this macro. 14 | /// To address data from the `test_data` folder you can use the macro like this: 15 | /// 16 | /// Assuming a file "test.tiff" in `test_data` with the path `test_data/test.tiff` call the macro with `test_data!("test.tiff")`. 17 | /// Assuming a file "more-data.json" in `test_data/vector/` with the path "test_data/vector/more-data.csv" call the macro with `test_data!("vector/move-data.csv")`. 18 | /// 19 | /// # Panics 20 | /// * if the path of the parent folder of `env!("CARGO_MANIFEST_DIR")` is unresolvable. 21 | /// 22 | #[macro_export] 23 | macro_rules! test_data { 24 | ($name:expr) => { 25 | std::path::Path::new(env!("CARGO_MANIFEST_DIR")) 26 | .canonicalize() // get a full path 27 | .expect("should be available during testing") 28 | .parent() 29 | .expect("should be available during testing") 30 | .join("test_data/") 31 | .join($name) 32 | .as_path() 33 | }; 34 | } 35 | -------------------------------------------------------------------------------- /datatypes/src/operations/image/into_lossy.rs: -------------------------------------------------------------------------------- 1 | /// This trait ensures a conversion into a type `T`. 2 | /// Unlike `Into`, it accepts a loss in precision. 3 | pub trait LossyInto { 4 | /// Convert into `T` and accept a loss in precision for types with larger value ranges 5 | fn lossy_into(self) -> T; 6 | } 7 | 8 | /// Implement `IntoLossy` for types that are `Into` 9 | macro_rules! non_lossy_into_impl { 10 | ($from:ty, $into:ty) => { 11 | impl LossyInto<$into> for $from { 12 | fn lossy_into(self) -> $into { 13 | self.into() 14 | } 15 | } 16 | }; 17 | } 18 | 19 | /// Implement `IntoLossy` for types that be casted `as T` 20 | macro_rules! type_cast_lossy_into_impl { 21 | ($from:ty, $into:ty) => { 22 | impl LossyInto<$into> for $from { 23 | fn lossy_into(self) -> $into { 24 | self as $into 25 | } 26 | } 27 | }; 28 | } 29 | 30 | non_lossy_into_impl!(f64, f64); 31 | non_lossy_into_impl!(f32, f64); 32 | 33 | non_lossy_into_impl!(u32, f64); 34 | non_lossy_into_impl!(i32, f64); 35 | non_lossy_into_impl!(u16, f64); 36 | non_lossy_into_impl!(i16, f64); 37 | non_lossy_into_impl!(u8, f64); 38 | non_lossy_into_impl!(i8, f64); 39 | 40 | type_cast_lossy_into_impl!(u64, f64); 41 | type_cast_lossy_into_impl!(i64, f64); 42 | 43 | impl LossyInto for bool { 44 | /// This function allows transforming booleans to 0/1 `f64`s 45 | fn lossy_into(self) -> f64 { 46 | if self { 1. } else { 0. } 47 | } 48 | } 49 | 50 | #[cfg(test)] 51 | mod tests { 52 | use super::*; 53 | 54 | #[test] 55 | #[allow(clippy::float_cmp)] 56 | fn conversions() { 57 | assert_eq!(42.0_f64.lossy_into(), 42.0_f64); 58 | assert_eq!(42.0_f32.lossy_into(), 42.0_f64); 59 | 60 | assert_eq!(42_u32.lossy_into(), 42.0_f64); 61 | assert_eq!(42_i32.lossy_into(), 42.0_f64); 62 | assert_eq!(42_u16.lossy_into(), 42.0_f64); 63 | assert_eq!(42_i16.lossy_into(), 42.0_f64); 64 | assert_eq!(42_u8.lossy_into(), 42.0_f64); 65 | assert_eq!(42_i8.lossy_into(), 42.0_f64); 66 | 67 | assert_eq!(42_u64.lossy_into(), 42.0_f64); 68 | assert_eq!(42_i64.lossy_into(), 42.0_f64); 69 | 70 | assert_eq!(true.lossy_into(), 1.0_f64); 71 | assert_eq!(false.lossy_into(), 0.0_f64); 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /datatypes/src/operations/image/mod.rs: -------------------------------------------------------------------------------- 1 | mod colorizer; 2 | mod into_lossy; 3 | mod rgba_transmutable; 4 | mod to_png; 5 | 6 | pub use colorizer::{ 7 | Breakpoint, Breakpoints, ColorMapper, Colorizer, Palette, RasterColorizer, RgbParams, RgbaColor, 8 | }; 9 | pub use into_lossy::LossyInto; 10 | pub use rgba_transmutable::RgbaTransmutable; 11 | pub use to_png::ToPng; 12 | -------------------------------------------------------------------------------- /datatypes/src/operations/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod image; 2 | pub mod reproject; 3 | mod spatial_relation; 4 | 5 | pub use spatial_relation::{Contains, Intersects}; 6 | -------------------------------------------------------------------------------- /datatypes/src/operations/spatial_relation.rs: -------------------------------------------------------------------------------- 1 | /// This method returns `true` iff `Self` contains the input `T`. 2 | /// It is valid if the `T` touches the `Self`'s borders. 3 | pub trait Contains { 4 | fn contains(&self, other: &T) -> bool; 5 | } 6 | 7 | /// This method returns `true` iff does this `Self` intersect with the input `T` 8 | pub trait Intersects { 9 | fn contains(&self, other: &T) -> bool; 10 | } 11 | -------------------------------------------------------------------------------- /datatypes/src/plots/mod.rs: -------------------------------------------------------------------------------- 1 | mod area_line_plot; 2 | mod bar_chart; 3 | mod box_plot; 4 | mod histogram; 5 | mod histogram2d; 6 | mod multi_line_plot; 7 | mod pie_chart; 8 | mod scatter_plot; 9 | 10 | pub use area_line_plot::AreaLineChart; 11 | pub use bar_chart::BarChart; 12 | pub use box_plot::{BoxPlot, BoxPlotAttribute}; 13 | pub use histogram::{Histogram, HistogramBuilder}; 14 | pub use histogram2d::{Histogram2D, HistogramDimension}; 15 | pub use multi_line_plot::{DataPoint, MultiLineChart}; 16 | pub use pie_chart::PieChart; 17 | pub use scatter_plot::ScatterPlot; 18 | 19 | use crate::util::Result; 20 | use serde::{Deserialize, Serialize}; 21 | use std::fmt::Debug; 22 | 23 | pub trait Plot { 24 | /// Creates a Vega string for embedding it into a Html page 25 | /// 26 | /// # Errors 27 | /// 28 | /// This method fails on internal errors of the plot. 29 | /// 30 | fn to_vega_embeddable(&self, allow_interactions: bool) -> Result; 31 | 32 | // TODO: create some PNG output, cf. https://github.com/procyon-rs/vega_lite_3.rs/issues/18 33 | // fn to_png(&self, width_px: u16, height_px: u16) -> Vec; 34 | } 35 | 36 | #[derive(Debug, Clone, Deserialize, Eq, Serialize)] 37 | #[serde(rename_all = "camelCase")] 38 | pub struct PlotData { 39 | pub vega_string: String, 40 | pub metadata: PlotMetaData, 41 | } 42 | 43 | impl PartialEq for PlotData { 44 | fn eq(&self, other: &Self) -> bool { 45 | let vega_equals = match ( 46 | serde_json::from_str::(&self.vega_string), 47 | serde_json::from_str::(&other.vega_string), 48 | ) { 49 | (Ok(v1), Ok(v2)) => v1 == v2, // if the vega_string is valid JSON, compare the JSON values to avoid formatting differences 50 | _ => self.vega_string == other.vega_string, 51 | }; 52 | 53 | vega_equals && self.metadata == other.metadata 54 | } 55 | } 56 | 57 | #[derive(Debug, Clone, Deserialize, PartialEq, Eq, Serialize, Default)] 58 | #[serde(untagged)] 59 | pub enum PlotMetaData { 60 | #[default] 61 | None, 62 | #[serde(rename_all = "camelCase")] 63 | Selection { selection_name: String }, 64 | } 65 | 66 | #[derive(Debug, Clone, Deserialize, PartialEq, Eq, Serialize)] 67 | pub enum PlotOutputFormat { 68 | JsonPlain, 69 | JsonVega, 70 | ImagePng, 71 | } 72 | -------------------------------------------------------------------------------- /datatypes/src/primitives/error.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Error; 2 | use arrow::error::ArrowError; 3 | use snafu::prelude::*; 4 | 5 | use super::TimeInstance; 6 | 7 | #[derive(Debug, Snafu)] 8 | #[snafu(visibility(pub(crate)))] 9 | #[snafu(context(suffix(false)))] // disables default `Snafu` suffix 10 | pub enum PrimitivesError { 11 | UnallowedEmpty, 12 | UnclosedPolygonRing, 13 | InvalidSpatialResolution { 14 | value: f64, 15 | }, 16 | #[snafu(display("Arrow internal error: {:?}", source))] 17 | ArrowInternal { 18 | source: ArrowError, 19 | }, 20 | InvalidConversion, 21 | 22 | #[snafu(display("Time instance must be between {} and {}, but is {}", min.inner(), max.inner(), is))] 23 | InvalidTimeInstance { 24 | min: TimeInstance, 25 | max: TimeInstance, 26 | is: i64, 27 | }, 28 | } 29 | 30 | impl From for Error { 31 | fn from(error: PrimitivesError) -> Self { 32 | Error::Primitives { source: error } 33 | } 34 | } 35 | 36 | impl From for PrimitivesError { 37 | fn from(source: ArrowError) -> Self { 38 | PrimitivesError::ArrowInternal { source } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /datatypes/src/primitives/mod.rs: -------------------------------------------------------------------------------- 1 | mod bounding_box; 2 | mod circle; 3 | mod coordinate; 4 | mod datetime; 5 | mod db_types; 6 | mod error; 7 | mod feature_data; 8 | mod geometry; 9 | mod line; 10 | mod measurement; 11 | mod multi_line_string; 12 | mod multi_point; 13 | mod multi_polygon; 14 | mod no_geometry; 15 | mod query_rectangle; 16 | mod spatial_partition; 17 | mod spatial_resolution; 18 | mod spatio_temporal_bounded; 19 | mod time_instance; 20 | mod time_interval; 21 | mod time_step; 22 | mod ttl; 23 | 24 | pub use bounding_box::{BoundingBox2D, bboxes_extent}; 25 | pub use circle::Circle; 26 | pub use coordinate::Coordinate2D; 27 | pub use datetime::{DateTime, DateTimeError, DateTimeParseFormat, Duration}; 28 | pub(crate) use error::PrimitivesError; 29 | pub use feature_data::{ 30 | BoolDataRef, CategoryDataRef, DataRef, DateTimeDataRef, FeatureData, FeatureDataRef, 31 | FeatureDataType, FeatureDataValue, FloatDataRef, FloatOptionsIter, FloatOptionsParIter, 32 | IntDataRef, TextDataRef, 33 | }; 34 | pub use geometry::{AsGeo, AsGeoOption, Geometry, GeometryRef, TypedGeometry}; 35 | pub use line::Line; 36 | pub use measurement::{ClassificationMeasurement, ContinuousMeasurement, Measurement}; 37 | pub use multi_line_string::{MultiLineString, MultiLineStringAccess, MultiLineStringRef}; 38 | pub use multi_point::{MultiPoint, MultiPointAccess, MultiPointRef}; 39 | pub use multi_polygon::{MultiPolygon, MultiPolygonAccess, MultiPolygonRef}; 40 | pub use no_geometry::NoGeometry; 41 | pub use query_rectangle::{ 42 | BandSelection, ColumnSelection, PlotQueryRectangle, PlotSeriesSelection, 43 | QueryAttributeSelection, QueryRectangle, RasterQueryRectangle, VectorQueryRectangle, 44 | }; 45 | pub use spatial_partition::{ 46 | AxisAlignedRectangle, SpatialPartition2D, SpatialPartitioned, partitions_extent, 47 | }; 48 | pub use spatial_resolution::SpatialResolution; 49 | pub use spatio_temporal_bounded::{SpatialBounded, TemporalBounded}; 50 | pub use time_instance::TimeInstance; 51 | pub use time_interval::{TimeInterval, time_interval_extent}; 52 | pub use time_step::{TimeGranularity, TimeStep, TimeStepIter}; 53 | pub use ttl::{CacheExpiration, CacheHint, CacheTtlSeconds}; 54 | -------------------------------------------------------------------------------- /datatypes/src/primitives/spatio_temporal_bounded.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::{BoundingBox2D, TimeInterval}; 2 | 3 | pub trait SpatialBounded { 4 | fn spatial_bounds(&self) -> BoundingBox2D; 5 | } 6 | 7 | pub trait TemporalBounded { 8 | fn temporal_bounds(&self) -> TimeInterval; 9 | } 10 | -------------------------------------------------------------------------------- /datatypes/src/raster/macros_raster.rs: -------------------------------------------------------------------------------- 1 | /// Maps a `TypedGrid2D` to another `TypedGrid2D` by calling a function on its variant. 2 | /// Call via `map_generic_grid_2d!(input, raster => function)`. 3 | #[macro_export] 4 | macro_rules! map_generic_grid_2d { 5 | ($input_grid:expr, $grid:ident => $function_call:expr) => { 6 | map_generic_grid_2d!( 7 | @variants $input_grid, $raster => $function_call, 8 | U8, U16, U32, U64, I8, I16, I32, I64, F32, F64 9 | ) 10 | }; 11 | 12 | (@variants $input_grid:expr, $grid:ident => $function_call:expr, $($variant:tt),+) => { 13 | match $input_grid { 14 | $( 15 | $crate::raster::TypedGrid2D::$variant($raster) => { 16 | $crate::raster::TypedGrid2D::$variant($function_call) 17 | } 18 | )+ 19 | } 20 | }; 21 | } 22 | 23 | /// Calls a function on a `TypedGrid2D` by calling it on its variant. 24 | /// Call via `call_generic_grid_2d!(input, raster => function)`. 25 | #[macro_export] 26 | macro_rules! call_generic_grid_2d { 27 | ($input_grid:expr, $grid:ident => $function_call:expr) => { 28 | call_generic_grid_2d!( 29 | @variants $input_grid, $grid => $function_call, 30 | U8, U16, U32, U64, I8, I16, I32, I64, F32, F64 31 | ) 32 | }; 33 | 34 | (@variants $input_grid:expr, $grid:ident => $function_call:expr, $($variant:tt),+) => { 35 | match $input_grid { 36 | $( 37 | $crate::raster::TypedGrid2D::$variant($grid) => $function_call, 38 | )+ 39 | } 40 | }; 41 | } 42 | 43 | /// Calls a function on a `TypedGrid2D` and some `RasterDataType`-like enum, effectively matching 44 | /// the raster with the corresponding enum value of the other enum. 45 | /// Call via `call_generic_grid_2d_ext!(input, (raster, e) => function)`. 46 | #[macro_export] 47 | macro_rules! call_generic_grid_2d_ext { 48 | ($input_grid:expr, $other_enum:ty, ($grid:ident, $enum:ident) => $func:expr) => { 49 | call_generic_grid_2d_ext!( 50 | @variants $input_grid, $other_enum, ($grid, $enum) => $func, 51 | U8, U16, U32, U64, I8, I16, I32, I64, F32, F64 52 | ) 53 | }; 54 | 55 | (@variants $input_grid:expr, $other_enum:ty, ($grid:ident, $enum:ident) => $func:expr, $($variant:tt),+) => { 56 | match $input_grid { 57 | $( 58 | $crate::raster::TypedGrid2D::$variant($grid) => { 59 | let $enum = <$other_enum>::$variant; 60 | $func 61 | } 62 | )+ 63 | } 64 | }; 65 | } 66 | -------------------------------------------------------------------------------- /datatypes/src/raster/operations/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod blit; 2 | pub mod checked_scaling; 3 | pub mod convert_data_type; 4 | pub mod from_index_fn; 5 | pub mod grid_blit; 6 | pub mod interpolation; 7 | pub mod map_elements; 8 | pub mod map_indexed_elements; 9 | pub mod update_elements; 10 | pub mod update_indexed_elements; 11 | -------------------------------------------------------------------------------- /datatypes/src/util/any.rs: -------------------------------------------------------------------------------- 1 | use std::{any::Any, sync::Arc}; 2 | 3 | /// Easy `Any`-casting by propagating the call to the underlying implementor 4 | pub trait AsAny: Any { 5 | fn as_any(&self) -> &dyn std::any::Any; 6 | 7 | fn into_box_any(self: Box) -> Box; 8 | } 9 | 10 | impl AsAny for T 11 | where 12 | T: Any, 13 | { 14 | fn as_any(&self) -> &dyn std::any::Any { 15 | self 16 | } 17 | 18 | fn into_box_any(self: Box) -> Box { 19 | Box::new(*self) 20 | } 21 | } 22 | 23 | /// Easy `Any`-casting for `Arc`s by propagating the call to the underlying implementor 24 | pub trait AsAnyArc { 25 | /// Returns the required Arc type for calling `Arc::downcast` 26 | fn as_any_arc(self: Arc) -> Arc<(dyn Any + Send + Sync)>; 27 | } 28 | 29 | impl AsAnyArc for T 30 | where 31 | T: Any + Send + Sync, 32 | { 33 | fn as_any_arc(self: Arc) -> Arc<(dyn Any + Send + Sync)> { 34 | self 35 | } 36 | } 37 | 38 | // TODO: test that everything works as expected 39 | -------------------------------------------------------------------------------- /datatypes/src/util/gdal.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::fmt::Display; 3 | 4 | pub fn hide_gdal_errors() { 5 | gdal::config::set_error_handler(|_, _, _| {}); 6 | } 7 | 8 | // TODO: push to `rust-gdal` 9 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 10 | #[serde(rename_all = "UPPERCASE")] 11 | pub enum ResamplingMethod { 12 | Nearest, 13 | Average, 14 | Bilinear, 15 | Cubic, 16 | CubicSpline, 17 | Lanczos, 18 | } 19 | 20 | impl Display for ResamplingMethod { 21 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 22 | match self { 23 | ResamplingMethod::Nearest => write!(f, "NEAREST"), 24 | ResamplingMethod::Average => write!(f, "AVERAGE"), 25 | ResamplingMethod::Bilinear => write!(f, "BILINEAR"), 26 | ResamplingMethod::Cubic => write!(f, "CUBIC"), 27 | ResamplingMethod::CubicSpline => write!(f, "CUBICSPLINE"), 28 | ResamplingMethod::Lanczos => write!(f, "LANCZOS"), 29 | } 30 | } 31 | } 32 | 33 | #[cfg(test)] 34 | mod tests { 35 | use super::*; 36 | 37 | #[test] 38 | fn test_deserialize_resampling_method() { 39 | let input = "\"NEAREST\""; 40 | let method = serde_json::from_str::(input).unwrap(); 41 | 42 | assert_eq!(method, ResamplingMethod::Nearest); 43 | assert_eq!(method.to_string(), "NEAREST"); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /datatypes/src/util/mod.rs: -------------------------------------------------------------------------------- 1 | mod any; 2 | pub mod arrow; 3 | mod byte_size; 4 | mod db_types; 5 | pub mod gdal; 6 | pub mod helpers; 7 | pub mod identifiers; 8 | mod image; 9 | pub mod ranges; 10 | mod result; 11 | pub mod test; 12 | pub mod well_known_data; 13 | 14 | pub use self::identifiers::Identifier; 15 | pub use any::{AsAny, AsAnyArc}; 16 | pub use byte_size::ByteSize; 17 | pub use db_types::{HashMapTextTextDbType, NotNanF64, StringPair, TextTextKeyValue}; 18 | pub use image::{ImageFormat, assert_image_equals, assert_image_equals_with_format}; 19 | pub use result::Result; 20 | use std::path::{Path, PathBuf}; 21 | 22 | /// Canonicalize `base`/`sub_path` and ensure the `sub_path` doesn't escape the `base` 23 | /// returns an error if the `sub_path` escapes the `base` 24 | /// 25 | /// This only works if the `Path` you are referring to actually exists. 26 | /// 27 | pub fn canonicalize_subpath(base: &Path, sub_path: &Path) -> Result { 28 | let base = base.canonicalize()?; 29 | let path = base.join(sub_path).canonicalize()?; 30 | 31 | if path.starts_with(&base) { 32 | Ok(path) 33 | } else { 34 | Err(crate::error::Error::SubPathMustNotEscapeBasePath { 35 | base, 36 | sub_path: sub_path.into(), 37 | }) 38 | } 39 | } 40 | 41 | #[cfg(test)] 42 | mod mod_tests { 43 | use super::*; 44 | #[test] 45 | fn it_doesnt_escape_base_path() { 46 | let tmp_dir = tempfile::tempdir().unwrap(); 47 | let tmp_path = tmp_dir.path(); 48 | std::fs::create_dir_all(tmp_path.join("foo/bar/foobar")).unwrap(); 49 | std::fs::create_dir_all(tmp_path.join("foo/barfoo")).unwrap(); 50 | 51 | assert_eq!( 52 | canonicalize_subpath(&tmp_path.join("foo/bar"), Path::new("foobar")) 53 | .unwrap() 54 | .to_string_lossy(), 55 | tmp_path.join("foo/bar/foobar").to_string_lossy() 56 | ); 57 | 58 | assert!(canonicalize_subpath(&tmp_path.join("foo/bar"), Path::new("../barfoo")).is_err()); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /datatypes/src/util/result.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Error; 2 | 3 | pub type Result = std::result::Result; 4 | -------------------------------------------------------------------------------- /datatypes/src/util/well_known_data.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::Coordinate2D; 2 | 3 | // coordinates used for the tests in EPSG:4326 4 | // and reprojected with proj cs2cs to EPSG:900913 5 | // 6 | // cs2cs -d 10 EPSG:4326 EPSG:900913 7 | // 50.8021728 8.7667933 8 | // 975914.9660458824 6586374.7028446598 0.0000000000 9 | // 50.937531 6.9602786 10 | // 774814.6695313191 6610251.1099264193 0.0000000000 11 | // 53.565278 10.001389 12 | // 1113349.5307054475 7088251.2962248782 0.0000000000 13 | 14 | pub const MARBURG_EPSG_4326: Coordinate2D = Coordinate2D { 15 | x: 8.766_793_3, 16 | y: 50.802_172_8, 17 | }; 18 | 19 | pub const MARBURG_EPSG_900_913: Coordinate2D = Coordinate2D { 20 | x: 975_914.966_045_882_4, 21 | y: 6_586_374.702_844_66, 22 | }; 23 | 24 | pub const COLOGNE_EPSG_4326: Coordinate2D = Coordinate2D { 25 | x: 6.960_278_6, 26 | y: 50.937_531, 27 | }; 28 | 29 | pub const COLOGNE_EPSG_900_913: Coordinate2D = Coordinate2D { 30 | x: 774_814.669_531_319, 31 | y: 6_610_251.109_926_419, 32 | }; 33 | 34 | pub const HAMBURG_EPSG_4326: Coordinate2D = Coordinate2D { 35 | x: 10.001_389, 36 | y: 53.565_278, 37 | }; 38 | 39 | pub const HAMBURG_EPSG_900_913: Coordinate2D = Coordinate2D { 40 | x: 1_113_349.530_705_447_5, 41 | y: 7_088_251.296_224_878, 42 | }; 43 | -------------------------------------------------------------------------------- /expression/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "geoengine-expression" 3 | version.workspace = true 4 | authors.workspace = true 5 | edition.workspace = true 6 | publish.workspace = true 7 | license-file.workspace = true 8 | documentation.workspace = true 9 | repository.workspace = true 10 | 11 | [dependencies] 12 | geoengine-expression-deps = { path = "deps-workspace" } 13 | libloading = { workspace = true } 14 | log = { workspace = true } 15 | pest = { workspace = true } 16 | pest_derive = { workspace = true } 17 | prettyplease = { workspace = true } 18 | proc-macro2 = { workspace = true } 19 | quote = { workspace = true } 20 | snafu = { workspace = true } 21 | syn = { workspace = true } 22 | tempfile = { workspace = true } 23 | 24 | [dev-dependencies] 25 | geo = { workspace = true } 26 | pretty_assertions = { workspace = true } 27 | toml = { workspace = true } 28 | 29 | [lints] 30 | workspace = true 31 | -------------------------------------------------------------------------------- /expression/deps-workspace/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "geoengine-expression-deps" 3 | version = "0.0.0" 4 | edition = "2024" 5 | 6 | [lib] 7 | path = "lib.rs" 8 | crate-type = ["rlib"] 9 | test = false 10 | doctest = false 11 | bench = false 12 | doc = false 13 | proc-macro = false 14 | harness = false 15 | 16 | [dependencies] 17 | geo = "0.30.0" 18 | geo-types = "0.7.16" # important for compatibility when linking 19 | -------------------------------------------------------------------------------- /expression/deps-workspace/lib.rs: -------------------------------------------------------------------------------- 1 | use geo::{Area, Centroid}; 2 | 3 | #[derive(Debug, Clone, PartialEq)] 4 | pub struct MultiPoint(geo::MultiPoint); 5 | #[derive(Debug, Clone, PartialEq)] 6 | pub struct MultiLineString(geo::MultiLineString); 7 | #[derive(Debug, Clone, PartialEq)] 8 | pub struct MultiPolygon(geo::MultiPolygon); 9 | 10 | /// Implement `From` and `Into` for a geometry type. 11 | macro_rules! impl_geo_from_to { 12 | ( $geom_type:ty, $geo_multi_type:ty, $geo_single_type:ty ) => { 13 | impl From<$geo_multi_type> for $geom_type { 14 | fn from(geom: $geo_multi_type) -> Self { 15 | Self(geom) 16 | } 17 | } 18 | 19 | impl From<$geo_single_type> for $geom_type { 20 | fn from(geom: $geo_single_type) -> Self { 21 | Self(<$geo_multi_type>::new(vec![geom])) 22 | } 23 | } 24 | 25 | impl From<$geom_type> for $geo_multi_type { 26 | fn from(geom: $geom_type) -> Self { 27 | geom.0 28 | } 29 | } 30 | }; 31 | } 32 | 33 | impl_geo_from_to!(MultiPoint, geo::MultiPoint, geo::Point); 34 | impl_geo_from_to!(MultiLineString, geo::MultiLineString, geo::LineString); 35 | impl_geo_from_to!(MultiPolygon, geo::MultiPolygon, geo::Polygon); 36 | 37 | /// Common operations for all geometry types. 38 | pub trait GeoOptionOperations { 39 | fn area(&self) -> Option; 40 | 41 | fn centroid(&self) -> Option; 42 | } 43 | 44 | impl GeoOptionOperations for MultiPoint { 45 | fn area(&self) -> Option { 46 | Some(self.0.unsigned_area()) 47 | } 48 | 49 | fn centroid(&self) -> Option { 50 | Some(MultiPoint(self.0.centroid()?.into())) 51 | } 52 | } 53 | 54 | impl GeoOptionOperations for MultiLineString { 55 | fn area(&self) -> Option { 56 | Some(self.0.unsigned_area()) 57 | } 58 | 59 | fn centroid(&self) -> Option { 60 | Some(MultiPoint(self.0.centroid()?.into())) 61 | } 62 | } 63 | 64 | impl GeoOptionOperations for MultiPolygon { 65 | fn area(&self) -> Option { 66 | Some(self.0.unsigned_area()) 67 | } 68 | 69 | fn centroid(&self) -> Option { 70 | Some(MultiPoint(self.0.centroid()?.into())) 71 | } 72 | } 73 | 74 | impl GeoOptionOperations for Option 75 | where 76 | T: GeoOptionOperations, 77 | { 78 | fn area(&self) -> Option { 79 | self.as_ref()?.area() 80 | } 81 | 82 | fn centroid(&self) -> Option { 83 | self.as_ref()?.centroid() 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /expression/src/expression.pest: -------------------------------------------------------------------------------- 1 | WHITESPACE = _{ WHITE_SPACE } 2 | 3 | number = @{ 4 | decimal | integer 5 | } 6 | integer = @{ "-"? ~ ASCII_DIGIT+ } 7 | decimal = @{ "-"? ~ ASCII_DIGIT+ ~ "." ~ ASCII_DIGIT+ } 8 | 9 | identifier = @{ 10 | ASCII_ALPHA ~ (ASCII_ALPHANUMERIC | "_")* 11 | } 12 | 13 | // special identifier 14 | nodata = { ^"nodata" } 15 | 16 | function = { 17 | identifier ~ "(" ~ ")" | 18 | identifier ~ "(" ~ expression ~ ("," ~ expression)* ~ ")" 19 | } 20 | 21 | operator = _{ 22 | power | add | subtract | multiply | divide 23 | } 24 | add = { "+" } 25 | subtract = { "-" } 26 | multiply = { "*" } 27 | divide = { "/" } 28 | power = { "**" } 29 | 30 | 31 | expression = { term ~ (operator ~ term)* } 32 | term = _{ branch | number | function | nodata | identifier | "(" ~ expression ~ ")" } 33 | 34 | boolean_comparator= _{ 35 | equals | not_equals | smaller_equals | smaller | larger_equals | larger 36 | } 37 | equals = { "==" } 38 | not_equals = { "!=" } 39 | smaller = { "<" } 40 | smaller_equals = { "<=" } 41 | larger = { ">" } 42 | larger_equals = { ">=" } 43 | 44 | // TODO: allow negation? 45 | boolean_operator = _{ and | or } 46 | and = { "&&" } 47 | or = { "||" } 48 | 49 | boolean_expression = { boolean_term ~ (boolean_operator ~ boolean_term)* } 50 | boolean_term = _{ boolean_true | boolean_false | boolean_comparison | identifier_is_nodata | "(" ~ boolean_expression ~ ")" } 51 | boolean_true = { ^"true" } 52 | boolean_false = { ^"false" } 53 | boolean_comparison = { expression ~ boolean_comparator ~ expression } 54 | 55 | identifier_is_nodata = { identifier ~ ^"is" ~ ^"nodata" } 56 | 57 | branch = { 58 | "if" ~ boolean_expression ~ "{" ~ expression ~ "}" 59 | ~ (^"else" ~ ^"if" ~ boolean_expression ~ "{" ~ expression ~ "}")* 60 | ~ ^"else" ~ "{" ~ expression ~ "}" 61 | } 62 | 63 | assignment = { 64 | "let" ~ identifier ~ "=" ~ expression ~ ";" 65 | } 66 | 67 | assignments_and_expression = { 68 | assignment* ~ expression 69 | } 70 | 71 | main = _{ 72 | SOI ~ assignments_and_expression ~ EOI 73 | } 74 | 75 | 76 | -------------------------------------------------------------------------------- /expression/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod codegen; 2 | mod compiled; 3 | mod dependencies; 4 | pub mod error; 5 | mod functions; 6 | mod parser; 7 | mod util; 8 | 9 | pub use codegen::{DataType, ExpressionAst, Parameter}; 10 | pub use compiled::LinkedExpression; 11 | pub use dependencies::ExpressionDependencies; 12 | pub use functions::FUNCTION_PREFIX; 13 | pub use parser::ExpressionParser; 14 | 15 | pub use geoengine_expression_deps::*; 16 | 17 | /// Checks if a string is a valid variable name 18 | pub fn is_allowed_variable_name(name: &str) -> bool { 19 | name.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') 20 | && !name.starts_with(FUNCTION_PREFIX) 21 | } 22 | -------------------------------------------------------------------------------- /expression/src/util.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | 3 | #[derive(Debug, Clone, PartialEq, Eq)] 4 | pub enum DuplicateOrEmpty { 5 | Ok, 6 | Duplicate(String), 7 | Empty, 8 | } 9 | 10 | /// Checks if a string is empty or duplicated within a slice 11 | pub fn duplicate_or_empty_str_slice>(strings: &[S]) -> DuplicateOrEmpty { 12 | let mut set = HashSet::new(); 13 | 14 | for string in strings { 15 | let string = string.as_ref(); 16 | 17 | if string.is_empty() { 18 | return DuplicateOrEmpty::Empty; 19 | } 20 | 21 | if !set.insert(string) { 22 | return DuplicateOrEmpty::Duplicate(string.to_string()); 23 | } 24 | } 25 | 26 | DuplicateOrEmpty::Ok 27 | } 28 | -------------------------------------------------------------------------------- /macros/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "geoengine-macros" 3 | version.workspace = true 4 | authors.workspace = true 5 | edition.workspace = true 6 | publish.workspace = true 7 | license-file.workspace = true 8 | documentation.workspace = true 9 | repository.workspace = true 10 | 11 | [lib] 12 | proc-macro = true 13 | 14 | [dependencies] 15 | proc-macro2 = { workspace = true } 16 | quote = { workspace = true } 17 | syn = { workspace = true } 18 | 19 | [dev-dependencies] 20 | pretty_assertions = { workspace = true } 21 | prettyplease = { workspace = true } 22 | 23 | [lints] 24 | workspace = true 25 | -------------------------------------------------------------------------------- /macros/src/lib.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::TokenStream; 2 | 3 | mod testing; 4 | mod typetag; 5 | mod util; 6 | 7 | type Result = std::result::Result; 8 | 9 | /// A macro to generate tests for Geo Engine services. 10 | /// It automatically spins up a database context. 11 | /// 12 | /// # Parameters 13 | /// 14 | /// - `tiling_spec` - a function that returns a [`geoengine_datatypes::raster::TilingSpecification`] to use for the test 15 | /// - `query_ctx_chunk_size` - a function that returns a [`geoengine_operators::engine::ChunkByteSize`] to use for the test 16 | /// - `test_execution` - `parallel` (default) or `serial`, which isolates this test from other tests 17 | /// - `before` - a function that is called before the context is created and the test is executed 18 | /// - `expect_panic` - if the test is expected to panic 19 | /// - `user` - `"admin"` if created session should be admin session 20 | /// - `quota_config` - a function that returns a [`crate::config::Quota`] to use for the test 21 | /// - `oidc_db` - a tuple `(handle, f)` with 22 | /// - `handle` being a handle of an OpenID-Connect endpoint, preventing it from dropping too early, and 23 | /// - `f` begin function that returns a [`crate::users::OidcManager`] to use for the test 24 | /// 25 | #[proc_macro_attribute] 26 | pub fn test( 27 | attr: proc_macro::TokenStream, 28 | item: proc_macro::TokenStream, 29 | ) -> proc_macro::TokenStream { 30 | match testing::test(attr.into(), &item.clone().into()) { 31 | Ok(ts) => ts.into(), 32 | Err(e) => token_stream_with_error(item.into(), e).into(), 33 | } 34 | } 35 | 36 | /// This macro generates a type tag for a struct as part of the OpenAPI schema. 37 | /// It creates a new enum with a single variant that is used as a type tag. 38 | /// The type tag is used for serialization and deserialization as well as for schema generation. 39 | /// 40 | /// # Parameters 41 | /// - `value` - the value of the type tag 42 | /// - `tag` - (optional) the name of the field that is used as a tag (default: `"type"`) 43 | /// 44 | #[proc_macro_attribute] 45 | pub fn type_tag( 46 | attr: proc_macro::TokenStream, 47 | item: proc_macro::TokenStream, 48 | ) -> proc_macro::TokenStream { 49 | match typetag::type_tag(attr.into(), &item.clone().into()) { 50 | Ok(ts) => ts.into(), 51 | Err(e) => token_stream_with_error(item.into(), e).into(), 52 | } 53 | } 54 | 55 | fn token_stream_with_error(mut tokens: TokenStream, error: syn::Error) -> TokenStream { 56 | tokens.extend(error.into_compile_error()); 57 | tokens 58 | } 59 | -------------------------------------------------------------------------------- /macros/src/util.rs: -------------------------------------------------------------------------------- 1 | use crate::{Result, testing::AttributeArgs}; 2 | use proc_macro2::TokenStream; 3 | use std::collections::HashMap; 4 | use syn::{Lit, parse::Parser}; 5 | 6 | pub fn parse_config_args(attr: TokenStream) -> Result> { 7 | let inputs = AttributeArgs::parse_terminated.parse2(attr)?; 8 | 9 | let mut args = HashMap::new(); 10 | 11 | for input in inputs { 12 | let syn::Meta::NameValue(name_value) = input else { 13 | return Err(syn::Error::new_spanned(input, "expected name-value pair")); 14 | }; 15 | 16 | let ident = name_value 17 | .path 18 | .get_ident() 19 | .ok_or_else(|| { 20 | syn::Error::new_spanned(name_value.clone(), "Must have specified ident") 21 | })? 22 | .to_string() 23 | .to_lowercase(); 24 | let lit = match &name_value.value { 25 | syn::Expr::Lit(syn::ExprLit { lit, .. }) => lit, 26 | expr => return Err(syn::Error::new_spanned(expr, "Must be a literal")), 27 | }; 28 | 29 | args.insert(ident, lit.clone()); 30 | } 31 | 32 | Ok(args) 33 | } 34 | 35 | #[cfg(test)] 36 | #[macro_export] 37 | // This macro is used to compare the pretty printed output of the expression parser. 38 | // We will use a macro instead of a function to get errors in the places where they occur. 39 | macro_rules! assert_eq_pretty { 40 | ( $left:expr, $right:expr ) => {{ 41 | pretty_assertions::assert_str_eq!( 42 | prettyplease::unparse(&syn::parse_file(&$left).unwrap()), 43 | prettyplease::unparse(&syn::parse_file(&$right).unwrap()), 44 | ); 45 | }}; 46 | } 47 | -------------------------------------------------------------------------------- /operators/.gitignore: -------------------------------------------------------------------------------- 1 | # Compile output 2 | /target 3 | **/*.rs.bk 4 | 5 | # Rust artifacts 6 | Cargo.lock 7 | 8 | # IDE files 9 | /.idea 10 | 11 | -------------------------------------------------------------------------------- /operators/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "geoengine-operators" 3 | version.workspace = true 4 | authors.workspace = true 5 | edition.workspace = true 6 | publish.workspace = true 7 | license-file.workspace = true 8 | documentation.workspace = true 9 | repository.workspace = true 10 | 11 | [features] 12 | 13 | [dependencies] 14 | arrow = { workspace = true } 15 | async-trait = { workspace = true } 16 | bb8-postgres = { workspace = true } 17 | bytes = { workspace = true } 18 | chrono = { workspace = true } 19 | csv = { workspace = true } 20 | float-cmp = { workspace = true } 21 | futures = { workspace = true } 22 | gdal = { workspace = true } 23 | gdal-sys = { workspace = true } 24 | geo = { workspace = true } 25 | geoengine-datatypes = { path = "../datatypes" } 26 | geoengine-expression = { path = "../expression" } 27 | itertools = { workspace = true } 28 | libloading = { workspace = true } 29 | log = { workspace = true } 30 | lru = { workspace = true } 31 | lz4_flex = { workspace = true } 32 | ndarray = { workspace = true } 33 | num-traits = { workspace = true } 34 | num = { workspace = true } 35 | ouroboros = { workspace = true } 36 | ordered-float = { workspace = true } 37 | ort = { workspace = true } 38 | paste = { workspace = true } 39 | pin-project = { workspace = true } 40 | postgres-protocol = { workspace = true } 41 | postgres-types = { workspace = true } 42 | rayon = { workspace = true } 43 | rustc-hash = { workspace = true } 44 | serde = { workspace = true } 45 | serde_json = { workspace = true } 46 | snafu = { workspace = true } 47 | stream-cancel = { workspace = true } 48 | tempfile = { workspace = true } 49 | tokio = { workspace = true } 50 | tokio-postgres = { workspace = true } 51 | tracing = { workspace = true } 52 | typetag = { workspace = true } 53 | uuid = { workspace = true } 54 | strum = { workspace = true } 55 | 56 | [dev-dependencies] 57 | async-stream = { workspace = true } 58 | approx = { workspace = true } 59 | geo-rand = { workspace = true } 60 | httptest = { workspace = true } 61 | tracing-subscriber = { workspace = true, features = ["json"] } 62 | rand = { workspace = true, features = ["small_rng"] } 63 | 64 | [[bench]] 65 | name = "bands" 66 | harness = false 67 | 68 | [[bench]] 69 | name = "cache" 70 | harness = false 71 | 72 | [[bench]] 73 | name = "cache_concurrent" 74 | harness = false 75 | 76 | [[bench]] 77 | name = "expression" 78 | harness = false 79 | 80 | [[bench]] 81 | name = "workflows" 82 | harness = false 83 | 84 | [[bench]] 85 | name = "thread_pool" 86 | harness = false 87 | 88 | [[bench]] 89 | name = "pip" 90 | harness = false 91 | 92 | [[bench]] 93 | name = "query_chunks" 94 | harness = false 95 | 96 | [lints] 97 | workspace = true 98 | -------------------------------------------------------------------------------- /operators/README.md: -------------------------------------------------------------------------------- 1 | # geo engine operators 2 | This crate contains the processing engine and operators for the geo engine. 3 | -------------------------------------------------------------------------------- /operators/benches/thread_pool.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::unwrap_used, clippy::print_stdout, clippy::print_stderr)] // okay in benchmarks 2 | 3 | use std::{hint::black_box, time::Instant}; 4 | 5 | use geoengine_operators::util::create_rayon_thread_pool; 6 | 7 | fn work(num_threads: usize) { 8 | let thread_pool = create_rayon_thread_pool(num_threads); 9 | 10 | thread_pool.scope(|scope| { 11 | for _ in 0..num_threads { 12 | scope.spawn(move |_| { 13 | for i in 0..100_000_000 / num_threads { 14 | black_box(i + 1); 15 | } 16 | }); 17 | } 18 | }); 19 | } 20 | 21 | fn main() { 22 | println!("num_threads,time"); 23 | for num_threads in [1, 2, 4] { 24 | let start = Instant::now(); 25 | work(num_threads); 26 | println!("{},{:?}", num_threads, start.elapsed()); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /operators/src/adapters/raster_subquery/mod.rs: -------------------------------------------------------------------------------- 1 | mod raster_subquery_adapter; 2 | mod raster_subquery_reprojection; 3 | 4 | pub use raster_subquery_adapter::{ 5 | FoldTileAccu, FoldTileAccuMut, RasterSubQueryAdapter, SubQueryTileAggregator, 6 | }; 7 | 8 | pub use raster_subquery_reprojection::{ 9 | TileReprojectionSubQuery, fold_by_coordinate_lookup_future, 10 | }; 11 | -------------------------------------------------------------------------------- /operators/src/cache/error.rs: -------------------------------------------------------------------------------- 1 | use snafu::Snafu; 2 | 3 | #[derive(Debug, Snafu)] 4 | #[snafu(visibility(pub(crate)))] 5 | #[snafu(context(suffix(false)))] 6 | pub enum CacheError { 7 | LandingZoneRatioMustBeLargerThanZero, 8 | LandingZoneRatioMustBeSmallerThenHalfCacheSize, 9 | ElementAndQueryDoNotIntersect, 10 | NotEnoughSpaceInLandingZone, 11 | NotEnoughSpaceInCache, 12 | QueryNotFoundInLandingZone, 13 | OperatorCacheEntryNotFound, 14 | InvalidTypeForInsertion, 15 | #[snafu(display("The Element inserted into the cache is already expired"))] 16 | TileExpiredBeforeInsertion, 17 | NegativeSizeOfLandingZone, 18 | NegativeSizeOfCache, 19 | QueryIdAlreadyInLandingZone, 20 | CacheEntryIdAlreadyInCache, 21 | CouldNotFilterResults, 22 | #[snafu(display("Compressed element could not be decompressed"))] 23 | CouldNotDecompressElement { 24 | source: lz4_flex::block::DecompressError, 25 | }, 26 | BlockingElementConversion, 27 | #[snafu(display("Could not run decompression task"))] 28 | CouldNotRunDecompressionTask { 29 | source: tokio::task::JoinError, 30 | }, 31 | #[snafu(display("Could not convert Arrow element to bytes"))] 32 | CouldNotWriteElementToBytes { 33 | source: arrow::error::ArrowError, 34 | }, 35 | #[snafu(display("Could not convert bytes to Arrow element"))] 36 | CouldNotReadElementFromBytes { 37 | source: arrow::error::ArrowError, 38 | }, 39 | } 40 | -------------------------------------------------------------------------------- /operators/src/cache/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod cache_chunks; 2 | pub mod cache_operator; 3 | pub mod cache_stream; 4 | pub mod cache_tiles; 5 | pub mod error; 6 | pub mod shared_cache; 7 | pub mod util; 8 | -------------------------------------------------------------------------------- /operators/src/engine/clonable_operator.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | InitializedPlotOperator, InitializedRasterOperator, InitializedVectorOperator, PlotOperator, 3 | RasterOperator, VectorOperator, 4 | }; 5 | 6 | /// Helper trait for making boxed `RasterOperator`s cloneable 7 | pub trait CloneableRasterOperator { 8 | fn clone_boxed_raster(&self) -> Box; 9 | } 10 | 11 | /// Helper trait for making boxed `VectorOperator`s cloneable 12 | pub trait CloneableVectorOperator { 13 | fn clone_boxed_vector(&self) -> Box; 14 | } 15 | 16 | /// Helper trait for making boxed `PlotOperator`s cloneable 17 | pub trait CloneablePlotOperator { 18 | fn clone_boxed_plot(&self) -> Box; 19 | } 20 | 21 | impl CloneableRasterOperator for T 22 | where 23 | T: 'static + RasterOperator + Clone, 24 | { 25 | fn clone_boxed_raster(&self) -> Box { 26 | Box::new(self.clone()) 27 | } 28 | } 29 | 30 | impl CloneableVectorOperator for T 31 | where 32 | T: 'static + VectorOperator + Clone, 33 | { 34 | fn clone_boxed_vector(&self) -> Box { 35 | Box::new(self.clone()) 36 | } 37 | } 38 | 39 | impl CloneablePlotOperator for T 40 | where 41 | T: 'static + PlotOperator + Clone, 42 | { 43 | fn clone_boxed_plot(&self) -> Box { 44 | Box::new(self.clone()) 45 | } 46 | } 47 | 48 | impl Clone for Box { 49 | fn clone(&self) -> Box { 50 | self.clone_boxed_raster() 51 | } 52 | } 53 | 54 | impl Clone for Box { 55 | fn clone(&self) -> Box { 56 | self.clone_boxed_vector() 57 | } 58 | } 59 | 60 | impl Clone for Box { 61 | fn clone(&self) -> Box { 62 | self.clone_boxed_plot() 63 | } 64 | } 65 | 66 | /// Helper trait for making boxed `InitializedRasterOperator`s cloneable 67 | pub trait CloneableInitializedRasterOperator { 68 | fn clone_boxed_raster(&self) -> Box; 69 | } 70 | 71 | /// Helper trait for making boxed `InitializedVectorOperator`s cloneable 72 | pub trait CloneableInitializedVectorOperator { 73 | fn clone_boxed_vector(&self) -> Box; 74 | } 75 | 76 | /// Helper trait for making boxed `InitializedPlotOperator`s cloneable 77 | pub trait CloneableInitializedPlotOperator { 78 | fn clone_boxed_plot(&self) -> Box; 79 | } 80 | -------------------------------------------------------------------------------- /operators/src/engine/workflow_path.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{Display, Formatter}; 2 | 3 | /// A path to an operator within an operator graph (workflow). 4 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 5 | pub struct WorkflowOperatorPath { 6 | id: Vec, 7 | } 8 | 9 | impl WorkflowOperatorPath { 10 | /// Creates a new root path. 11 | pub fn initialize_root() -> Self { 12 | Self { id: Vec::new() } 13 | } 14 | 15 | /// returns the inner Vec of the path 16 | pub fn inner(self) -> Vec { 17 | self.id 18 | } 19 | 20 | /// clone the path and extend it with the given suffix 21 | #[must_use] 22 | pub fn clone_and_extend(&self, suffix: &[u8]) -> Self { 23 | let mut id = self.id.clone(); 24 | id.extend(suffix); 25 | Self { id } 26 | } 27 | 28 | /// clone the path and append the given suffix 29 | #[must_use] 30 | pub fn clone_and_append(&self, suffix: u8) -> Self { 31 | let mut id = self.id.clone(); 32 | id.push(suffix); 33 | Self { id } 34 | } 35 | 36 | /// checks if the path starts with the given prefix 37 | pub fn starts_with(&self, prefix: &[u8]) -> bool { 38 | self.id.starts_with(prefix) 39 | } 40 | 41 | /// checks if the path is the root path 42 | pub fn is_root(&self) -> bool { 43 | self.id.is_empty() 44 | } 45 | } 46 | 47 | impl AsRef<[u8]> for WorkflowOperatorPath { 48 | fn as_ref(&self) -> &[u8] { 49 | &self.id 50 | } 51 | } 52 | 53 | impl From<&[u8]> for WorkflowOperatorPath { 54 | fn from(id: &[u8]) -> Self { 55 | Self { id: id.to_vec() } 56 | } 57 | } 58 | 59 | impl Display for WorkflowOperatorPath { 60 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 61 | let sep = ", "; 62 | 63 | write!(f, "[")?; 64 | 65 | for (i, id) in self.id.iter().enumerate() { 66 | if i > 0 { 67 | write!(f, "{sep}")?; 68 | } 69 | write!(f, "{id}")?; 70 | } 71 | 72 | write!(f, "]") 73 | } 74 | } 75 | 76 | #[cfg(test)] 77 | mod tests { 78 | use super::*; 79 | 80 | #[test] 81 | fn test() { 82 | let path = WorkflowOperatorPath::initialize_root(); 83 | assert_eq!(path.to_string(), "[]"); 84 | 85 | let path = path.clone_and_append(1); 86 | assert_eq!(path.to_string(), "[1]"); 87 | 88 | let path = path.clone_and_extend(&[2, 3]); 89 | assert_eq!(path.to_string(), "[1, 2, 3]"); 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /operators/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod adapters; 2 | #[macro_use] 3 | pub mod engine; 4 | pub mod cache; 5 | pub mod error; 6 | pub mod machine_learning; 7 | pub mod meta; 8 | pub mod mock; 9 | pub mod plot; 10 | pub mod processing; 11 | pub mod source; 12 | pub mod util; 13 | 14 | use geoengine_datatypes::test_data; 15 | -------------------------------------------------------------------------------- /operators/src/meta/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod quota; 2 | pub mod wrapper; 3 | -------------------------------------------------------------------------------- /operators/src/meta/quota.rs: -------------------------------------------------------------------------------- 1 | use crate::{engine::WorkflowOperatorPath, util::Result}; 2 | use async_trait::async_trait; 3 | use tokio::sync::mpsc::UnboundedSender; 4 | use uuid::Uuid; 5 | 6 | /// An Id for a computation used for quota tracking 7 | #[derive(Debug, Clone, PartialEq, Eq)] 8 | pub struct ComputationUnit { 9 | pub user: Uuid, 10 | pub workflow: Uuid, 11 | pub computation: Uuid, 12 | pub operator_name: &'static str, 13 | pub operator_path: WorkflowOperatorPath, 14 | pub data: Option, 15 | } 16 | 17 | #[derive(Debug, Clone, PartialEq, Eq)] 18 | pub enum QuotaMessage { 19 | ComputationUnit(ComputationUnit), 20 | Flush, 21 | } 22 | 23 | impl From for QuotaMessage { 24 | fn from(value: ComputationUnit) -> Self { 25 | Self::ComputationUnit(value) 26 | } 27 | } 28 | 29 | /// This type holds a [`Sender`] to a channel that is used to track the computation units. 30 | /// It is passed to the [`StreamStatisticsAdapter`] via the [`QueryContext`]. 31 | #[derive(Clone)] 32 | pub struct QuotaTracking { 33 | quota_sender: UnboundedSender, 34 | user: Uuid, 35 | workflow: Uuid, 36 | computation: Uuid, 37 | } 38 | 39 | impl QuotaTracking { 40 | pub fn new( 41 | quota_sender: UnboundedSender, 42 | user: Uuid, 43 | workflow: Uuid, 44 | computation: Uuid, 45 | ) -> Self { 46 | Self { 47 | quota_sender, 48 | user, 49 | workflow, 50 | computation, 51 | } 52 | } 53 | 54 | pub fn work_unit_done( 55 | &self, 56 | operator_name: &'static str, 57 | operator_path: WorkflowOperatorPath, 58 | data: Option, 59 | ) { 60 | let _ = self 61 | .quota_sender 62 | .send(QuotaMessage::ComputationUnit(ComputationUnit { 63 | user: self.user, 64 | workflow: self.workflow, 65 | computation: self.computation, 66 | operator_name, 67 | operator_path, 68 | data, 69 | })); // ignore the Result because the quota receiver should never close the receiving end of the channel 70 | } 71 | } 72 | 73 | #[async_trait] 74 | pub trait QuotaCheck { 75 | /// checks if the quota is available and if not, returns an error 76 | async fn ensure_quota_available(&self) -> Result<()>; 77 | } 78 | 79 | pub type QuotaChecker = Box; 80 | -------------------------------------------------------------------------------- /operators/src/mock/mod.rs: -------------------------------------------------------------------------------- 1 | mod mock_dataset_data_source; 2 | mod mock_feature_collection_source; 3 | mod mock_point_source; 4 | mod mock_raster_source; 5 | 6 | pub use mock_dataset_data_source::*; 7 | pub use mock_feature_collection_source::*; 8 | pub use mock_point_source::*; 9 | pub use mock_raster_source::*; 10 | -------------------------------------------------------------------------------- /operators/src/plot/mod.rs: -------------------------------------------------------------------------------- 1 | mod box_plot; 2 | mod class_histogram; 3 | mod histogram; 4 | mod pie_chart; 5 | mod scatter_plot; 6 | mod statistics; 7 | mod temporal_raster_mean_plot; 8 | mod temporal_vector_line_plot; 9 | 10 | pub use self::class_histogram::{ 11 | ClassHistogram, ClassHistogramParams, ClassHistogramRasterQueryProcessor, 12 | ClassHistogramVectorQueryProcessor, InitializedClassHistogram, 13 | }; 14 | pub use self::histogram::{ 15 | Histogram, HistogramBounds, HistogramBuckets, HistogramParams, HistogramRasterQueryProcessor, 16 | HistogramVectorQueryProcessor, InitializedHistogram, 17 | }; 18 | pub use self::pie_chart::{ 19 | CountPieChartVectorQueryProcessor, InitializedCountPieChart, PieChart, PieChartError, 20 | PieChartParams, 21 | }; 22 | pub use self::statistics::{ 23 | InitializedStatistics, Statistics, StatisticsParams, StatisticsRasterQueryProcessor, 24 | StatisticsVectorQueryProcessor, 25 | }; 26 | pub use self::temporal_raster_mean_plot::{ 27 | InitializedMeanRasterPixelValuesOverTime, MeanRasterPixelValuesOverTime, 28 | MeanRasterPixelValuesOverTimeParams, MeanRasterPixelValuesOverTimePosition, 29 | MeanRasterPixelValuesOverTimeQueryProcessor, 30 | }; 31 | -------------------------------------------------------------------------------- /operators/src/processing/circle_merging_quadtree.rs: -------------------------------------------------------------------------------- 1 | mod aggregates; 2 | mod circle_of_points; 3 | mod circle_radius_model; 4 | mod grid; 5 | mod hash_map; 6 | mod node; 7 | mod operator; 8 | mod quadtree; 9 | 10 | pub use operator::{ 11 | InitializedVisualPointClustering, VisualPointClustering, VisualPointClusteringParams, 12 | }; 13 | -------------------------------------------------------------------------------- /operators/src/processing/mod.rs: -------------------------------------------------------------------------------- 1 | mod band_neighborhood_aggregate; 2 | mod bandwise_expression; 3 | mod circle_merging_quadtree; 4 | mod column_range_filter; 5 | mod expression; 6 | mod interpolation; 7 | mod line_simplification; 8 | mod map_query; 9 | mod meteosat; 10 | mod neighborhood_aggregate; 11 | mod point_in_polygon; 12 | mod raster_scaling; 13 | mod raster_stacker; 14 | mod raster_type_conversion; 15 | mod raster_vector_join; 16 | mod rasterization; 17 | mod reprojection; 18 | mod temporal_raster_aggregation; 19 | mod time_projection; 20 | mod time_shift; 21 | mod vector_join; 22 | 23 | pub use band_neighborhood_aggregate::{ 24 | BandNeighborhoodAggregate, BandNeighborhoodAggregateError, BandNeighborhoodAggregateParams, 25 | }; 26 | pub use circle_merging_quadtree::{ 27 | InitializedVisualPointClustering, VisualPointClustering, VisualPointClusteringParams, 28 | }; 29 | pub use expression::{ 30 | Expression, ExpressionParams, RasterExpressionError, VectorExpression, VectorExpressionError, 31 | VectorExpressionParams, initialize_expression_dependencies, 32 | }; 33 | pub use interpolation::{Interpolation, InterpolationError, InterpolationParams}; 34 | pub use line_simplification::{ 35 | LineSimplification, LineSimplificationError, LineSimplificationParams, 36 | }; 37 | pub use neighborhood_aggregate::{ 38 | AggregateFunctionParams, NeighborhoodAggregate, NeighborhoodAggregateError, 39 | NeighborhoodAggregateParams, NeighborhoodParams, 40 | }; 41 | pub use point_in_polygon::{ 42 | PointInPolygonFilter, PointInPolygonFilterParams, PointInPolygonFilterSource, 43 | PointInPolygonTester, 44 | }; 45 | pub use raster_stacker::{RasterStacker, RasterStackerParams}; 46 | pub use raster_type_conversion::{ 47 | RasterTypeConversion, RasterTypeConversionParams, RasterTypeConversionQueryProcessor, 48 | }; 49 | pub use raster_vector_join::{ 50 | ColumnNames, FeatureAggregationMethod, RasterVectorJoin, RasterVectorJoinParams, 51 | TemporalAggregationMethod, 52 | }; 53 | pub use reprojection::{ 54 | InitializedRasterReprojection, InitializedVectorReprojection, Reprojection, ReprojectionParams, 55 | }; 56 | pub use temporal_raster_aggregation::{ 57 | Aggregation, TemporalRasterAggregation, TemporalRasterAggregationParameters, 58 | }; 59 | pub use time_projection::{TimeProjection, TimeProjectionError, TimeProjectionParams}; 60 | pub use time_shift::{TimeShift, TimeShiftError, TimeShiftParams}; 61 | -------------------------------------------------------------------------------- /operators/src/processing/temporal_raster_aggregation/mod.rs: -------------------------------------------------------------------------------- 1 | mod aggregators; 2 | mod first_last_subquery; 3 | mod subquery; 4 | mod temporal_aggregation_operator; 5 | 6 | pub use temporal_aggregation_operator::{ 7 | Aggregation, TemporalRasterAggregation, TemporalRasterAggregationParameters, 8 | }; 9 | -------------------------------------------------------------------------------- /operators/src/source/gdal_source/error.rs: -------------------------------------------------------------------------------- 1 | use geoengine_datatypes::raster::RasterDataType; 2 | use snafu::Snafu; 3 | 4 | #[derive(Debug, Snafu)] 5 | #[snafu(visibility(pub(crate)))] 6 | #[snafu(context(suffix(false)))] // disables default `Snafu` suffix 7 | pub enum GdalSourceError { 8 | #[snafu(display("Unsupported raster type: {raster_type:?}"))] 9 | UnsupportedRasterType { raster_type: RasterDataType }, 10 | } 11 | -------------------------------------------------------------------------------- /operators/src/source/mod.rs: -------------------------------------------------------------------------------- 1 | mod csv; 2 | mod gdal_source; 3 | mod ogr_source; 4 | 5 | pub use self::csv::{ 6 | CsvGeometrySpecification, CsvSource, CsvSourceParameters, CsvSourceStream, CsvTimeSpecification, 7 | }; 8 | pub use self::gdal_source::{ 9 | FileNotFoundHandling, GdalDatasetGeoTransform, GdalDatasetParameters, GdalLoadingInfo, 10 | GdalLoadingInfoTemporalSlice, GdalLoadingInfoTemporalSliceIterator, GdalMetaDataList, 11 | GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataMapping, GdalMetadataNetCdfCf, 12 | GdalRetryOptions, GdalSource, GdalSourceError, GdalSourceParameters, GdalSourceProcessor, 13 | GdalSourceTimePlaceholder, TimeReference, 14 | }; 15 | pub use self::ogr_source::{ 16 | AttributeFilter, CsvHeader, FormatSpecifics, OgrSource, OgrSourceColumnSpec, OgrSourceDataset, 17 | OgrSourceDatasetTimeType, OgrSourceDurationSpec, OgrSourceErrorSpec, OgrSourceParameters, 18 | OgrSourceProcessor, OgrSourceTimeFormat, UnixTimeStampType, 19 | }; 20 | -------------------------------------------------------------------------------- /operators/src/util/input/mod.rs: -------------------------------------------------------------------------------- 1 | mod float_with_nan_serde; 2 | mod multi_raster_or_vector; 3 | mod raster_or_vector; 4 | mod string_or_number; 5 | mod string_or_number_range; 6 | 7 | pub use float_with_nan_serde::{float as float_with_nan, float_option as float_option_with_nan}; 8 | pub use multi_raster_or_vector::MultiRasterOrVectorOperator; 9 | pub use raster_or_vector::RasterOrVectorOperator; 10 | pub use string_or_number::StringOrNumber; 11 | pub use string_or_number_range::StringOrNumberRange; 12 | -------------------------------------------------------------------------------- /operators/src/util/math.rs: -------------------------------------------------------------------------------- 1 | use std::ops::{Add, BitAnd, BitOr, BitXor, Shr}; 2 | 3 | /// From `num_integer`. 4 | /// Returns the floor value of the average of `a` and `b` without overflow problems. 5 | #[inline] 6 | pub fn average_floor(a: I, b: I) -> I 7 | where 8 | I: Copy 9 | + Add 10 | + Shr 11 | + BitAnd 12 | + BitOr 13 | + BitXor, 14 | { 15 | (a & b) + ((a ^ b) >> 1) 16 | } 17 | 18 | #[cfg(test)] 19 | mod tests { 20 | use super::*; 21 | 22 | #[test] 23 | fn average_floor_checks() { 24 | assert_eq!( 25 | average_floor(631_152_000_000_i64, 946_684_800_001_i64), 26 | 788_918_400_000_i64 27 | ); 28 | 29 | assert_eq!(average_floor(i64::MIN, i64::MAX), -1); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /operators/src/util/mod.rs: -------------------------------------------------------------------------------- 1 | mod async_util; 2 | pub mod gdal; 3 | pub mod input; 4 | pub mod math; 5 | pub mod number_statistics; 6 | pub mod raster_stream_to_geotiff; 7 | pub mod raster_stream_to_png; 8 | mod rayon; 9 | pub mod retry; 10 | pub mod statistics; 11 | pub mod stream_zip; 12 | pub mod string_token; 13 | pub mod sunpos; 14 | mod temporary_gdal_thread_local_config_options; 15 | 16 | use crate::error::Error; 17 | use std::collections::HashSet; 18 | use std::ops::Deref; 19 | use std::sync::{Mutex, MutexGuard}; 20 | 21 | pub use self::async_util::{ 22 | abortable_query_execution, spawn, spawn_blocking, spawn_blocking_with_thread_pool, 23 | }; 24 | pub use self::rayon::create_rayon_thread_pool; 25 | pub use self::temporary_gdal_thread_local_config_options::TemporaryGdalThreadLocalConfigOptions; 26 | 27 | pub type Result = std::result::Result; 28 | 29 | /// Get a lock for mutex and recover from poisoning 30 | /// TODO: proper poisoning handling 31 | pub fn safe_lock_mutex(lock: &M) -> MutexGuard 32 | where 33 | M: Deref>, 34 | { 35 | match lock.deref().lock() { 36 | Ok(guard) => guard, 37 | Err(poisoned) => poisoned.into_inner(), 38 | } 39 | } 40 | 41 | #[derive(Debug, Clone, PartialEq, Eq)] 42 | pub enum DuplicateOrEmpty { 43 | Ok, 44 | Duplicate(String), 45 | Empty, 46 | } 47 | 48 | /// Checks if a string is empty or duplicated within a slice 49 | pub fn duplicate_or_empty_str_slice>(strings: &[S]) -> DuplicateOrEmpty { 50 | let mut set = HashSet::new(); 51 | 52 | for string in strings { 53 | let string = string.as_ref(); 54 | 55 | if string.is_empty() { 56 | return DuplicateOrEmpty::Empty; 57 | } 58 | 59 | if !set.insert(string) { 60 | return DuplicateOrEmpty::Duplicate(string.to_string()); 61 | } 62 | } 63 | 64 | DuplicateOrEmpty::Ok 65 | } 66 | 67 | #[cfg(test)] 68 | mod tests { 69 | use super::*; 70 | 71 | #[test] 72 | fn test_duplicate_or_empty_str_slice() { 73 | assert_eq!( 74 | duplicate_or_empty_str_slice(&["a", "b", "c"]), 75 | DuplicateOrEmpty::Ok 76 | ); 77 | 78 | assert_eq!( 79 | duplicate_or_empty_str_slice(&["a", "", "c"]), 80 | DuplicateOrEmpty::Empty 81 | ); 82 | 83 | assert_eq!( 84 | duplicate_or_empty_str_slice(&["a", "a", "c"]), 85 | DuplicateOrEmpty::Duplicate("a".to_string()) 86 | ); 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /operators/src/util/rayon.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use rayon::{ThreadPool, ThreadPoolBuilder}; 4 | 5 | /// Tries to create a global thread pool that does not spawn any threads. 6 | /// This prevents accidentally using it. 7 | /// 8 | /// Hopefully, rayon either provides a real method for achieving this in the future 9 | /// or does not fix this behavior. 10 | /// 11 | /// Panics if building the global thread pool does not fail. 12 | /// 13 | fn rayon_destroy_global_thread_pool() { 14 | assert!( 15 | rayon::ThreadPoolBuilder::new() 16 | .num_threads(1) 17 | .spawn_handler(|_thread| { 18 | Err(std::io::Error::new( 19 | std::io::ErrorKind::Other, 20 | "Do not spawn rayon global pool on purpose", 21 | )) 22 | }) 23 | .build_global() 24 | .is_err() 25 | ); 26 | } 27 | 28 | /// Create a rayon thread pool with the given number of threads. 29 | /// Use `num_threads = 0` for auto number of threads. 30 | #[allow(clippy::missing_panics_doc)] 31 | pub fn create_rayon_thread_pool(num_threads: usize) -> Arc { 32 | rayon_destroy_global_thread_pool(); 33 | 34 | let thread_pool = ThreadPoolBuilder::new() 35 | .num_threads(num_threads) 36 | .build() 37 | .expect("Thread Pool must be initializable"); 38 | 39 | Arc::new(thread_pool) 40 | } 41 | 42 | #[cfg(test)] 43 | mod tests { 44 | use super::*; 45 | 46 | #[test] 47 | #[should_panic( 48 | expected = "The global thread pool has not been initialized.: ThreadPoolBuildError { kind: GlobalPoolAlreadyInitialized }" 49 | )] 50 | fn global_rayon_fail() { 51 | create_rayon_thread_pool(0); 52 | 53 | rayon::current_num_threads(); 54 | } 55 | 56 | #[test] 57 | fn num_threads() { 58 | let pool = create_rayon_thread_pool(11); 59 | assert_eq!(11, pool.current_num_threads()); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /operators/src/util/retry.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use futures::Future; 4 | 5 | /// A method wrapper for calling a method that may fail spuriously until it succeeds. 6 | /// The method is called at most `max_retries + 1` times. 7 | /// If it still fails after `max_retries` times, the error is returned. 8 | /// 9 | /// Uses exponential backoff by taking the `initial_delay_ms` and multiplying each time an `exponential_backoff_factor` on it. 10 | /// 11 | /// # Panics 12 | /// Panics if `max_retries` is 0. 13 | /// 14 | pub async fn retry( 15 | mut max_retries: usize, 16 | initial_delay_ms: u64, 17 | exponential_backoff_factor: f64, 18 | max_delay_ms: Option, 19 | mut f: F, 20 | ) -> Result 21 | where 22 | F: FnMut() -> Fut, 23 | Fut: Future>, 24 | { 25 | let mut result = (f)().await; 26 | 27 | let mut sleep_delay = initial_delay_ms as f64; 28 | 29 | while result.is_err() && max_retries > 0 { 30 | tokio::time::sleep(Duration::from_millis(sleep_delay as u64)).await; 31 | 32 | result = (f)().await; 33 | 34 | max_retries -= 1; 35 | sleep_delay *= exponential_backoff_factor; 36 | 37 | if let Some(max_delay_ms) = max_delay_ms { 38 | sleep_delay = sleep_delay.min(max_delay_ms as f64); 39 | } 40 | } 41 | 42 | result 43 | } 44 | 45 | #[cfg(test)] 46 | mod tests { 47 | use std::sync::Arc; 48 | use std::sync::atomic::{AtomicUsize, Ordering}; 49 | use std::task::Poll; 50 | 51 | use futures::future::{err, ok, poll_fn}; 52 | 53 | use super::*; 54 | 55 | #[tokio::test] 56 | async fn test_immediate_success() { 57 | let result: Result<(), ()> = retry(3, 0, 1., None, || ok(())).await; 58 | 59 | assert!(result.is_ok()); 60 | } 61 | 62 | #[tokio::test] 63 | async fn test_retry_success_after_tries() { 64 | let i = Arc::new(AtomicUsize::new(0)); 65 | 66 | let result = retry(3, 0, 1., None, || { 67 | let i = i.clone(); 68 | poll_fn(move |_ctx| { 69 | Poll::Ready(match i.fetch_add(1, Ordering::Relaxed) { 70 | 0..=2 => Err(()), 71 | _ => Ok(()), 72 | }) 73 | }) 74 | }) 75 | .await; 76 | 77 | assert!(result.is_ok()); 78 | } 79 | 80 | #[tokio::test] 81 | async fn test_failure() { 82 | let result: Result<(), ()> = retry(3, 0, 1., None, || err(())).await; 83 | 84 | assert!(result.is_err()); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /operators/src/util/stream_zip/mod.rs: -------------------------------------------------------------------------------- 1 | mod tuple_zip; 2 | mod vec_zip; 3 | 4 | pub use tuple_zip::StreamTupleZip; 5 | pub use vec_zip::{StreamArrayZip, StreamVectorZip}; 6 | -------------------------------------------------------------------------------- /operators/src/util/temporary_gdal_thread_local_config_options.rs: -------------------------------------------------------------------------------- 1 | use super::Result; 2 | 3 | /// Set thread local gdal options and revert them on drop 4 | pub struct TemporaryGdalThreadLocalConfigOptions { 5 | original_configs: Vec<(String, Option)>, 6 | } 7 | 8 | impl TemporaryGdalThreadLocalConfigOptions { 9 | /// Set thread local gdal options and revert them on drop 10 | pub fn new(configs: &[(String, String)]) -> Result { 11 | let mut original_configs = vec![]; 12 | 13 | for (key, value) in configs { 14 | let old = gdal::config::get_thread_local_config_option(key, "") 15 | .map(|value| if value.is_empty() { None } else { Some(value) })?; 16 | 17 | // TODO: check if overriding existing config (local & global) is ok for the given key 18 | gdal::config::set_thread_local_config_option(key, value)?; 19 | log::trace!("set {key}={value}"); 20 | 21 | original_configs.push((key.clone(), old)); 22 | } 23 | 24 | Ok(Self { original_configs }) 25 | } 26 | } 27 | 28 | impl Drop for TemporaryGdalThreadLocalConfigOptions { 29 | fn drop(&mut self) { 30 | for (key, value) in &self.original_configs { 31 | if let Some(value) = value { 32 | let _result = gdal::config::set_thread_local_config_option(key, value); 33 | } else { 34 | let _result = gdal::config::clear_thread_local_config_option(key); 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /operators/tests/streams.rs: -------------------------------------------------------------------------------- 1 | use futures::Stream; 2 | use futures::executor::block_on_stream; 3 | use futures::stream; 4 | use futures::task::Poll; 5 | 6 | pub fn fn_stream() -> impl Stream { 7 | let mut counter: usize = 2; 8 | 9 | stream::poll_fn(move |_| -> Poll> { 10 | if counter == 0 { 11 | return Poll::Ready(None); 12 | } 13 | counter -= 1; 14 | Poll::Ready(Some(counter)) 15 | }) 16 | } 17 | 18 | #[test] 19 | fn fn_test() { 20 | let mut stream = block_on_stream(fn_stream()); 21 | 22 | assert_eq!(stream.next(), Some(1)); 23 | assert_eq!(stream.next(), Some(0)); 24 | assert_eq!(stream.next(), None); 25 | } 26 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "1.86.0" 3 | components = ["cargo", "rustfmt", "rust-src", "clippy", "llvm-tools"] 4 | -------------------------------------------------------------------------------- /services/README.md: -------------------------------------------------------------------------------- 1 | # geo engine services 2 | This crate contains the services for the geo engine. 3 | -------------------------------------------------------------------------------- /services/build.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use vergen::{BuildBuilder, CargoBuilder, Emitter}; 3 | use vergen_gitcl::GitclBuilder; 4 | 5 | fn main() -> anyhow::Result<()> { 6 | Emitter::default() 7 | .add_instructions( 8 | // `VERGEN_BUILD_DATE` 9 | &BuildBuilder::default().build_date(true).build()?, 10 | )? 11 | .add_instructions( 12 | // `VERGEN_CARGO_FEATURES` 13 | &CargoBuilder::all_cargo()?, 14 | )? 15 | .add_instructions( 16 | // `VERGEN_GIT_SHA` 17 | &GitclBuilder::default().sha(true).build()?, 18 | )? 19 | .emit_and_set() 20 | .context("Unable to generate version info") 21 | } 22 | -------------------------------------------------------------------------------- /services/src/api/handlers/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::contexts::SessionId; 2 | use crate::error::{Error, Result}; 3 | use actix_web::HttpRequest; 4 | use actix_web::http::header; 5 | use actix_web_httpauth::headers::authorization::{Bearer, Scheme}; 6 | use std::str::FromStr; 7 | 8 | pub mod datasets; 9 | pub mod ebv; 10 | pub mod layers; 11 | pub mod machine_learning; 12 | pub mod permissions; 13 | pub mod plots; 14 | pub mod projects; 15 | pub mod spatial_references; 16 | pub mod tasks; 17 | pub mod upload; 18 | pub mod users; 19 | pub mod wcs; 20 | pub mod wfs; 21 | pub mod wms; 22 | pub mod workflows; 23 | 24 | pub fn get_token(req: &HttpRequest) -> Result { 25 | let header = req 26 | .headers() 27 | .get(header::AUTHORIZATION) 28 | .ok_or(Error::Unauthorized { 29 | source: Box::new(Error::MissingAuthorizationHeader), 30 | })?; 31 | let scheme = Bearer::parse(header).map_err(|_| Error::Unauthorized { 32 | source: Box::new(Error::InvalidAuthorizationScheme), 33 | })?; 34 | SessionId::from_str(scheme.token()).map_err(|_err| Error::Unauthorized { 35 | source: Box::new(Error::InvalidUuid), 36 | }) 37 | } 38 | -------------------------------------------------------------------------------- /services/src/api/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod apidoc; 2 | pub mod handlers; 3 | pub mod model; 4 | pub mod ogc; 5 | -------------------------------------------------------------------------------- /services/src/api/model/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod datatypes; 2 | pub mod operators; 3 | pub mod responses; 4 | pub mod services; 5 | -------------------------------------------------------------------------------- /services/src/api/model/responses/datasets/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::datasets::DatasetName; 2 | use serde::{Deserialize, Serialize}; 3 | use utoipa::ToSchema; 4 | 5 | pub mod errors; 6 | 7 | #[derive(Debug, Serialize, Deserialize, Clone, ToSchema)] 8 | #[serde(rename_all = "camelCase")] 9 | #[schema(title = "Dataset Name Response")] 10 | pub struct DatasetNameResponse { 11 | pub dataset_name: DatasetName, 12 | } 13 | 14 | impl From for DatasetNameResponse { 15 | fn from(dataset_name: DatasetName) -> Self { 16 | Self { dataset_name } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /services/src/api/model/responses/ml_models/mod.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use utoipa::{ToResponse, ToSchema}; 3 | 4 | use crate::machine_learning::name::MlModelName; 5 | 6 | #[derive(Debug, Serialize, Deserialize, Clone, ToResponse, ToSchema)] 7 | #[serde(rename_all = "camelCase")] 8 | #[response(description = "Name of generated resource", example = json!({ 9 | "name": "ns:name" 10 | }))] 11 | pub struct MlModelNameResponse { 12 | pub ml_model_name: MlModelName, 13 | } 14 | 15 | impl From for MlModelNameResponse { 16 | fn from(ml_model_name: MlModelName) -> Self { 17 | Self { ml_model_name } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /services/src/api/ogc/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod util; 2 | pub mod wcs; 3 | pub mod wfs; 4 | pub mod wms; 5 | -------------------------------------------------------------------------------- /services/src/api/ogc/wcs/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod request; 2 | -------------------------------------------------------------------------------- /services/src/api/ogc/wfs/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod request; 2 | -------------------------------------------------------------------------------- /services/src/api/ogc/wms/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod request; 2 | -------------------------------------------------------------------------------- /services/src/bin/geoengine-cli.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, Subcommand}; 2 | use geoengine_services::cli::{ 3 | CheckSuccessfulStartup, Heartbeat, OpenAPIGenerate, check_heartbeat, check_successful_startup, 4 | output_openapi_json, 5 | }; 6 | 7 | /// CLI for Geo Engine Utilities 8 | #[derive(Debug, Parser)] 9 | #[command(version, about, long_about = None)] 10 | struct Cli { 11 | #[command(subcommand)] 12 | command: Commands, 13 | } 14 | 15 | #[derive(Debug, Subcommand)] 16 | enum Commands { 17 | /// Checks the program's `STDERR` for successful startup 18 | CheckSuccessfulStartup(CheckSuccessfulStartup), 19 | 20 | /// Checks if the Geo Engine server is alive 21 | Heartbeat(Heartbeat), 22 | 23 | /// Outputs OpenAPI JSON 24 | #[command(name = "openapi")] 25 | OpenAPI(OpenAPIGenerate), 26 | } 27 | 28 | impl Commands { 29 | async fn execute(self) -> Result<(), anyhow::Error> { 30 | match self { 31 | Commands::CheckSuccessfulStartup(params) => check_successful_startup(params).await, 32 | Commands::Heartbeat(params) => check_heartbeat(params).await, 33 | Commands::OpenAPI(params) => output_openapi_json(params).await, 34 | } 35 | } 36 | } 37 | 38 | #[tokio::main] 39 | #[allow(clippy::print_stderr)] 40 | async fn main() { 41 | let cli = Cli::parse(); 42 | 43 | if let Err(err) = cli.command.execute().await { 44 | eprintln!("Error: {err}"); 45 | std::process::exit(1); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /services/src/cli/check_successful_startup.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use tokio::io::AsyncBufReadExt; 3 | 4 | /// Checks the program's `STDERR` for successful startup 5 | #[derive(Debug, Parser)] 6 | pub struct CheckSuccessfulStartup { 7 | /// Timeout in seconds 8 | #[arg(long, default_value = "60")] 9 | timeout: u16, 10 | 11 | /// Maximum number of lines to check 12 | #[arg(long, default_value = "1000")] 13 | max_lines: u16, 14 | 15 | /// Fail on warnings 16 | #[arg(long, default_value = "false")] 17 | fail_on_warnings: bool, 18 | 19 | /// Output `STDIN` to `STDERR` 20 | #[arg(long, default_value = "false")] 21 | output_stdin: bool, 22 | } 23 | 24 | /// Checks the program's `STDERR` for successful startup 25 | #[allow(clippy::print_stderr)] 26 | pub async fn check_successful_startup(params: CheckSuccessfulStartup) -> Result<(), anyhow::Error> { 27 | eprintln!( 28 | "Checking for successful startup with timeout of {} seconds and {} lines", 29 | params.timeout, params.max_lines 30 | ); 31 | 32 | let success = tokio::time::timeout( 33 | std::time::Duration::from_secs(params.timeout.into()), 34 | check_lines( 35 | params.max_lines, 36 | params.fail_on_warnings, 37 | params.output_stdin, 38 | ), 39 | ) 40 | .await 41 | .map_err(|_| anyhow::anyhow!("Timeout"))??; 42 | 43 | if success { 44 | eprintln!("Server started successfully"); 45 | Ok(()) 46 | } else { 47 | Err(anyhow::anyhow!("Server did not start successfully")) 48 | } 49 | } 50 | 51 | #[allow(clippy::print_stderr)] 52 | async fn check_lines( 53 | max_lines: u16, 54 | fail_on_warnings: bool, 55 | output_stdin: bool, 56 | ) -> Result { 57 | let mut line_reader = tokio::io::BufReader::new(tokio::io::stdin()).lines(); 58 | let mut lines_left = max_lines; 59 | 60 | while let Some(line) = line_reader.next_line().await? { 61 | if output_stdin { 62 | eprintln!("{line}"); 63 | } 64 | 65 | if line.contains("Tokio runtime found") { 66 | return Ok(true); 67 | } 68 | 69 | if fail_on_warnings && line.contains("WARN") { 70 | return Err(anyhow::anyhow!("Warning in log output: {line}")); 71 | } 72 | 73 | lines_left -= 1; 74 | if lines_left == 0 { 75 | break; 76 | } 77 | } 78 | 79 | Ok(false) 80 | } 81 | -------------------------------------------------------------------------------- /services/src/cli/heartbeat.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use url::Url; 3 | 4 | /// Checks if the Geo Engine server is alive 5 | #[derive(Debug, Parser)] 6 | pub struct Heartbeat { 7 | /// Server URL 8 | #[arg(long)] 9 | server_url: Url, 10 | } 11 | 12 | const API_ENDPOINT: &str = "info"; 13 | 14 | /// Checks the program's `STDERR` for successful startup 15 | #[allow(clippy::print_stderr)] 16 | pub async fn check_heartbeat(params: Heartbeat) -> Result<(), anyhow::Error> { 17 | let server_path = canonicalize_url(params.server_url).join(API_ENDPOINT)?; 18 | let server_response = reqwest::get(server_path).await?; 19 | 20 | if server_response.status().is_success() { 21 | eprintln!("Server is alive"); 22 | Ok(()) 23 | } else { 24 | Err(anyhow::anyhow!( 25 | "Server {url} is not alive. Status: {status}", 26 | url = server_response.url(), 27 | status = server_response.status() 28 | )) 29 | } 30 | } 31 | 32 | /// Canonicalizes a URL by ensuring it ends with a slash 33 | fn canonicalize_url(mut url: Url) -> Url { 34 | if !url.path().ends_with('/') { 35 | url.set_path(&format!("{}/", url.path())); 36 | } 37 | url 38 | } 39 | -------------------------------------------------------------------------------- /services/src/cli/mod.rs: -------------------------------------------------------------------------------- 1 | mod check_successful_startup; 2 | mod heartbeat; 3 | mod openapi; 4 | 5 | pub use check_successful_startup::{CheckSuccessfulStartup, check_successful_startup}; 6 | pub use heartbeat::{Heartbeat, check_heartbeat}; 7 | pub use openapi::{OpenAPIGenerate, output_openapi_json}; 8 | -------------------------------------------------------------------------------- /services/src/cli/openapi.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::print_stderr, clippy::print_stdout)] // okay in CLI 2 | 3 | use crate::api::apidoc::ApiDoc; 4 | use clap::Parser; 5 | use geoengine_operators::util::spawn_blocking; 6 | use utoipa::OpenApi; 7 | 8 | /// Checks if the Geo Engine server is alive 9 | #[derive(Debug, Parser)] 10 | pub struct OpenAPIGenerate; 11 | 12 | /// Outputs OpenAPI JSON to `STDOUT` 13 | pub async fn output_openapi_json(_params: OpenAPIGenerate) -> Result<(), anyhow::Error> { 14 | spawn_blocking(_output_openapi_json).await? 15 | } 16 | 17 | fn _output_openapi_json() -> Result<(), anyhow::Error> { 18 | let mut spec = ApiDoc::openapi(); 19 | 20 | // make server a wildcard 21 | spec.servers = Some(vec![ 22 | utoipa::openapi::ServerBuilder::new() 23 | .url("{server}/api") 24 | .parameter( 25 | "server", 26 | utoipa::openapi::ServerVariableBuilder::new() 27 | .default_value("https://geoengine.io") 28 | .build(), 29 | ) 30 | .build(), 31 | ]); 32 | 33 | println!("{}", serde_json::to_string_pretty(&spec)?); 34 | 35 | Ok(()) 36 | } 37 | 38 | #[cfg(test)] 39 | mod tests { 40 | use super::*; 41 | use assert_cmd::cargo::CommandCargoExt; 42 | use std::{ 43 | path::{Path, PathBuf}, 44 | process::{Command, Stdio}, 45 | }; 46 | 47 | #[test] 48 | fn it_generates_json() { 49 | let cli_result = Command::cargo_bin("geoengine-cli") 50 | .unwrap() 51 | .arg("openapi") 52 | .current_dir(workspace_dir()) 53 | .stdout(Stdio::piped()) 54 | .output() 55 | .unwrap(); 56 | 57 | assert!( 58 | cli_result.status.success(), 59 | "failed to run CLI: {cli_result:?}", 60 | ); 61 | 62 | let _openapi_spec: serde_json::Value = serde_json::from_slice(&cli_result.stdout).unwrap(); 63 | } 64 | 65 | fn workspace_dir() -> PathBuf { 66 | let output = Command::new(env!("CARGO")) 67 | .arg("locate-project") 68 | .arg("--workspace") 69 | .arg("--message-format=plain") 70 | .output() 71 | .unwrap() 72 | .stdout; 73 | let cargo_path = Path::new(std::str::from_utf8(&output).unwrap().trim()); 74 | cargo_path.parent().unwrap().to_path_buf() 75 | } 76 | 77 | #[tokio::test] 78 | async fn it_runs_successfully() { 79 | output_openapi_json(OpenAPIGenerate).await.unwrap(); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /services/src/contexts/migrations/migration_0015_log_quota.rs: -------------------------------------------------------------------------------- 1 | use super::database_migration::{DatabaseVersion, Migration}; 2 | use crate::error::Result; 3 | use async_trait::async_trait; 4 | use tokio_postgres::Transaction; 5 | 6 | /// This migration adds the multiband raster colorizer 7 | pub struct Migration0015LogQuota; 8 | 9 | #[async_trait] 10 | impl Migration for Migration0015LogQuota { 11 | fn prev_version(&self) -> Option { 12 | // Upon migration `0015_log_quota`, we did a major refactoring and removed the deprecated pro migrations. 13 | // Hence, we added a snapshot of the database schema to this migration instead of just the migration itself. 14 | // This is the state of the database schema at commit `071ba4e636a709f05ecb18b6f01bd19f313b0c94`. 15 | // Furthermore, we deleted all prior migrations, so we can't determine the previous version here. 16 | // 17 | // If you have a database version prior to `0015_log_quota`, you will need to migrate to `0015_log_quota` first. 18 | // Use commit `071ba4e636a709f05ecb18b6f01bd19f313b0c94` as a reference. 19 | // Then, you can migrate to the latest version. 20 | // 21 | None 22 | } 23 | 24 | fn version(&self) -> DatabaseVersion { 25 | "0015_log_quota".into() 26 | } 27 | 28 | async fn migrate(&self, tx: &Transaction<'_>) -> Result<()> { 29 | let config = crate::config::get_config_element::()?; 30 | 31 | let schema_name = &config.schema; 32 | 33 | if schema_name != "pg_temp" { 34 | tx.batch_execute(&format!("CREATE SCHEMA IF NOT EXISTS {schema_name};",)) 35 | .await?; 36 | } 37 | 38 | tx.batch_execute(include_str!("migration_0015_snapshot.sql")) 39 | .await?; 40 | 41 | tx 42 | .execute( 43 | "INSERT INTO geoengine (clear_database_on_start, database_version) VALUES ($1, '0015_log_quota');", 44 | &[&config.clear_database_on_start]) 45 | .await?; 46 | 47 | Ok(()) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /services/src/contexts/migrations/migration_0016_merge_providers.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | Migration0015LogQuota, 3 | database_migration::{DatabaseVersion, Migration}, 4 | }; 5 | use crate::error::Result; 6 | use async_trait::async_trait; 7 | use tokio_postgres::Transaction; 8 | 9 | /// This migration merges the two providers tables into one 10 | pub struct Migration0016MergeProviders; 11 | 12 | #[async_trait] 13 | impl Migration for Migration0016MergeProviders { 14 | fn prev_version(&self) -> Option { 15 | Some(Migration0015LogQuota.version()) 16 | } 17 | 18 | fn version(&self) -> DatabaseVersion { 19 | "0016_merge_providers".into() 20 | } 21 | 22 | async fn migrate(&self, tx: &Transaction<'_>) -> Result<()> { 23 | tx.batch_execute(include_str!("migration_0016_merge_providers.sql")) 24 | .await?; 25 | 26 | Ok(()) 27 | } 28 | } 29 | #[cfg(test)] 30 | mod tests { 31 | use crate::contexts::migrations::all_migrations; 32 | use crate::util::postgres::DatabaseConnectionConfig; 33 | use crate::{config::get_config_element, contexts::migrate_database}; 34 | use bb8_postgres::{PostgresConnectionManager, bb8::Pool}; 35 | use tokio_postgres::NoTls; 36 | 37 | #[tokio::test(flavor = "multi_thread", worker_threads = 1)] 38 | async fn it_merges_the_pro_layer_providers_table() { 39 | let postgres_config = get_config_element::().unwrap(); 40 | let db_config = DatabaseConnectionConfig::from(postgres_config); 41 | let pg_mgr = PostgresConnectionManager::new(db_config.pg_config(), NoTls); 42 | 43 | let pool = Pool::builder().max_size(1).build(pg_mgr).await.unwrap(); 44 | 45 | let mut conn = pool.get().await.unwrap(); 46 | 47 | // initial schema 48 | migrate_database(&mut conn, &all_migrations()[0..1]) 49 | .await 50 | .unwrap(); 51 | 52 | // insert test data on initial schema 53 | assert_eq!( 54 | conn.execute(include_str!("migration_0016_test_data.sql"), &[]) 55 | .await 56 | .unwrap(), 57 | 2 58 | ); 59 | 60 | // perform this migration 61 | migrate_database(&mut conn, &all_migrations()[1..=1]) 62 | .await 63 | .unwrap(); 64 | 65 | // verify that entries are in the new table 66 | assert_eq!( 67 | conn.query_one("SELECT COUNT(*) FROM layer_providers", &[]) 68 | .await 69 | .unwrap() 70 | .get::(0), 71 | 2 72 | ); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /services/src/contexts/migrations/migration_0016_merge_providers.sql: -------------------------------------------------------------------------------- 1 | ALTER TYPE "DataProviderDefinition" ADD ATTRIBUTE 2 | sentinel_s2_l2_a_cogs_provider_definition 3 | "SentinelS2L2ACogsProviderDefinition"; 4 | 5 | ALTER TYPE "DataProviderDefinition" ADD ATTRIBUTE 6 | copernicus_dataspace_provider_definition 7 | "CopernicusDataspaceDataProviderDefinition"; 8 | 9 | INSERT INTO layer_providers ( 10 | id, 11 | type_name, 12 | name, 13 | definition 14 | ) SELECT 15 | pro.id, 16 | pro.type_name, 17 | pro.name, 18 | ( 19 | NULL, 20 | NULL, 21 | NULL, 22 | NULL, 23 | NULL, 24 | NULL, 25 | NULL, 26 | NULL, 27 | NULL, -- noqa: PRS 28 | (pro.definition).sentinel_s2_l2_a_cogs_provider_definition, 29 | NULL 30 | )::"DataProviderDefinition" 31 | FROM pro_layer_providers AS pro 32 | WHERE pro.type_name = 'SentinelS2L2ACogsProviderDefinition'; 33 | 34 | INSERT INTO layer_providers ( 35 | id, 36 | type_name, 37 | name, 38 | definition 39 | ) SELECT 40 | pro.id, 41 | pro.type_name, 42 | pro.name, 43 | ( 44 | NULL, 45 | NULL, 46 | NULL, 47 | NULL, 48 | NULL, 49 | NULL, 50 | NULL, 51 | NULL, 52 | NULL, 53 | NULL, -- noqa: PRS 54 | (pro.definition).copernicus_dataspace_provider_definition 55 | )::"DataProviderDefinition" 56 | FROM pro_layer_providers AS pro 57 | WHERE pro.type_name = 'CopernicusDataspaceDataProviderDefinition'; 58 | 59 | DROP TABLE pro_layer_providers; 60 | DROP TYPE "ProDataProviderDefinition"; 61 | 62 | -- user_sessions 63 | 64 | ALTER TABLE sessions ADD COLUMN 65 | user_id uuid REFERENCES users (id) ON DELETE CASCADE; 66 | ALTER TABLE sessions ADD COLUMN created timestamp with time zone; 67 | ALTER TABLE sessions ADD COLUMN valid_until timestamp with time zone; 68 | 69 | UPDATE sessions SET 70 | user_id = us.user_id, 71 | created = us.created, 72 | valid_until = us.valid_until 73 | FROM user_sessions AS us 74 | WHERE sessions.id = us.session_id; 75 | 76 | ALTER TABLE sessions ALTER COLUMN user_id SET NOT NULL; 77 | ALTER TABLE sessions ALTER COLUMN created SET NOT NULL; 78 | ALTER TABLE sessions ALTER COLUMN valid_until SET NOT NULL; 79 | 80 | DROP TABLE user_sessions; 81 | -------------------------------------------------------------------------------- /services/src/contexts/migrations/migration_0016_test_data.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO pro_layer_providers ( 2 | id, 3 | type_name, 4 | name, 5 | definition, 6 | priority 7 | ) 8 | VALUES ( 9 | '409add03-2bfa-43da-86d1-6de18cbd1e50', 10 | 'SentinelS2L2ACogsProviderDefinition', 11 | 'SentinelS2L2ACogsProviderDefinition', 12 | ( 13 | ( 14 | 'Element 84 AWS STAC', 15 | '409add03-2bfa-43da-86d1-6de18cbd1e50', 16 | '/v0/collections/sentinel-s2-l2a-cogs/items', 17 | ARRAY[]::"StacBand" [], -- noqa: PRS 18 | ARRAY[]::"StacZone" [], 19 | (1, 100, 2.0)::"StacApiRetries", 20 | '(999)'::"GdalRetries", 21 | 0, 22 | 'Access to Sentinel 2 L2A COGs on AWS', 23 | 10, 24 | (1, 10)::"StacQueryBuffer" 25 | )::"SentinelS2L2ACogsProviderDefinition", -- noqa: PRS 26 | NULL 27 | )::"ProDataProviderDefinition", 28 | 10 29 | ), 30 | ( 31 | 'd3cd1013-c41f-4ac7-938b-3a50e1b9ae5e', 32 | 'CopernicusDataspaceDataProviderDefinition', 33 | 'CopernicusDataspaceDataProviderDefinition', 34 | ( 35 | NULL, -- noqa: PRS 36 | ( 37 | 'Copernicus Dataspace Data', 38 | 'd3cd1013-c41f-4ac7-938b-3a50e1b9ae5e', 39 | 'https://catalogue.dataspace.copernicus.eu/stac', 40 | 'dataspace.copernicus.eu', 41 | 'XYZ', 42 | 'XYZ', 43 | 'Access to Copernicus Dataspace Data', 44 | 10, 45 | ARRAY[ARRAY['key', 'VALUE']::"StringPair"]::"StringPair" [] 46 | )::"CopernicusDataspaceDataProviderDefinition" 47 | )::"ProDataProviderDefinition", 48 | 10 49 | ); 50 | -------------------------------------------------------------------------------- /services/src/contexts/migrations/migration_0017_ml_model_tensor_shape.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use tokio_postgres::Transaction; 3 | 4 | use crate::error::Result; 5 | 6 | use super::database_migration::{DatabaseVersion, Migration}; 7 | 8 | /// This migration adds tensor shape to `MlModel` input and output 9 | pub struct Migration0017MlModelTensorShape; 10 | 11 | #[async_trait] 12 | impl Migration for Migration0017MlModelTensorShape { 13 | fn prev_version(&self) -> Option { 14 | Some("0016_merge_providers".into()) 15 | } 16 | 17 | fn version(&self) -> DatabaseVersion { 18 | "0017_ml_model_tensor_shape".into() 19 | } 20 | 21 | async fn migrate(&self, tx: &Transaction<'_>) -> Result<()> { 22 | tx.batch_execute( 23 | r#" 24 | CREATE TYPE "MlTensorShape3D" AS ( 25 | x OID, 26 | y OID, 27 | bands OID 28 | ); 29 | 30 | ALTER TYPE "MlModelMetadata" ADD ATTRIBUTE input_shape "MlTensorShape3D"; 31 | ALTER TYPE "MlModelMetadata" ADD ATTRIBUTE output_shape "MlTensorShape3D"; 32 | 33 | WITH qqqq AS ( 34 | SELECT 35 | id, 36 | metadata 37 | FROM ml_models 38 | WHERE (metadata).num_input_bands IS NOT NULL 39 | ) 40 | UPDATE ml_models 41 | SET 42 | metadata.input_shape = (1, 1, (qqqq.metadata).num_input_bands)::"MlTensorShape3D", 43 | metadata.output_shape = (1, 1, 1)::"MlTensorShape3D" 44 | FROM qqqq 45 | WHERE ml_models.id = qqqq.id; 46 | 47 | ALTER TYPE "MlModelMetadata" DROP ATTRIBUTE num_input_bands; 48 | "#, 49 | ) 50 | .await?; 51 | Ok(()) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /services/src/contexts/migrations/migration_0018_wildlive_connector.sql: -------------------------------------------------------------------------------- 1 | CREATE TYPE "WildliveDataConnectorDefinition" AS ( 2 | id uuid, 3 | "name" text, 4 | description text, 5 | api_key text, 6 | priority smallint 7 | ); 8 | 9 | ALTER TYPE "DataProviderDefinition" ADD ATTRIBUTE 10 | wildlive_data_connector_definition "WildliveDataConnectorDefinition"; 11 | 12 | CREATE TABLE wildlive_projects ( 13 | provider_id uuid NOT NULL, 14 | cache_date date NOT NULL, 15 | project_id text NOT NULL, 16 | name text NOT NULL, 17 | description text NOT NULL, 18 | geom public.GEOMETRY (POLYGON) NOT NULL, 19 | 20 | -- TODO: check if we need it 21 | PRIMARY KEY (provider_id, cache_date, project_id) DEFERRABLE 22 | ); 23 | 24 | CREATE TABLE wildlive_stations ( 25 | provider_id uuid NOT NULL, 26 | cache_date date NOT NULL, 27 | station_id text NOT NULL, 28 | project_id text NOT NULL, 29 | name text NOT NULL, 30 | description text NOT NULL, 31 | location text NOT NULL, 32 | geom public.GEOMETRY (POINT) NOT NULL, 33 | 34 | -- TODO: check if we need it 35 | PRIMARY KEY (provider_id, cache_date, project_id, station_id) DEFERRABLE 36 | ); 37 | 38 | CREATE TABLE wildlive_captures ( 39 | provider_id uuid NOT NULL, 40 | cache_date date NOT NULL, 41 | image_object_id text NOT NULL, 42 | project_id text NOT NULL, 43 | station_setup_id text NOT NULL, 44 | capture_time_stamp timestamp with time zone NOT NULL, 45 | accepted_name_usage_id text NOT NULL, 46 | vernacular_name text NOT NULL, 47 | scientific_name text NOT NULL, 48 | content_url text NOT NULL, 49 | geom public.GEOMETRY (POINT) NOT NULL, 50 | 51 | -- TODO: check if we need it 52 | PRIMARY KEY ( 53 | provider_id, cache_date, project_id, image_object_id 54 | ) DEFERRABLE 55 | ); 56 | -------------------------------------------------------------------------------- /services/src/contexts/session.rs: -------------------------------------------------------------------------------- 1 | use crate::identifier; 2 | use crate::projects::ProjectId; 3 | use crate::projects::STRectangle; 4 | use geoengine_datatypes::primitives::DateTime; 5 | use serde::Serialize; 6 | 7 | identifier!(SessionId); 8 | 9 | pub trait Session: Send + Sync + Serialize { 10 | fn id(&self) -> SessionId; 11 | fn created(&self) -> &DateTime; 12 | fn valid_until(&self) -> &DateTime; 13 | fn project(&self) -> Option; 14 | fn view(&self) -> Option<&STRectangle>; 15 | } 16 | -------------------------------------------------------------------------------- /services/src/datasets/external/aruna/error.rs: -------------------------------------------------------------------------------- 1 | use snafu::prelude::*; 2 | use tonic::metadata::errors::InvalidMetadataValue; 3 | 4 | #[derive(Debug, Snafu)] 5 | #[snafu(visibility(pub(crate)))] 6 | #[snafu(context(suffix(false)))] // disables default `Snafu` suffix 7 | pub enum ArunaProviderError { 8 | InvalidAPIToken { source: InvalidMetadataValue }, 9 | InvalidDataId, 10 | InvalidUri { uri_string: String }, 11 | InvalidMetaObject, 12 | MissingProject, 13 | MissingDataset, 14 | MissingObject, 15 | MissingDataObject, 16 | MissingMetaObject, 17 | MissingArunaMetaData, 18 | MissingRelation, 19 | MissingURL, 20 | MissingLabel { resource_id: String }, 21 | ResourceNotAvailable { resource_id: String }, 22 | Reqwest { source: reqwest::Error }, 23 | UnexpectedObjectHierarchy, 24 | TonicStatus { source: tonic::Status }, 25 | TonicTransport { source: tonic::transport::Error }, 26 | } 27 | 28 | impl From for ArunaProviderError { 29 | fn from(source: tonic::Status) -> Self { 30 | Self::TonicStatus { source } 31 | } 32 | } 33 | 34 | impl From for ArunaProviderError { 35 | fn from(source: tonic::transport::Error) -> Self { 36 | Self::TonicTransport { source } 37 | } 38 | } 39 | 40 | impl From for ArunaProviderError { 41 | fn from(source: reqwest::Error) -> Self { 42 | Self::Reqwest { source } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /services/src/datasets/external/copernicus_dataspace/mod.rs: -------------------------------------------------------------------------------- 1 | mod ids; 2 | mod provider; 3 | mod sentinel2; 4 | mod stac; 5 | 6 | pub use provider::CopernicusDataspaceDataProviderDefinition; 7 | -------------------------------------------------------------------------------- /services/src/datasets/external/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod aruna; 2 | mod copernicus_dataspace; 3 | pub mod edr; 4 | pub mod gbif; 5 | pub mod gfbio_abcd; 6 | pub mod gfbio_collections; 7 | pub mod netcdfcf; 8 | pub mod pangaea; 9 | mod sentinel_s2_l2a_cogs; 10 | mod wildlive; 11 | 12 | pub use copernicus_dataspace::CopernicusDataspaceDataProviderDefinition; 13 | pub use sentinel_s2_l2a_cogs::{ 14 | GdalRetries, SentinelS2L2ACogsProviderDefinition, StacApiRetries, StacBand, StacQueryBuffer, 15 | StacZone, 16 | }; 17 | pub use wildlive::{WildliveDataConnectorDefinition, WildliveDbCache, WildliveError}; 18 | -------------------------------------------------------------------------------- /services/src/datasets/external/wildlive/error.rs: -------------------------------------------------------------------------------- 1 | use geoengine_datatypes::error::ErrorSource; 2 | use snafu::Snafu; 3 | 4 | #[derive(Debug, Snafu)] 5 | #[snafu(visibility(pub(crate)), context(suffix(false)))] 6 | pub enum WildliveError { 7 | #[snafu(display("Unable to parse collection id"))] 8 | UnableToSerializeCollectionId { 9 | source: Box, 10 | }, 11 | #[snafu(display("Unable to parse JSON: {source}"), context(false))] 12 | InvalidJSON { 13 | source: serde_json::Error, 14 | }, 15 | #[snafu(display("Unable to parse URL: {source}"), context(false))] 16 | InvalidUrl { 17 | source: url::ParseError, 18 | }, 19 | #[snafu(display("Unable to make web request: {source}"), context(false))] 20 | InvalidRequest { 21 | source: reqwest::Error, 22 | }, 23 | #[snafu(display("Unable to get bounds for project: {source}"))] 24 | InvalidProjectBounds { 25 | source: geoengine_datatypes::error::Error, 26 | }, 27 | #[snafu(display("Unable to get bounds for project: {project}"))] 28 | EmptyProjectBounds { 29 | project: String, 30 | }, 31 | #[snafu(display("Unexpected execution error: Please contact the system administrator"))] 32 | UnexpectedExecution { 33 | source: Box, 34 | }, 35 | #[snafu(display("Unable to create temporary directory: {source}"))] 36 | TempDirCreation { 37 | source: std::io::Error, 38 | }, 39 | UnableToCreateDatasetFilename { 40 | source: std::fmt::Error, 41 | }, 42 | UnableToWriteDataset { 43 | source: std::io::Error, 44 | }, 45 | 46 | InvalidCaptureTimeStamp { 47 | source: Box, 48 | }, 49 | 50 | UnableToLookupStation, 51 | } 52 | -------------------------------------------------------------------------------- /services/src/datasets/mod.rs: -------------------------------------------------------------------------------- 1 | mod create_from_workflow; 2 | pub(crate) mod dataset_listing_provider; 3 | pub mod external; // TODO: move to layers/external 4 | pub mod listing; 5 | mod name; 6 | pub mod postgres; 7 | pub mod storage; 8 | pub mod upload; 9 | 10 | pub(crate) use create_from_workflow::{ 11 | RasterDatasetFromWorkflow, RasterDatasetFromWorkflowResult, 12 | schedule_raster_dataset_from_workflow_task, 13 | }; 14 | pub use name::{DatasetIdAndName, DatasetName, DatasetNameError}; 15 | pub use storage::AddDataset; 16 | -------------------------------------------------------------------------------- /services/src/layers/error.rs: -------------------------------------------------------------------------------- 1 | use super::listing::LayerCollectionId; 2 | use geoengine_datatypes::dataset::LayerId; 3 | use snafu::Snafu; 4 | 5 | #[derive(Debug, Snafu)] 6 | #[snafu( 7 | visibility(pub(crate)), 8 | context(suffix(false)) /* disables default `Snafu` suffix */, 9 | )] 10 | pub enum LayerDbError { 11 | #[snafu(display("There is no layer with the given id {id}"))] 12 | NoLayerForGivenId { id: LayerId }, 13 | 14 | #[snafu(display("There is no layer collection with the given id {id}"))] 15 | NoLayerCollectionForGivenId { id: LayerCollectionId }, 16 | 17 | #[snafu(display("There is no layer {layer} in collection {collection}"))] 18 | NoLayerForGivenIdInCollection { 19 | collection: LayerCollectionId, 20 | layer: LayerId, 21 | }, 22 | 23 | #[snafu(display("There is no collection {collection} in collection {parent}"))] 24 | NoCollectionForGivenIdInCollection { 25 | collection: LayerCollectionId, 26 | parent: LayerCollectionId, 27 | }, 28 | 29 | #[snafu(display("You must not remove the root collection"))] 30 | CannotRemoveRootCollection, 31 | } 32 | -------------------------------------------------------------------------------- /services/src/layers/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod add_from_directory; 2 | pub mod error; // pub to export all Snafu-generated errors 3 | pub mod external; 4 | pub mod layer; 5 | pub mod listing; 6 | mod postgres_layer_db; 7 | pub mod storage; 8 | 9 | pub use error::LayerDbError; 10 | -------------------------------------------------------------------------------- /services/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod api; 2 | pub mod cli; 3 | pub mod config; 4 | pub mod contexts; 5 | pub mod datasets; 6 | pub mod error; 7 | pub mod layers; 8 | pub mod machine_learning; 9 | pub mod permissions; 10 | pub mod projects; 11 | pub mod quota; 12 | pub mod server; 13 | pub mod stac; 14 | pub mod tasks; 15 | pub mod users; 16 | #[macro_use] 17 | pub mod util; 18 | pub mod workflows; 19 | 20 | pub use geoengine_datatypes::test_data; 21 | 22 | // re-export test macro 23 | pub mod ge_context { 24 | pub use geoengine_macros::test; 25 | } 26 | -------------------------------------------------------------------------------- /services/src/machine_learning/error.rs: -------------------------------------------------------------------------------- 1 | use snafu::Snafu; 2 | use strum::IntoStaticStr; 3 | 4 | use super::MlModelName; 5 | 6 | #[derive(Debug, Snafu, IntoStaticStr)] 7 | #[snafu(visibility(pub(crate)))] 8 | #[snafu(context(suffix(MachineLearningError)), module(error))] // disables default `Snafu` suffix 9 | pub enum MachineLearningError { 10 | CouldNotFindMlModelFile { 11 | source: crate::error::Error, 12 | }, 13 | ModelNotFound { 14 | name: MlModelName, 15 | }, 16 | DuplicateMlModelName { 17 | name: MlModelName, 18 | }, 19 | InvalidModelNamespace { 20 | name: MlModelName, 21 | }, 22 | #[snafu(display("An unexpected database error occurred."))] 23 | Postgres { 24 | source: tokio_postgres::Error, 25 | }, 26 | #[snafu(display("An unexpected database error occurred."))] 27 | Bb8 { 28 | source: bb8_postgres::bb8::RunError, 29 | }, 30 | #[snafu(display("An underlying MachineLearningError occured: {source}"))] 31 | MachineLearning { 32 | source: geoengine_operators::machine_learning::MachineLearningError, 33 | }, 34 | } 35 | 36 | impl From for MachineLearningError { 37 | fn from(e: bb8_postgres::tokio_postgres::error::Error) -> Self { 38 | Self::Postgres { source: e } 39 | } 40 | } 41 | 42 | impl From for MachineLearningError { 43 | fn from(e: geoengine_operators::machine_learning::MachineLearningError) -> Self { 44 | Self::MachineLearning { source: e } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /services/src/projects/error.rs: -------------------------------------------------------------------------------- 1 | use geoengine_datatypes::error::ErrorSource; 2 | use snafu::prelude::*; 3 | 4 | use super::{ProjectId, ProjectVersionId}; 5 | 6 | #[derive(Debug, Snafu)] 7 | #[snafu(visibility(pub(crate)), context(suffix(ProjectDbError)))] 8 | pub enum ProjectDbError { 9 | #[snafu(display("Project {project} does not exist"))] 10 | ProjectNotFound { project: ProjectId }, 11 | #[snafu(display("Version {version} of project {project} does not exist"))] 12 | ProjectVersionNotFound { 13 | project: ProjectId, 14 | version: ProjectVersionId, 15 | }, 16 | #[snafu(display("Updating project {project} failed"))] 17 | ProjectUpdateFailed { project: ProjectId }, 18 | #[snafu(display("Accessing project {project} failed: {source}"))] 19 | AccessFailed { 20 | project: ProjectId, 21 | source: Box, 22 | }, 23 | #[snafu(display("An unexpected database error occurred."))] 24 | Postgres { source: tokio_postgres::Error }, 25 | #[snafu(display("An unexpected database error occurred."))] 26 | Bb8 { 27 | source: bb8_postgres::bb8::RunError, 28 | }, 29 | } 30 | -------------------------------------------------------------------------------- /services/src/projects/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod error; 2 | pub mod postgres_projectdb; 3 | mod project; 4 | mod projectdb; 5 | 6 | pub use project::{ 7 | ColorParam, CreateProject, Delete as ProjectUpdateToken, DerivedColor, DerivedNumber, 8 | LayerType, LayerUpdate, LayerVisibility, LineSymbology, LoadVersion, NumberParam, OrderBy, 9 | Plot, PlotUpdate, PointSymbology, PolygonSymbology, Project, ProjectId, ProjectLayer, 10 | ProjectListOptions, ProjectListing, ProjectVersion, ProjectVersionId, RasterSymbology, 11 | STRectangle, StaticColor, StaticNumber, StrokeParam, Symbology, TextSymbology, UpdateProject, 12 | }; 13 | pub use projectdb::ProjectDb; 14 | -------------------------------------------------------------------------------- /services/src/projects/projectdb.rs: -------------------------------------------------------------------------------- 1 | use crate::projects::project::{ 2 | CreateProject, Project, ProjectId, ProjectListOptions, ProjectListing, UpdateProject, 3 | }; 4 | 5 | use async_trait::async_trait; 6 | 7 | use super::{LoadVersion, ProjectVersion, error::ProjectDbError}; 8 | 9 | /// Storage of user projects 10 | #[async_trait] 11 | pub trait ProjectDb: Send + Sync { 12 | /// List all datasets accessible to `user` that match the `options` 13 | async fn list_projects( 14 | &self, 15 | options: ProjectListOptions, 16 | ) -> Result, ProjectDbError>; 17 | 18 | /// Load the the latest version of the `project` for the `user` 19 | async fn load_project(&self, project: ProjectId) -> Result; 20 | 21 | /// Create a new `project` for the `user` 22 | async fn create_project(&self, project: CreateProject) -> Result; 23 | 24 | /// Update a `project` for the `user`. A new version is created 25 | async fn update_project(&self, project: UpdateProject) -> Result<(), ProjectDbError>; 26 | 27 | /// Delete the `project` if `user` is an owner 28 | async fn delete_project(&self, project: ProjectId) -> Result<(), ProjectDbError>; 29 | 30 | /// Load the the `version` of the `project` for the `user` 31 | async fn load_project_version( 32 | &self, 33 | project: ProjectId, 34 | version: LoadVersion, 35 | ) -> Result; 36 | 37 | /// List all versions of the `project` if given `user` has at least read permission 38 | async fn list_project_versions( 39 | &self, 40 | project: ProjectId, 41 | ) -> Result, ProjectDbError>; 42 | } 43 | -------------------------------------------------------------------------------- /services/src/tasks/error.rs: -------------------------------------------------------------------------------- 1 | use snafu::Snafu; 2 | 3 | use super::TaskId; 4 | 5 | #[derive(Debug, Snafu)] 6 | #[snafu(visibility(pub(crate)))] 7 | #[snafu(context(suffix(false)))] // disables default `Snafu` suffix 8 | pub enum TaskError { 9 | #[snafu(display("Task not found with id: {task_id}"))] 10 | TaskNotFound { task_id: TaskId }, 11 | 12 | #[snafu(display("Task was aborted by the user: {task_id}"))] 13 | TaskAborted { task_id: TaskId }, 14 | 15 | #[snafu(display("Task was already aborted by the user: {task_id}"))] 16 | TaskAlreadyAborted { task_id: TaskId }, 17 | 18 | #[snafu(display("Task was already finished: {task_id}"))] 19 | TaskAlreadyFinished { task_id: TaskId }, 20 | 21 | #[snafu(display("Task is duplicate. Type: {task_type}, Unique ID: {task_unique_id}"))] 22 | DuplicateTask { 23 | task_type: &'static str, 24 | task_unique_id: String, 25 | }, 26 | 27 | TaskManagerOperationFailed { 28 | source: Box, 29 | }, 30 | } 31 | -------------------------------------------------------------------------------- /services/src/tasks/util.rs: -------------------------------------------------------------------------------- 1 | use super::{TaskContext, TaskId, TaskManager, TaskStatusInfo}; 2 | 3 | pub mod test { 4 | use super::{TaskContext, TaskId, TaskManager}; 5 | use std::sync::Arc; 6 | 7 | /// Test helper for waiting for a task to finish 8 | /// 9 | /// # Panics 10 | /// Panics if task does not finish within some time. 11 | /// 12 | pub async fn wait_for_task_to_finish( 13 | task_manager: Arc>, 14 | task_id: TaskId, 15 | ) { 16 | geoengine_operators::util::retry::retry(10, 100, 2., None, move || { 17 | let task_manager = task_manager.clone(); 18 | async move { 19 | let status = task_manager 20 | .get_task_status(task_id) 21 | .await 22 | .expect("it should only be used in tests"); 23 | status.is_finished().then_some(()).ok_or(()) 24 | } 25 | }) 26 | .await 27 | .expect("it should only be used in tests"); 28 | } 29 | } 30 | 31 | /// A task context for testing that does nothing 32 | pub struct NopTaskContext; 33 | 34 | #[async_trait::async_trait] 35 | impl TaskContext for NopTaskContext { 36 | async fn set_completion(&self, _pct_complete: f64, _status: Box) {} 37 | } 38 | -------------------------------------------------------------------------------- /services/src/users/mod.rs: -------------------------------------------------------------------------------- 1 | mod oidc; 2 | mod postgres_userdb; 3 | mod session; 4 | mod user; 5 | mod userdb; 6 | 7 | pub(crate) use oidc::OidcError; 8 | pub(crate) use oidc::{AuthCodeRequestURL, AuthCodeResponse, OidcDisabled, OidcManager}; 9 | #[cfg(test)] 10 | pub(super) use oidc::{DefaultJsonWebKeySet, DefaultProviderMetadata, OidcTokens, UserClaims}; 11 | pub use session::{UserInfo, UserSession}; 12 | pub use user::{User, UserCredentials, UserId, UserRegistration}; 13 | pub use userdb::{RoleDb, SessionTokenStore, StoredOidcTokens, UserAuth, UserDb}; 14 | -------------------------------------------------------------------------------- /services/src/users/user.rs: -------------------------------------------------------------------------------- 1 | use pwhash::bcrypt; 2 | use serde::{Deserialize, Serialize}; 3 | use utoipa::ToSchema; 4 | use validator::Validate; 5 | 6 | use crate::error::Result; 7 | use crate::identifier; 8 | use crate::permissions::{Role, RoleId}; 9 | use geoengine_datatypes::util::Identifier; 10 | 11 | #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Hash, ToSchema, Validate)] 12 | #[serde(rename_all = "camelCase")] 13 | #[schema(example = json!({ 14 | "email": "foo@example.com", 15 | "password": "secret123", 16 | "realName": "Foo Bar" 17 | }))] 18 | pub struct UserRegistration { 19 | #[validate(email)] 20 | pub email: String, 21 | #[validate(length(min = 8))] 22 | pub password: String, 23 | #[validate(length(min = 1))] 24 | pub real_name: String, 25 | } 26 | 27 | #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Hash, ToSchema, Validate)] 28 | #[schema(example = json!({ 29 | "email": "foo@example.com", 30 | "password": "secret123", 31 | }))] 32 | pub struct UserCredentials { 33 | #[validate(email)] 34 | pub email: String, 35 | #[validate(length(min = 8))] 36 | pub password: String, 37 | } 38 | 39 | identifier!(UserId); 40 | 41 | #[derive(Clone)] 42 | pub struct User { 43 | pub id: UserId, 44 | pub email: String, 45 | pub password_hash: String, 46 | pub real_name: String, 47 | pub active: bool, 48 | pub roles: Vec, 49 | } 50 | 51 | impl From for User { 52 | fn from(user_registration: UserRegistration) -> Self { 53 | let id = UserId::new(); 54 | Self { 55 | id, 56 | email: user_registration.email, 57 | 58 | password_hash: bcrypt::hash(&user_registration.password) 59 | // TODO: use error instead to be sure 60 | .expect("the random number generator should always be accessible"), 61 | real_name: user_registration.real_name, 62 | active: true, 63 | roles: vec![id.into(), Role::registered_user_role_id()], 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /services/src/util/operators.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Result; 2 | use geoengine_datatypes::dataset::NamedData; 3 | use geoengine_operators::{ 4 | engine::{OperatorName, RasterOperator, TypedOperator, VectorOperator}, 5 | mock::{MockDatasetDataSource, MockDatasetDataSourceParams}, 6 | source::{GdalSource, GdalSourceParameters, OgrSource, OgrSourceParameters}, 7 | }; 8 | 9 | pub fn source_operator_from_dataset( 10 | source_operator_name: &str, 11 | name: &NamedData, 12 | ) -> Result { 13 | Ok(match source_operator_name { 14 | OgrSource::TYPE_NAME => TypedOperator::Vector( 15 | OgrSource { 16 | params: OgrSourceParameters { 17 | data: name.clone(), 18 | attribute_projection: None, 19 | attribute_filters: None, 20 | }, 21 | } 22 | .boxed(), 23 | ), 24 | GdalSource::TYPE_NAME => TypedOperator::Raster( 25 | GdalSource { 26 | params: GdalSourceParameters { data: name.clone() }, 27 | } 28 | .boxed(), 29 | ), 30 | MockDatasetDataSource::TYPE_NAME => TypedOperator::Vector( 31 | MockDatasetDataSource { 32 | params: MockDatasetDataSourceParams { data: name.clone() }, 33 | } 34 | .boxed(), 35 | ), 36 | s => { 37 | return Err(crate::error::Error::UnknownOperator { 38 | operator: s.to_owned(), 39 | }); 40 | } 41 | }) 42 | } 43 | -------------------------------------------------------------------------------- /services/src/util/workflows.rs: -------------------------------------------------------------------------------- 1 | use crate::{error::Result, workflows::workflow::Workflow}; 2 | use geoengine_operators::engine::{ExecutionContext, TypedOperator, WorkflowOperatorPath}; 3 | 4 | /// ensure the workflow is valid by initializing it 5 | pub async fn validate_workflow( 6 | workflow: &Workflow, 7 | execution_context: &E, 8 | ) -> Result<()> { 9 | let workflow_operator_path_root = WorkflowOperatorPath::initialize_root(); 10 | 11 | match workflow.clone().operator { 12 | TypedOperator::Vector(o) => { 13 | o.initialize(workflow_operator_path_root, execution_context) 14 | .await?; 15 | } 16 | TypedOperator::Raster(o) => { 17 | o.initialize(workflow_operator_path_root, execution_context) 18 | .await?; 19 | } 20 | TypedOperator::Plot(o) => { 21 | o.initialize(workflow_operator_path_root, execution_context) 22 | .await?; 23 | } 24 | } 25 | 26 | Ok(()) 27 | } 28 | -------------------------------------------------------------------------------- /services/src/workflows/mod.rs: -------------------------------------------------------------------------------- 1 | mod postgres_workflow_registry; 2 | mod raster_stream; 3 | pub mod registry; 4 | mod vector_stream; 5 | pub mod workflow; 6 | 7 | pub use raster_stream::RasterWebsocketStreamHandler; 8 | pub use vector_stream::VectorWebsocketStreamHandler; 9 | -------------------------------------------------------------------------------- /services/src/workflows/registry.rs: -------------------------------------------------------------------------------- 1 | use super::workflow::{Workflow, WorkflowId}; 2 | 3 | use crate::error::Result; 4 | use async_trait::async_trait; 5 | 6 | #[async_trait] 7 | pub trait TxWorkflowRegistry: Send + Sync { 8 | async fn register_workflow_in_tx( 9 | &self, 10 | workflow: Workflow, 11 | tx: &tokio_postgres::Transaction<'_>, 12 | ) -> Result; 13 | } 14 | 15 | #[async_trait] 16 | pub trait WorkflowRegistry: Send + Sync { 17 | async fn register_workflow(&self, workflow: Workflow) -> Result; 18 | async fn load_workflow(&self, id: &WorkflowId) -> Result; 19 | } 20 | -------------------------------------------------------------------------------- /services/tests/drivers.rs: -------------------------------------------------------------------------------- 1 | //! We need to run this in a separate process since it changes the global state of the GDAL driver 2 | 3 | use gdal::{Dataset, DriverManager}; 4 | use geoengine_datatypes::test_data; 5 | use geoengine_operators::util::gdal::register_gdal_drivers_from_list; 6 | use std::collections::HashSet; 7 | 8 | #[test] 9 | fn test_gdal_driver_restriction() { 10 | register_gdal_drivers_from_list(HashSet::new()); 11 | 12 | let dataset_path = test_data!("raster/geotiff_from_stream_compressed.tiff").to_path_buf(); 13 | 14 | assert!(Dataset::open(&dataset_path).is_err()); 15 | 16 | DriverManager::register_all(); 17 | 18 | register_gdal_drivers_from_list(HashSet::from([ 19 | "GTiff".to_string(), 20 | "CSV".to_string(), 21 | "GPKG".to_string(), 22 | ])); 23 | 24 | assert!(Dataset::open(&dataset_path).is_ok()); 25 | 26 | // reset for other tests 27 | 28 | DriverManager::register_all(); 29 | 30 | assert!(Dataset::open(&dataset_path).is_ok()); 31 | } 32 | -------------------------------------------------------------------------------- /services/tests/openapi.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::unwrap_used, clippy::print_stdout, clippy::print_stderr)] // okay in tests 2 | 3 | use assert_cmd::cargo::CommandCargoExt; 4 | use geoengine_services::test_data; 5 | use pretty_assertions::assert_eq; 6 | use std::process::{Command, Stdio}; 7 | 8 | #[tokio::test] 9 | async fn it_has_the_latest_openapi_schema_stored_in_the_repository() { 10 | // change cwd s.t. the config file can be found 11 | std::env::set_current_dir(test_data!("..")).expect("failed to set current directory"); 12 | 13 | let startup_result = Command::cargo_bin("geoengine-cli") 14 | .unwrap() 15 | .args(["openapi"]) 16 | .stdout(Stdio::piped()) 17 | .output() 18 | .unwrap(); 19 | 20 | assert!( 21 | startup_result.status.success(), 22 | "failed to output openapi schema from CLI: {startup_result:?}", 23 | ); 24 | 25 | let spec_from_cli = String::from_utf8(startup_result.stdout).unwrap(); 26 | let spec_from_file = include_str!("../../openapi.json"); 27 | 28 | assert_eq!(spec_from_cli, spec_from_file); 29 | } 30 | -------------------------------------------------------------------------------- /test_data/.gitignore: -------------------------------------------------------------------------------- 1 | # Compile output 2 | /target 3 | **/*.rs.bk 4 | 5 | # Rust artifacts 6 | Cargo.lock 7 | 8 | # IDE files 9 | /.idea 10 | -------------------------------------------------------------------------------- /test_data/api_calls/copernicus_provider.http: -------------------------------------------------------------------------------- 1 | ### 2 | 3 | # @name anonymousSession 4 | POST http://localhost:3030/api/anonymous 5 | Content-Type: application/json 6 | 7 | ### 8 | 9 | GET http://localhost:3030/api/layers/bf888259-e4dd-4872-ac4a-f6d1f05a5014/datasets%2FSENTINEL-2%2FL2A%2FUTM32N%2FB02 10 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 11 | 12 | 13 | ### 14 | 15 | # @name workflow 16 | POST http://localhost:3030/api/layers/bf888259-e4dd-4872-ac4a-f6d1f05a5014/datasets%2FSENTINEL-2%2FL2A%2FUTM32N%2FB02/workflowId 17 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 18 | 19 | ### 20 | 21 | GET http://localhost:3030/api/workflow/{{workflow.response.body.$.id}}/metadata 22 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 23 | 24 | 25 | ### 26 | 27 | GET http://localhost:3030/api/wms/{{workflow.response.body.$.id}}?REQUEST=GetMap&SERVICE=WMS&VERSION=1.3.0&FORMAT=image%2Fpng&STYLES=custom%3A%7B%22type%22%3A%22singleBand%22%2C%22band%22%3A0%2C%22bandColorizer%22%3A%7B%22type%22%3A%22linearGradient%22%2C%22breakpoints%22%3A%5B%7B%22value%22%3A543%2C%22color%22%3A%5B0%2C0%2C0%2C255%5D%7D%2C%7B%22value%22%3A18336%2C%22color%22%3A%5B255%2C255%2C255%2C255%5D%7D%5D%2C%22noDataColor%22%3A%5B0%2C0%2C0%2C0%5D%2C%22overColor%22%3A%5B0%2C0%2C0%2C255%5D%2C%22underColor%22%3A%5B255%2C255%2C255%2C255%5D%7D%7D&TRANSPARENT=true&layers={{workflow.response.body.$.id}}&time=2020-07-01T12%3A00%3A00.000Z/2020-07-03T12%3A00%3A00.000Z&EXCEPTIONS=application%2Fjson&WIDTH=256&HEIGHT=256&CRS=EPSG%3A32632&BBOX=482500%2C5627500%2C483500%2C5628500 28 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 29 | -------------------------------------------------------------------------------- /test_data/api_calls/ebv_api.http: -------------------------------------------------------------------------------- 1 | # @name session 2 | POST http://localhost:3030/api/login 3 | Content-Type: application/json 4 | 5 | { 6 | "email": "admin@localhost", 7 | "password": "adminadmin" 8 | } 9 | 10 | ### 11 | 12 | # @name lastTask 13 | PUT http://localhost:3030/api/ebv/overviews/dataset_sm.nc 14 | Authorization: Bearer {{session.response.body.$.id}} 15 | Content-Type: application/json 16 | Accept: application/json 17 | 18 | ### 19 | 20 | GET http://localhost:3030/api/tasks/{{lastTask.response.body.$.taskId}}/status 21 | Authorization: Bearer {{session.response.body.$.id}} 22 | 23 | ### 24 | 25 | # @name lastTask 26 | DELETE http://localhost:3030/api/ebv/overviews/dataset_sm.nc 27 | Authorization: Bearer {{session.response.body.$.id}} 28 | Content-Type: application/json 29 | Accept: application/json 30 | 31 | ### 32 | 33 | # @name lastTask 34 | PUT http://localhost:3030/api/ebv/overviews/dataset_sm.nc/refresh 35 | Authorization: Bearer {{session.response.body.$.id}} 36 | Content-Type: application/json 37 | Accept: application/json 38 | 39 | ### -------------------------------------------------------------------------------- /test_data/api_calls/quota.http: -------------------------------------------------------------------------------- 1 | # @name anonymousSession 2 | POST http://localhost:3030/api/anonymous 3 | Content-Type: application/json 4 | 5 | ### 6 | 7 | # @name workflow 8 | POST http://localhost:3030/api/workflow 9 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 10 | Content-Type: application/json 11 | 12 | { 13 | "type": "Raster", 14 | "operator": { 15 | "type": "GdalSource", 16 | "params": { 17 | "data": "land_cover" 18 | } 19 | } 20 | } 21 | 22 | ### 23 | 24 | GET http://localhost:3030/api/wms/{{workflow.response.body.$.id}}?REQUEST=GetMap&SERVICE=WMS&VERSION=1.3.0&FORMAT=image%2Fpng&STYLES=custom%3A%7B%22type%22%3A%22singleBand%22%2C%22band%22%3A0%2C%22bandColorizer%22%3A%7B%22type%22%3A%22linearGradient%22%2C%22breakpoints%22%3A%5B%7B%22value%22%3A543%2C%22color%22%3A%5B0%2C0%2C0%2C255%5D%7D%2C%7B%22value%22%3A18336%2C%22color%22%3A%5B255%2C255%2C255%2C255%5D%7D%5D%2C%22noDataColor%22%3A%5B0%2C0%2C0%2C0%5D%2C%22overColor%22%3A%5B0%2C0%2C0%2C255%5D%2C%22underColor%22%3A%5B255%2C255%2C255%2C255%5D%7D%7D&TRANSPARENT=true&layers={{workflow.response.body.$.id}}&time=2020-07-01T12%3A00%3A00.000Z/2020-07-03T12%3A00%3A00.000Z&EXCEPTIONS=application%2Fjson&WIDTH=256&HEIGHT=256&CRS=EPSG%3A32632&BBOX=482500%2C5627500%2C483500%2C5628500 25 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 26 | 27 | 28 | ### 29 | 30 | # @name adminSession 31 | POST http://localhost:3030/api/login 32 | Content-Type: application/json 33 | 34 | { 35 | "email": "admin@localhost", 36 | "password": "adminadmin" 37 | } 38 | 39 | ### 40 | 41 | # @name lastTask 42 | GET http://localhost:3030/api/quota/dataUsage?offset=0&limit=10 43 | Authorization: Bearer {{adminSession.response.body.$.id}} 44 | Content-Type: application/json 45 | Accept: application/json 46 | 47 | ### 48 | 49 | # @name lastTask 50 | GET http://localhost:3030/api/quota/dataUsage/summary 51 | Authorization: Bearer {{adminSession.response.body.$.id}} 52 | Content-Type: application/json 53 | Accept: application/json 54 | 55 | ### -------------------------------------------------------------------------------- /test_data/api_calls/wcs.http: -------------------------------------------------------------------------------- 1 | ### 2 | 3 | # @name anonymousSession 4 | POST http://localhost:3030/api/anonymous 5 | Content-Type: application/json 6 | 7 | ### 8 | 9 | # @name workflow 10 | POST http://localhost:3030/api/workflow 11 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 12 | Content-Type: application/json 13 | 14 | { 15 | "type": "Raster", 16 | "operator": { 17 | "type": "GdalSource", 18 | "params": { 19 | "data": "ndvi" 20 | } 21 | } 22 | } 23 | 24 | ### 25 | 26 | 27 | # @name workflow 28 | POST http://localhost:3030/api/workflow 29 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 30 | Content-Type: application/json 31 | 32 | { 33 | "type": "Raster", 34 | "operator": { 35 | "type": "GdalSource", 36 | "params": { 37 | "data": "ndvi_3857" 38 | } 39 | } 40 | } 41 | 42 | ### 43 | 44 | GET http://localhost:4200/api/wcs/{{workflow.response.body.$.id}}?SERVICE=WCS&REQUEST=DescribeCoverage&VERSION=1.1.1&IDENTIFIERS={{workflow.response.body.$.id}}&FORMAT=text/xml&crs=urn:ogc:def:crs:EPSG::4326 45 | Authorization: Bearer {{anonymousSession.response.body.$.id}} -------------------------------------------------------------------------------- /test_data/api_calls/wms.http: -------------------------------------------------------------------------------- 1 | 2 | ### 3 | 4 | # @name anonymousSession 5 | POST http://localhost:3030/api/anonymous 6 | Content-Type: application/json 7 | 8 | ### 9 | 10 | # @name workflow 11 | POST http://localhost:3030/api/workflow 12 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 13 | Content-Type: application/json 14 | 15 | { 16 | "type": "Raster", 17 | "operator": { 18 | "type": "GdalSource", 19 | "params": { 20 | "data": "ndvi" 21 | } 22 | } 23 | } 24 | 25 | ### 26 | 27 | # @name workflow 28 | POST http://localhost:3030/api/workflow 29 | Authorization: Bearer {{anonymousSession.response.body.$.id}} 30 | Content-Type: application/json 31 | 32 | { 33 | "type": "Raster", 34 | "operator": { 35 | "type": "GdalSource", 36 | "params": { 37 | "data": "ndvi_3857" 38 | } 39 | } 40 | } 41 | 42 | ### 43 | 44 | 45 | GET http://localhost:4200/api/wms/890d41ec-7e4c-5000-8ab8-7394a758a86f?REQUEST=GetMap&SERVICE=WMS&VERSION=1.3.0&FORMAT=image%2Fpng&STYLES=custom%3A%7B%22type%22%3A%22singleBand%22%2C%22band%22%3A0%2C%22bandColorizer%22%3A%7B%22type%22%3A%22linearGradient%22%2C%22breakpoints%22%3A%5B%7B%22value%22%3A-1%2C%22color%22%3A%5B0%2C0%2C0%2C255%5D%7D%2C%7B%22value%22%3A1%2C%22color%22%3A%5B255%2C255%2C255%2C255%5D%7D%5D%2C%22noDataColor%22%3A%5B0%2C0%2C0%2C0%5D%2C%22overColor%22%3A%5B246%2C250%2C254%2C255%5D%2C%22underColor%22%3A%5B247%2C251%2C255%2C255%5D%7D%7D&TRANSPARENT=true&layers=890d41ec-7e4c-5000-8ab8-7394a758a86f&time=2022-01-01T00%3A00%3A00.000Z&EXCEPTIONS=application%2Fjson&WIDTH=256&HEIGHT=256&CRS=EPSG:32632&BBOX=353060.0%2C5603500.0%2C394020.0%2C5644460.0 46 | Authorization: Bearer {{anonymousSession.response.body.$.id}} -------------------------------------------------------------------------------- /test_data/colorizer/empty.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/colorizer/empty.png -------------------------------------------------------------------------------- /test_data/colorizer/linear_gradient.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/colorizer/linear_gradient.png -------------------------------------------------------------------------------- /test_data/colorizer/logarithmic_gradient.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/colorizer/logarithmic_gradient.png -------------------------------------------------------------------------------- /test_data/colorizer/no_data.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/colorizer/no_data.png -------------------------------------------------------------------------------- /test_data/colorizer/palette.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/colorizer/palette.png -------------------------------------------------------------------------------- /test_data/colorizer/rgba.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/colorizer/rgba.png -------------------------------------------------------------------------------- /test_data/copernicus_dataspace/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B02_10m.jp2.head: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/copernicus_dataspace/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B02_10m.jp2.head -------------------------------------------------------------------------------- /test_data/copernicus_dataspace/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B03_10m.jp2.head: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/copernicus_dataspace/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B03_10m.jp2.head -------------------------------------------------------------------------------- /test_data/copernicus_dataspace/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B04_10m.jp2.head: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/copernicus_dataspace/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B04_10m.jp2.head -------------------------------------------------------------------------------- /test_data/copernicus_dataspace/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B08_10m.jp2.head: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/copernicus_dataspace/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B08_10m.jp2.head -------------------------------------------------------------------------------- /test_data/copernicus_dataspace/license.txt: -------------------------------------------------------------------------------- 1 | Credit: European Union, contains modified Copernicus Sentinel data 2024 2 | -------------------------------------------------------------------------------- /test_data/dataset_defs/germany_polygon.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "name": "germany_outline", 4 | "displayName": "Germany", 5 | "description": "Boundaries of Germany", 6 | "sourceOperator": "OgrSource" 7 | }, 8 | "metaData": { 9 | "type": "OgrMetaData", 10 | "loadingInfo": { 11 | "fileName": "test_data/vector/data/germany_polygon.gpkg", 12 | "layerName": "test_germany", 13 | "dataType": "MultiPolygon", 14 | "time": { 15 | "type": "none" 16 | }, 17 | "columns": { 18 | "x": "", 19 | "float": [], 20 | "int": [], 21 | "text": [] 22 | }, 23 | "onError": "abort", 24 | "cacheTtl": 0 25 | }, 26 | "resultDescriptor": { 27 | "type": "vector", 28 | "dataType": "MultiPolygon", 29 | "spatialReference": "EPSG:4326", 30 | "columns": {}, 31 | "time": { 32 | "start": "-262143-01-01T00:00:00+00:00", 33 | "end": "+262142-12-31T23:59:59.999+00:00" 34 | }, 35 | "bbox": { 36 | "lowerLeftCoordinate": [5.8524898680001103, 47.2711209110000965], 37 | "upperRightCoordinate": [15.0220593670001001, 55.0653343770000987] 38 | } 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /test_data/dataset_defs/mock.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "name": "Mock", 4 | "displayName": "Mock", 5 | "description": "A mock dataset", 6 | "sourceOperator": "MockDatasetDataSource" 7 | }, 8 | "metaData": { 9 | "type": "MockMetaData", 10 | "loadingInfo": { 11 | "points": [ 12 | { 13 | "x": 1.0, 14 | "y": 2.0 15 | } 16 | ] 17 | }, 18 | "resultDescriptor": { 19 | "type": "vector", 20 | "dataType": "MultiPoint", 21 | "spatialReference": "EPSG:4326", 22 | "columns": {}, 23 | "time": { 24 | "start": "-262143-01-01T00:00:00+00:00", 25 | "end": "+262142-12-31T23:59:59.999+00:00" 26 | }, 27 | "bbox": { 28 | "lowerLeftCoordinate": [1.0, 2.0], 29 | "upperRightCoordinate": [1.0, 2.0] 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /test_data/dataset_defs/ndvi (3587).json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "name": "ndvi_3857", 4 | "displayName": "NDVI3857", 5 | "description": "NDVI data from MODIS reprojected to EPSG:3857", 6 | "sourceOperator": "GdalSource", 7 | "provenance": [ 8 | { 9 | "citation": "Nasa Earth Observations, MODIS Vegetation Index Products", 10 | "license": "https://earthdata.nasa.gov/collaborate/open-data-services-and-software/data-information-policy", 11 | "uri": "https://modis.gsfc.nasa.gov/data/dataprod/mod13.php" 12 | } 13 | ] 14 | }, 15 | "metaData": { 16 | "type": "GdalMetaDataRegular", 17 | "resultDescriptor": { 18 | "dataType": "U8", 19 | "spatialReference": "EPSG:3857", 20 | "time": { 21 | "start": "2014-01-01T00:00:00.000Z", 22 | "end": "2014-07-01T00:00:00.000Z" 23 | }, 24 | "bbox": { 25 | "upperLeftCoordinate": [ 26 | -20037508.3427892439067364, 19971868.8804085627198219 27 | ], 28 | "lowerRightCoordinate": [ 29 | 20027452.8429077081382275, -19966571.3752283006906509 30 | ] 31 | }, 32 | "resolution": { 33 | "x": 14052.95025804873876, 34 | "y": 14057.88111778840539 35 | }, 36 | "bands": [ 37 | { 38 | "name": "ndvi", 39 | "measurement": { 40 | "type": "unitless" 41 | } 42 | } 43 | ] 44 | }, 45 | "params": { 46 | "filePath": "test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_%_START_TIME_%.TIFF", 47 | "rasterbandChannel": 1, 48 | "geoTransform": { 49 | "originCoordinate": { 50 | "x": -20037508.342789243906736, 51 | "y": 19971868.880408562719822 52 | }, 53 | "xPixelSize": 14052.95025804873876, 54 | "yPixelSize": -14057.88111778840539 55 | }, 56 | "width": 2851, 57 | "height": 2841, 58 | "fileNotFoundHandling": "NoData", 59 | "noDataValue": 0.0 60 | }, 61 | "timePlaceholders": { 62 | "%_START_TIME_%": { 63 | "format": "%Y-%m-%d", 64 | "reference": "start" 65 | } 66 | }, 67 | "dataTime": { 68 | "start": "2014-01-01T00:00:00.000Z", 69 | "end": "2014-07-01T00:00:00.000Z" 70 | }, 71 | "step": { 72 | "granularity": "months", 73 | "step": 1 74 | }, 75 | "cacheTtl": 0 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /test_data/dataset_defs/ne_10m_ports (3857).json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "name": "ne_10_ports_3857", 4 | "displayName": "Natural Earth 10m Ports 3857", 5 | "description": "Ports from Natural Earth reprojected to EPSG:3857", 6 | "sourceOperator": "OgrSource", 7 | "provenance": [ 8 | { 9 | "citation": "Natural Earth, Cultural Vectors 10m Ports", 10 | "license": "Public domain by Natural Earth http://www.naturalearthdata.com/about/terms-of-use/", 11 | "uri": "https://www.naturalearthdata.com/downloads/10m-cultural-vectors/ports/" 12 | } 13 | ] 14 | }, 15 | "metaData": { 16 | "type": "OgrMetaData", 17 | "loadingInfo": { 18 | "fileName": "test_data/vector/data/ne_10m_ports/projected_3857/ne_10m_ports.shp", 19 | "layerName": "ne_10m_ports", 20 | "dataType": "MultiPoint", 21 | "time": { 22 | "type": "none" 23 | }, 24 | "columns": { 25 | "x": "", 26 | "y": null, 27 | "float": ["natlscale"], 28 | "int": ["scalerank"], 29 | "text": ["featurecla", "name", "website"] 30 | }, 31 | "forceOgrTimeFilter": false, 32 | "onError": "ignore", 33 | "provenance": null, 34 | "cacheTtl": 0 35 | }, 36 | "resultDescriptor": { 37 | "dataType": "MultiPoint", 38 | "spatialReference": "EPSG:3857", 39 | "columns": { 40 | "website": { 41 | "dataType": "text", 42 | "measurement": { 43 | "type": "unitless" 44 | } 45 | }, 46 | "name": { 47 | "dataType": "text", 48 | "measurement": { 49 | "type": "unitless" 50 | } 51 | }, 52 | "natlscale": { 53 | "dataType": "float", 54 | "measurement": { 55 | "type": "unitless" 56 | } 57 | }, 58 | "scalerank": { 59 | "dataType": "int", 60 | "measurement": { 61 | "type": "unitless" 62 | } 63 | }, 64 | "featurecla": { 65 | "dataType": "text", 66 | "measurement": { 67 | "type": "unitless" 68 | } 69 | } 70 | }, 71 | "time": { 72 | "start": "-262143-01-01T00:00:00+00:00", 73 | "end": "+262142-12-31T23:59:59.999+00:00" 74 | }, 75 | "bbox": { 76 | "lowerLeftCoordinate": [-19120007.589356, -7324970.649647], 77 | "upperRightCoordinate": [19960627.094946, 14490886.335637] 78 | } 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /test_data/dataset_defs/ne_10m_ports.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "name": "ne_10m_ports", 4 | "displayName": "Natural Earth 10m Ports", 5 | "description": "Ports from Natural Earth", 6 | "sourceOperator": "OgrSource", 7 | "provenance": [ 8 | { 9 | "citation": "Natural Earth, Cultural Vectors 10m Ports", 10 | "license": "Public domain by Natural Earth http://www.naturalearthdata.com/about/terms-of-use/", 11 | "uri": "https://www.naturalearthdata.com/downloads/10m-cultural-vectors/ports/" 12 | } 13 | ] 14 | }, 15 | "metaData": { 16 | "type": "OgrMetaData", 17 | "loadingInfo": { 18 | "fileName": "test_data/vector/data/ne_10m_ports/ne_10m_ports.shp", 19 | "layerName": "ne_10m_ports", 20 | "dataType": "MultiPoint", 21 | "time": { 22 | "type": "none" 23 | }, 24 | "columns": { 25 | "x": "", 26 | "y": null, 27 | "float": ["natlscale"], 28 | "int": ["scalerank"], 29 | "text": ["featurecla", "name", "website"] 30 | }, 31 | "forceOgrTimeFilter": false, 32 | "onError": "ignore", 33 | "provenance": null, 34 | "cacheTtl": 0 35 | }, 36 | "resultDescriptor": { 37 | "dataType": "MultiPoint", 38 | "spatialReference": "EPSG:4326", 39 | "columns": { 40 | "website": { 41 | "dataType": "text", 42 | "measurement": { 43 | "type": "unitless" 44 | } 45 | }, 46 | "name": { 47 | "dataType": "text", 48 | "measurement": { 49 | "type": "unitless" 50 | } 51 | }, 52 | "natlscale": { 53 | "dataType": "float", 54 | "measurement": { 55 | "type": "unitless" 56 | } 57 | }, 58 | "scalerank": { 59 | "dataType": "int", 60 | "measurement": { 61 | "type": "unitless" 62 | } 63 | }, 64 | "featurecla": { 65 | "dataType": "text", 66 | "measurement": { 67 | "type": "unitless" 68 | } 69 | } 70 | }, 71 | "time": { 72 | "start": "-262143-01-01T00:00:00+00:00", 73 | "end": "+262142-12-31T23:59:59.999+00:00" 74 | }, 75 | "bbox": { 76 | "lowerLeftCoordinate": [-171.75795, -54.809444], 77 | "upperRightCoordinate": [179.309364, 78.226111] 78 | } 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /test_data/dataset_defs/points_with_time.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "name": "points_with_time", 4 | "displayName": "Points with Time", 5 | "description": "Some points in Europe in 2014", 6 | "sourceOperator": "OgrSource" 7 | }, 8 | "metaData": { 9 | "type": "OgrMetaData", 10 | "loadingInfo": { 11 | "fileName": "test_data/vector/data/points_with_time.json", 12 | "layerName": "points_with_time", 13 | "dataType": "MultiPoint", 14 | "time": { 15 | "type": "start+end", 16 | "startField": "time_start", 17 | "startFormat": { 18 | "format": "custom", 19 | "customFormat": "+%Y-%m-%d %H:%M:%S %z" 20 | }, 21 | "endField": "time_end", 22 | "endFormat": { 23 | "format": "custom", 24 | "customFormat": "+%Y-%m-%d %H:%M:%S %z" 25 | } 26 | }, 27 | "forceOgrTimeFilter": false, 28 | "onError": "abort", 29 | "provenance": null, 30 | "cacheTtl": 0 31 | }, 32 | "resultDescriptor": { 33 | "dataType": "MultiPoint", 34 | "spatialReference": "EPSG:4326", 35 | "columns": {}, 36 | "time": { 37 | "start": "2014-04-01T00:00:00.000Z", 38 | "end": "2014-07-01T00:00:00.000Z" 39 | }, 40 | "bbox": { 41 | "lowerLeftCoordinate": [3.9662060000000001, 45.9030360000000002], 42 | "upperRightCoordinate": [19.171284, 51.8473430000000022] 43 | } 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /test_data/drone_mapping/drone_images/drone_image_0.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/drone_mapping/drone_images/drone_image_0.jpg -------------------------------------------------------------------------------- /test_data/drone_mapping/drone_images/drone_image_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/drone_mapping/drone_images/drone_image_1.jpg -------------------------------------------------------------------------------- /test_data/drone_mapping/odm_result/odm_orthophoto/odm_orthophoto.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/drone_mapping/odm_result/odm_orthophoto/odm_orthophoto.tif -------------------------------------------------------------------------------- /test_data/edr/edr_raster.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/edr/edr_raster.tif -------------------------------------------------------------------------------- /test_data/gbif/init_test_data.sql: -------------------------------------------------------------------------------- 1 | CREATE EXTENSION IF NOT EXISTS postgis WITH SCHEMA public; 2 | 3 | SET ROLE 'geoengine'; -- noqa: PRS 4 | 5 | CREATE SCHEMA IF NOT EXISTS gbif; 6 | SET SEARCH_PATH TO gbif, public; 7 | -------------------------------------------------------------------------------- /test_data/gfbio/init_test_data.sql: -------------------------------------------------------------------------------- 1 | CREATE EXTENSION IF NOT EXISTS postgis WITH SCHEMA public; 2 | 3 | SET ROLE 'geoengine'; -- noqa: PRS 4 | 5 | CREATE SCHEMA IF NOT EXISTS abcd; 6 | SET SEARCH_PATH TO abcd, public; 7 | -------------------------------------------------------------------------------- /test_data/layer_collection_defs/empty_collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "a29f77cc-51ce-466b-86ef-d0ab2170bc0a", 3 | "name": "An empty collection", 4 | "description": "There is nothing here", 5 | "collections": [], 6 | "layers": [] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/layer_collection_defs/no_parent_collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "12555d50-194e-450f-acb0-a2f571987e32", 3 | "name": "An collection without a parent", 4 | "description": "There is nothing here", 5 | "collections": [], 6 | "layers": [] 7 | } 8 | -------------------------------------------------------------------------------- /test_data/layer_collection_defs/root_collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "05102bb3-a855-4a37-8a8a-30026a91fef1", 3 | "name": "LayerDB", 4 | "description": "Root collection for LayerDB", 5 | "collections": [ 6 | "272bf675-2e27-4412-824c-287c1e6841ac", 7 | "a29f77cc-51ce-466b-86ef-d0ab2170bc0a" 8 | ], 9 | "layers": [] 10 | } 11 | -------------------------------------------------------------------------------- /test_data/layer_collection_defs/test_collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "272bf675-2e27-4412-824c-287c1e6841ac", 3 | "name": "A test collection", 4 | "description": "Some layers for testing and an empty subcollection", 5 | "collections": ["a29f77cc-51ce-466b-86ef-d0ab2170bc0a"], 6 | "layers": [ 7 | "b75db46e-2b9a-4a86-b33f-bc06a73cd711", 8 | "c078db52-2dc6-4838-ad75-340cefeab476", 9 | "83866f7b-dcee-47b8-9242-e5636ceaf402" 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /test_data/layer_defs/ports_in_germany.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "b75db46e-2b9a-4a86-b33f-bc06a73cd711", 3 | "name": "Ports in Germany", 4 | "description": "Natural Earth Ports point filtered with Germany polygon", 5 | "workflow": { 6 | "type": "Vector", 7 | "operator": { 8 | "type": "PointInPolygonFilter", 9 | "params": {}, 10 | "sources": { 11 | "points": { 12 | "type": "OgrSource", 13 | "params": { 14 | "data": "ne_10m_ports", 15 | "attributeProjection": null, 16 | "attributeFilters": null 17 | } 18 | }, 19 | "polygons": { 20 | "type": "OgrSource", 21 | "params": { 22 | "data": "germany_outline", 23 | "attributeProjection": null, 24 | "attributeFilters": null 25 | } 26 | } 27 | } 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /test_data/layer_defs/raster_stack.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "c078db52-2dc6-4838-ad75-340cefeab476", 3 | "name": "Stacked Raster", 4 | "description": "A raster with two bands for testing", 5 | "workflow": { 6 | "type": "Raster", 7 | "operator": { 8 | "type": "RasterStacker", 9 | "params": { 10 | "renameBands": { 11 | "type": "rename", 12 | "values": ["ndvi", "ndvi_masked"] 13 | } 14 | }, 15 | "sources": { 16 | "rasters": [ 17 | { 18 | "type": "GdalSource", 19 | "params": { 20 | "data": "ndvi" 21 | } 22 | }, 23 | { 24 | "type": "Expression", 25 | "params": { 26 | "expression": "if A > 100 { A } else { 0 }", 27 | "outputType": "U8", 28 | "outputBand": { 29 | "name": "ndvi_masked", 30 | "measurement": { 31 | "type": "continuous", 32 | "measurement": "NDVI" 33 | } 34 | }, 35 | "mapNoData": false 36 | }, 37 | "sources": { 38 | "raster": { 39 | "type": "GdalSource", 40 | "params": { 41 | "data": "ndvi" 42 | } 43 | } 44 | } 45 | } 46 | ] 47 | } 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /test_data/layer_defs/rgb.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "83866f7b-dcee-47b8-9242-e5636ceaf402", 3 | "name": "Natural Earth II – RGB", 4 | "description": "A raster with three bands for RGB visualization", 5 | "workflow": { 6 | "type": "Raster", 7 | "operator": { 8 | "type": "RasterStacker", 9 | "params": { 10 | "renameBands": { 11 | "type": "rename", 12 | "values": ["blue", "green", "red"] 13 | } 14 | }, 15 | "sources": { 16 | "rasters": [ 17 | { 18 | "type": "GdalSource", 19 | "params": { 20 | "data": "ne2_raster_blue" 21 | } 22 | }, 23 | { 24 | "type": "GdalSource", 25 | "params": { 26 | "data": "ne2_raster_green" 27 | } 28 | }, 29 | { 30 | "type": "GdalSource", 31 | "params": { 32 | "data": "ne2_raster_red" 33 | } 34 | } 35 | ] 36 | } 37 | } 38 | }, 39 | "symbology": { 40 | "type": "raster", 41 | "rasterColorizer": { 42 | "type": "multiBand", 43 | "redBand": 2, 44 | "greenBand": 1, 45 | "blueBand": 0, 46 | "redMin": 0, 47 | "redMax": 255, 48 | "redScale": 1, 49 | "greenMin": 0, 50 | "greenMax": 255, 51 | "greenScale": 1, 52 | "blueMin": 0, 53 | "blueMax": 255, 54 | "blueScale": 1, 55 | "noDataColor": [0, 0, 0, 0] 56 | }, 57 | "opacity": 1.0 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /test_data/ml/onnx/test_a_plus_b.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/ml/onnx/test_a_plus_b.onnx -------------------------------------------------------------------------------- /test_data/ml/onnx/test_classification.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/ml/onnx/test_classification.onnx -------------------------------------------------------------------------------- /test_data/ml/onnx/test_regression.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/ml/onnx/test_regression.onnx -------------------------------------------------------------------------------- /test_data/ml/onnx/training_scripts/build_test_a_plus_b.py: -------------------------------------------------------------------------------- 1 | from onnx import TensorProto, OperatorSetIdProto 2 | from onnx.helper import ( 3 | make_model, make_node, make_graph, 4 | make_tensor_value_info) 5 | from onnx.checker import check_model 6 | 7 | 8 | # Inputs, 'X' is the name, TensorProto.FLOAT the type, [None, 512, 512, 2] the shape 9 | X = make_tensor_value_info('X', TensorProto.FLOAT, [None, 512, 512, 2]) 10 | 11 | # outputs, the shape is [None, 512, 512] which shuld be the same as [None, 512, 512, 1] 12 | Y = make_tensor_value_info('Y', TensorProto.FLOAT, [None, 512, 512]) 13 | 14 | # operators 15 | 16 | # we need to split the input which consists of tuples [band 0, band 1] 17 | split = make_node( 18 | "Split", 19 | inputs=["X"], 20 | outputs=["A_2", "A_1"], 21 | axis=-1, 22 | num_outputs=2 23 | ) 24 | 25 | # now we can use the splitted inputs to calculate something 26 | 27 | add = make_node('Add', ['A_1', 'A_2'], ['Y']) 28 | 29 | # create a graph from the operators 30 | graph = make_graph([split, add], 'a_plus_b', [X], [Y]) 31 | # generate the model from the graph for a specific opset 32 | opset = OperatorSetIdProto(version=21) 33 | model = make_model(graph, opset_imports=[opset]) 34 | 35 | # check the model 36 | check_model(model) 37 | 38 | with open("a_plus_b.onnx", "wb") as text_file: 39 | text_file.write(model.SerializeToString()) 40 | -------------------------------------------------------------------------------- /test_data/ml/onnx/training_scripts/train_test_classification.py: -------------------------------------------------------------------------------- 1 | from sklearn.tree import DecisionTreeClassifier 2 | import numpy as np 3 | 4 | np.random.seed(0) 5 | X = np.random.rand(100, 2).astype(np.float32) # 100 instances, 2 features 6 | y = np.where(X[:, 0] > X[:, 1], 42, 33) # 1 if feature 0 > feature 42, else 33 7 | 8 | clf = DecisionTreeClassifier() 9 | clf.fit(X, y) 10 | 11 | test_samples = np.array([[0.1, 0.2], [0.2, 0.1]]) 12 | predictions = clf.predict(test_samples) 13 | print("Predictions:", predictions) 14 | 15 | # Convert into ONNX format. 16 | from skl2onnx import to_onnx 17 | 18 | onx = to_onnx(clf, X[:1], target_opset=9) # target_opset is the ONNX version to use 19 | with open("test_classification.onnx", "wb") as f: 20 | f.write(onx.SerializeToString()) 21 | -------------------------------------------------------------------------------- /test_data/ml/onnx/training_scripts/train_test_regression.py: -------------------------------------------------------------------------------- 1 | from sklearn.linear_model import LinearRegression 2 | import numpy as np 3 | from skl2onnx import to_onnx 4 | 5 | np.random.seed(0) 6 | X = np.random.rand(100000, 3).astype(np.float32) # 100000 instances, 3 features 7 | y = X.sum(axis=1) # y is the sum of features 8 | 9 | reg = LinearRegression() 10 | reg.fit(X, y) 11 | 12 | test_samples = np.array([[0.1, 0.1, 0.1], [0.1, 0.2, 0.2], [0.2, 0.2, 0.2]]) 13 | predictions = reg.predict(test_samples) 14 | print("Predictions:", predictions) 15 | 16 | # Convert into ONNX format 17 | onx = to_onnx(reg, X[:1], target_opset=9) # target_opset is the ONNX version to use 18 | with open("test_regression.onnx", "wb") as f: 19 | f.write(onx.SerializeToString()) 20 | -------------------------------------------------------------------------------- /test_data/netcdf4d/Biodiversity/dataset_daily.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/netcdf4d/Biodiversity/dataset_daily.nc -------------------------------------------------------------------------------- /test_data/netcdf4d/Biodiversity/dataset_monthly.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/netcdf4d/Biodiversity/dataset_monthly.nc -------------------------------------------------------------------------------- /test_data/netcdf4d/dataset_esri.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/netcdf4d/dataset_esri.nc -------------------------------------------------------------------------------- /test_data/netcdf4d/dataset_irr_ts.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/netcdf4d/dataset_irr_ts.nc -------------------------------------------------------------------------------- /test_data/netcdf4d/dataset_m.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/netcdf4d/dataset_m.nc -------------------------------------------------------------------------------- /test_data/netcdf4d/dataset_sm.colorizer.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "linearGradient", 3 | "breakpoints": [ 4 | { "value": 0, "color": [68, 1, 84, 255] }, 5 | { "value": 50, "color": [33, 145, 140, 255] }, 6 | { "value": 100, "color": [253, 231, 37, 255] } 7 | ], 8 | "noDataColor": [0, 0, 0, 0], 9 | "overColor": [0, 0, 0, 0], 10 | "underColor": [0, 0, 0, 0] 11 | } 12 | -------------------------------------------------------------------------------- /test_data/netcdf4d/dataset_sm.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/netcdf4d/dataset_sm.nc -------------------------------------------------------------------------------- /test_data/netcdf4d/license_notes.txt: -------------------------------------------------------------------------------- 1 | All test data within this folder was obtained from the German Centre for Integrative Biodiversity Research (iDiv) and is licensed 2 | under a Creative Commons Attribution 4.0 License (http://creativecommons.org/licenses/by/4.0/) 3 | 4 | In particular, the datasets are: 5 | - "Test dataset metric" 6 | Luise Quoß, German Centre for Integrative Biodiversity Research (iDiv), 2022-01-18 7 | - "Test dataset metric and scenario" 8 | Luise Quoß, German Centre for Integrative Biodiversity Research (iDiv), 2022-01-18 9 | - "Test dataset irregular timesteps" 10 | Luise Quoß, German Centre for Integrative Biodiversity Research (iDiv), 2022-08-02 11 | - "Test dataset metric monthly" & "Test dataset metric" 12 | Luise Quoß, German Centre for Integrative Biodiversity Research (iDiv), 2024-01-16 13 | - "Test dataset esri projection" 14 | Luise Quoß, German Centre for Integrative Biodiversity Research (iDiv), 2024-05-30 15 | -------------------------------------------------------------------------------- /test_data/netcdf4d/overviews/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/netcdf4d/overviews/.gitkeep -------------------------------------------------------------------------------- /test_data/pangaea/pangaea_geo_lat_lon_citation.txt: -------------------------------------------------------------------------------- 1 | Pisternick, Timo; Lilkendey, Julian; Audit-Manna, Anishta; Dumur Neelayya, Danishta; Neehaul, Yashvin; Moosdorf, Nils (2020): Fish abundances for Troux aux Biches lagoon, Mauritius. PANGAEA, https://doi.org/10.1594/PANGAEA.921338, In: Pisternick, T et al. (2020): Environmental data and fish abundances for Troux aux Biches and Flic en Flac lagoon, Mauritius. PANGAEA, https://doi.org/10.1594/PANGAEA.921340 -------------------------------------------------------------------------------- /test_data/provider_defs/aruna.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "ArunaDataProviderDefinition", 3 | "id": "86a7f7ce-1bab-4ce9-a32b-172c0f958ee0", 4 | "name": "NFDI", 5 | "description": "Access to NFDI4Bio data stored in Aruna", 6 | "priority": -10, 7 | "apiToken": "", 8 | "apiUrl": "", 9 | "projectId": "", 10 | "filterLabel": "", 11 | "cacheTtl": 0 12 | } 13 | -------------------------------------------------------------------------------- /test_data/provider_defs/copernicus_dataspace.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "CopernicusDataspaceDataProviderDefinition", 3 | "id": "2e44e5ab-0f93-467b-8599-9b5b6339d5cd", 4 | "name": "Copernicus Dataspace", 5 | "description": "", 6 | "stacUrl": "https://catalogue.dataspace.copernicus.eu/stac", 7 | "s3Url": "eodata.dataspace.copernicus.eu", 8 | "s3AccessKey": "XYZ", 9 | "s3SecretKey": "XYZ", 10 | "gdalConfig": [["CPL_VSIL_CURL_CHUNK_SIZE", "2097152"]] 11 | } 12 | -------------------------------------------------------------------------------- /test_data/provider_defs/ebv.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "EbvPortalDataProviderDefinition", 3 | "name": "EBV Portal", 4 | "description": "Access to EBV Portal data", 5 | "priority": -20, 6 | "cacheTtl": 0, 7 | "baseUrl": "https://portal.geobon.org/api/v1", 8 | "data": "test_data/netcdf4d/", 9 | "overviews": "test_data/netcdf4d/overviews/", 10 | "metadataDbConfig": { 11 | "user": "geoengine", 12 | "password": "geoengine", 13 | "host": "localhost", 14 | "port": 5432, 15 | "database": "geoengine", 16 | "schema": "ebv" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /test_data/provider_defs/gbif.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "GbifDataProviderDefinition", 3 | "name": "GBIF", 4 | "description": "Access to GBIF occurrence data", 5 | "priority": -30, 6 | "dbConfig": { 7 | "user": "geoengine", 8 | "password": "geoengine", 9 | "host": "localhost", 10 | "port": 5432, 11 | "database": "geoengine", 12 | "schema": "gbif" 13 | }, 14 | "cacheTtl": 0, 15 | "autocompleteTimeout": 5, 16 | "columns": ["gbifid", "basisofrecord", "scientificname"] 17 | } 18 | -------------------------------------------------------------------------------- /test_data/provider_defs/gfbio_abcd.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "GfbioAbcdDataProviderDefinition", 3 | "name": "GFBio ABCD Datasets", 4 | "description": "Access to GFBio ABCD datasets", 5 | "priority": -40, 6 | "dbConfig": { 7 | "user": "geoengine", 8 | "password": "geoengine", 9 | "host": "localhost", 10 | "port": 5432, 11 | "database": "geoengine", 12 | "schema": "abcd" 13 | }, 14 | "cacheTtl": 0 15 | } 16 | -------------------------------------------------------------------------------- /test_data/provider_defs/gfbio_collections.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "GfbioCollectionsDataProviderDefinition", 3 | "name": "GFBio Collections", 4 | "description": "Access to GFBio collections", 5 | "priority": -50, 6 | "collectionApiUrl": "https://collections.gfbio.dev/api/", 7 | "collectionApiAuthToken": "Token 6bc06a951394f222eeb576c6f86a4ad73ab805f6", 8 | "abcdDbConfig": { 9 | "user": "geoengine", 10 | "password": "geoengine", 11 | "host": "localhost", 12 | "port": 5432, 13 | "database": "geoengine", 14 | "schema": "abcd" 15 | }, 16 | "pangaeaUrl": "https://doi.pangaea.de", 17 | "cacheTtl": 0 18 | } 19 | -------------------------------------------------------------------------------- /test_data/provider_defs/netcdfcf.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "NetCdfCfDataProviderDefinition", 3 | "name": "terranova", 4 | "description": "terranova", 5 | "priority": -1000, 6 | "cacheTtl": 0, 7 | "data": "test_data/netcdf4d/", 8 | "overviews": "test_data/netcdf4d/overviews/", 9 | "metadataDbConfig": { 10 | "user": "geoengine", 11 | "password": "geoengine", 12 | "host": "localhost", 13 | "port": 5432, 14 | "database": "geoengine", 15 | "schema": "ebv" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /test_data/provider_defs/open_weather.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "EdrDataProviderDefinition", 3 | "id": "0668d980-6c82-47c4-b1d9-1096f6770666", 4 | "name": "Open Weather", 5 | "description": "Access to Open Weather data", 6 | "priority": -60, 7 | "baseUrl": "https://ogcie.iblsoft.com/edr", 8 | "discreteVrs": ["ibl#between-depth"], 9 | "provenance": [ 10 | { 11 | "citation": "For GFS: U.S. National Centers for Environmental Information. Scale not given. \"Global Forecast System\" For others look at source website.", 12 | "license": "GFS is public domain. For others look at source website.", 13 | "uri": "https://www.iblsoft.com/products/open-weather" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /test_data/provider_defs/pangaea.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "PangaeaDataProviderDefinition", 3 | "name": "Pangaea", 4 | "priority": -70, 5 | "description": "Access to data stored in Pangaea", 6 | "baseUrl": "https://doi.pangaea.de", 7 | "cacheTtl": 0 8 | } 9 | -------------------------------------------------------------------------------- /test_data/provider_defs/sentinel_s2_l2a_cogs.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "SentinelS2L2ACogsProviderDefinition", 3 | "id": "5779494c-f3a2-48b3-8a2d-5fbba8c5b6c5", 4 | "name": "Element 84 AWS STAC", 5 | "description": "Sentinel-2 L2A COGs hosted on AWS by Element 84", 6 | "priority": 50, 7 | "apiUrl": "https://earth-search.aws.element84.com/v0/collections/sentinel-s2-l2a-cogs/items", 8 | "cacheTtl": 86400, 9 | "bands": [ 10 | { 11 | "name": "B01", 12 | "noDataValue": 0, 13 | "dataType": "U16" 14 | }, 15 | { 16 | "name": "B02", 17 | "noDataValue": 0, 18 | "dataType": "U16" 19 | }, 20 | { 21 | "name": "B03", 22 | "noDataValue": 0, 23 | "dataType": "U16" 24 | }, 25 | { 26 | "name": "B04", 27 | "noDataValue": 0, 28 | "dataType": "U16" 29 | }, 30 | { 31 | "name": "B08", 32 | "noDataValue": 0, 33 | "dataType": "U16" 34 | }, 35 | { 36 | "name": "SCL", 37 | "noDataValue": 0, 38 | "dataType": "U8" 39 | } 40 | ], 41 | "zones": [ 42 | { 43 | "name": "UTM32N", 44 | "epsg": 32632 45 | }, 46 | { 47 | "name": "UTM36N", 48 | "epsg": 32636 49 | }, 50 | { 51 | "name": "UTM36S", 52 | "epsg": 32736 53 | }, 54 | { 55 | "name": "UTM37N", 56 | "epsg": 32637 57 | }, 58 | { 59 | "name": "UTM37S", 60 | "epsg": 32737 61 | } 62 | ], 63 | "queryBuffer": { 64 | "startSeconds": 60, 65 | "endSeconds": 60 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /test_data/provider_defs/user_datasets.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "DatasetLayerListingProviderDefinition", 3 | "id": "cbb21ee3-d15d-45c5-a175-66964adf4e85", 4 | "name": "Personal Data Catalog", 5 | "description": "Catalog of your personal data and workflows.", 6 | "priority": 100, 7 | "collections": [ 8 | { 9 | "name": "User Uploads", 10 | "description": "Datasets uploaded by the user.", 11 | "tags": ["upload"] 12 | }, 13 | { 14 | "name": "Workflows", 15 | "description": "Datasets created from workflows.", 16 | "tags": ["workflow"] 17 | }, 18 | { 19 | "name": "All Datasets", 20 | "description": "All datasets", 21 | "tags": ["*"] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /test_data/provider_defs/wildlive.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "WildliveDataConnectorDefinition", 3 | "id": "b9e0e7c2-4f2a-4e3d-9c8a-2e7f5b1a6d3f", 4 | "name": "WildLIVE!", 5 | "description": "WildLIVE! Portal Connector" 6 | } 7 | -------------------------------------------------------------------------------- /test_data/raster/cloud_optimized_geotiff_big_tiff_from_stream_compressed.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/cloud_optimized_geotiff_big_tiff_from_stream_compressed.tiff -------------------------------------------------------------------------------- /test_data/raster/cloud_optimized_geotiff_from_stream_compressed.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/cloud_optimized_geotiff_from_stream_compressed.tiff -------------------------------------------------------------------------------- /test_data/raster/cloud_optimized_geotiff_timestep_0_from_stream_compressed.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/cloud_optimized_geotiff_timestep_0_from_stream_compressed.tiff -------------------------------------------------------------------------------- /test_data/raster/cloud_optimized_geotiff_timestep_1_from_stream_compressed.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/cloud_optimized_geotiff_timestep_1_from_stream_compressed.tiff -------------------------------------------------------------------------------- /test_data/raster/cloud_optimized_geotiff_timestep_2_from_stream_compressed.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/cloud_optimized_geotiff_timestep_2_from_stream_compressed.tiff -------------------------------------------------------------------------------- /test_data/raster/geotiff_big_tiff_from_stream_compressed.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/geotiff_big_tiff_from_stream_compressed.tiff -------------------------------------------------------------------------------- /test_data/raster/geotiff_from_stream_compressed.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/geotiff_from_stream_compressed.tiff -------------------------------------------------------------------------------- /test_data/raster/geotiff_with_mask_from_stream_compressed.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/geotiff_with_mask_from_stream_compressed.tiff -------------------------------------------------------------------------------- /test_data/raster/landcover/landcover.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/landcover/landcover.tif -------------------------------------------------------------------------------- /test_data/raster/landcover/readme.txt: -------------------------------------------------------------------------------- 1 | The data was obtained from . The layer Majority_Land_Cover_Type_1 was extracted, downscaled and saved as landcover.tif. 2 | 3 | All data distributed by the LP DAAC contain no restrictions on the data reuse. (https://lpdaac.usgs.gov/resources/faqs/#am-i-allowed-to-reuse-lp-daac-data) 4 | -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-01-01.TIFF: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-01-01.TIFF -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-02-01.TIFF: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-02-01.TIFF -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-03-01.TIFF: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-03-01.TIFF -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-04-01.TIFF: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-04-01.TIFF -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-05-01.TIFF: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-05-01.TIFF -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-06-01.TIFF: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/MOD13A2_M_NDVI_2014-06-01.TIFF -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/flipped_axis_y/MOD13A2_M_NDVI_2014-01-01_flipped_y.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/flipped_axis_y/MOD13A2_M_NDVI_2014-01-01_flipped_y.tiff -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01.TIFF: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01.TIFF -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01_tile-20.pgw: -------------------------------------------------------------------------------- 1 | 11111.111111111111 2 | 0 3 | 0 4 | -11111.111111111111 5 | 5555.555555555556 6 | 10005555.555555556 -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01_tile-20.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01_tile-20.png -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01_tile-20.rdc: -------------------------------------------------------------------------------- 1 | file format : Idrisi Raster A.1 2 | file title : 3 | data type : byte 4 | file type : binary 5 | columns : 450 6 | rows : 450 7 | ref. system : MOD13A2_M_NDVI_2014-04-01_tile-20 8 | ref. units : m 9 | unit dist. : 1 10 | min. X : 0.0 11 | max. X : 5000000.0 12 | min. Y : 5000000.0 13 | max. Y : 10000000.0 14 | pos'n error : unspecified 15 | resolution : 11111.111111111111 16 | min. value : 1 17 | max. value : 255 18 | display min : 1 19 | display max : 255 20 | value units : unspecified 21 | value error : unspecified 22 | flag value : 0 23 | flag def'n : missing data 24 | legend cats : 0 25 | lineage : 26 | comment : 27 | -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01_tile-20.ref: -------------------------------------------------------------------------------- 1 | ref. system : WGS 84 / Pseudo-Mercator 2 | projection : Mercator 3 | datum : WGS_1984 4 | delta WGS84 : 0 2.08e-322 3.37e-321 5 | ellipsoid : WGS 84 6 | major s-ax : 6378137.000 7 | minor s-ax : 6356752.314 8 | origin long : 0 9 | origin lat : 0 10 | origin X : 0 11 | origin Y : 0 12 | scale fac : 1 13 | units : m 14 | parameters : 0 15 | -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01_tile-20.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01_tile-20.rst -------------------------------------------------------------------------------- /test_data/raster/modis_ndvi/with_offset_scale/MOD13A2_M_NDVI_2014-01-01.TIFF: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/modis_ndvi/with_offset_scale/MOD13A2_M_NDVI_2014-01-01.TIFF -------------------------------------------------------------------------------- /test_data/raster/natural_earth_2/NE2_150M_SR_W.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/natural_earth_2/NE2_150M_SR_W.tif -------------------------------------------------------------------------------- /test_data/raster/png/png_from_stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/raster/png/png_from_stream.png -------------------------------------------------------------------------------- /test_data/raster/simple_raster/wikipedia_esri_asci.asc: -------------------------------------------------------------------------------- 1 | ncols 4 2 | nrows 6 3 | xllcorner 0.0 4 | yllcorner 0.0 5 | cellsize 50.0 6 | NODATA_value -9999 7 | -9999 -9999 5 2 8 | -9999 20 100 36 9 | 3 8 35 10 10 | 32 42 50 6 11 | 88 75 27 9 12 | 13 5 1 -9999 -------------------------------------------------------------------------------- /test_data/stac_responses/cog-header.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/stac_responses/cog-header.bin -------------------------------------------------------------------------------- /test_data/stac_responses/cog-tile.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/stac_responses/cog-tile.bin -------------------------------------------------------------------------------- /test_data/vector/data/empty.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [] 4 | } 5 | -------------------------------------------------------------------------------- /test_data/vector/data/germany_polygon.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/germany_polygon.gpkg -------------------------------------------------------------------------------- /test_data/vector/data/lonlat.csv: -------------------------------------------------------------------------------- 1 | Longitude,Latitude,Name 2 | 1.1,2.2,foo 3 | -------------------------------------------------------------------------------- /test_data/vector/data/lonlat_date.csv: -------------------------------------------------------------------------------- 1 | Longitude,Latitude,Name,Date 2 | 1.1,2.2,foo,24.12.1995 3 | -------------------------------------------------------------------------------- /test_data/vector/data/lonlat_date_time.csv: -------------------------------------------------------------------------------- 1 | Longitude,Latitude,Name,DateTime 2 | 1.1,2.2,foo,24.12.1995 18:00:00 3 | -------------------------------------------------------------------------------- /test_data/vector/data/lonlat_date_time_tz.csv: -------------------------------------------------------------------------------- 1 | Longitude,Latitude,Name,DateTimeTz 2 | 1.1,2.2,foo,24.12.1995 23:00:00 +0100 3 | -------------------------------------------------------------------------------- /test_data/vector/data/lonlat_unix_date.csv: -------------------------------------------------------------------------------- 1 | Longitude,Latitude,Name,DateTime 2 | 1.1,2.2,foo,819824400.5 3 | -------------------------------------------------------------------------------- /test_data/vector/data/missing_geo.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [0.0, 0.1] 8 | }, 9 | "properties": { 10 | "bar": "one", 11 | "foo": 0.0 12 | }, 13 | "when": { 14 | "start": "1970-01-01T00:00:00+00:00", 15 | "end": "1970-01-01T00:00:00.001+00:00", 16 | "type": "Interval" 17 | } 18 | }, { 19 | "type": "Feature", 20 | "geometry": { 21 | "type": "MultiPoint", 22 | "coordinates": [ 23 | [1.0, 1.1], 24 | [2.0, 2.1] 25 | ] 26 | }, 27 | "properties": { 28 | "bar": null, 29 | "foo": 1.0 30 | }, 31 | "when": { 32 | "start": "1970-01-01T00:00:00.001+00:00", 33 | "end": "1970-01-01T00:00:00.002+00:00", 34 | "type": "Interval" 35 | } 36 | }, { 37 | "type": "Feature", 38 | "geometry": null, 39 | "properties": { 40 | "bar": "three", 41 | "foo": 2.0 42 | }, 43 | "when": { 44 | "start": "1970-01-01T00:00:00.003+00:00", 45 | "end": "1970-01-01T00:00:00.004+00:00", 46 | "type": "Interval" 47 | } 48 | }] 49 | } -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/license.txt: -------------------------------------------------------------------------------- 1 | Public domain by Natural Earth http://www.naturalearthdata.com/about/terms-of-use/ 2 | -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/ne_10m_ports.VERSION.txt: -------------------------------------------------------------------------------- 1 | 4.0.0 2 | -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/ne_10m_ports.cpg: -------------------------------------------------------------------------------- 1 | UTF-8 -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/ne_10m_ports.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/ne_10m_ports/ne_10m_ports.dbf -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/ne_10m_ports.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.017453292519943295]] -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/ne_10m_ports.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/ne_10m_ports/ne_10m_ports.shp -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/ne_10m_ports.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/ne_10m_ports/ne_10m_ports.shx -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/projected_3857/ne_10m_ports.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/ne_10m_ports/projected_3857/ne_10m_ports.dbf -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/projected_3857/ne_10m_ports.prj: -------------------------------------------------------------------------------- 1 | PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/projected_3857/ne_10m_ports.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/ne_10m_ports/projected_3857/ne_10m_ports.shp -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/projected_3857/ne_10m_ports.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/ne_10m_ports/projected_3857/ne_10m_ports.shx -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/source.txt: -------------------------------------------------------------------------------- 1 | "ne_10m_ports.*" were downloaded from http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/10m/cultural/ne_10m_ports.zip 2 | -------------------------------------------------------------------------------- /test_data/vector/data/ne_10m_ports/with_spatial_index/ne_10m_ports.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/ne_10m_ports/with_spatial_index/ne_10m_ports.gpkg -------------------------------------------------------------------------------- /test_data/vector/data/plain_data.csv: -------------------------------------------------------------------------------- 1 | a,b,c 2 | 1,5.4,foo 3 | 2,,bar 4 | -------------------------------------------------------------------------------- /test_data/vector/data/points.csv: -------------------------------------------------------------------------------- 1 | x,y,num,txt 2 | 1.1,2.2,42,foo 3 | 3.3,4.4,0815,bar 4 | -------------------------------------------------------------------------------- /test_data/vector/data/points.fgb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/points.fgb -------------------------------------------------------------------------------- /test_data/vector/data/points_with_bool.csv: -------------------------------------------------------------------------------- 1 | x,y,bool 2 | 1.1,2.2,true 3 | 3.3,4.4,false 4 | 7.2,5.9,true 5 | -------------------------------------------------------------------------------- /test_data/vector/data/points_with_date.cpg: -------------------------------------------------------------------------------- 1 | UTF-8 -------------------------------------------------------------------------------- /test_data/vector/data/points_with_date.dbf: -------------------------------------------------------------------------------- 1 | yatime_startDtime_endD 2014060120140601 2014050120140501 2014070120140701 2014040120140701 2014050120140601 -------------------------------------------------------------------------------- /test_data/vector/data/points_with_date.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /test_data/vector/data/points_with_date.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/points_with_date.shp -------------------------------------------------------------------------------- /test_data/vector/data/points_with_date.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/points_with_date.shx -------------------------------------------------------------------------------- /test_data/vector/data/points_with_iso_start_duration.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "crs": { 4 | "type": "name", 5 | "properties": { 6 | "name": "EPSG:4326" 7 | } 8 | }, 9 | "features": [ 10 | { 11 | "type": "Feature", 12 | "geometry": { 13 | "type": "Point", 14 | "coordinates": [10.294331, 51.847343] 15 | }, 16 | "properties": { 17 | "time_start": "2014-06-01T00:00:00Z", 18 | "duration": 1 19 | } 20 | }, 21 | { 22 | "type": "Feature", 23 | "geometry": { 24 | "type": "Point", 25 | "coordinates": [3.966206, 49.337321] 26 | }, 27 | "properties": { 28 | "time_start": "2014-05-01T00:00:00Z", 29 | "duration": 2 30 | } 31 | }, 32 | { 33 | "type": "Feature", 34 | "geometry": { 35 | "type": "Point", 36 | "coordinates": [6.163471, 45.903036] 37 | }, 38 | "properties": { 39 | "time_start": "2014-07-01T00:00:00Z", 40 | "duration": 3 41 | } 42 | }, 43 | { 44 | "type": "Feature", 45 | "geometry": { 46 | "type": "Point", 47 | "coordinates": [12.843159, 47.825724] 48 | }, 49 | "properties": { 50 | "time_start": "2014-04-01T00:00:00Z", 51 | "duration": 4 52 | } 53 | }, 54 | { 55 | "type": "Feature", 56 | "geometry": { 57 | "type": "Point", 58 | "coordinates": [19.171284, 50.636947] 59 | }, 60 | "properties": { 61 | "time_start": "2014-05-01T00:00:00Z", 62 | "duration": 5 63 | } 64 | } 65 | ] 66 | } 67 | -------------------------------------------------------------------------------- /test_data/vector/data/points_with_iso_time.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "crs": { 4 | "type": "name", 5 | "properties": { 6 | "name": "EPSG:4326" 7 | } 8 | }, 9 | "features": [ 10 | { 11 | "type": "Feature", 12 | "geometry": { 13 | "type": "Point", 14 | "coordinates": [10.294331, 51.847343] 15 | }, 16 | "properties": { 17 | "time_start": "2014-06-01T00:00:00Z", 18 | "time_end": "2014-06-01T00:00:00Z" 19 | } 20 | }, 21 | { 22 | "type": "Feature", 23 | "geometry": { 24 | "type": "Point", 25 | "coordinates": [3.966206, 49.337321] 26 | }, 27 | "properties": { 28 | "time_start": "2014-05-01T00:00:00Z", 29 | "time_end": "2014-05-01T00:00:00Z" 30 | } 31 | }, 32 | { 33 | "type": "Feature", 34 | "geometry": { 35 | "type": "Point", 36 | "coordinates": [6.163471, 45.903036] 37 | }, 38 | "properties": { 39 | "time_start": "2014-07-01T00:00:00Z", 40 | "time_end": "2014-07-01T00:00:00Z" 41 | } 42 | }, 43 | { 44 | "type": "Feature", 45 | "geometry": { 46 | "type": "Point", 47 | "coordinates": [12.843159, 47.825724] 48 | }, 49 | "properties": { 50 | "time_start": "2014-04-01T00:00:00Z", 51 | "time_end": "2014-07-01T00:00:00Z" 52 | } 53 | }, 54 | { 55 | "type": "Feature", 56 | "geometry": { 57 | "type": "Point", 58 | "coordinates": [19.171284, 50.636947] 59 | }, 60 | "properties": { 61 | "time_start": "2014-05-01T00:00:00Z", 62 | "time_end": "2014-06-01T00:00:00Z" 63 | } 64 | } 65 | ] 66 | } 67 | -------------------------------------------------------------------------------- /test_data/vector/data/points_with_time.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/points_with_time.gpkg -------------------------------------------------------------------------------- /test_data/vector/data/points_with_time.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "crs": { 4 | "type": "name", 5 | "properties": { 6 | "name": "EPSG:4326" 7 | } 8 | }, 9 | "features": [{ 10 | "type": "Feature", 11 | "geometry": { 12 | "type": "Point", 13 | "coordinates": [10.294331, 51.847343] 14 | }, 15 | "properties": { 16 | "time_start": "+2014-06-01 00:00:00 +0000", 17 | "time_end": "+2014-06-01 00:00:00 +0000" 18 | } 19 | }, { 20 | "type": "Feature", 21 | "geometry": { 22 | "type": "Point", 23 | "coordinates": [3.966206, 49.337321] 24 | }, 25 | "properties": { 26 | "time_start": "+2014-05-01 00:00:00 +0000", 27 | "time_end": "+2014-05-01 00:00:00 +0000" 28 | } 29 | }, { 30 | "type": "Feature", 31 | "geometry": { 32 | "type": "Point", 33 | "coordinates": [6.163471, 45.903036] 34 | }, 35 | "properties": { 36 | "time_start": "+2014-07-01 00:00:00 +0000", 37 | "time_end": "+2014-07-01 00:00:00 +0000" 38 | } 39 | }, { 40 | "type": "Feature", 41 | "geometry": { 42 | "type": "Point", 43 | "coordinates": [12.843159, 47.825724] 44 | }, 45 | "properties": { 46 | "time_start": "+2014-04-01 00:00:00 +0000", 47 | "time_end": "+2014-07-01 00:00:00 +0000" 48 | } 49 | }, { 50 | "type": "Feature", 51 | "geometry": { 52 | "type": "Point", 53 | "coordinates": [19.171284, 50.636947] 54 | }, 55 | "properties": { 56 | "time_start": "+2014-05-01 00:00:00 +0000", 57 | "time_end": "+2014-06-01 00:00:00 +0000" 58 | } 59 | }] 60 | } -------------------------------------------------------------------------------- /test_data/vector/data/two_layers.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/vector/data/two_layers.gpkg -------------------------------------------------------------------------------- /test_data/vector/germany_polygon.json: -------------------------------------------------------------------------------- 1 | { 2 | "fileName": "test_data/vector/data/germany_polygon.gpkg", 3 | "layerName": "test_germany", 4 | "columns": { 5 | "x": "", 6 | "float": [], 7 | "int": [], 8 | "text": [] 9 | }, 10 | "onError": "abort" 11 | } 12 | -------------------------------------------------------------------------------- /test_data/wildlive/responses/annotations.json: -------------------------------------------------------------------------------- 1 | { 2 | "pageNum": 0, 3 | "pageSize": -1, 4 | "size": 3, 5 | "facets": [], 6 | "results": [ 7 | { 8 | "id": "wildlive/a1eaa469eec33a0d3a39", 9 | "content": { 10 | "hasTarget": { 11 | "hasSelector": { 12 | "value": "xywh=2671,1043,1808,999" 13 | }, 14 | "source": "wildlive/75243d4b79e5c91bd3b3" 15 | }, 16 | "hasBody": { 17 | "acceptedNameUsageID": "https://www.gbif.org/species/5219426", 18 | "vernacularName": "Jaguar", 19 | "scientificName": "Panthera onca (Linnaeus, 1758)" 20 | }, 21 | "id": "wildlive/a1eaa469eec33a0d3a39" 22 | } 23 | }, 24 | { 25 | "id": "wildlive/ebe8d5f722782b0bee73", 26 | "content": { 27 | "hasTarget": { 28 | "hasSelector": { 29 | "value": "xywh=2834,1168,2092,1244" 30 | }, 31 | "source": "wildlive/7f8e06ea4ae305a2822d" 32 | }, 33 | "hasBody": { 34 | "acceptedNameUsageID": "https://www.gbif.org/species/5219426", 35 | "vernacularName": "Jaguar", 36 | "scientificName": "Panthera onca (Linnaeus, 1758)" 37 | }, 38 | "id": "wildlive/ebe8d5f722782b0bee73" 39 | } 40 | }, 41 | { 42 | "id": "wildlive/7ef5664c43cf26299b09", 43 | "content": { 44 | "hasTarget": { 45 | "hasSelector": { 46 | "value": "xywh=2734,1147,1877,833" 47 | }, 48 | "source": "wildlive/da842858a8fd792ccfbf" 49 | }, 50 | "hasBody": { 51 | "acceptedNameUsageID": "https://www.gbif.org/species/5219426", 52 | "vernacularName": "Jaguar", 53 | "scientificName": "Panthera onca (Linnaeus, 1758)" 54 | }, 55 | "id": "wildlive/7ef5664c43cf26299b09" 56 | } 57 | } 58 | ] 59 | } 60 | -------------------------------------------------------------------------------- /test_data/wildlive/responses/image_objects.json: -------------------------------------------------------------------------------- 1 | { 2 | "pageNum": 0, 3 | "pageSize": -1, 4 | "size": 3, 5 | "facets": [], 6 | "results": [ 7 | { 8 | "id": "wildlive/75243d4b79e5c91bd3b3", 9 | "content": { 10 | "atStation": "wildlive/ea64f18b8fa1dec31196", 11 | "captureTimeStamp": "2019-02-26T14:48:27Z", 12 | "contentUrl": "https://wildlive.senckenberg.de/api/objects/wildlive/75243d4b79e5c91bd3b3?payload=CamTrapImport_2019-03-11_Grid_G-05_105_A_026.JPG", 13 | "id": "wildlive/75243d4b79e5c91bd3b3", 14 | "hasAnnotations": ["wildlive/a1eaa469eec33a0d3a39"] 15 | } 16 | }, 17 | { 18 | "id": "wildlive/7f8e06ea4ae305a2822d", 19 | "content": { 20 | "atStation": "wildlive/ea64f18b8fa1dec31196", 21 | "captureTimeStamp": "2019-02-26T14:49:03Z", 22 | "contentUrl": "https://wildlive.senckenberg.de/api/objects/wildlive/7f8e06ea4ae305a2822d?payload=CamTrapImport_2019-03-11_Grid_G-05_105_A_031.JPG", 23 | "id": "wildlive/7f8e06ea4ae305a2822d", 24 | "hasAnnotations": ["wildlive/ebe8d5f722782b0bee73"] 25 | } 26 | }, 27 | { 28 | "id": "wildlive/da842858a8fd792ccfbf", 29 | "content": { 30 | "atStation": "wildlive/ea64f18b8fa1dec31196", 31 | "captureTimeStamp": "2019-02-26T14:48:27Z", 32 | "contentUrl": "https://wildlive.senckenberg.de/api/objects/wildlive/da842858a8fd792ccfbf?payload=CamTrapImport_2019-03-11_Grid_G-05_105_A_028.JPG", 33 | "id": "wildlive/da842858a8fd792ccfbf", 34 | "hasAnnotations": ["wildlive/7ef5664c43cf26299b09"] 35 | } 36 | } 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /test_data/wildlive/responses/project.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "CameraTrapping project in Bolivia", 3 | "description": "Research project for Jaguar wildlife monitoring", 4 | "@context": ["https://wildlive.senckenberg.de/wlmo/0.3/context"], 5 | "projectStatistics": { 6 | "stationsLayoutsCount": 2, 7 | "stationSetupCount": 21, 8 | "captureEventCount": 1702, 9 | "observationWithImageCount": 2658, 10 | "annotatedSpeciesCount": 3735, 11 | "annotatedSpeciesTypes": [ 12 | "https://www.gbif.org/species/2441022", 13 | "https://www.gbif.org/species/5219426", 14 | "https://www.gbif.org/species/2440995" 15 | ], 16 | "annotatedSpeciesTypesVernacular": [ 17 | "Aurochs", 18 | "Jaguar", 19 | "Collared Peccary" 20 | ], 21 | "annotatedSpeciesTypesScientific": [ 22 | "Bos taurus Linnaeus, 1758", 23 | "Panthera onca (Linnaeus, 1758)", 24 | "Pecari tajacu (Linnaeus, 1758)" 25 | ] 26 | }, 27 | "id": "wildlive/ef7833589d61b2d2a905", 28 | "provObject": "wildlive/23bde451dc80c2bd449c", 29 | "hasStationsLayouts": [ 30 | "wildlive/667cc39364fd45136c7a", 31 | "wildlive/151c43fdd5881eba0bd5" 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /test_data/wildlive/responses/projects.json: -------------------------------------------------------------------------------- 1 | { 2 | "pageNum": 0, 3 | "pageSize": -1, 4 | "size": 1, 5 | "facets": [], 6 | "results": [ 7 | { 8 | "id": "wildlive/ef7833589d61b2d2a905", 9 | "type": "Project", 10 | "content": { 11 | "name": "CameraTrapping project in Bolivia", 12 | "description": "Research project for Jaguar wildlife monitoring", 13 | "@context": ["https://wildlive.senckenberg.de/wlmo/0.3/context"], 14 | "projectStatistics": { 15 | "stationsLayoutsCount": 2, 16 | "stationSetupCount": 21, 17 | "captureEventCount": 1702, 18 | "observationWithImageCount": 2658, 19 | "annotatedSpeciesCount": 3735, 20 | "annotatedSpeciesTypes": [ 21 | "https://www.gbif.org/species/2441022", 22 | "https://www.gbif.org/species/5219426", 23 | "https://www.gbif.org/species/2440995" 24 | ], 25 | "annotatedSpeciesTypesVernacular": [ 26 | "Aurochs", 27 | "Jaguar", 28 | "Collared Peccary" 29 | ], 30 | "annotatedSpeciesTypesScientific": [ 31 | "Bos taurus Linnaeus, 1758", 32 | "Panthera onca (Linnaeus, 1758)", 33 | "Pecari tajacu (Linnaeus, 1758)" 34 | ] 35 | }, 36 | "id": "wildlive/ef7833589d61b2d2a905", 37 | "provObject": "wildlive/23bde451dc80c2bd449c", 38 | "hasStationsLayouts": [ 39 | "wildlive/667cc39364fd45136c7a", 40 | "wildlive/151c43fdd5881eba0bd5" 41 | ] 42 | }, 43 | "metadata": { 44 | "hashes": { 45 | "alg": "SHA-256", 46 | "content": "d102b9e950385afd364c482020dd9be0f8d55d0127632cd00941a39c0ce207a7", 47 | "full": "79f3f9495387454db94c20f414aab9ce3e5e08e30e5218f3f560a4cde8c71f32" 48 | }, 49 | "createdOn": 1746721958739, 50 | "createdBy": "admin", 51 | "modifiedOn": 1746730126995, 52 | "modifiedBy": "admin", 53 | "txnId": 1746730126994929 54 | } 55 | } 56 | ] 57 | } 58 | -------------------------------------------------------------------------------- /test_data/wms/gaussian_blur.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/gaussian_blur.png -------------------------------------------------------------------------------- /test_data/wms/get_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/get_map.png -------------------------------------------------------------------------------- /test_data/wms/get_map_colorizer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/get_map_colorizer.png -------------------------------------------------------------------------------- /test_data/wms/get_map_ndvi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/get_map_ndvi.png -------------------------------------------------------------------------------- /test_data/wms/ne2_rgb_colorizer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/ne2_rgb_colorizer.png -------------------------------------------------------------------------------- /test_data/wms/ne2_rgb_colorizer_gray.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/ne2_rgb_colorizer_gray.png -------------------------------------------------------------------------------- /test_data/wms/partial_derivative.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/partial_derivative.png -------------------------------------------------------------------------------- /test_data/wms/raster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/raster.png -------------------------------------------------------------------------------- /test_data/wms/raster_colorizer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/raster_colorizer.png -------------------------------------------------------------------------------- /test_data/wms/raster_small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geo-engine/geoengine/c33c038659981ffab0a6752e0a0ac94c54b8b604/test_data/wms/raster_small.png --------------------------------------------------------------------------------