├── frontend ├── .nvmrc ├── LICENSE ├── .gitignore ├── .dockerignore ├── .prettierrc ├── test │ └── example.test.ts ├── tsconfig.json ├── .babelrc ├── .stylelintrc.json ├── app │ ├── vite-env.d.ts │ ├── global.d.ts │ ├── types │ │ └── stac.d.ts │ ├── main.tsx │ ├── hooks │ │ └── useStacCatalog.ts │ ├── context │ │ └── StacContext.tsx │ └── components │ │ ├── map.tsx │ │ └── detail.tsx ├── .editorconfig ├── tsconfig.node.json ├── vite.config.mts ├── vite-plugin-port-scanner.ts ├── tsconfig.app.json ├── Dockerfile ├── index.html ├── eslint.config.mjs ├── package.json ├── README.md └── jest.config.js ├── docs ├── about │ ├── faq.md │ ├── about.md │ └── timeline.md ├── CNAME ├── dev │ ├── frontend.md │ ├── backend │ │ ├── stac-api.md │ │ ├── global-mosaic.md │ │ └── stac-ingester.md │ ├── new-provider.md │ └── backup-prod-pgstac.md ├── images │ ├── hot_logo.png │ ├── docs_badge.svg │ └── favicon.svg ├── index.md ├── css │ ├── extra.css │ └── timeline.css └── decisions │ ├── 0000-hotosm.md │ ├── README.md │ └── 0001-stac.md ├── backend ├── global-tms │ ├── .gitignore │ ├── chart │ │ ├── README.md │ │ ├── Chart.yaml │ │ ├── templates │ │ │ ├── service.yaml │ │ │ ├── serviceaccount.yaml │ │ │ ├── tests │ │ │ │ └── test-connection.yaml │ │ │ ├── NOTES.txt │ │ │ ├── pvc.yaml │ │ │ ├── ingress.yaml │ │ │ ├── hpa.yaml │ │ │ ├── _helpers.tpl │ │ │ ├── deployment.yaml │ │ │ └── configmap.yaml │ │ ├── .helmignore │ │ └── values.yaml │ ├── gen-chart.sh │ ├── tileserver.config │ ├── tileserver.style.config │ ├── README.md │ ├── compose.yaml │ └── nginx.conf ├── stac-api │ ├── .dockerignore │ ├── app │ │ ├── settings.py │ │ └── main.py │ ├── pyproject.toml │ ├── Dockerfile │ ├── README.md │ ├── compose.yaml │ ├── .gitignore │ └── scripts │ │ └── wait-for-it.sh ├── global-mosaic │ ├── .dockerignore │ ├── output │ │ └── README.md │ ├── pyproject.toml │ ├── compose.yaml │ ├── Dockerfile │ ├── README.md │ ├── .gitignore │ └── scripts │ │ └── gen_coverage_vector.py └── stac-ingester │ ├── .dockerignore │ ├── pyproject.toml │ ├── compose.yaml │ ├── README.md │ ├── Dockerfile │ └── .gitignore ├── .env.example ├── recipes ├── README.md └── prep │ └── Justfile ├── .github ├── workflows │ ├── docs.yml │ ├── backend-stac-api-build-deploy copy.yml │ ├── backend-stac-api-build-deploy.yml │ ├── backend-stac-ingester-build-deploy.yml │ ├── issue_label.yml │ ├── deploy.yml │ └── checks.yml ├── FUNDING.yml ├── pull_request_template.md └── _workflow-samples │ ├── deploy-gh.yml │ ├── README.md │ └── deploy-s3.yml ├── metadata ├── OIN-image-metadata.json └── README.md ├── notebooks └── pyproject.toml ├── .gitignore ├── mkdocs.yml ├── .pre-commit-config.yaml ├── CONTRIBUTING.md ├── Justfile └── README.md /frontend/.nvmrc: -------------------------------------------------------------------------------- 1 | 22 -------------------------------------------------------------------------------- /frontend/LICENSE: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/about/faq.md: -------------------------------------------------------------------------------- 1 | # FAQ 2 | -------------------------------------------------------------------------------- /docs/CNAME: -------------------------------------------------------------------------------- 1 | docs.imagery.hotosm.org 2 | -------------------------------------------------------------------------------- /docs/about/about.md: -------------------------------------------------------------------------------- 1 | # What is OpenAerialMap? 2 | -------------------------------------------------------------------------------- /docs/dev/frontend.md: -------------------------------------------------------------------------------- 1 | ../../frontend/README.md -------------------------------------------------------------------------------- /docs/dev/backend/stac-api.md: -------------------------------------------------------------------------------- 1 | ../../../backend/stac-api/README.md -------------------------------------------------------------------------------- /backend/global-tms/.gitignore: -------------------------------------------------------------------------------- 1 | # Packaged helm chart 2 | *.tgz 3 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # NPM (PNPM is used) 2 | package-lock.json 3 | -------------------------------------------------------------------------------- /backend/stac-api/.dockerignore: -------------------------------------------------------------------------------- 1 | # don't copy host uv .venv 2 | .venv 3 | -------------------------------------------------------------------------------- /docs/dev/backend/global-mosaic.md: -------------------------------------------------------------------------------- 1 | ../../../backend/global-mosaic/README.md -------------------------------------------------------------------------------- /docs/dev/backend/stac-ingester.md: -------------------------------------------------------------------------------- 1 | ../../../backend/stac-ingester/README.md -------------------------------------------------------------------------------- /backend/global-mosaic/.dockerignore: -------------------------------------------------------------------------------- 1 | # don't copy host uv .venv 2 | .venv 3 | -------------------------------------------------------------------------------- /backend/stac-ingester/.dockerignore: -------------------------------------------------------------------------------- 1 | # don't copy host uv .venv 2 | .venv 3 | -------------------------------------------------------------------------------- /frontend/.dockerignore: -------------------------------------------------------------------------------- 1 | **/.pnpm-store 2 | **/node_modules 3 | **/dist 4 | **/*.gitignore 5 | -------------------------------------------------------------------------------- /docs/images/hot_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hotosm/OpenAerialMap/HEAD/docs/images/hot_logo.png -------------------------------------------------------------------------------- /backend/global-mosaic/output/README.md: -------------------------------------------------------------------------------- 1 | # Output Dir 2 | 3 | Output directory for the final PMTiles file. 4 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # OpenAerialMap 2 | 3 | Welcome to the docs! 4 | 5 | Click on a sidebar link to get started. 6 | -------------------------------------------------------------------------------- /backend/global-tms/chart/README.md: -------------------------------------------------------------------------------- 1 | # Global TMS Helm Chart 2 | 3 | Used to deploy as part of eoAPI in k8s cluster. 4 | -------------------------------------------------------------------------------- /docs/css/extra.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --md-primary-fg-color: #d73f3f; 3 | --md-primary-fg-color--light: #e27575; 4 | --md-primary-fg-color--dark: #c22929; 5 | } 6 | -------------------------------------------------------------------------------- /frontend/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "semi": true, 3 | "trailingComma": "none", 4 | "singleQuote": true, 5 | "jsxSingleQuote": true, 6 | "printWidth": 80 7 | } 8 | -------------------------------------------------------------------------------- /frontend/test/example.test.ts: -------------------------------------------------------------------------------- 1 | describe('Main test suite', () => { 2 | it('should pass this demo test', () => { 3 | expect(true).toBe(true); 4 | }); 5 | }); 6 | -------------------------------------------------------------------------------- /frontend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [], 3 | "references": [ 4 | { "path": "./tsconfig.app.json" }, 5 | { "path": "./tsconfig.node.json" } 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /backend/stac-api/app/settings.py: -------------------------------------------------------------------------------- 1 | from stac_fastapi.pgstac.config import Settings as _Settings 2 | 3 | 4 | class Settings(_Settings): 5 | """Settings specific to this deployment of STAC FastAPI PgSTAC""" 6 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | VITE_STAC_API_URL=${VITE_STAC_API_URL:-https://api.imagery.hotosm.org} 2 | VITE_STAC_API_PATHNAME=${VITE_STAC_API_PATHNAME:-stac} 3 | VITE_STAC_TILER_PATHNAME=${VITE_STAC_TILER_PATHNAME:-raster} 4 | VITE_STAC_ITEMS_LIMIT=${VITE_STAC_ITEMS_LIMIT:-40} 5 | -------------------------------------------------------------------------------- /frontend/.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "test": { 4 | "presets": ["@babel/preset-react", "@babel/preset-typescript"], 5 | "plugins": ["@babel/plugin-transform-modules-commonjs"] 6 | } 7 | }, 8 | "plugins": ["babel-plugin-styled-components"] 9 | } 10 | -------------------------------------------------------------------------------- /backend/global-tms/gen-chart.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | 5 | # First go to chart/Chart.yaml and increment version & appVersion 6 | 7 | helm package chart 8 | # Update version to match here 9 | helm push global-tms-0.1.1.tgz oci://ghcr.io/hotosm/openaerialmap 10 | -------------------------------------------------------------------------------- /backend/stac-api/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "hotosm-stac-api" 3 | version = "0.1.0" 4 | description = "STAC API for HOTOSM based on stac-api-pgstac" 5 | readme = "README.md" 6 | requires-python = ">=3.12" 7 | dependencies = [ 8 | "stac-fastapi-pgstac[server]~=5.0.2", 9 | ] 10 | -------------------------------------------------------------------------------- /docs/decisions/0000-hotosm.md: -------------------------------------------------------------------------------- 1 | # Higher Level HOTOSM Decisions 2 | 3 | Many decisions have been made at an organizational level, affecting 4 | all tools that we develop. 5 | 6 | The decisions made in this project should not deviate much from the 7 | choices, [starting here](https://docs.hotosm.org/decisions/0003-react). 8 | -------------------------------------------------------------------------------- /recipes/README.md: -------------------------------------------------------------------------------- 1 | # Just Submodules 2 | 3 | - This directory contains submodules for the parent Justfile. 4 | - It allows for submodules to be used like: 5 | 6 | ```bash 7 | just build frontend 8 | just build backend 9 | 10 | # Instead of (only top level) 11 | just build-backend 12 | just build-frontend 13 | ``` 14 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: 📖 Publish Docs 2 | 3 | on: 4 | push: 5 | paths: 6 | - docs/** 7 | - mkdocs.yml 8 | branches: [main] 9 | # Allow manual trigger (workflow_dispatch) 10 | workflow_dispatch: 11 | 12 | jobs: 13 | publish_docs: 14 | uses: hotosm/gh-workflows/.github/workflows/mkdocs_build.yml@3.3.2 15 | -------------------------------------------------------------------------------- /backend/global-tms/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: global-tms 3 | description: A combined raster TMS of OAM imagery, for legacy clients 4 | type: application 5 | icon: https://raw.githubusercontent.com/hotosm/openaerialmap/refs/heads/main/docs/images/favicon.svg 6 | 7 | # Chart version 8 | version: 0.1.1 9 | 10 | # App version 11 | appVersion: "0.1.1" 12 | -------------------------------------------------------------------------------- /backend/stac-ingester/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "hotosm-stac-ingester" 3 | version = "0.1.0" 4 | description = "Container for running STAC Ingestion for HOTOSM" 5 | readme = "README.md" 6 | requires-python = ">=3.12" 7 | dependencies = [ 8 | "stactools-hotosm[ingest]", 9 | ] 10 | 11 | [tool.uv.sources] 12 | stactools-hotosm = { git = "https://github.com/hotosm/stactools-hotosm", rev = "v0.2.1" } 13 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ include "chart.fullname" . }}-nginx 5 | labels: 6 | {{- include "chart.labels" . | nindent 4 }} 7 | spec: 8 | type: ClusterIP 9 | ports: 10 | - port: 80 11 | targetPort: 80 12 | protocol: TCP 13 | name: http 14 | selector: 15 | {{- include "chart.selectorLabels" . | nindent 4 }} 16 | -------------------------------------------------------------------------------- /metadata/OIN-image-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "An scene from some satellite", 3 | "projection": "EPSG:4326", 4 | "bbox": [-180, -90, 180, 90], 5 | "footprint": "POLYGON((-180 -90, -180 90, 180 90, 180 -90, -180 -90))", 6 | "gsd": 0.35, 7 | "file_size": 1024, 8 | "sense_start": "2015-05-03T13:00:00.000", 9 | "sense_end": "2015-05-04T13:00:00.000", 10 | "platform": "satellite", 11 | "contact": "someone@nasa.gov" 12 | } 13 | -------------------------------------------------------------------------------- /docs/decisions/README.md: -------------------------------------------------------------------------------- 1 | # Architectural Decisions 2 | 3 | Markdown Architectural Decision Records documenting the technical decisions 4 | taken in this project. 5 | 6 | This process was started 15/08/2025, so does not necessarily capture all decisions 7 | from the projects inception. 8 | 9 | ## Decisions 10 | 11 | - [0000 - HOTOSM Context and Alignment](./0000-hotosm.md) 12 | - [0001 - STAC to catalogue all imagery assets](./0001-stac.md) 13 | -------------------------------------------------------------------------------- /frontend/.stylelintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "stylelint-config-standard", 4 | "stylelint-config-styled-components" 5 | ], 6 | "customSyntax": "postcss-styled-syntax", 7 | "rules": { 8 | "font-family-no-missing-generic-family-keyword": null, 9 | "no-descending-specificity": [ 10 | true, 11 | { 12 | "severity": "warning" 13 | } 14 | ] 15 | }, 16 | "ignoreFiles": ["**/*.d.ts"] 17 | } 18 | -------------------------------------------------------------------------------- /frontend/app/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | 3 | interface ImportMeta { 4 | readonly env: ImportMetaEnv; 5 | } 6 | 7 | interface ImportMetaEnv { 8 | readonly VITE_STAC_API_URL: string; 9 | readonly VITE_STAC_API_PATHNAME: string; 10 | readonly VITE_STAC_TILER_PATHNAME: string; 11 | readonly VITE_STAC_ITEMS_LIMIT: string; 12 | // readonly VITE_STAC_CATALOG_API_URL: string; 13 | // more env vars... 14 | } 15 | -------------------------------------------------------------------------------- /backend/global-tms/tileserver.config: -------------------------------------------------------------------------------- 1 | { 2 | "options": { 3 | "paths": { 4 | "root": "." 5 | } 6 | }, 7 | "data": { 8 | "global-coverage": { 9 | "pmtiles": "/tiles/global-coverage.pmtiles" 10 | } 11 | }, 12 | "styles": { 13 | "global-coverage": { 14 | "style": "style.json", 15 | "serve_rendered": true, 16 | "tilejson": { 17 | "bounds": [-180, -85, 180, 85] 18 | } 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /frontend/app/global.d.ts: -------------------------------------------------------------------------------- 1 | export {}; 2 | 3 | declare module 'react' { 4 | namespace JSX { 5 | interface IntrinsicElements { 6 | 'wa-drawer': any; 7 | 'wa-button': any; 8 | 'wa-button-group': any; 9 | 'wa-divider': any; 10 | 'wa-spinner': any; 11 | 'wa-select': any; 12 | 'wa-option': any; 13 | 'wa-input': any; 14 | 'wa-dialog': any; 15 | 'wa-copy-button': any; 16 | 'wa-icon': any; 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /backend/global-tms/chart/.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *.orig 18 | *~ 19 | # Various IDEs 20 | .project 21 | .idea/ 22 | *.tmproj 23 | .vscode/ 24 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.serviceAccount.create -}} 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ include "chart.serviceAccountName" . }} 6 | labels: 7 | {{- include "chart.labels" . | nindent 4 }} 8 | {{- with .Values.serviceAccount.annotations }} 9 | annotations: 10 | {{- toYaml . | nindent 4 }} 11 | {{- end }} 12 | automountServiceAccountToken: {{ .Values.serviceAccount.automount }} 13 | {{- end }} 14 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/tests/test-connection.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: "{{ include "chart.fullname" . }}-test-connection" 5 | labels: 6 | {{- include "chart.labels" . | nindent 4 }} 7 | annotations: 8 | "helm.sh/hook": test 9 | spec: 10 | containers: 11 | - name: wget 12 | image: busybox 13 | command: ['wget'] 14 | args: ['{{ include "chart.fullname" . }}:{{ .Values.service.port }}'] 15 | restartPolicy: Never 16 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | 1. Tileserver and NGINX sidecar have been deployed. 2 | 2. You can access raster tiles (zoom 0–15) via your Ingress: 3 | 4 | http://{{ .Values.ingress.host }}///.png 5 | 6 | Example: 7 | curl http://{{ .Values.ingress.host }}/10/511/340.png -o tile.png 8 | 9 | Zoom levels >=16 will automatically redirect to the external TiTiler service. 10 | 11 | 3. Health check endpoint is available at: 12 | http://{{ .Values.ingress.host }}/health 13 | -------------------------------------------------------------------------------- /backend/global-mosaic/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "hotosm-global-mosaic" 3 | version = "0.1.0" 4 | description = "Generate a global mosaic for OAM on a schedule" 5 | readme = "README.md" 6 | requires-python = ">=3.12" 7 | dependencies = [ 8 | "affine>=2.4.0", 9 | "aiohttp>=3.12.15", 10 | "mercantile>=1.2.1", 11 | "minio>=7.2.16", 12 | "numpy>=2.3.2", 13 | "pmtiles>=3.4.1", 14 | "psycopg>=3.2.9", 15 | "rasterio>=1.4.3", 16 | "rio-tiler>=7.8.1", 17 | "shapely>=2.1.1", 18 | ] 19 | -------------------------------------------------------------------------------- /.github/workflows/backend-stac-api-build-deploy copy.yml: -------------------------------------------------------------------------------- 1 | name: Build and Deploy Global Mosaicker 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - "backend/global-mosaic/**" 9 | workflow_dispatch: 10 | 11 | jobs: 12 | image-build-and-push: 13 | uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.5.1 14 | with: 15 | context: backend/global-mosaic/ 16 | image_name: ghcr.io/${{ github.repository }}/global-mosaic 17 | build_target: prod 18 | dockerfile: Dockerfile 19 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/pvc.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.persistence.enabled }} 2 | apiVersion: v1 3 | kind: PersistentVolumeClaim 4 | metadata: 5 | name: {{ include "chart.fullname" . }}-pvc 6 | labels: 7 | {{- include "chart.labels" . | nindent 4 }} 8 | spec: 9 | accessModes: 10 | - ReadWriteOnce 11 | resources: 12 | requests: 13 | storage: {{ .Values.persistence.size }} 14 | {{- if .Values.persistence.storageClass }} 15 | storageClassName: {{ .Values.persistence.storageClass }} 16 | {{- end }} 17 | {{- end }} 18 | -------------------------------------------------------------------------------- /.github/workflows/backend-stac-api-build-deploy.yml: -------------------------------------------------------------------------------- 1 | name: Build and Deploy STAC API Backend Service to Container Registry 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - "backend/stac-api/**" 9 | workflow_dispatch: 10 | 11 | jobs: 12 | image-build-and-push: 13 | uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.5.1 14 | with: 15 | context: backend/stac-api/ 16 | image_name: ghcr.io/${{ github.repository }}/stac-api 17 | build_target: prod 18 | dockerfile: Dockerfile 19 | -------------------------------------------------------------------------------- /.github/workflows/backend-stac-ingester-build-deploy.yml: -------------------------------------------------------------------------------- 1 | name: Build and Deploy STAC Ingester Backend Service to Container Registry 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - "backend/stac-ingester/**" 9 | workflow_dispatch: 10 | 11 | jobs: 12 | image-build-and-push: 13 | uses: hotosm/gh-workflows/.github/workflows/image_build.yml@1.5.1 14 | with: 15 | context: backend/stac-ingester/ 16 | image_name: ghcr.io/${{ github.repository }}/stac-ingester 17 | build_target: prod 18 | dockerfile: Dockerfile 19 | -------------------------------------------------------------------------------- /.github/workflows/issue_label.yml: -------------------------------------------------------------------------------- 1 | # We add a label `repo:repo-name` to each new issue, 2 | # for easier tracking in external systems 3 | 4 | name: 🏷️ Issue Label 5 | 6 | on: 7 | issues: 8 | types: 9 | - opened 10 | 11 | jobs: 12 | issue-label: 13 | runs-on: ubuntu-latest 14 | permissions: 15 | issues: write 16 | 17 | steps: 18 | - run: gh issue edit "$NUMBER" --add-label "$LABELS" 19 | env: 20 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | GH_REPO: ${{ github.repository }} 22 | NUMBER: ${{ github.event.issue.number }} 23 | LABELS: repo:oam 24 | -------------------------------------------------------------------------------- /frontend/.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: https://EditorConfig.org 2 | 3 | # top-most EditorConfig file 4 | root = true 5 | 6 | # Unix-style newlines with a newline ending every file 7 | [*] 8 | end_of_line = lf 9 | insert_final_newline = true 10 | 11 | # Matches multiple files with brace expansion notation 12 | # Set default charset 13 | [*.{js}] 14 | charset = utf-8 15 | 16 | # Indentation override for all JS under lib directory 17 | [lib/**.js] 18 | indent_style = space 19 | indent_size = 2 20 | 21 | # Matches the exact files either package.json or .travis.yml 22 | [{package.json,.travis.yml}] 23 | indent_style = space 24 | indent_size = 2 25 | -------------------------------------------------------------------------------- /notebooks/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "openaerialmap" 3 | version = "0.0.0" 4 | description = "Demonstration notebooks for using openaerialmap programmatically" 5 | readme = "README.md" 6 | requires-python = ">=3.13" 7 | dependencies = [ 8 | "contextily>=1.6.2", 9 | "cql2>=0.3.7", 10 | "folium>=0.19.6", 11 | "geopandas>=1.0.1", 12 | "httpx>=0.28.1", 13 | "ipykernel>=6.29.5", 14 | "jinja2>=3.1.6", 15 | "pystac-client>=0.8.6", 16 | ] 17 | 18 | [dependency-groups] 19 | dev = ["ruff>=0.11.11"] 20 | 21 | [tool.codespell] 22 | skip = "notebooks/*,*pnpm-lock.yaml,*package-lock.json,*CHANGELOG.md" 23 | write-changes = true 24 | -------------------------------------------------------------------------------- /backend/global-tms/tileserver.style.config: -------------------------------------------------------------------------------- 1 | { 2 | "version": 8, 3 | "sources": { 4 | "global-coverage": { 5 | "type": "vector", 6 | "url": "pmtiles:///tiles/global-coverage.pmtiles" 7 | } 8 | }, 9 | "layers": [ 10 | { 11 | "id": "global-coverage", 12 | "source": "global-coverage", 13 | "source-layer": "globalcoverage", 14 | "minzoom": 0, 15 | "maxzoom": 15, 16 | "type": "fill", 17 | "paint": { 18 | "fill-color": "#ff0000", 19 | "fill-opacity": 0.5 20 | }, 21 | "filter": ["==", ["geometry-type"], "Polygon"] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /frontend/tsconfig.node.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", 4 | "target": "ES2022", 5 | "lib": ["ES2023"], 6 | "module": "ESNext", 7 | "skipLibCheck": true, 8 | 9 | /* Bundler mode */ 10 | "moduleResolution": "bundler", 11 | "allowImportingTsExtensions": true, 12 | "isolatedModules": true, 13 | "moduleDetection": "force", 14 | "noEmit": true, 15 | 16 | /* Linting */ 17 | "strict": true, 18 | "noUnusedLocals": true, 19 | "noUnusedParameters": true, 20 | "noFallthroughCasesInSwitch": true, 21 | "noUncheckedSideEffectImports": true 22 | }, 23 | "include": ["vite.config.mts"] 24 | } 25 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy OAM UI to S3 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - frontend/** 9 | 10 | # Run every 2 months (approximately 60 days) at midnight UTC on the 1st 11 | schedule: 12 | - cron: "0 0 1 */2 *" 13 | 14 | # Allows you to run this workflow manually from the Actions tab 15 | workflow_dispatch: 16 | 17 | # id-token required to get OIDC token for AWS 18 | permissions: 19 | contents: read 20 | id-token: write 21 | 22 | jobs: 23 | deploy: 24 | uses: hotosm/gh-workflows/.github/workflows/just.yml@3.3.2 25 | with: 26 | environment: ${{ github.ref_name }} 27 | command: "deploy-frontend" 28 | secrets: inherit 29 | -------------------------------------------------------------------------------- /frontend/vite.config.mts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite'; 2 | import react from '@vitejs/plugin-react'; 3 | import path from 'path'; 4 | 5 | import vitePortScanner from './vite-plugin-port-scanner'; 6 | 7 | // https://vite.dev/config/ 8 | export default defineConfig({ 9 | base: '/', 10 | envDir: '../', 11 | server: { 12 | port: 9000 13 | }, 14 | plugins: [react(), vitePortScanner()], 15 | resolve: { 16 | alias: { 17 | $components: path.resolve(__dirname, './app/components'), 18 | $utils: path.resolve(__dirname, './app/utils'), 19 | $styles: path.resolve(__dirname, './app/styles'), 20 | $hooks: path.resolve(__dirname, './app/hooks'), 21 | $pages: path.resolve(__dirname, './app/pages') 22 | } 23 | } 24 | }); 25 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: hotosm 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 14 | -------------------------------------------------------------------------------- /frontend/app/types/stac.d.ts: -------------------------------------------------------------------------------- 1 | import { FeatureCollection } from 'geojson'; 2 | import { StacItem, StacLink } from 'stac-ts'; 3 | 4 | export interface StacFeatureCollection 5 | extends Omit { 6 | type: 'FeatureCollection'; 7 | features: StacItem[]; 8 | links: StacLink[]; 9 | numberMatched?: number; 10 | numberReturned?: number; 11 | } 12 | 13 | export interface StacFeature extends Feature { 14 | properties: StacItem; 15 | } 16 | export interface StacQueryable { 17 | title: string; 18 | $ref: string; 19 | description: string; 20 | } 21 | 22 | export interface StacQueryables { 23 | $id: string; 24 | $schema: string; 25 | additionalProperties: boolean; 26 | properties: Record; 27 | title: string; 28 | type: string; 29 | } 30 | -------------------------------------------------------------------------------- /backend/global-tms/README.md: -------------------------------------------------------------------------------- 1 | # Global TMS Service 2 | 3 | - For clients that can use the PMTiles global coverage from 4 | generated from the `global-mosaic` directory, this is the 5 | most efficient. 6 | - For clients that can't, e.g. QGIS etc, we should: 7 | - z0-15: use maptiler/tileserver-gl tile server to serve a TMS of rendered 8 | raster tiles (including styling) from the PMTiles. 9 | - z16+: switch to TiTiler for these zooms levels. 10 | - Also offer a Martin tile server, for a vector tiles TMS from the 11 | same PMTiles source (e.g. for QGIS) 12 | 13 | This directory contains test configuration for doing this behind an 14 | Nginx proxy, with URL routing. 15 | 16 | In production, the set for this would be bundled with eoAPI in the 17 | [Kubernetes cluster](https://github.com/hotosm/k8s-info) 18 | -------------------------------------------------------------------------------- /backend/stac-ingester/compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | ingester: 3 | container_name: hotosm-stac-ingester 4 | build: . 5 | environment: 6 | - PGUSER=username 7 | - PGPASSWORD=password 8 | - PGDATABASE=postgis 9 | - PGHOST=database 10 | - PGPORT=5432 11 | depends_on: 12 | - database 13 | command: bash -c "hotosm --help" 14 | 15 | database: 16 | container_name: stac-db-ingester 17 | image: ghcr.io/stac-utils/pgstac:v0.9.6 18 | environment: 19 | - POSTGRES_USER=username 20 | - POSTGRES_PASSWORD=password 21 | - POSTGRES_DB=postgis 22 | - PGUSER=username 23 | - PGPASSWORD=password 24 | - PGDATABASE=postgis 25 | ports: 26 | - "5439:5432" 27 | command: postgres -N 500 28 | 29 | networks: 30 | default: 31 | name: stac-fastapi-network 32 | -------------------------------------------------------------------------------- /docs/css/timeline.css: -------------------------------------------------------------------------------- 1 | .timeline-container { 2 | border-left: 4px solid gray; 3 | border-radius: 0 4px 4px 0; 4 | background: rgba(200, 200, 200, 0.15); 5 | margin: 2rem auto; 6 | padding: 1rem 2rem; 7 | position: relative; 8 | text-align: center; 9 | margin-left: 7rem; 10 | } 11 | 12 | .timeline-entry { 13 | text-align: left; 14 | position: relative; 15 | padding-bottom: 1rem; 16 | margin-bottom: 1rem; 17 | } 18 | 19 | .timeline-date { 20 | position: absolute; 21 | left: -10rem; 22 | text-align: right; 23 | font-size: 0.9rem; 24 | font-weight: 700; 25 | opacity: 0.7; 26 | min-width: 6rem; 27 | top: 2px; 28 | } 29 | 30 | .timeline-dot { 31 | position: absolute; 32 | box-shadow: 0 0 0 4px gray; 33 | left: -2.5rem; 34 | background: #444; 35 | border-radius: 50%; 36 | height: 11px; 37 | width: 11px; 38 | top: 5px; 39 | } 40 | -------------------------------------------------------------------------------- /frontend/vite-plugin-port-scanner.ts: -------------------------------------------------------------------------------- 1 | import portscanner from 'portscanner'; 2 | import { PluginOption } from 'vite'; 3 | 4 | export default function vitePortScanner() { 5 | return { 6 | name: 'vite-port-scanner-plugin', // Name of your plugin (required) 7 | 8 | // Vite config hooks 9 | async config(config, { command }) { 10 | if (command === 'serve') { 11 | const startPort = config.server?.port || 5173; 12 | const port = await portscanner.findAPortNotInUse( 13 | startPort, 14 | startPort + 100 15 | ); 16 | if (port !== startPort) { 17 | // eslint-disable-next-line no-console 18 | console.warn( 19 | ` Port ${startPort} is busy. Using port ${port} instead.` 20 | ); 21 | config.server = { ...(config.server || {}), port }; 22 | } 23 | } 24 | } 25 | } as PluginOption; 26 | } 27 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/ingress.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ingress.enabled -}} 2 | apiVersion: networking.k8s.io/v1 3 | kind: Ingress 4 | metadata: 5 | name: {{ include "chart.fullname" . }} 6 | labels: 7 | {{- include "chart.labels" . | nindent 4 }} 8 | annotations: 9 | {{- toYaml .Values.ingress.annotations | nindent 4 }} 10 | spec: 11 | ingressClassName: {{ .Values.ingress.className }} 12 | {{- if .Values.ingress.tls.enabled }} 13 | tls: 14 | - hosts: 15 | - {{ .Values.ingress.host }} 16 | secretName: {{ .Values.ingress.tls.secretName }} 17 | {{- end }} 18 | rules: 19 | - host: {{ .Values.ingress.host }} 20 | http: 21 | paths: 22 | - path: / 23 | pathType: Prefix 24 | backend: 25 | service: 26 | name: {{ include "chart.fullname" . }}-nginx 27 | port: 28 | number: 80 29 | {{- end }} 30 | -------------------------------------------------------------------------------- /docs/images/docs_badge.svg: -------------------------------------------------------------------------------- 1 | 📖 Docs📖 Docs -------------------------------------------------------------------------------- /frontend/tsconfig.app.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", 4 | "target": "ES2020", 5 | "useDefineForClassFields": true, 6 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 7 | "module": "ESNext", 8 | "skipLibCheck": true, 9 | 10 | /* Bundler mode */ 11 | "moduleResolution": "bundler", 12 | "allowImportingTsExtensions": true, 13 | "isolatedModules": true, 14 | "moduleDetection": "force", 15 | "noEmit": true, 16 | "jsx": "react-jsx", 17 | "paths": { 18 | /* Specify a set of entries that re-map imports to additional lookup locations. */ 19 | "$components/*": ["./app/components/*"], 20 | "$utils/*": ["./app/utils/*"], 21 | "$styles/*": ["./app/styles/*"], 22 | "$hooks/*": ["./app/hooks/*"], 23 | "$pages/*": ["./app/pages/*"] 24 | }, 25 | 26 | /* Linting */ 27 | "strict": true, 28 | "noUnusedLocals": true, 29 | "noUnusedParameters": true, 30 | "noFallthroughCasesInSwitch": true, 31 | "noUncheckedSideEffectImports": true 32 | }, 33 | "include": ["app"] 34 | } 35 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/hpa.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.autoscaling.enabled }} 2 | apiVersion: autoscaling/v2 3 | kind: HorizontalPodAutoscaler 4 | metadata: 5 | name: {{ include "chart.fullname" . }} 6 | labels: 7 | {{- include "chart.labels" . | nindent 4 }} 8 | spec: 9 | scaleTargetRef: 10 | apiVersion: apps/v1 11 | kind: Deployment 12 | name: {{ include "chart.fullname" . }} 13 | minReplicas: {{ .Values.autoscaling.minReplicas }} 14 | maxReplicas: {{ .Values.autoscaling.maxReplicas }} 15 | metrics: 16 | {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} 17 | - type: Resource 18 | resource: 19 | name: cpu 20 | target: 21 | type: Utilization 22 | averageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} 23 | {{- end }} 24 | {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} 25 | - type: Resource 26 | resource: 27 | name: memory 28 | target: 29 | type: Utilization 30 | averageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} 31 | {{- end }} 32 | {{- end }} 33 | -------------------------------------------------------------------------------- /backend/stac-ingester/README.md: -------------------------------------------------------------------------------- 1 | # OpenAerialMap STAC Ingester 2 | 3 | This directory contains a deployment of the STAC ingester for HOTOSM based on the 4 | STAC creation package, [stactools-hotosm](https://github.com/hotosm/stactools-hotosm). 5 | 6 | ## Getting Started 7 | 8 | This project uses [uv](https://docs.astral.sh/uv/getting-started/installation/) 9 | to manage Python dependencies. 10 | 11 | Once `uv` is installed, you can install the dependencies by, 12 | 13 | ```bash 14 | uv sync --all-groups 15 | ``` 16 | 17 | ## Re-ingesting The Catalog 18 | 19 | - The ingestion from the old metadata API runs as a Kubernetes CronJob 20 | on a 30 minute schedule. 21 | - If anything is missed and ingestion must be run manually, a new Job 22 | can be spawned from the CronJob, overriding the ingestion dates: 23 | 24 | ```bash 25 | # get the original yaml file 26 | kubectl create job stac-ingest-oam-manual --from cronjob/stac-ingest-oam \ 27 | --dry-run=client --output yaml > job.yaml 28 | 29 | # edit the args in job.yaml 30 | # modify the command in the 'args' section 31 | # e.g. --uploaded-after 2024-01-01 32 | 33 | # create job from the final yaml 34 | kubectl create -f job.yaml 35 | ``` 36 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## What type of PR is this? (check all applicable) 2 | 3 | - [ ] 🍕 Feature 4 | - [ ] 🐛 Bug Fix 5 | - [ ] 📝 Documentation 6 | - [ ] 🧑‍💻 Refactor 7 | - [ ] ✅ Test 8 | - [ ] 🤖 Build or CI 9 | - [ ] ❓ Other (please specify) 10 | 11 | ## Related Issue 12 | 13 | Example: Fixes #123 14 | 15 | ## Describe this PR 16 | 17 | A brief description of how this solves the issue. 18 | 19 | ## Screenshots 20 | 21 | Please provide screenshots of the change. 22 | 23 | ## Alternative Approaches Considered 24 | 25 | Did you attempt any other approaches that are not documented in code? 26 | 27 | ## Review Guide 28 | 29 | Notes for the reviewer. How to test this change? 30 | 31 | ## Checklist before requesting a review 32 | 33 | - 📖 Read the OAM Contributing Guide: 34 | - 📖 Read the HOT Code of Conduct: 35 | - 👷‍♀️ Create small PRs. In most cases, this will be possible. 36 | - ✅ Provide tests for your changes. 37 | - 📝 Use descriptive commit messages. 38 | - 📗 Update any related documentation and include any relevant screenshots. 39 | 40 | ## [optional] What gif best describes this PR or how it makes you feel? 41 | -------------------------------------------------------------------------------- /docs/dev/new-provider.md: -------------------------------------------------------------------------------- 1 | # Adding a new data provider 2 | 3 | This document walks through the process of adding a new data provider to the HOT 4 | OpenAerialMap (HOT OAM) STAC Catalog. 5 | 6 | ## Creating STAC items 7 | 8 | The code to create STAC items for the OpenAerialMap STAC Catalog lives in 9 | [stactools-hotosm](https://github.com/hotosm/stactools-hotosm/). For an example 10 | of creating HOT OAM STAC item from existing Maxar items, see [this 11 | file](https://github.com/hotosm/stactools-hotosm/blob/main/src/stactools/hotosm/maxar/stac.py). 12 | Create a new branch, create a new directory for your provider, and write the 13 | code. Be sure to include tests. When it's ready, open a pull request (PR) with 14 | your changes. 15 | 16 | See the [stactools-hotosm 17 | README](https://github.com/hotosm/stactools-hotosm/blob/main/README.md) for 18 | more. 19 | 20 | ## Add ingestion 21 | 22 | Create a PR on [hotosm/k8s-infra](https://github.com/hotosm/k8s-infra/pulls) 23 | to add a new 24 | [manifest](https://github.com/hotosm/k8s-infra/tree/main/kubernetes/manifests) 25 | that syncs your data on a schedule. 26 | See 27 | [sync-maxar](https://github.com/hotosm/k8s-infra/blob/main/kubernetes/manifests/sync-maxar.yaml) 28 | for a representative example. 29 | -------------------------------------------------------------------------------- /backend/stac-api/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION=3.12 2 | 3 | FROM python:${PYTHON_VERSION}-slim AS builder 4 | COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ 5 | 6 | ENV UV_MANAGED_PYTHON=false 7 | ENV UV_COMPILE_BYTECODE=1 8 | ENV UV_LINK_MODE=copy 9 | 10 | # Any python libraries that require system libraries to be installed will likely 11 | # need the following packages in order to build 12 | RUN apt-get update && \ 13 | apt-get -y upgrade && \ 14 | apt-get install -y build-essential git && \ 15 | apt-get clean && \ 16 | rm -rf /var/lib/apt/lists/* 17 | 18 | ENV CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt 19 | 20 | WORKDIR /app 21 | 22 | # Install dependencies 23 | RUN --mount=type=cache,target=/root/.cache/uv \ 24 | --mount=type=bind,source=uv.lock,target=uv.lock \ 25 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 26 | uv sync --locked --no-install-project --no-editable 27 | 28 | COPY . /app 29 | 30 | # Sync the project 31 | RUN --mount=type=cache,target=/root/.cache/uv \ 32 | uv sync --locked --no-editable 33 | 34 | FROM python:${PYTHON_VERSION}-slim AS prod 35 | 36 | WORKDIR /app 37 | COPY --from=builder --chown=app:app /app /app 38 | 39 | ENV PATH=/app/.venv/bin:$PATH 40 | 41 | CMD ["/app/.venv/bin/uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8080"] 42 | -------------------------------------------------------------------------------- /backend/stac-api/README.md: -------------------------------------------------------------------------------- 1 | # OpenAerialMap STAC API 2 | 3 | This directory contains a customized version of the 4 | [STAC FastAPI PgSTAC](https://github.com/stac-utils/stac-fastapi-pgstac) 5 | for OpenAerialMap. The only customization so far is to disable the "transaction" 6 | extension endpoints, but in the future this could enabled after adding in authorization 7 | logic to enable adding, updating, or deleting STAC records for certain users. 8 | 9 | ## Getting Started 10 | 11 | This project uses [uv](https://docs.astral.sh/uv/getting-started/installation/) 12 | to manage Python dependencies. 13 | 14 | Once `uv` is installed, you can install the dependencies by, 15 | 16 | ```bash 17 | uv sync --all-groups 18 | ``` 19 | 20 | You can spin up the STAC FastAPI PgSTAC application using Docker Compose, 21 | 22 | ```bash 23 | docker compose up app 24 | ``` 25 | 26 | Once the API is ready you can visit the OpenAPI documentation on your local 27 | machine by visiting, . 28 | 29 | ## Upgrading 30 | 31 | The original source for `main.py` in this directory is: 32 | 33 | 34 | In order to upgrade, we should diff `main.py` against the 'official' `app.py` to 35 | check for changes to incorporate, then update the version pinned in `pyproject.toml`, 36 | relock, and redeploy. 37 | -------------------------------------------------------------------------------- /backend/stac-ingester/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION=3.12 2 | ARG PLATFORM=linux/amd64 3 | 4 | FROM python:${PYTHON_VERSION}-slim AS builder 5 | COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ 6 | 7 | ENV UV_MANAGED_PYTHON=false 8 | ENV UV_COMPILE_BYTECODE=1 9 | ENV UV_LINK_MODE=copy 10 | 11 | # Install dependencies for, 12 | # * Git sources 13 | # * building wheels (if needed) 14 | RUN apt-get update && \ 15 | apt-get -y upgrade && \ 16 | apt-get install -y build-essential git && \ 17 | apt-get clean && \ 18 | rm -rf /var/lib/apt/lists/* 19 | 20 | ENV CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt 21 | 22 | WORKDIR /app 23 | 24 | # Install dependencies 25 | RUN --mount=type=cache,target=/root/.cache/uv \ 26 | --mount=type=bind,source=uv.lock,target=uv.lock \ 27 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 28 | uv sync --locked --no-install-project --no-editable 29 | 30 | COPY . /app 31 | 32 | # Sync the project 33 | RUN --mount=type=cache,target=/root/.cache/uv \ 34 | uv sync --locked --no-editable 35 | 36 | FROM python:${PYTHON_VERSION}-slim AS prod 37 | 38 | # "rasterio" needs libexpat1 39 | RUN apt-get update && \ 40 | apt-get install -y libexpat1 && \ 41 | apt-get clean && \ 42 | rm -rf /var/lib/apt/lists/* 43 | 44 | WORKDIR /app 45 | COPY --from=builder --chown=app:app /app /app 46 | 47 | ENV PATH=/app/.venv/bin:$PATH 48 | 49 | CMD ["/app/.venv/bin/hotosm", "--help"] 50 | -------------------------------------------------------------------------------- /backend/stac-api/compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | app: 3 | container_name: hotosm-stac-fastapi-pgstac 4 | build: . 5 | environment: 6 | - APP_HOST=0.0.0.0 7 | - APP_PORT=8082 8 | - RELOAD=true 9 | - ENVIRONMENT=local 10 | - POSTGRES_USER=username 11 | - POSTGRES_PASS=password 12 | - POSTGRES_DBNAME=postgis 13 | - POSTGRES_HOST_READER=database 14 | - POSTGRES_HOST_WRITER=database 15 | - POSTGRES_PORT=5432 16 | - WEB_CONCURRENCY=10 17 | - VSI_CACHE=TRUE 18 | - GDAL_HTTP_MERGE_CONSECUTIVE_RANGES=YES 19 | - GDAL_DISABLE_READDIR_ON_OPEN=EMPTY_DIR 20 | - DB_MIN_CONN_SIZE=1 21 | - DB_MAX_CONN_SIZE=1 22 | - USE_API_HYDRATE=${USE_API_HYDRATE:-false} 23 | ports: 24 | - "8082:8082" 25 | volumes: 26 | - ./app:/app/app 27 | - ./scripts:/app/scripts 28 | depends_on: 29 | - database 30 | command: bash -c "./scripts/wait-for-it.sh database:5432 && python -m app.main" 31 | 32 | database: 33 | container_name: api-stac-db 34 | image: ghcr.io/stac-utils/pgstac:v0.9.6 35 | environment: 36 | - POSTGRES_USER=username 37 | - POSTGRES_PASSWORD=password 38 | - POSTGRES_DB=postgis 39 | - PGUSER=username 40 | - PGPASSWORD=password 41 | - PGDATABASE=postgis 42 | ports: 43 | - "5439:5432" 44 | command: postgres -N 500 45 | 46 | networks: 47 | default: 48 | name: stac-fastapi-network 49 | -------------------------------------------------------------------------------- /docs/dev/backup-prod-pgstac.md: -------------------------------------------------------------------------------- 1 | # Backing Up pgSTAC Manually 2 | 3 | - pgSTAC is backed up by the PGO Operator on a schedule. 4 | - However, it might be useful to dump the pgSTAC database locally, 5 | for testing and development purposes. 6 | 7 | ## Dump 8 | 9 | ```bash 10 | # Connect to cluster 11 | CURRENT_USER=$(whoami) 12 | docker run --rm -it --name aws-cli -v /home/$CURRENT_USER:/root \ 13 | --workdir /root ghcr.io/spwoodcock/awscli-kubectl:latest 14 | aws sso login --profile admin --use-device-code 15 | 16 | # Install postgresql-client-16 (to match current pg version of pgstac) 17 | apt install lsb-release wget gnupg 18 | echo \ 19 | "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" \ 20 | > /etc/apt/sources.list.d/pgdg.list 21 | wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc \ 22 | | apt-key add - 23 | apt update 24 | apt install postgresql-client-16 25 | 26 | # Port forward the db service 27 | kubectl port-forward service/eoapi-pgbouncer 5432:5432 28 | 29 | # Get the connection string 30 | kubectl get secrets eoapi-pguser-eoapi -o go-template='{{.data.uri | base64decode}}' 31 | # Replace eoapi-primary.eoapi.svc with localhost, as we are port-forwarding 32 | PGURL= 33 | 34 | # Dump the db in custom format 35 | pg_dump -Fc "$PGURL" > pgstac-backup.dump.gz 36 | ``` 37 | 38 | ## Restore 39 | 40 | See [global-mosaic compose file](../backend/global-mosaic/compose.yaml) for a 41 | `pg_restore` example. 42 | -------------------------------------------------------------------------------- /frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/node:22-slim AS base 2 | ARG VITE_STAC_API_URL 3 | ARG VITE_STAC_API_PATHNAME 4 | ARG VITE_STAC_TILER_PATHNAME 5 | ARG VITE_STAC_ITEMS_LIMIT 6 | ENV VITE_STAC_API_URL=${VITE_STAC_API_URL} \ 7 | VITE_STAC_API_PATHNAME=${VITE_STAC_API_PATHNAME} \ 8 | VITE_STAC_TILER_PATHNAME=${VITE_STAC_TILER_PATHNAME} \ 9 | VITE_STAC_ITEMS_LIMIT=${VITE_STAC_ITEMS_LIMIT} \ 10 | PNPM_HOME="/pnpm" \ 11 | PATH="$PATH:/pnpm" 12 | WORKDIR /app 13 | 14 | 15 | FROM base AS build 16 | COPY package.json pnpm-lock.yaml ./ 17 | RUN corepack enable && corepack install 18 | RUN pnpm install 19 | COPY . . 20 | RUN pnpm run build 21 | 22 | 23 | FROM docker.io/rclone/rclone:1 AS prod 24 | ARG APP_VERSION 25 | ARG COMMIT_REF 26 | ARG VITE_STAC_API_URL 27 | ARG VITE_STAC_API_PATHNAME 28 | ARG VITE_STAC_TILER_PATHNAME 29 | ARG VITE_STAC_ITEMS_LIMIT 30 | LABEL org.hotosm.openaerialmap.app-name="frontend" \ 31 | org.hotosm.openaerialmap.app-version="${APP_VERSION}" \ 32 | org.hotosm.openaerialmap.git-commit-ref="${COMMIT_REF:-none}" \ 33 | org.hotosm.openaerialmap.maintainer="sysadmin@hotosm.org" \ 34 | org.hotosm.openaerialmap.stac-api-url="${VITE_STAC_API_URL}" \ 35 | org.hotosm.openaerialmap.stac-api-pathname="${VITE_STAC_API_PATHNAME}" \ 36 | org.hotosm.openaerialmap.stac-tiler-pathname="${VITE_STAC_TILER_PATHNAME}" \ 37 | org.hotosm.openaerialmap.stac-items-limit="${VITE_STAC_ITEMS_LIMIT}" 38 | WORKDIR /app 39 | COPY --from=build /app/dist . 40 | -------------------------------------------------------------------------------- /frontend/app/main.tsx: -------------------------------------------------------------------------------- 1 | import { allDefined } from '@awesome.me/webawesome/dist/webawesome.js'; 2 | 3 | import Detail from '$components/detail'; 4 | import { 5 | // useQuery, 6 | // useMutation, 7 | // useQueryClient, 8 | QueryClient, 9 | QueryClientProvider 10 | } from '@tanstack/react-query'; 11 | import { useState } from 'react'; 12 | import { createRoot } from 'react-dom/client'; 13 | 14 | import MapComponent from './components/map'; 15 | import Sidebar from './components/sidebar'; 16 | import { StacProvider } from './context/StacContext'; 17 | 18 | // If using a router add the public url to the base path. 19 | // const publicUrl = process.env.BASE_URL || ''; 20 | 21 | // Ensure all WebAwesome components are loaded before rendering 22 | await allDefined(); 23 | 24 | const queryClient = new QueryClient(); 25 | 26 | function Root() { 27 | return ( 28 | 29 | 30 | 31 | ); 32 | } 33 | 34 | function AppContent() { 35 | const [showDetailPane, setShowDetailPane] = useState(false); 36 | return ( 37 | 38 | 42 | 46 | 47 | 48 | ); 49 | } 50 | 51 | const rootNode = document.querySelector('#app-container')!; 52 | const root = createRoot(rootNode); 53 | root.render(); 54 | -------------------------------------------------------------------------------- /recipes/prep/Justfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) Humanitarian OpenStreetMap Team 2 | # 3 | # This file is part of OpenAerialMap. 4 | # 5 | # OpenAerialMap is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # OpenAerialMap is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with OpenAerialMap. If not, see . 17 | # 18 | 19 | # List available commands 20 | [private] 21 | default: 22 | just --justfile {{justfile()}} --list prep 23 | 24 | [no-cd] 25 | _curl: 26 | #!/usr/bin/env bash 27 | if ! command -v curl &> /dev/null; then 28 | sudo apt-get update 29 | sudo apt-get install -y curl 30 | fi 31 | 32 | # Install envsubst 33 | [no-cd] 34 | [no-exit-message] 35 | _envsubst: 36 | #!/usr/bin/env bash 37 | just prep _curl 38 | 39 | # Get a8m/envsubst (required for default vals syntax ${VAR:-default}) 40 | # Use local version, as envsubst may be installed on system already 41 | if [ -f ./envsubst ]; then 42 | echo "envsubst already exists. Continuing." 43 | else 44 | echo "Downloading a8m/envsubst" 45 | echo 46 | curl -L "https://github.com/a8m/envsubst/releases/download/v1.4.3/envsubst-$(uname -s)-$(uname -m)" -o envsubst 47 | chmod +x envsubst 48 | fi 49 | -------------------------------------------------------------------------------- /.github/_workflow-samples/deploy-gh.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Github Pages 2 | 3 | on: 4 | push: 5 | branches: 6 | - "main" 7 | 8 | env: 9 | PUBLIC_URL: ${{ vars.PUBLIC_URL }} 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }} 13 | cancel-in-progress: true 14 | 15 | permissions: 16 | contents: write 17 | 18 | jobs: 19 | build: 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - name: Checkout 24 | uses: actions/checkout@v4 25 | 26 | - name: Install pnpm 27 | uses: pnpm/action-setup@v4 28 | 29 | - name: Use Node.js 30 | uses: actions/setup-node@v4 31 | with: 32 | node-version-file: ".nvmrc" 33 | cache: "pnpm" 34 | 35 | - name: Cache node_modules 36 | uses: actions/cache@v4 37 | id: cache-node-modules 38 | with: 39 | path: node_modules 40 | key: ${{ runner.os }}-build-${{ hashFiles('**/package.json') }} 41 | 42 | - name: Cache dist 43 | uses: actions/cache@v4 44 | id: cache-dist 45 | with: 46 | path: dist 47 | key: ${{ runner.os }}-build-${{ github.sha }} 48 | 49 | - name: Install 50 | run: pnpm install 51 | 52 | - name: Build 53 | run: pnpm build 54 | 55 | deploy: 56 | runs-on: ubuntu-latest 57 | needs: build 58 | 59 | steps: 60 | - name: Checkout 61 | uses: actions/checkout@v4 62 | 63 | - name: Restore dist cache 64 | uses: actions/cache@v4 65 | id: cache-dist 66 | with: 67 | path: dist 68 | key: ${{ runner.os }}-build-${{ github.sha }} 69 | 70 | - name: Deploy 🚀 71 | uses: JamesIves/github-pages-deploy-action@v4 72 | with: 73 | branch: gh-pages 74 | clean: true 75 | single-commit: true 76 | folder: /dist 77 | -------------------------------------------------------------------------------- /backend/global-mosaic/compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | mosaicker: 3 | image: ghcr.io/hotosm/openaerialmap/global-mosaicker 4 | container_name: mosaic-generator 5 | build: . 6 | volumes: 7 | - ./scripts/gen_coverage_vector.py:/app/gen_coverage_vector.py 8 | - ./output:/app/output 9 | environment: 10 | TEST_MODE: 1 11 | PG_DSN: postgres://eoapi:eoapi@db/eoapi 12 | S3_ACCESS_KEY: ${S3_ACCESS_KEY} 13 | S3_SECRET_KEY: ${S3_SECRET_KEY} 14 | depends_on: 15 | db-restore: 16 | condition: service_completed_successfully 17 | 18 | db-restore: 19 | container_name: pgstac-db-restore 20 | image: "ghcr.io/stac-utils/pgstac:v0.9.6" 21 | depends_on: 22 | db: 23 | condition: service_healthy 24 | volumes: 25 | - db_data:/var/lib/postgresql/data/ 26 | - ./pgstac-backup.dump:/pgstac-backup.dump 27 | entrypoint: /bin/sh -c 28 | command: 29 | - | 30 | PGPASSWORD=eoapi pg_restore \ 31 | --clean \ 32 | --create \ 33 | --no-owner \ 34 | --no-privileges \ 35 | -d postgres \ 36 | -h db \ 37 | -U eoapi \ 38 | /pgstac-backup.dump 39 | 40 | // Ensure success exit code 41 | exit 0 42 | 43 | db: 44 | container_name: pgstac-db 45 | image: "ghcr.io/stac-utils/pgstac:v0.9.6" 46 | environment: 47 | - POSTGRES_USER=eoapi 48 | - POSTGRES_PASSWORD=eoapi 49 | - POSTGRES_DB=eoapi 50 | - PGUSER=eoapi 51 | - PGPASSWORD=eoapi 52 | - PGDATABASE=eoapi 53 | volumes: 54 | - db_data:/var/lib/postgresql/data/ 55 | ports: 56 | - "5439:5432" 57 | healthcheck: 58 | test: pg_isready --user eoapi -d postgres 59 | start_period: 5s 60 | interval: 10s 61 | timeout: 5s 62 | retries: 3 63 | 64 | networks: 65 | default: 66 | name: global-mosaic 67 | 68 | volumes: 69 | db_data: 70 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ################################################ 2 | ############### .gitignore ################## 3 | ################################################ 4 | # 5 | # This file is only relevant if you are using git. 6 | # 7 | # Files which match the splat patterns below will 8 | # be ignored by git. This keeps random crap and 9 | # and sensitive credentials from being uploaded to 10 | # your repository. It allows you to configure your 11 | # app for your machine without accidentally 12 | # committing settings which will smash the local 13 | # settings of other developers on your team. 14 | # 15 | # Some reasonable defaults are included below, 16 | # but, of course, you should modify/extend/prune 17 | # to fit your needs! 18 | ################################################ 19 | 20 | app/scripts/time.json 21 | 22 | node_modules 23 | bower_components 24 | .sass-cache 25 | test/bower_components 26 | 27 | 28 | ################################################ 29 | # Node.js / NPM 30 | # 31 | # Common files generated by Node, NPM, and the 32 | # related ecosystem. 33 | ################################################ 34 | 35 | lib-cov 36 | *.seed 37 | *.log 38 | *.out 39 | *.pid 40 | npm-debug.log 41 | yarn-error.log 42 | .parcel-cache 43 | 44 | 45 | ################################################ 46 | # Apidocs 47 | # 48 | # Common files generated by apidocs and other docs 49 | ################################################ 50 | 51 | 52 | ################################################ 53 | # Miscellaneous 54 | # 55 | # Common files generated by text editors, 56 | # operating systems, file systems, etc. 57 | ################################################ 58 | 59 | *~ 60 | *# 61 | .DS_STORE 62 | .DS_Store 63 | .netbeans 64 | nbproject 65 | .idea 66 | .resources 67 | .node_history 68 | temp 69 | tmp 70 | .tmp 71 | dist 72 | parcel-bundle-reports 73 | **/.claude/settings.local.json 74 | .env 75 | **/*.env 76 | .python-version 77 | ./envsubst 78 | -------------------------------------------------------------------------------- /docs/decisions/0001-stac.md: -------------------------------------------------------------------------------- 1 | # Use a STAC to catalogue all imagery assets 2 | 3 | ## Context and Problem Statement 4 | 5 | We need a way to: 6 | 7 | - Store metadata about all the imagery in OpenAerialMap. 8 | - Query the metadata, ideally using filters. 9 | - Use an API for discoverability of assets, allowing various 10 | downstream components to load in imagery easily. 11 | 12 | Historically OpenAerialMap used a MongoDB metadata database to store 13 | imagery info. 14 | 15 | While this worked, it was a custom schema and API that required ongoing 16 | maintenance, and it was difficult for external tools to consume without 17 | writing bespoke integrations. 18 | 19 | The [STAC Spec](https://stacspec.org) (Spatio-Temporal Asset Catalogue) is 20 | now a well established, community-driven approach to cataloguing various 21 | geospatial assets. 22 | 23 | There are numerous tools built around STAC, from pre-built web UIs, Python 24 | modules to discover and read imagery, etc. 25 | 26 | ## Considered Options 27 | 28 | I think we only really considered STAC, as it's the primary 29 | standard built for geospatial metadata cataloguing. 30 | 31 | ## Decision Outcome 32 | 33 | We chose STAC as the only sane option in 2025! 34 | 35 | ### Consequences 36 | 37 | - ✅ Interoperability: Many existing clients and libraries can query and parse STAC, 38 | removing the need for us to maintain bespoke things. 39 | - ✅ Discoverability: STAC API implementations (e.g., PySTAC, stac-fastapi) allow 40 | easy filtering by time, location, and metadata attributes. 41 | - ✅ Future-proofing: The STAC community continues to evolve the spec, adding 42 | extensions for new metadata needs (e.g., processing levels, cloud storage 43 | locations). Should also reduce maintenance burden as a result. 44 | - ❌ Not many downsides. Possible learning curve with STAC, but lots of resources 45 | available online. It's also bit more complex than our simple metadata schema 46 | previously. 47 | -------------------------------------------------------------------------------- /frontend/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 9 | 10 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | OpenAerialMap 24 | 25 | 29 | 30 | 46 | 47 | 48 | 52 | 56 | 57 | 58 | 59 |
60 | 61 |
62 | 63 | 64 | 65 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* 2 | Expand the name of the chart. 3 | */}} 4 | {{- define "chart.name" -}} 5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} 6 | {{- end }} 7 | 8 | {{/* 9 | Create a default fully qualified app name. 10 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 11 | If release name contains chart name it will be used as a full name. 12 | */}} 13 | {{- define "chart.fullname" -}} 14 | {{- if .Values.fullnameOverride }} 15 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} 16 | {{- else }} 17 | {{- $name := default .Chart.Name .Values.nameOverride }} 18 | {{- if contains $name .Release.Name }} 19 | {{- .Release.Name | trunc 63 | trimSuffix "-" }} 20 | {{- else }} 21 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} 22 | {{- end }} 23 | {{- end }} 24 | {{- end }} 25 | 26 | {{/* 27 | Create chart name and version as used by the chart label. 28 | */}} 29 | {{- define "chart.chart" -}} 30 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} 31 | {{- end }} 32 | 33 | {{/* 34 | Common labels 35 | */}} 36 | {{- define "chart.labels" -}} 37 | helm.sh/chart: {{ include "chart.chart" . }} 38 | {{ include "chart.selectorLabels" . }} 39 | {{- if .Chart.AppVersion }} 40 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} 41 | {{- end }} 42 | app.kubernetes.io/managed-by: {{ .Release.Service }} 43 | {{- end }} 44 | 45 | {{/* 46 | Selector labels 47 | */}} 48 | {{- define "chart.selectorLabels" -}} 49 | app.kubernetes.io/name: {{ include "chart.name" . }} 50 | app.kubernetes.io/instance: {{ .Release.Name }} 51 | {{- end }} 52 | 53 | {{/* 54 | Create the name of the service account to use 55 | */}} 56 | {{- define "chart.serviceAccountName" -}} 57 | {{- if .Values.serviceAccount.create }} 58 | {{- default (include "chart.fullname" .) .Values.serviceAccount.name }} 59 | {{- else }} 60 | {{- default "default" .Values.serviceAccount.name }} 61 | {{- end }} 62 | {{- end }} 63 | -------------------------------------------------------------------------------- /.github/_workflow-samples/README.md: -------------------------------------------------------------------------------- 1 | # `deploy-s3-yml` 2 | 3 | A workflow that builds the site and deploys it to S3. 4 | 5 | This workflow gets triggered with every push to the main branch, and doesn't verify if the checks were successful. It relies on branch protection to do so. 6 | 7 | ## First-time setup 8 | 9 | - create a bucket on S3 and enable 'Static website hosting' with both the Index and Error document set to `index.html`. To do this programmatically: 10 | ``` 11 | aws s3 mb [BUCKET NAME] 12 | aws s3 website [BUCKET NAME] --index-document index.html --error-document index.html 13 | aws s3api put-bucket-tagging --bucket [BUCKET NAME] --tagging 'TagSet=[{Key=Project,Value=[PROJECT TAG]}]' 14 | ``` 15 | - create an IAM with a policy that provides it with programmatic access to the bucket 16 | - add the AWS Access Key and Secret from the IAM [as encrypted secrets to the project repository](https://docs.github.com/en/actions/reference/encrypted-secrets#creating-encrypted-secrets-for-a-repository). Use `AWS_ACCESS_KEY_ID` & `AWS_SECRET_ACCESS_KEY` 17 | - add the bucket name as an environment variable (`DEPLOY_BUCKET`) to the deploy workflow. Omit `s3://` from the bucket name. 18 | 19 | ## Serving site from sub-path 20 | 21 | This workflow assumes that the site is served from the root of the URL (eg. devseed.com). To support a URL served from a sub-path (eg. devseed.com/explorer), add the following step: 22 | 23 | ``` 24 | - name: Serve site from subpath 25 | run: | 26 | cd dist 27 | mkdir 28 | mv assets /assets 29 | cp index.html 30 | ``` 31 | 32 | # `deploy-gh-yml` 33 | 34 | A workflow that builds the site and deploys it to Github pages. 35 | 36 | This workflow gets triggered with every push to the main branch, and doesn't verify if the checks were successful. It relies on branch protection to do so. 37 | 38 | # S3 previews 39 | 40 | Check the [Implementing S3 deploy previews](https://github.com/developmentseed/how/issues/423) guide to set up S3 previews for feature branches. 41 | -------------------------------------------------------------------------------- /backend/global-tms/compose.yaml: -------------------------------------------------------------------------------- 1 | volumes: 2 | pmtiles: 3 | 4 | services: 5 | # Get the latest global coverage tiles 6 | get-tiles: 7 | image: docker.io/alpine/curl:8.14.1 8 | volumes: 9 | - pmtiles:/tiles 10 | command: | 11 | /bin/sh -c " 12 | if [ ! -f /tiles/global-coverage.pmtiles ]; then 13 | echo 'Downloading global-coverage.pmtiles...' 14 | curl -fSL https://s3.amazonaws.com/oin-hotosm-temp/global-coverage.pmtiles -o /tiles/global-coverage.pmtiles 15 | else 16 | echo 'File already exists, skipping download.' 17 | fi 18 | " 19 | 20 | # Route between martin / tileserver / TiTiler eoAPI 21 | nginx: 22 | image: docker.io/nginx:1.29-alpine 23 | ports: 24 | - "8084:80" 25 | volumes: 26 | - ./nginx.conf:/etc/nginx/conf.d/default.conf:ro 27 | depends_on: 28 | martin: 29 | condition: service_healthy 30 | tileserver: 31 | condition: service_healthy 32 | restart: unless-stopped 33 | 34 | # Martin serves .pbf vector tiles as a XYZ/TMS server (e.g. for QGIS) 35 | # In future we would switch to this for the pre-rendered raster tiles 36 | # But requires https://github.com/maplibre/martin/issues/978 first 37 | martin: 38 | image: ghcr.io/maplibre/martin:v0.18.1 39 | depends_on: 40 | get-tiles: 41 | condition: service_completed_successfully 42 | volumes: 43 | - pmtiles:/tiles 44 | ports: 45 | - "3004:3000" 46 | command: /tiles/global-coverage.pmtiles 47 | restart: unless-stopped 48 | 49 | # Tileserver renders the vector tiles --> raster tiles, plus styling (legacy clients) 50 | # As above, this will ideally be replaced by martin in future 51 | tileserver: 52 | image: maptiler/tileserver-gl:v5.3.1 53 | ports: 54 | - "8080:8080" 55 | volumes: 56 | - pmtiles:/tiles 57 | # Config picked up automatically as /data/config.json 58 | - ./tileserver.config:/data/config.json:ro 59 | - ./tileserver.style.config:/data/style.json:ro 60 | restart: unless-stopped 61 | -------------------------------------------------------------------------------- /backend/global-mosaic/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION=3.12 2 | ARG PLATFORM=linux/amd64 3 | 4 | 5 | 6 | FROM debian:bookworm-slim AS tippecanoe 7 | WORKDIR /src 8 | RUN apt-get update \ 9 | && apt-get -y install \ 10 | git make gcc g++ libsqlite3-dev zlib1g-dev 11 | RUN git clone https://github.com/felt/tippecanoe.git 12 | WORKDIR /src/tippecanoe 13 | RUN make -j 14 | RUN make install 15 | 16 | 17 | 18 | FROM python:${PYTHON_VERSION}-slim AS builder 19 | COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ 20 | 21 | ENV UV_MANAGED_PYTHON=false 22 | ENV UV_COMPILE_BYTECODE=1 23 | ENV UV_LINK_MODE=copy 24 | ENV PYTHONUNBUFFERED=1 25 | 26 | # Install dependencies for, 27 | # * Git sources 28 | # * building wheels (if needed) 29 | RUN apt-get update && \ 30 | apt-get -y upgrade && \ 31 | apt-get install -y build-essential git && \ 32 | apt-get clean && \ 33 | rm -rf /var/lib/apt/lists/* 34 | 35 | ENV CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt 36 | 37 | WORKDIR /app 38 | 39 | # Install dependencies 40 | RUN --mount=type=cache,target=/root/.cache/uv \ 41 | --mount=type=bind,source=uv.lock,target=uv.lock \ 42 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 43 | uv sync --locked --no-install-project --no-editable 44 | 45 | COPY . /app 46 | 47 | # Sync the project 48 | RUN --mount=type=cache,target=/root/.cache/uv \ 49 | uv sync --locked --no-editable 50 | 51 | FROM python:${PYTHON_VERSION}-slim AS prod 52 | 53 | VOLUME /app/output 54 | 55 | # "psycopg" needs lib-pq 56 | # "rasterio" needs libexpat1 57 | # "tippecanoe" needs libsqlite3-0 58 | RUN apt-get update && \ 59 | apt-get install -y \ 60 | libexpat1 \ 61 | libpq-dev \ 62 | libsqlite3-0 \ 63 | && apt-get clean && \ 64 | rm -rf /var/lib/apt/lists/* \ 65 | && mkdir -p /app/output 66 | 67 | WORKDIR /app 68 | COPY --from=builder --chown=app:app /app /app 69 | COPY --from=tippecanoe /src/tippecanoe/tippecanoe* /usr/local/bin/ 70 | COPY --from=tippecanoe /src/tippecanoe/tile-join /usr/local/bin/ 71 | COPY scripts/gen_coverage_vector.py /app/gen_coverage_vector.py 72 | 73 | ENV PATH=/app/.venv/bin:$PATH 74 | 75 | CMD ["python", "-u", "gen_coverage_vector.py"] 76 | -------------------------------------------------------------------------------- /.github/_workflow-samples/deploy-s3.yml: -------------------------------------------------------------------------------- 1 | # Deploy the site to AWS S3 on a push to the 'main' branch 2 | 3 | name: Deploy S3 4 | 5 | on: 6 | push: 7 | branches: 8 | - "main" 9 | 10 | concurrency: 11 | group: ${{ github.workflow }}-${{ github.ref }} 12 | cancel-in-progress: true 13 | 14 | jobs: 15 | build: 16 | runs-on: ubuntu-latest 17 | env: 18 | AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} 19 | AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} 20 | 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v4 24 | 25 | - name: Install pnpm 26 | uses: pnpm/action-setup@v4 27 | 28 | - name: Use Node.js ${{ env.NODE }} 29 | uses: actions/setup-node@v4 30 | with: 31 | node-version-file: ".nvmrc" 32 | cache: "pnpm" 33 | 34 | - name: Cache node_modules 35 | uses: actions/cache@v4 36 | id: cache-node-modules 37 | with: 38 | path: node_modules 39 | key: ${{ runner.os }}-build-${{ hashFiles('**/package.json') }} 40 | 41 | - name: Cache dist 42 | uses: actions/cache@v4 43 | id: cache-dist 44 | with: 45 | path: dist 46 | key: ${{ runner.os }}-build-${{ github.sha }} 47 | 48 | - name: Install 49 | run: pnpm install 50 | 51 | - name: Build 52 | run: pnpm build 53 | 54 | deploy: 55 | runs-on: ubuntu-latest 56 | needs: build 57 | 58 | steps: 59 | - name: Checkout 60 | uses: actions/checkout@v4 61 | 62 | - name: Restore dist cache 63 | uses: actions/cache@v4 64 | id: cache-dist 65 | with: 66 | path: dist 67 | key: ${{ runner.os }}-build-${{ github.sha }} 68 | 69 | # Action: https://github.com/marketplace/actions/s3-deploy 70 | - name: Deploy to S3 71 | uses: reggionick/s3-deploy@v4 72 | with: 73 | folder: dist 74 | bucket: ${{ secrets.S3_BUCKET }} 75 | bucket-region: ${{ secrets.S3_BUCKET_REGION }} 76 | dist-id: ${{ secrets.CLOUDFRONT_DISTRIBUTION_ID }} 77 | invalidation: / 78 | delete-removed: true 79 | no-cache: true 80 | private: true 81 | files-to-include: "{.*/**,**}" 82 | -------------------------------------------------------------------------------- /backend/global-tms/nginx.conf: -------------------------------------------------------------------------------- 1 | server { 2 | listen 80; 3 | listen [::]:80; 4 | server_name localhost; 5 | 6 | # Zoom 0-15, reverse proxy to maptiler/tileserver 7 | location ~ ^/([0-9]|1[0-5])/([0-9]+)/([0-9]+)\.png$ { 8 | set $zoom_level $1; 9 | set $x $2; 10 | set $y $3; 11 | 12 | proxy_pass http://tileserver_backend/styles/global-coverage/256/$zoom_level/$x/$y.png; 13 | proxy_set_header Host $host; 14 | proxy_set_header X-Real-IP $remote_addr; 15 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 16 | proxy_set_header X-Forwarded-Proto $scheme; 17 | proxy_set_header X-Forwarded-Host $host:$server_port; 18 | proxy_redirect off; 19 | proxy_connect_timeout 5s; 20 | proxy_send_timeout 10s; 21 | proxy_read_timeout 10s; 22 | } 23 | 24 | # Zoom >=16, redirect to external TiTiler (eoAPI) 25 | location ~ ^/(1[6-9]|[2-9][0-9])/([0-9]+)/([0-9]+)\.png$ { 26 | set $zoom_level $1; 27 | set $x $2; 28 | set $y $3; 29 | 30 | return 302 https://api.imagery.hotosm.org/raster/collections/openaerialmap/tiles/WebMercatorQuad/$zoom_level/$x/$y.png?assets=visual; 31 | } 32 | 33 | # Friendly root page with info 34 | location = / { 35 | default_type text/html; 36 | return 200 ' 37 | 38 | 39 | 40 | 41 | Tile Server 42 | 47 | 48 | 49 |

Tile Server

50 |

This server provides map tiles.

51 |

Request tiles using the format: /z/x/y.png

52 |

Example: /0/0/0.png

53 | 54 | '; 55 | } 56 | 57 | location /health { 58 | access_log off; 59 | return 200 "healthy\n"; 60 | add_header Content-Type text/plain; 61 | } 62 | } 63 | 64 | upstream tileserver_backend { 65 | server tileserver:8080; 66 | } 67 | -------------------------------------------------------------------------------- /docs/images/favicon.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /frontend/eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import globals from 'globals'; 2 | import pluginJs from '@eslint/js'; 3 | import tseslint from 'typescript-eslint'; 4 | import pluginReact from 'eslint-plugin-react'; 5 | import reactHooks from 'eslint-plugin-react-hooks'; 6 | import reactRefresh from 'eslint-plugin-react-refresh'; 7 | import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended'; 8 | 9 | /** @type {import('eslint').Linter.Config[]} */ 10 | export default [ 11 | { 12 | files: ['**/*.{js,mjs,cjs,ts,jsx,tsx}'], 13 | settings: { react: { version: 'detect' } }, 14 | languageOptions: { ecmaVersion: 2020, globals: globals.browser }, 15 | plugins: { 'react-hooks': reactHooks, 'react-refresh': reactRefresh } 16 | }, 17 | pluginJs.configs.recommended, 18 | ...tseslint.configs.recommended, 19 | pluginReact.configs.flat.recommended, 20 | eslintPluginPrettierRecommended, 21 | { 22 | name: 'Custom Rules ', 23 | rules: { 24 | 'no-console': 2, 25 | 'prefer-promise-reject-errors': 0, 26 | // 'import/order': 2, 27 | 'react/button-has-type': 2, 28 | 'react/jsx-closing-bracket-location': 2, 29 | 'react/jsx-closing-tag-location': 2, 30 | 'react/jsx-curly-spacing': 2, 31 | 'react/jsx-curly-newline': 2, 32 | 'react/jsx-equals-spacing': 2, 33 | 'react/jsx-max-props-per-line': [2, { maximum: 1, when: 'multiline' }], 34 | 'react/jsx-first-prop-new-line': 2, 35 | 'react/jsx-curly-brace-presence': [ 36 | 2, 37 | { props: 'never', children: 'never' } 38 | ], 39 | 'react/jsx-pascal-case': 2, 40 | 'react/jsx-props-no-multi-spaces': 2, 41 | 'react/jsx-tag-spacing': [2, { beforeClosing: 'never' }], 42 | 'react/jsx-wrap-multilines': 2, 43 | 'react/no-array-index-key': 2, 44 | 'react/no-typos': 2, 45 | 'react/no-unused-prop-types': 2, 46 | 'react/no-unused-state': 2, 47 | 'react/react-in-jsx-scope': 'off', 48 | 'react/self-closing-comp': 2, 49 | 'react/style-prop-object': 2, 50 | 'react/void-dom-elements-no-children': 2, 51 | 'react/function-component-definition': [ 52 | 2, 53 | { namedComponents: ['function-declaration', 'arrow-function'] } 54 | ], 55 | 'react-hooks/rules-of-hooks': 2, // Checks rules of Hooks 56 | // 'react-hooks/exhaustive-deps': 1, // Checks effect dependencies 57 | // 'fp/no-mutating-methods': 1, 58 | '@typescript-eslint/no-explicit-any': 'warn' 59 | } 60 | } 61 | ]; 62 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: OpenAerialMap 2 | site_description: OpenAerialMap is an open service to provide access to a commons of openly licensed imagery and map layer services. 3 | # strict: true 4 | site_url: "https://docs.imagery.hotosm.org/" 5 | 6 | repo_name: "hotosm/openaerialmap" 7 | repo_url: "https://github.com/hotosm/openaerialmap/" 8 | edit_uri: "edit/main/docs/" 9 | 10 | extra: 11 | social: 12 | - icon: "fontawesome/brands/github" 13 | link: "https://github.com/hotosm/" 14 | - icon: "fontawesome/brands/twitter" 15 | link: "https://twitter.com/hotosm" 16 | - icon: "fontawesome/solid/globe" 17 | link: "https://www.hotosm.org" 18 | copyright: Copyright © 2010 HOTOSM 19 | generator: false 20 | 21 | theme: 22 | name: material 23 | palette: 24 | primary: custom 25 | language: en 26 | favicon: images/favicon.svg 27 | logo: images/hot_logo.png 28 | 29 | extra_css: 30 | - css/extra.css 31 | - css/timeline.css 32 | 33 | markdown_extensions: 34 | - tables 35 | - toc: 36 | permalink: true 37 | title: Page contents 38 | - admonition 39 | - pymdownx.details 40 | - pymdownx.superfences 41 | - pymdownx.highlight 42 | - pymdownx.extra 43 | - pymdownx.emoji: 44 | - pymdownx.tabbed: 45 | alternate_style: true 46 | - mdx_truly_sane_lists 47 | 48 | plugins: 49 | - search 50 | - git-revision-date-localized 51 | - exclude: 52 | glob: 53 | - plugins/* 54 | - __pycache__/* 55 | - mkdocstrings: 56 | handlers: 57 | python: 58 | paths: [.] 59 | options: 60 | members_order: source 61 | separate_signature: true 62 | filters: ["!^_"] 63 | docstring_options: 64 | ignore_init_summary: true 65 | merge_init_into_class: true 66 | 67 | nav: 68 | - Home: index.md 69 | - Get Started: 70 | - About: about/about.md 71 | # - Installation: INSTALL.md 72 | # - Contribution Guidelines: CONTRIBUTING.md 73 | - Code of Conduct: https://docs.hotosm.org/code-of-conduct 74 | - FAQ: about/faq.md 75 | - Developer Guide: 76 | - Practices: 77 | - Dev Practices: https://docs.hotosm.org/dev-practices 78 | - Tech Decisions: decisions/README.md 79 | - Pre-Commit: https://docs.hotosm.org/dev-guide/repo-management/pre-commit/ 80 | - Versioning: https://docs.hotosm.org/dev-guide/repo-management/version-control/#creating-releases 81 | - Backend: 82 | - Global Mosaic: dev/backend/global-mosaic.md 83 | - STAC API: dev/backend/stac-api.md 84 | - STAC Ingester: dev/backend/stac-ingester.md 85 | - Frontend: dev/frontend.md 86 | - Adding New Providers: dev/new-provider.md 87 | - Backup Prod STAC: dev/backup-prod-pgstac.md 88 | - Roadmap: https://github.com/hotosm/openaerialmap#roadmap 89 | - Timeline: about/timeline.md 90 | - API Docs: https://hotosm.github.io/swagger/?url=https://api.imagery.hotosm.org/raster/api 91 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "openaerialmap", 3 | "description": "OpenAerialMap frontend.", 4 | "version": "0.0.1", 5 | "repository": { 6 | "type": "git", 7 | "url": "" 8 | }, 9 | "author": { 10 | "name": "Development Seed", 11 | "url": "https://developmentseed.org" 12 | }, 13 | "license": "MIT", 14 | "bugs": { 15 | "url": "https://github.com" 16 | }, 17 | "homepage": "", 18 | "scripts": { 19 | "serve": "pnpm clean && NODE_ENV=development vite", 20 | "build": "pnpm clean && NODE_ENV=production tsc -b && vite build", 21 | "stage": "pnpm clean && NODE_ENV=staging tsc -b && vite build", 22 | "clean": "rm -rf dist node_modules/.vite", 23 | "lint": "pnpm lint:scripts", 24 | "lint:scripts": "eslint app/", 25 | "ts-check": "npx tsc --noEmit --skipLibCheck", 26 | "test": "jest" 27 | }, 28 | "engines": { 29 | "node": "22.x" 30 | }, 31 | "browserslist": "> 0.5%, last 2 versions, not dead", 32 | "devDependencies": { 33 | "@eslint/js": "^9.30.1", 34 | "@tanstack/eslint-plugin-query": "^5.81.2", 35 | "@testing-library/jest-dom": "^6.6.3", 36 | "@testing-library/react": "^16.3.0", 37 | "@testing-library/user-event": "^14.6.1", 38 | "@types/babel__core": "^7.20.5", 39 | "@types/d3-scale-chromatic": "^3.1.0", 40 | "@types/geojson": "^7946.0.16", 41 | "@types/jest": "^30.0.0", 42 | "@types/node": "^24.0.10", 43 | "@types/portscanner": "^2.1.4", 44 | "@types/react": "^19.1.8", 45 | "@types/react-dom": "^19.1.6", 46 | "@vitejs/plugin-react": "^4.6.0", 47 | "babel-jest": "^30.0.2", 48 | "eslint": "^9.30.1", 49 | "eslint-config-prettier": "^10.1.5", 50 | "eslint-plugin-prettier": "^5.5.1", 51 | "eslint-plugin-react": "^7.37.5", 52 | "eslint-plugin-react-hooks": "^5.2.0", 53 | "eslint-plugin-react-refresh": "^0.4.20", 54 | "globals": "^16.3.0", 55 | "jest": "^30.0.3", 56 | "jest-environment-jsdom": "^30.0.2", 57 | "portscanner": "^2.2.0", 58 | "prettier": "^3.6.2", 59 | "ts-jest": "^29.4.0", 60 | "ts-node": "^10.9.2", 61 | "typescript": "~5.8.3", 62 | "typescript-eslint": "^8.35.1", 63 | "vite": "^7.0.0" 64 | }, 65 | "dependencies": { 66 | "@awesome.me/webawesome": "3.0.0-beta.1", 67 | "@hotosm/ui": "0.3.1-b1", 68 | "@tanstack/react-query": "^5.81.5", 69 | "d3-scale-chromatic": "^3.1.0", 70 | "maplibre-gl": "^5.6.1", 71 | "next-themes": "^0.4.6", 72 | "pmtiles": "^4.3.0", 73 | "react": "^19.1.0", 74 | "react-dom": "^19.1.0", 75 | "react-icons": "^5.5.0", 76 | "stac-ts": "^1.0.4" 77 | }, 78 | "alias": { 79 | "$components": "~/app/components", 80 | "$styles": "~/app/styles", 81 | "$utils": "~/app/utils", 82 | "$hooks": "~/app/hooks", 83 | "$pages": "~/app/pages", 84 | "$test": "~/test" 85 | }, 86 | "packageManager": "pnpm@10.6.4+sha512.da3d715bfd22a9a105e6e8088cfc7826699332ded60c423b14ec613a185f1602206702ff0fe4c438cb15c979081ce4cb02568e364b15174503a63c7a8e2a5f6c" 87 | } 88 | -------------------------------------------------------------------------------- /backend/global-mosaic/README.md: -------------------------------------------------------------------------------- 1 | # OpenAerialMap Global Mosaic 2 | 3 | On a 24hr schedule: 4 | 5 | - Generates global mosaic in PMTiles format, serving via S3. 6 | - Also server TMS via a lightweight Martin server, for clients that 7 | don't support PMTiles. 8 | 9 | ## Getting Started 10 | 11 | This project uses [uv](https://docs.astral.sh/uv/getting-started/installation/) 12 | to manage Python dependencies. 13 | 14 | Once `uv` is installed, you can install the dependencies by, 15 | 16 | ```bash 17 | uv sync --all-groups 18 | ``` 19 | 20 | ## Note On Various Scripts 21 | 22 | The following `scripts` share a lot of code, and were developed iteratively: 23 | 24 | - Attempt 1: `gen_mosaic_manual.py` - manually generate mosaics from COGs. 25 | - Attempt 2: `gen_mosaic_hybrid.py` - hybrid coverage for zooms 0-10 + mosaic 26 | for zooms 11-14 (from TiTiler instance). This is similar to the approach from 27 | the original , but uses our eoAPI 28 | pgstac and TiTiler instead. 29 | - Attempt 3: `gen_coverage_raster.py` - simple grey coverage pixels indicating 30 | where we have imagery. 31 | - Attempt 4: `gen_coverage_vector.py` - just use Tippecanoe for vector tiles 🤦‍♂️ 32 | All the approaches above need significant memory optimisation to run on 33 | limited system resources / will require a bit more work. This approach 34 | is much more efficient and simple. 35 | 36 | > [!NOTE] 37 | > For coverage tiles there are two approaches: 38 | > 39 | > 1. Colour all pixels in the tile grey, meaning we massively reduce the 40 | > PMTiles size, due to internal tile deduplication. 41 | > 2. Partially colour pixels where appropriate, giving a more accurate 42 | > representation of coverage (more space, but looks nicer). 43 | > The gen_mosaic_raster.py script currently does approach 2. 44 | 45 | As of 2025-08-12 we are using `gen_coverage_vector.py` as the 46 | simplest approach, and is well optimised C++ code 47 | (low memory footprint). **It generates tiles for zooms 0-15**. 48 | 49 | ## Note On S3 Permissions 50 | 51 | - There is an IAM policy `oam-bucket-upload` with permission to upload 52 | to the `oin-hotosm-temp` bucket. 53 | - We must ensure this policy also has `"s3:PutObjectAcl"` set, to allow 54 | setting the global-mosaic.pmtiles file permission to public. 55 | - We have a user `hotosm-oam-global-mosaic-upload` that assigned this 56 | IAM policy, plus access/secret key for uploading to the bucket. 57 | 58 | > [!NOTE] 59 | > There are no doubt better ways to do this from EKS, but using key/secret 60 | > pairs for access is pretty simple and transferable amongst providers, 61 | > rather than being AWS specific. 62 | 63 | ## Development Testing 64 | 65 | - See [doc for loading prod pgSTAC into development](../../docs/backup-prod-pgstac.md) 66 | - Add a `.env` to this directory, with content: 67 | 68 | ```dotenv 69 | S3_ACCESS_KEY=KEY_FOR_OAM_BUCKET 70 | S3_SECRET_KEY=SECRET_FOR_OAM_BUCKET/zyalRchM+7 71 | ``` 72 | 73 | - Run the script: 74 | 75 | ```bash 76 | docker compose run --rm mosaicker 77 | ``` 78 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | # Versioning: Commit messages & changelog 3 | - repo: https://github.com/commitizen-tools/commitizen 4 | rev: v4.10.0 5 | hooks: 6 | - id: commitizen 7 | stages: [commit-msg] 8 | 9 | # Lint / autoformat: Python code 10 | - repo: https://github.com/astral-sh/ruff-pre-commit 11 | # Ruff version. 12 | rev: "v0.14.9" 13 | hooks: 14 | # Run the linter 15 | - id: ruff 16 | files: ^backend/(?:.*/)*.*$ 17 | args: [--fix, --exit-non-zero-on-fix] 18 | # Run the formatter 19 | - id: ruff-format 20 | files: ^backend/(?:.*/)*.*$ 21 | 22 | # Deps: ensure Python uv lockfile is up to date 23 | - repo: https://github.com/astral-sh/uv-pre-commit 24 | rev: 0.9.17 25 | hooks: 26 | - id: uv-lock 27 | files: notebooks/pyproject.toml 28 | args: [--project, notebooks] 29 | 30 | # Upgrade: upgrade Python syntax 31 | - repo: https://github.com/asottile/pyupgrade 32 | rev: v3.21.2 33 | hooks: 34 | - id: pyupgrade 35 | 36 | # Spelling: Checks for common misspellings in text files. 37 | - repo: https://github.com/codespell-project/codespell 38 | rev: v2.4.1 39 | hooks: 40 | - id: codespell 41 | types: [text] 42 | args: [--toml, notebooks/pyproject.toml] 43 | 44 | # Autoformat: YAML, JSON, Markdown, etc. 45 | - repo: https://github.com/pycontribs/mirrors-prettier 46 | rev: v3.6.2 47 | hooks: 48 | - id: prettier 49 | args: 50 | [ 51 | --ignore-unknown, 52 | --no-error-on-unmatched-pattern, 53 | "!CHANGELOG.md", 54 | "!frontend/pnpm-lock.yaml", 55 | "!backend/global-tms/chart/**", 56 | ] 57 | 58 | # # Lint: Dockerfile (disabled until binary is bundled) 59 | # - repo: https://github.com/hadolint/hadolint.git 60 | # rev: v2.12.1-beta 61 | # hooks: 62 | # - id: hadolint 63 | # args: 64 | # [ 65 | # "--ignore=DL3008", 66 | # "--ignore=DL3013", 67 | # "--ignore=DL3018", 68 | # "--ignore=DL3059", 69 | # ] 70 | 71 | # Lint: Bash scripts 72 | - repo: https://github.com/openstack-dev/bashate.git 73 | rev: 2.1.1 74 | hooks: 75 | - id: bashate 76 | 77 | # Lint: Shell scripts 78 | - repo: https://github.com/shellcheck-py/shellcheck-py 79 | rev: v0.11.0.1 80 | hooks: 81 | - id: shellcheck 82 | args: ["-x"] 83 | 84 | # Lint: Markdown 85 | - repo: https://github.com/igorshubovych/markdownlint-cli 86 | rev: v0.47.0 87 | hooks: 88 | - id: markdownlint 89 | args: 90 | [ 91 | --fix, 92 | --ignore, 93 | CHANGELOG.md, 94 | --ignore, 95 | .github, 96 | --ignore, 97 | metadata/README.md, 98 | --ignore, 99 | frontend/README.md, 100 | --ignore, 101 | frontend/_README.md, 102 | ] 103 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | # OpenAerialMap 2 | 3 | ## Vite for building 4 | 5 | [Vite](https://vite.dev/) is used to bundle all the needed assets for the application. 6 | There are two commands, both run via `pnpm` 7 | 8 | - `pnpm build` - clean & build everything and put it into dist folder 9 | - `pnpm serve` - serve the pages and utilize live reload on changes to fonts, images, scripts and HTML. 10 | 11 | ### Configurations and environment variables 12 | 13 | This app makes use of the environment variables found in `../.env.example`. Copy and paste that file to a new file called `../.env` and populate the values. 14 | 15 | See Vite's documentation on [env variables](https://vite.dev/guide/env-and-mode.html#env-variables-and-modes). 16 | 17 | ## AWS S3 For Deploy 18 | 19 | The deployment is: 20 | 21 | - Handled via Justfile `deploy-frontend` script. 22 | - The dist is pushed to S3 bucket `oam-frontend` under path `/${GIT_BRANCH}/`. 23 | - This way we host the main/stage/dev deployments in the same bucket, 24 | under subpaths. 25 | - In advance a Cloudfront distribution is made in AWS, attached to 26 | `*.hotosm.org` cert. 27 | - The workflow will: 28 | - Build the frontend container image. 29 | - Push the dist to S3. 30 | - Find the Cloudfront distribution matching the S3 URL. 31 | - Invalidate the cache of the cloudfront deployment in all locations. 32 | 33 | ## Linting 34 | 35 | Our [ESLint rules](.eslintrc) are based on `eslint:recommended` rules, with some custom options. To check linting errors run: 36 | 37 | npm run lint 38 | 39 | ## Tests 40 | 41 | Tests are setup using [Jest](https://jestjs.io/), and can be run with 42 | 43 | ``` 44 | npm run test 45 | ``` 46 | 47 | ## Coding style 48 | 49 | File [.editorconfig](.editorconfig) defines basic code styling rules, like indent sizes. 50 | 51 | [Prettier](https://prettier.io) is the recommended code formatter. Atom and VSCode have extensions supporting Prettier-ESLint integration, which will help maintain style consistency while following linting rules. 52 | 53 | ## Path alias 54 | 55 | Path alias allow you to define aliases for commonly used folders and avoid having very long file paths like `../../../component`. This also allows you to more easily move files around without worrying the imports will break. 56 | 57 | Paths are defined in the [package.json](./package.json) under `alias`. They start with a `$` and point to a folder. 58 | 59 | The following paths are predefined, but feel free to change them to whatever is convenient to your project needs. 60 | 61 | ```json 62 | "alias": { 63 | "$components": "~/app/scripts/components", 64 | "$styles": "~/app/scripts/styles", 65 | "$utils": "~/app/scripts/utils", 66 | "$test": "~/test" 67 | } 68 | ``` 69 | 70 | For example, to import a component from a file called `page-header` in the `"~/app/scripts/components"` folder, you'd just need to do `import Component from '$components/page-header'`. 71 | 72 | ## Pull Request templates 73 | 74 | Project seed comes with pull request templates to simplify and standardize the pull requests in the project. This [issue on the how repo](https://github.com/developmentseed/how/issues/360#issuecomment-1041292591) provides some context to how this works. 75 | 76 | To add more templates create them in the `.github/PULL_REQUEST_TEMPLATE` folder and link them in the [PULL_REQUEST_TEMPLATE.md](./.github/PULL_REQUEST_TEMPLATE.md) file. 77 | -------------------------------------------------------------------------------- /backend/global-tms/chart/values.yaml: -------------------------------------------------------------------------------- 1 | image: 2 | repository: maptiler/tileserver-gl 3 | tag: v5.3.1 4 | pullPolicy: IfNotPresent 5 | imagePullSecrets: [] 6 | # This is to override the chart name 7 | nameOverride: "" 8 | fullnameOverride: "" 9 | 10 | initContainer: 11 | image: alpine/curl:8.14.1 12 | url: "https://s3.amazonaws.com/oin-hotosm-temp/global-coverage.pmtiles" 13 | filename: "global-coverage.pmtiles" 14 | 15 | persistence: 16 | enabled: true 17 | size: 1Gi 18 | storageClass: "gp2" 19 | 20 | service: 21 | type: ClusterIP 22 | port: 8080 23 | 24 | ingress: 25 | enabled: true 26 | className: nginx 27 | host: global.imagery.hotosm.org 28 | annotations: 29 | cert-manager.io/cluster-issuer: "letsencrypt-prod" 30 | external-dns.alpha.kubernetes.io/hostname: global.imagery.hotosm.org 31 | external-dns.alpha.kubernetes.io/ttl: "300" 32 | tls: 33 | enabled: true 34 | secretName: oam-global-tms-tls 35 | 36 | replicaCount: 1 37 | 38 | # This section builds out the service account more information can be found here: https://kubernetes.io/docs/concepts/security/service-accounts/ 39 | serviceAccount: 40 | # Specifies whether a service account should be created 41 | create: true 42 | # Automatically mount a ServiceAccount's API credentials? 43 | automount: true 44 | # Annotations to add to the service account 45 | annotations: {} 46 | # The name of the service account to use. 47 | # If not set and create is true, a name is generated using the fullname template 48 | name: "" 49 | 50 | # This is for setting Kubernetes Annotations to a Pod. 51 | # For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/ 52 | podAnnotations: {} 53 | # This is for setting Kubernetes Labels to a Pod. 54 | # For more information checkout: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ 55 | podLabels: {} 56 | 57 | podSecurityContext: {} 58 | # fsGroup: 2000 59 | 60 | securityContext: {} 61 | # capabilities: 62 | # drop: 63 | # - ALL 64 | # readOnlyRootFilesystem: true 65 | # runAsNonRoot: true 66 | # runAsUser: 1000 67 | 68 | # This is to setup the liveness and readiness probes more information can be found here: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/ 69 | livenessProbe: 70 | httpGet: 71 | path: /health 72 | port: 80 73 | initialDelaySeconds: 30 74 | periodSeconds: 10 75 | readinessProbe: 76 | httpGet: 77 | path: /health 78 | port: 80 79 | initialDelaySeconds: 5 80 | periodSeconds: 5 81 | 82 | # This section is for setting up autoscaling more information can be found here: https://kubernetes.io/docs/concepts/workloads/autoscaling/ 83 | autoscaling: 84 | enabled: false 85 | minReplicas: 1 86 | maxReplicas: 3 87 | targetCPUUtilizationPercentage: 80 88 | # targetMemoryUtilizationPercentage: 80 89 | 90 | # Additional volumes on the output Deployment definition. 91 | volumes: [] 92 | # - name: foo 93 | # secret: 94 | # secretName: mysecret 95 | # optional: false 96 | 97 | # Additional volumeMounts on the output Deployment definition. 98 | volumeMounts: [] 99 | # - name: foo 100 | # mountPath: "/etc/foo" 101 | # readOnly: true 102 | 103 | nodeSelector: {} 104 | 105 | tolerations: [] 106 | 107 | affinity: {} 108 | -------------------------------------------------------------------------------- /.github/workflows/checks.yml: -------------------------------------------------------------------------------- 1 | # This workflow performs basic checks: 2 | # 3 | # 1. run a preparation step to install and cache node modules 4 | # 2. once prep succeeds, lint and test run in parallel 5 | # 6 | # The checks only run on non-draft Pull Requests. They don't run on the main 7 | # branch prior to deploy. It's recommended to use branch protection to avoid 8 | # pushes straight to 'main'. 9 | 10 | name: Checks 11 | 12 | on: 13 | pull_request: 14 | types: 15 | - opened 16 | - synchronize 17 | - reopened 18 | - ready_for_review 19 | 20 | concurrency: 21 | group: ${{ github.workflow }}-${{ github.ref }} 22 | cancel-in-progress: true 23 | 24 | jobs: 25 | prep: 26 | if: github.event.pull_request.draft == false 27 | runs-on: ubuntu-latest 28 | 29 | steps: 30 | - name: Checkout 31 | uses: actions/checkout@v4 32 | 33 | - name: Install pnpm 34 | uses: pnpm/action-setup@v4 35 | with: 36 | version: 10.6.4 37 | 38 | - name: Use Node.js ${{ env.NODE }} 39 | uses: actions/setup-node@v4 40 | with: 41 | node-version-file: "./frontend/.nvmrc" 42 | cache: "pnpm" 43 | cache-dependency-path: "frontend/pnpm-lock.yaml" 44 | 45 | - name: Cache node_modules 46 | uses: actions/cache@v4 47 | id: cache-node-modules 48 | with: 49 | path: frontend/node_modules 50 | key: ${{ runner.os }}-build-${{ hashFiles('frontend/package.json') }} 51 | 52 | - name: Install 53 | run: cd frontend && pnpm install 54 | 55 | lint: 56 | needs: prep 57 | runs-on: ubuntu-latest 58 | 59 | steps: 60 | - name: Checkout 61 | uses: actions/checkout@v4 62 | 63 | - name: Install pnpm 64 | uses: pnpm/action-setup@v4 65 | with: 66 | version: 10.6.4 67 | 68 | - name: Use Node.js ${{ env.NODE }} 69 | uses: actions/setup-node@v4 70 | with: 71 | node-version-file: "./frontend/.nvmrc" 72 | cache: "pnpm" 73 | cache-dependency-path: "frontend/pnpm-lock.yaml" 74 | 75 | - name: Cache node_modules 76 | uses: actions/cache@v4 77 | id: cache-node-modules 78 | with: 79 | path: frontend/node_modules 80 | key: ${{ runner.os }}-build-${{ hashFiles('frontend/package.json') }} 81 | 82 | - name: Install 83 | run: cd frontend && pnpm install 84 | 85 | - name: Lint 86 | run: cd frontend && pnpm lint 87 | 88 | test: 89 | needs: prep 90 | runs-on: ubuntu-latest 91 | 92 | steps: 93 | - name: Checkout 94 | uses: actions/checkout@v4 95 | 96 | - name: Install pnpm 97 | uses: pnpm/action-setup@v4 98 | with: 99 | version: 10.6.4 100 | 101 | - name: Use Node.js ${{ env.NODE }} 102 | uses: actions/setup-node@v4 103 | with: 104 | node-version-file: "./frontend/.nvmrc" 105 | cache: "pnpm" 106 | cache-dependency-path: "frontend/pnpm-lock.yaml" 107 | 108 | - name: Cache node_modules 109 | uses: actions/cache@v4 110 | id: cache-node-modules 111 | with: 112 | path: frontend/node_modules 113 | key: ${{ runner.os }}-build-${{ hashFiles('frontend/package.json') }} 114 | 115 | - name: Install 116 | run: cd frontend && pnpm install 117 | 118 | - name: Test 119 | run: cd frontend && pnpm test 120 | -------------------------------------------------------------------------------- /docs/about/timeline.md: -------------------------------------------------------------------------------- 1 | # Timeline 2 | 3 | In reverse chronological order, with most recent events first. 4 | 5 | 6 | 7 | 8 | 9 | >
10 | >
11 | >
2025-05
12 | >

STAC based OpenAerialMap

13 | > 🚀 Complete overhaul of the OAM backend, in partnership with 14 | > DevelopmentSeed. OAM can now be globally federated / 15 | > decentralised (providing data sovereignty for communities). 16 | >
17 | >
18 | >
19 | >
2025-01
20 | >

Drone-TM Release

21 | > 🚁 A platform to collaboratively collect drone imagery, 22 | > but it needs to go somewhere! 23 | >
24 | >
25 | >
26 | >
2023
27 | >

Global Mosaic

28 | > 🗺️ Global mosaic layer is launched for OAM, developed 29 | > by Kontur. 30 | >
31 | >
32 | >
33 | >
2022
34 | >

HOT x Kontur Collab

35 | > 🤝 HOT & Kontur collaborate to improve OAM (v2). 36 | >
37 | >
38 | >
39 | >
2018
40 | >

Uploader Improvements

41 | > 🚀 Big usability improvements to uploader and tiler for OAM. 42 | >
43 | >
44 | >
45 | >
2017
46 | >

Pacific Drone Imagery Dashboard

47 | > 🏝️ PacDID was developed alongside communities to make drone imagery 48 | > accessible via OAM. 49 | > Details [here](https://www.hotosm.org/updates/2016-08-15_improving_resilience_with_aerial_imagery) 50 | >
51 | >
52 | >
53 | >
2015-11
54 | >

First Version

55 | > 🏁 First version of OAM released by HOT. 56 | >
57 | >
58 | >
59 | >
2014-06
60 | >

Initial Research & Concept

61 | > 🔬 OpenAerialMap & Open Imagery Network concept started. 62 | > [More info here](https://www.elrha.org/news-blogs/openaerialmap-final-blog) 63 | >
64 | >
65 | >
66 | >
2010
67 | >

Imagery Used In Haiti

68 | > ❤️ Imagery from OAM was used to assist the response in the Haiti earthquake. 69 | >
70 | >
71 | >
72 | >
2007
73 | >

First Imagery Becomes Available

74 | > 🛰️ Some satellite imagery providers started making their data freely 75 | > available for disaster response mapping. 76 | >
77 | >
78 | > 79 | >
80 | 81 | 82 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ include "chart.fullname" . }} 5 | labels: 6 | {{- include "chart.labels" . | nindent 4 }} 7 | spec: 8 | {{- if not .Values.autoscaling.enabled }} 9 | replicas: {{ .Values.replicaCount }} 10 | {{- end }} 11 | selector: 12 | matchLabels: 13 | {{- include "chart.selectorLabels" . | nindent 6 }} 14 | template: 15 | metadata: 16 | annotations: 17 | {{- with .Values.podAnnotations }}{{ toYaml . | nindent 8 }}{{- end }} 18 | labels: 19 | {{- include "chart.selectorLabels" . | nindent 8 }} 20 | {{- with .Values.podLabels }}{{ toYaml . | nindent 8 }}{{- end }} 21 | spec: 22 | serviceAccountName: {{ include "chart.serviceAccountName" . }} 23 | {{- with .Values.imagePullSecrets }} 24 | imagePullSecrets: {{ toYaml . | nindent 8 }} 25 | {{- end }} 26 | initContainers: 27 | - name: get-tiles 28 | image: {{ .Values.initContainer.image }} 29 | command: [ "sh", "-c" ] 30 | args: 31 | - | 32 | if [ ! -f /tiles/{{ .Values.initContainer.filename }} ]; then 33 | echo "Downloading tiles..." 34 | curl -fSL {{ .Values.initContainer.url }} -o /tiles/{{ .Values.initContainer.filename }} 35 | else 36 | echo "File exists, skipping" 37 | fi 38 | volumeMounts: 39 | - name: global-coverage-pmtiles 40 | mountPath: /tiles 41 | containers: 42 | - name: {{ .Chart.Name }} 43 | image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" 44 | imagePullPolicy: {{ .Values.image.pullPolicy }} 45 | ports: 46 | - name: http 47 | containerPort: {{ .Values.service.port }} 48 | protocol: TCP 49 | livenessProbe: {{- toYaml .Values.livenessProbe | nindent 12 }} 50 | readinessProbe: {{- toYaml .Values.readinessProbe | nindent 12 }} 51 | {{- with .Values.resources }} 52 | resources: {{ toYaml . | nindent 12 }} 53 | {{- end }} 54 | volumeMounts: 55 | - name: global-coverage-pmtiles 56 | mountPath: /tiles 57 | - name: config 58 | mountPath: /data/config.json 59 | subPath: config.json 60 | - name: style 61 | mountPath: /data/style.json 62 | subPath: style.json 63 | {{- with .Values.volumeMounts }} 64 | {{ toYaml . | nindent 12 }} 65 | {{- end }} 66 | - name: {{ .Chart.Name }}-nginx 67 | image: nginx:1.29-alpine 68 | ports: 69 | - containerPort: 80 70 | name: http 71 | volumeMounts: 72 | - name: nginx-conf 73 | mountPath: /etc/nginx/conf.d/default.conf 74 | subPath: nginx.conf 75 | volumes: 76 | - name: global-coverage-pmtiles 77 | persistentVolumeClaim: 78 | claimName: {{ include "chart.fullname" . }}-pvc 79 | - name: config 80 | configMap: 81 | name: {{ include "chart.fullname" . }}-config 82 | - name: style 83 | configMap: 84 | name: {{ include "chart.fullname" . }}-style 85 | - name: nginx-conf 86 | configMap: 87 | name: {{ include "chart.fullname" . }}-nginx 88 | {{- with .Values.volumes }} 89 | {{ toYaml . | nindent 8 }} 90 | {{- end }} 91 | -------------------------------------------------------------------------------- /frontend/app/hooks/useStacCatalog.ts: -------------------------------------------------------------------------------- 1 | import { useQuery } from '@tanstack/react-query'; 2 | import { type StacCatalog, type StacCollection } from 'stac-ts'; 3 | import { StacItemFilter } from '../context/StacContext'; 4 | import { StacFeatureCollection, StacQueryables } from '../types/stac'; 5 | 6 | const STAC_API = import.meta.env.VITE_STAC_API_URL; 7 | const STAC_PATH = import.meta.env.VITE_STAC_API_PATHNAME; 8 | const RASTER_PATH = import.meta.env.VITE_STAC_TILER_PATHNAME; 9 | const STAC_API_PATH = `${STAC_API}/${STAC_PATH}`; 10 | export const RASTER_API_PATH = `${STAC_API}/${RASTER_PATH}`; 11 | const STAC_ITEMS_LIMIT = import.meta.env.VITE_STAC_ITEMS_LIMIT; 12 | /** 13 | * Fetches STAC catalog data from the provided endpoint 14 | */ 15 | export function useStacCatalog() { 16 | return useQuery({ 17 | queryKey: ['stacCatalog'], 18 | queryFn: async () => { 19 | const response = await fetch(STAC_API_PATH); 20 | if (!response.ok) { 21 | throw new Error(`Failed to fetch STAC catalog: ${response.statusText}`); 22 | } 23 | return response.json(); 24 | } 25 | }); 26 | } 27 | 28 | export function useStacCollections() { 29 | return useQuery({ 30 | queryKey: ['stacCollections'], 31 | queryFn: async () => { 32 | const response = await fetch(`${STAC_API_PATH}/collections`); 33 | if (!response.ok) { 34 | throw new Error( 35 | `Failed to fetch STAC collections: ${response.statusText}` 36 | ); 37 | } 38 | return response.json(); 39 | } 40 | }); 41 | } 42 | 43 | /** 44 | * @param collection The STAC collection ID 45 | */ 46 | export function useStacItems( 47 | collection: string | undefined, 48 | filters: StacItemFilter, 49 | bbox: number[] 50 | ) { 51 | return useQuery({ 52 | queryKey: ['stacItems', collection, filters, bbox], 53 | queryFn: async () => { 54 | let stacItemsFetchURL = `${STAC_API_PATH}/collections/${collection}/items?limit=${STAC_ITEMS_LIMIT}&bbox=${bbox.join(',')}&sortby=-datetime`; 55 | if ( 56 | filters.dateFilter && 57 | filters.dateFilter.startDate && 58 | filters.dateFilter.endDate 59 | ) { 60 | const datetimeValue = `${new Date(filters.dateFilter.startDate).toISOString()}/${new Date(filters.dateFilter.endDate).toISOString()}`; 61 | stacItemsFetchURL += `&datetime=${encodeURIComponent(datetimeValue)}`; 62 | } 63 | if (filters.itemIdFilter && filters.itemIdFilter.itemId) { 64 | // Add CQL2 text filter for item ID 65 | stacItemsFetchURL += `&filter-lang=cql2-text&filter=${encodeURIComponent(`id = '${filters.itemIdFilter.itemId}'`)}`; 66 | } 67 | const response = await fetch(stacItemsFetchURL); 68 | if (!response.ok) { 69 | throw new Error(`Failed to fetch STAC items: ${response.statusText}`); 70 | } 71 | return response.json(); 72 | }, 73 | enabled: collection !== undefined 74 | }); 75 | } 76 | 77 | /** 78 | * @param collection The STAC collection ID 79 | */ 80 | export function useStacQueryables(collection: string | undefined) { 81 | return useQuery({ 82 | queryKey: ['stacQueryables', collection], 83 | queryFn: async () => { 84 | const response = await fetch( 85 | `${STAC_API_PATH}/collections/${collection}/queryables` 86 | ); 87 | if (!response.ok) { 88 | throw new Error( 89 | `Failed to fetch STAC queryables: ${response.statusText}` 90 | ); 91 | } 92 | return response.json(); 93 | }, 94 | enabled: collection !== undefined 95 | }); 96 | } 97 | -------------------------------------------------------------------------------- /frontend/app/context/StacContext.tsx: -------------------------------------------------------------------------------- 1 | import { createContext, ReactNode, useContext, useState } from 'react'; 2 | import { StacCollection } from 'stac-ts'; 3 | import { 4 | useStacCollections, 5 | useStacItems, 6 | useStacQueryables 7 | } from '../hooks/useStacCatalog'; 8 | import { StacFeatureCollection, StacQueryables } from '../types/stac'; 9 | 10 | export interface DateFilter { 11 | startDate: string | undefined; 12 | endDate: string | undefined; 13 | } 14 | 15 | export interface ItemIdFilter { 16 | itemId: string | undefined; 17 | } 18 | 19 | export type StacItemFilter = { 20 | itemIdFilter: ItemIdFilter; 21 | dateFilter: DateFilter; 22 | }; 23 | 24 | interface StacContextType { 25 | selectedCollection?: string; 26 | availableCollections?: StacCollection[]; 27 | stacItems?: StacFeatureCollection; 28 | selectedItem: string | undefined; 29 | 30 | isStacCollectionLoading: boolean; 31 | isStacCollectionsError: Error | null; 32 | isStacItemsLoading: boolean; 33 | isStacItemsError: Error | null; 34 | isStacQueryablesLoading: boolean; 35 | isStacQueryablesError: Error | null; 36 | 37 | handleSelectCollection: (id: string) => void; 38 | handleSelectQueryable: (id: string) => void; 39 | handleSetFilter: (filters: StacItemFilter) => void; 40 | setSelectedItem: (items: string) => void; 41 | 42 | stacQueryables?: StacQueryables; 43 | filters: StacItemFilter; 44 | 45 | bbox: number[]; 46 | setBbox: (bbox: number[]) => void; 47 | } 48 | 49 | const StacContext = createContext(undefined); 50 | 51 | interface StacProviderProps { 52 | children: ReactNode; 53 | } 54 | 55 | export function StacProvider({ children }: StacProviderProps) { 56 | const [selectedCollection, setSelectedCollection] = useState< 57 | string | undefined 58 | >(); 59 | const [selectedQueryable, setSelectedQueryable] = useState< 60 | string | undefined 61 | >(); 62 | const [selectedItem, setSelectedItem] = useState(); 63 | const [filters, setFilters] = useState<{ 64 | itemIdFilter: ItemIdFilter; 65 | dateFilter: DateFilter; 66 | }>({ 67 | itemIdFilter: { itemId: undefined }, 68 | dateFilter: { startDate: undefined, endDate: undefined } 69 | }); 70 | const [bbox, setBbox] = useState([-180, -90, 180, 90]); 71 | 72 | const { 73 | data: stacCollections, 74 | isLoading: isStacCollectionLoading, 75 | error: isStacCollectionsError 76 | } = useStacCollections(); 77 | 78 | const { 79 | data: stacItems, 80 | isLoading: isStacItemsLoading, 81 | error: isStacItemsError 82 | } = useStacItems(selectedCollection, filters, bbox); 83 | 84 | const { 85 | data: stacQueryables, 86 | isLoading: isStacQueryablesLoading, 87 | error: isStacQueryablesError 88 | } = useStacQueryables(selectedCollection); 89 | 90 | const handleSelectCollection = (id: string) => { 91 | setSelectedCollection(id); 92 | }; 93 | const handleSelectQueryable = (id: string) => { 94 | setSelectedQueryable(id); 95 | }; 96 | 97 | const handleSetFilter = (filters: StacItemFilter) => setFilters(filters); 98 | 99 | const value = { 100 | selectedCollection, 101 | availableCollections: stacCollections?.collections, 102 | stacItems, 103 | 104 | isStacCollectionLoading, 105 | isStacCollectionsError, 106 | isStacItemsLoading, 107 | isStacItemsError, 108 | 109 | stacQueryables, 110 | selectedQueryable, 111 | isStacQueryablesLoading, 112 | isStacQueryablesError, 113 | 114 | filters, 115 | selectedItem, 116 | 117 | handleSelectCollection, 118 | handleSelectQueryable, 119 | handleSetFilter, 120 | setSelectedItem, 121 | 122 | bbox, 123 | setBbox 124 | }; 125 | 126 | return {children}; 127 | } 128 | 129 | export function useStac() { 130 | const context = useContext(StacContext); 131 | if (context === undefined) { 132 | throw new Error('useStac must be used within a StacProvider'); 133 | } 134 | return context; 135 | } 136 | -------------------------------------------------------------------------------- /metadata/README.md: -------------------------------------------------------------------------------- 1 | # Metadata Specification 2 | 3 | This folder contains the specification of metadata that will be used for Open Imagery Network and OpenAerialMap. 4 | 5 | A metadata file is required for each "image" file. An image file is an RGB GeoTIFF. 6 | 7 | Below is a table of metadata values and where they would be required: 8 | 9 | | Element | Type | Sample Value | Description | OIN | OAM | Apply to TMS? | 10 | | ------------- | ------------ | ------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | -------- | -------- | ------------- | 11 | | UUID | URI | | Unique URI to file | Auto | Auto | Yes | 12 | | Title | string | San Diego 2015 orthomosaic | Human friendly title of the image | Optional | Optional | Yes | 13 | | Projection | string | EPSG:4326 | CRS of the datasource in EPSG format | Yes | Yes | Yes | 14 | | BBox | string | -180,-90,180,90 | Pair of min and max coordinates in CRS units, (min_x, min_y, max_x, max_y) | Yes? | Yes | Yes | 15 | | Footprint | string (WKT) | POLYGON((-180 -90, -180 90, 180 90, 180 -90, -180 -90)) | Datasource footprint. WKT format, describing the actual footprint of the imagery | Yes? | Yes | Yes | 16 | | GSD | double | 0.35 | Average ground spatial distance (resolution) of the datasource imagery, expressed in meters | Yes | Yes | Yes | 17 | | File size | double | 1024 | File size on disk in bytes | Yes? | Yes | No | 18 | | License | string | Nextview | Usage license of the datasource. This determines visibility of the datasource for authenticated users | No | Yes | Yes | 19 | | Sense Start | Date | 2015-05-03T13:00:00.000 | First date of acquisition in UTC (Combined date and time representation) | Yes | Yes | | 20 | | Sense End | Date | 2015-05-04T13:00:00.000 | Last date of acquisition in UTC (Combined date and time representation) | Yes | Yes | | 21 | | Platform | string | Satellite | List of possible platform sources limited to satellite, aircraft, UAV, balloon, kite | Yes | Yes | | 22 | | Sensor | string | WV-3 | How the data was collected? (image acquisition device... camera, radar, ...) | Optional | Yes | | 23 | | Tags | string | #nepal_earthquake_2015 | Any user provided tag | No | Optional | Yes | 24 | | Provider | string | Digital Globe | Provider/owner of the OIN bucket | No | Yes | Yes | 25 | | Contact Email | string | | Name and email address of the data provider | Yes | Yes | Yes | 26 | -------------------------------------------------------------------------------- /backend/global-tms/chart/templates/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: {{ include "chart.fullname" . }}-config 5 | labels: 6 | {{- include "chart.labels" . | nindent 4 }} 7 | data: 8 | config.json: | 9 | { 10 | "options": { "paths": { "root": "." } }, 11 | "data": { 12 | "global-coverage": { 13 | "pmtiles": "/tiles/{{ .Values.initContainer.filename }}" 14 | } 15 | }, 16 | "styles": { 17 | "global-coverage": { 18 | "style": "style.json", 19 | "serve_rendered": true, 20 | "tilejson": { "bounds": [-180, -85, 180, 85] } 21 | } 22 | } 23 | } 24 | --- 25 | apiVersion: v1 26 | kind: ConfigMap 27 | metadata: 28 | name: {{ include "chart.fullname" . }}-style 29 | labels: 30 | {{- include "chart.labels" . | nindent 4 }} 31 | data: 32 | style.json: | 33 | { 34 | "version": 8, 35 | "sources": { 36 | "global-coverage": { 37 | "type": "vector", 38 | "url": "pmtiles:///tiles/{{ .Values.initContainer.filename }}" 39 | } 40 | }, 41 | "layers": [ 42 | { 43 | "id": "global-coverage", 44 | "source": "global-coverage", 45 | "source-layer": "globalcoverage", 46 | "minzoom": 0, 47 | "maxzoom": 15, 48 | "type": "fill", 49 | "paint": { "fill-color": "#ff0000", "fill-opacity": 0.5 }, 50 | "filter": ["==", ["geometry-type"], "Polygon"] 51 | } 52 | ] 53 | } 54 | --- 55 | apiVersion: v1 56 | kind: ConfigMap 57 | metadata: 58 | name: {{ include "chart.fullname" . }}-nginx 59 | labels: 60 | {{- include "chart.labels" . | nindent 4 }} 61 | data: 62 | nginx.conf: | 63 | server { 64 | listen 80; 65 | listen [::]:80; 66 | server_name _; 67 | 68 | # Zoom 0-15 → proxy to tileserver (same pod) 69 | location ~ ^/([0-9]|1[0-5])/([0-9]+)/([0-9]+)\.png$ { 70 | set $zoom_level $1; 71 | set $x $2; 72 | set $y $3; 73 | 74 | proxy_pass http://127.0.0.1:8080/styles/global-coverage/256/$zoom_level/$x/$y.png; 75 | proxy_set_header Host $host; 76 | proxy_set_header X-Real-IP $remote_addr; 77 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 78 | proxy_set_header X-Forwarded-Proto $scheme; 79 | proxy_set_header X-Forwarded-Host $host:$server_port; 80 | proxy_redirect off; 81 | proxy_connect_timeout 5s; 82 | proxy_send_timeout 10s; 83 | proxy_read_timeout 10s; 84 | } 85 | 86 | # Zoom >=16 → redirect to TiTiler (eoAPI) 87 | location ~ ^/(1[6-9]|[2-9][0-9])/([0-9]+)/([0-9]+)\.png$ { 88 | set $zoom_level $1; 89 | set $x $2; 90 | set $y $3; 91 | 92 | return 302 https://api.imagery.hotosm.org/raster/collections/openaerialmap/tiles/WebMercatorQuad/$zoom_level/$x/$y.png?assets=visual; 93 | } 94 | 95 | # Friendly root page with info 96 | location = / { 97 | default_type text/html; 98 | return 200 ' 99 | 100 | 101 | 102 | 103 | Tile Server 104 | 109 | 110 | 111 |

Tile Server

112 |

This server provides map tiles.

113 |

Request tiles using the format: /z/x/y.png

114 |

Example: /0/0/0.png

115 | 116 | '; 117 | } 118 | 119 | location /health { 120 | access_log off; 121 | return 200 "healthy\n"; 122 | add_header Content-Type text/plain; 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # 🤗 Welcome 2 | 3 | :+1::tada: First off, We are really glad you're reading this, because we need 4 | volunteer developers to help improve OpenAerialMap! 5 | :tada::+1: 6 | 7 | We welcome and encourage contributors of all skill levels, and we are committed 8 | to making sure your participation is inclusive, enjoyable, and rewarding. If 9 | you have never contributed to an open source project before, we are a good 10 | place to start, and we will make sure you are supported every step of the way. 11 | If you have **any** questions, please ask! 12 | 13 | There are many ways to contribute to the **OpenAerialMap**, including: 14 | 15 | ## Testing 16 | 17 | - User testing the functionality and reporting any issues. 18 | - Writing automated tests for the existing code. 19 | 20 | ## Code contributions 21 | 22 | Create pull requests (PRs) for changes that you think are needed. We would 23 | really appreciate your help! 24 | 25 | Skills with the following would be beneficial: 26 | 27 | - Python 28 | - React 29 | - TypeScript / JavaScript 30 | - Docker 31 | - CI/CD workflows 32 | 33 | ## Report bugs and suggest improvements 34 | 35 | The [issue queue][3] is the best way to get started. There are issue templates 36 | for BUGs and FEATURES that you can use, you could also create your own. 37 | 38 | Once you have submitted an issue, it will be assigned one label from the 39 | following [label categories][4]. 40 | 41 | If you are wondering where to start, you can filter by the 42 | **good first issue label**. 43 | 44 | ## Report security vulnerabilities 45 | 46 | Please inform a maintainer as soon as possible, including the CVE code. 47 | 48 | Message via the [HOTOSM Slack][9] or [direct email][10] would be preferred, 49 | but via Github issue is also possible. 50 | 51 | ## :handshake: Thank you 52 | 53 | Thank you very much in advance for your contributions!! Please ensure you refer 54 | to our **Code of Conduct**. 55 | If you've read the guidelines, but are still not sure how to contribute on 56 | Github, please reach out to us via our Slack **#geospatial-tech-and-innovation**. 57 | 58 | ## Code Contribution guidelines 59 | 60 | ### Workflow 61 | 62 | We operate the "Fork & Pull" model explained at [About Pull Requests][5] 63 | 64 | Further details of our development workflow can be found [on this page][8] 65 | 66 | ### If you are reporting a problem 67 | 68 | - Describe exactly what you were trying to achieve, what you did, what you 69 | expected to happen and what did happen instead. Include relevant information 70 | about the platform, OS version etc. you are using. Include shell commands you 71 | typed in, log files, errors messages etc. 72 | 73 | - Please open a separate issue for each problem, question, or comment you have. 74 | Do not reuse existing issues for other topics, even if they are similar. This 75 | keeps issues small and manageable and makes it much easier to follow through 76 | and make sure each problem is taken care of. 77 | 78 | ### Documentation 79 | 80 | Project documentation should be in [Markdown format][6], and in a _docs_ 81 | subdirectory. While it is possible to use HTML in Markdown documents 82 | for tables and images, it is preferred to use the Markdown style as 83 | it's much easier to read. 84 | 85 | See a detailed guide on documentation contributions 86 | [on this page](https://docs.hotosm.org/techdoc). 87 | 88 | ### Pre-Commit Hooks 89 | 90 | [Pre-Commit Hooks][7] are used in this repo to enforce coding style: 91 | 92 | - Python adheres mostly to PEP8 convention, amongst others, using the 93 | tool `ruff`. 94 | - TypeScript / JavaScript code is formatted using `prettier`. 95 | - Markdown files are formatted using `markdownlint`. 96 | - Raq SQL is formatted using `sqlfluff`. 97 | 98 | Please install the pre-commit hooks before contributing: 99 | 100 | ```bash 101 | pip install pre-commit 102 | pre-commit install 103 | ``` 104 | 105 | ### Commit Sign-Off Policy 106 | 107 | - In order to commit to this repository, please read and accept our 108 | [commit sign-off policy](https://developercertificate.org) 109 | - This is simply to verify that you are the author of the commits you make. 110 | - If possible, please add to your commit footer the `Signed-off-by` info: 111 | `Signed-off-by: John Doe ` 112 | 113 | ## Our Development Practices 114 | 115 | To see more detail on the development practices used at HOT, 116 | please visit [this page](https://docs.hotosm.org/dev-practices) 117 | 118 | [3]: https://github.com/hotosm/openaerialmap/issues "issue queue" 119 | [4]: https://github.com/hotosm/openaerialmap/labels "label categories" 120 | [5]: https://help.github.com/articles/about-pull-requests/ "About Pull Requests" 121 | [6]: https://www.markdownguide.org/ "Markdown format" 122 | [7]: https://docs.hotosm.org/dev-guide/repo-management/pre-commit "Pre-commit" 123 | [8]: https://docs.hotosm.org/dev-guide/repo-management/git/#git-flow "Git Flow" 124 | [9]: https://slack.hotosm.org "HOT Slack" 125 | [10]: mailto:sysadmin@hotosm.org "Sysadmin email" 126 | -------------------------------------------------------------------------------- /backend/stac-api/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[codz] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # UV 98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | #uv.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | #poetry.toml 110 | 111 | # pdm 112 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 113 | # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. 114 | # https://pdm-project.org/en/latest/usage/project/#working-with-version-control 115 | #pdm.lock 116 | #pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # pixi 121 | # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. 122 | #pixi.lock 123 | # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one 124 | # in the .venv directory. It is recommended not to include this directory in version control. 125 | .pixi 126 | 127 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 128 | __pypackages__/ 129 | 130 | # Celery stuff 131 | celerybeat-schedule 132 | celerybeat.pid 133 | 134 | # SageMath parsed files 135 | *.sage.py 136 | 137 | # Environments 138 | .env 139 | .envrc 140 | .venv 141 | env/ 142 | venv/ 143 | ENV/ 144 | env.bak/ 145 | venv.bak/ 146 | 147 | # Spyder project settings 148 | .spyderproject 149 | .spyproject 150 | 151 | # Rope project settings 152 | .ropeproject 153 | 154 | # mkdocs documentation 155 | /site 156 | 157 | # mypy 158 | .mypy_cache/ 159 | .dmypy.json 160 | dmypy.json 161 | 162 | # Pyre type checker 163 | .pyre/ 164 | 165 | # pytype static type analyzer 166 | .pytype/ 167 | 168 | # Cython debug symbols 169 | cython_debug/ 170 | 171 | # PyCharm 172 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 173 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 174 | # and can be added to the global gitignore or merged into this file. For a more nuclear 175 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 176 | #.idea/ 177 | 178 | # Abstra 179 | # Abstra is an AI-powered process automation framework. 180 | # Ignore directories containing user credentials, local state, and settings. 181 | # Learn more at https://abstra.io/docs 182 | .abstra/ 183 | 184 | # Visual Studio Code 185 | # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore 186 | # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore 187 | # and can be added to the global gitignore or merged into this file. However, if you prefer, 188 | # you could uncomment the following to ignore the entire vscode folder 189 | # .vscode/ 190 | 191 | # Ruff stuff: 192 | .ruff_cache/ 193 | 194 | # PyPI configuration file 195 | .pypirc 196 | 197 | # Cursor 198 | # Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to 199 | # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data 200 | # refer to https://docs.cursor.com/context/ignore-files 201 | .cursorignore 202 | .cursorindexingignore 203 | 204 | # Marimo 205 | marimo/_static/ 206 | marimo/_lsp/ 207 | __marimo__/ 208 | 209 | # Streamlit 210 | .streamlit/secrets.toml 211 | -------------------------------------------------------------------------------- /backend/stac-ingester/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[codz] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # UV 98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | #uv.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | #poetry.toml 110 | 111 | # pdm 112 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 113 | # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. 114 | # https://pdm-project.org/en/latest/usage/project/#working-with-version-control 115 | #pdm.lock 116 | #pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # pixi 121 | # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. 122 | #pixi.lock 123 | # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one 124 | # in the .venv directory. It is recommended not to include this directory in version control. 125 | .pixi 126 | 127 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 128 | __pypackages__/ 129 | 130 | # Celery stuff 131 | celerybeat-schedule 132 | celerybeat.pid 133 | 134 | # SageMath parsed files 135 | *.sage.py 136 | 137 | # Environments 138 | .env 139 | .envrc 140 | .venv 141 | env/ 142 | venv/ 143 | ENV/ 144 | env.bak/ 145 | venv.bak/ 146 | 147 | # Spyder project settings 148 | .spyderproject 149 | .spyproject 150 | 151 | # Rope project settings 152 | .ropeproject 153 | 154 | # mkdocs documentation 155 | /site 156 | 157 | # mypy 158 | .mypy_cache/ 159 | .dmypy.json 160 | dmypy.json 161 | 162 | # Pyre type checker 163 | .pyre/ 164 | 165 | # pytype static type analyzer 166 | .pytype/ 167 | 168 | # Cython debug symbols 169 | cython_debug/ 170 | 171 | # PyCharm 172 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 173 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 174 | # and can be added to the global gitignore or merged into this file. For a more nuclear 175 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 176 | #.idea/ 177 | 178 | # Abstra 179 | # Abstra is an AI-powered process automation framework. 180 | # Ignore directories containing user credentials, local state, and settings. 181 | # Learn more at https://abstra.io/docs 182 | .abstra/ 183 | 184 | # Visual Studio Code 185 | # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore 186 | # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore 187 | # and can be added to the global gitignore or merged into this file. However, if you prefer, 188 | # you could uncomment the following to ignore the entire vscode folder 189 | # .vscode/ 190 | 191 | # Ruff stuff: 192 | .ruff_cache/ 193 | 194 | # PyPI configuration file 195 | .pypirc 196 | 197 | # Cursor 198 | # Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to 199 | # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data 200 | # refer to https://docs.cursor.com/context/ignore-files 201 | .cursorignore 202 | .cursorindexingignore 203 | 204 | # Marimo 205 | marimo/_static/ 206 | marimo/_lsp/ 207 | __marimo__/ 208 | 209 | # Streamlit 210 | .streamlit/secrets.toml 211 | -------------------------------------------------------------------------------- /backend/global-mosaic/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[codz] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # UV 98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | #uv.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | #poetry.toml 110 | 111 | # pdm 112 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 113 | # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. 114 | # https://pdm-project.org/en/latest/usage/project/#working-with-version-control 115 | #pdm.lock 116 | #pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # pixi 121 | # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. 122 | #pixi.lock 123 | # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one 124 | # in the .venv directory. It is recommended not to include this directory in version control. 125 | .pixi 126 | 127 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 128 | __pypackages__/ 129 | 130 | # Celery stuff 131 | celerybeat-schedule 132 | celerybeat.pid 133 | 134 | # SageMath parsed files 135 | *.sage.py 136 | 137 | # Environments 138 | .env 139 | .envrc 140 | .venv 141 | env/ 142 | venv/ 143 | ENV/ 144 | env.bak/ 145 | venv.bak/ 146 | 147 | # Spyder project settings 148 | .spyderproject 149 | .spyproject 150 | 151 | # Rope project settings 152 | .ropeproject 153 | 154 | # mkdocs documentation 155 | /site 156 | 157 | # mypy 158 | .mypy_cache/ 159 | .dmypy.json 160 | dmypy.json 161 | 162 | # Pyre type checker 163 | .pyre/ 164 | 165 | # pytype static type analyzer 166 | .pytype/ 167 | 168 | # Cython debug symbols 169 | cython_debug/ 170 | 171 | # PyCharm 172 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 173 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 174 | # and can be added to the global gitignore or merged into this file. For a more nuclear 175 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 176 | #.idea/ 177 | 178 | # Abstra 179 | # Abstra is an AI-powered process automation framework. 180 | # Ignore directories containing user credentials, local state, and settings. 181 | # Learn more at https://abstra.io/docs 182 | .abstra/ 183 | 184 | # Visual Studio Code 185 | # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore 186 | # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore 187 | # and can be added to the global gitignore or merged into this file. However, if you prefer, 188 | # you could uncomment the following to ignore the entire vscode folder 189 | # .vscode/ 190 | 191 | # Ruff stuff: 192 | .ruff_cache/ 193 | 194 | # PyPI configuration file 195 | .pypirc 196 | 197 | # Cursor 198 | # Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to 199 | # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data 200 | # refer to https://docs.cursor.com/context/ignore-files 201 | .cursorignore 202 | .cursorindexingignore 203 | 204 | # Marimo 205 | marimo/_static/ 206 | marimo/_lsp/ 207 | __marimo__/ 208 | 209 | # Streamlit 210 | .streamlit/secrets.toml 211 | 212 | # Tile output 213 | output/global-mosaic.pmtiles 214 | output/global_mosaic_error.log 215 | output/global-coverage.pmtiles 216 | output/global-coverage.pmtiles-journal 217 | output/global-coverage.geojson 218 | 219 | # pgstac dump 220 | **/**/*.dump 221 | -------------------------------------------------------------------------------- /Justfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) Humanitarian OpenStreetMap Team 2 | # 3 | # This file is part of OpenAerialMap. 4 | # 5 | # OpenAerialMap is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # OpenAerialMap is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with OpenAerialMap. If not, see . 17 | # 18 | 19 | set dotenv-load 20 | 21 | mod prep 'recipes/prep/Justfile' 22 | 23 | # List available commands 24 | [private] 25 | default: 26 | just help 27 | 28 | # List available commands 29 | help: 30 | just --justfile {{justfile()}} --list 31 | 32 | # Generate the .env file from scratch, using .env.example and substitutions 33 | [no-cd] 34 | generate-dotenv branch="main": 35 | #!/usr/bin/env sh 36 | set -e 37 | 38 | # By default we deploy from 'main' branch, but can be overridden 39 | 40 | cd {{justfile_directory()}} 41 | 42 | # Re-export .env to the environment, with cleaned variables 43 | if [ -f .env ]; then 44 | just _echo-yellow "'.env' file already exists. Skipping dotenv generation." 45 | echo ".env file content:" 46 | cat .env 47 | exit 0 48 | fi 49 | 50 | just manage _install_envsubst 51 | 52 | # Generate a .env file from .env.example, substituting values from environment 53 | ./envsubst -i .env.example | grep -vE '^\s*#|^\s*$' > .env 54 | echo ".env file content:" 55 | cat .env 56 | 57 | # Build the frontend container image 58 | build-frontend branch="main": 59 | #!/usr/bin/env bash 60 | # Note we set -a here to export sourced vars 61 | set -euoa pipefail 62 | 63 | just generate-dotenv 64 | source .env 65 | 66 | GIT_BRANCH="{{ branch }}" 67 | docker build ./frontend --tag "ghcr.io/hotosm/openaerialmap/frontend:${GIT_BRANCH}" \ 68 | --build-arg VITE_STAC_API_URL=${VITE_STAC_API_URL} \ 69 | --build-arg VITE_STAC_API_PATHNAME=${VITE_STAC_API_PATHNAME} \ 70 | --build-arg VITE_STAC_TILER_PATHNAME=${VITE_STAC_TILER_PATHNAME} \ 71 | --build-arg VITE_STAC_ITEMS_LIMIT=${VITE_STAC_ITEMS_LIMIT} 72 | 73 | # Get temp AWS credentials using CI/CD OIDC 74 | get-aws-creds: 75 | #!/usr/bin/env bash 76 | # NOTE this should be moved into a generic remote justfile 77 | # https://just.systems/man/en/remote-justfiles.html 78 | # It essentially just replicates aws-actions/configure-aws-credentials@v4 79 | 80 | set -euo pipefail 81 | 82 | just prep _curl 83 | 84 | # NOTE this part is specific to Github 85 | # Gitlab has a slightly simpler config: 86 | # https://docs.gitlab.com/ci/cloud_services/aws/ 87 | echo "Requesting GitHub OIDC token..." 88 | OIDC_TOKEN=$(curl -s -H "Authorization: bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" \ 89 | "$ACTIONS_ID_TOKEN_REQUEST_URL&audience=sts.amazonaws.com" \ 90 | | sed -E 's/.*"value":"([^"]+)".*/\1/') 91 | export OIDC_TOKEN 92 | 93 | echo "Requesting AWS credentials..." 94 | aws_sts_output=$(aws sts assume-role-with-web-identity \ 95 | --role-arn "$AWS_OIDC_ROLE_ARN" \ 96 | --role-session-name "GH-Actions-${GITHUB_RUN_ID:-local}-${GITHUB_RUN_ATTEMPT:-0}" \ 97 | --web-identity-token ${OIDC_TOKEN} \ 98 | --duration-seconds 3600 \ 99 | --query 'Credentials.[AccessKeyId,SecretAccessKey,SessionToken]' \ 100 | --output text) 101 | 102 | # NOTE that env vars cannot be read in future recipes, so this is required. 103 | # NOTE also consider we cannot use multiline heredoc syntax here 104 | export $(printf "AWS_ACCESS_KEY_ID=%s AWS_SECRET_ACCESS_KEY=%s AWS_SESSION_TOKEN=%s" $aws_sts_output) 105 | echo "Writing credentials to .aws.env file for future recipes" 106 | echo "AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID" > .aws.env 107 | echo "AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY" >> .aws.env 108 | echo "AWS_SESSION_TOKEN=$AWS_SESSION_TOKEN" >> .aws.env 109 | 110 | # Deploy the frontend to S3 and CDN (in Github workflow) 111 | deploy-frontend: 112 | #!/usr/bin/env bash 113 | set -euo pipefail 114 | 115 | export GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD) 116 | echo "Current branch: ${GIT_BRANCH}" 117 | 118 | just build-frontend ${GIT_BRANCH} 119 | just get-aws-creds 120 | 121 | echo "Uploading to dist to aws:oam-frontend/${GIT_BRANCH}..." 122 | docker run --rm \ 123 | --entrypoint /bin/sh \ 124 | --env-file .aws.env \ 125 | ghcr.io/hotosm/openaerialmap/frontend:${GIT_BRANCH} \ 126 | -c "rclone config create aws s3 \ 127 | provider=AWS \ 128 | env_auth=true \ 129 | region=${AWS_REGION} \ 130 | && rclone sync ./ aws:oam-frontend/${GIT_BRANCH}" 131 | echo "Upload done." 132 | 133 | echo "Invalidating cloudfront cache..." 134 | docker run --rm \ 135 | --entrypoint /bin/sh \ 136 | --env-file .aws.env \ 137 | public.ecr.aws/aws-cli/aws-cli:2.28.11 \ 138 | -c " 139 | cf_dist_id=\$(aws cloudfront list-distributions \ 140 | --query 'DistributionList.Items[?contains(Origins.Items[].DomainName, '\''oam-frontend.s3.amazonaws.com'\'')].Id | [0]' \ 141 | --output text) 142 | 143 | echo \"Found cloudfront distribution \$cf_dist_id\" 144 | aws cloudfront create-invalidation --distribution-id \$cf_dist_id --paths \"/${GIT_BRANCH}/*\" 145 | " 146 | echo "Cloudfront config done." 147 | 148 | # Echo to terminal with blue colour 149 | [no-cd] 150 | _echo-blue text: 151 | #!/usr/bin/env sh 152 | printf "\033[0;34m%s\033[0m\n" "{{ text }}" 153 | 154 | # Echo to terminal with yellow colour 155 | [no-cd] 156 | _echo-yellow text: 157 | #!/usr/bin/env sh 158 | printf "\033[0;33m%s\033[0m\n" "{{ text }}" 159 | 160 | # Echo to terminal with red colour 161 | [no-cd] 162 | _echo-red text: 163 | #!/usr/bin/env sh 164 | printf "\033[0;41m%s\033[0m\n" "{{ text }}" 165 | -------------------------------------------------------------------------------- /backend/global-mosaic/scripts/gen_coverage_vector.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Generate simple coverage vector tiles, based on GeoJSON output 4 | from pgSTAC catalogue. 5 | """ 6 | 7 | import json 8 | import logging 9 | import os 10 | import sys 11 | import subprocess 12 | from pathlib import Path 13 | from typing import Tuple 14 | from psycopg import connect 15 | from minio import Minio 16 | from minio.error import S3Error 17 | 18 | 19 | PG_DSN = os.getenv("PG_DSN") 20 | if not PG_DSN: 21 | PGHOST = os.getenv("PGHOST") 22 | PGUSER = os.getenv("PGUSER") 23 | PGPASSWORD = os.getenv("PGPASSWORD") 24 | PGPORT = int(os.getenv("PGPORT", 5432)) 25 | PGDATABASE = os.getenv("PGDATABASE", "eoapi") 26 | 27 | if not (PGHOST and PGUSER and PGPASSWORD): 28 | raise ValueError("Must set either PG_DSN, or (PGHOST,PGUSER,PGPASSWORD)") 29 | 30 | PG_DSN = f"postgresql://{PGUSER}:{PGPASSWORD}@{PGHOST}:{PGPORT}/{PGDATABASE}" 31 | 32 | COLLECTION = os.getenv("COLLECTION", "openaerialmap") 33 | OUTPUT_GEOJSON = os.getenv("OUTPUT_GEOJSON", "/app/output/global-coverage.geojson") 34 | OUTPUT_PMTILES = os.getenv("OUTPUT_PMTILES", "/app/output/global-coverage.pmtiles") 35 | ZOOM_MIN = int(os.getenv("ZOOM_MIN", "0")) 36 | ZOOM_MAX = int(os.getenv("ZOOM_MAX", "15")) 37 | 38 | TEST_MODE = os.getenv("TEST_MODE", "").lower() in {"true", "1", "yes"} 39 | 40 | BBOX: Tuple[float, float, float, float] = ( 41 | (-20.0, 0.0, 10.0, 30.0) # large test bbox 42 | if TEST_MODE 43 | else (-180.0, -85.05112878, 180.0, 85.05112878) 44 | ) 45 | 46 | 47 | logging.basicConfig( 48 | stream=sys.stdout, 49 | level=os.getenv("LOG_LEVEL", "INFO").upper(), 50 | format="%(asctime)s %(levelname)s: %(message)s", 51 | ) 52 | log = logging.getLogger("gen_mosaic") 53 | 54 | 55 | def get_features() -> None: 56 | """ 57 | Query PgSTAC for imagery features in BBOX and write them as newline-delimited GeoJSON. 58 | 59 | Returns: 60 | None. Writes OUTPUT_GEOJSON file for Tippecanoe ingestion. 61 | """ 62 | where_bbox = "AND geometry && ST_MakeEnvelope(%s, %s, %s, %s, 4326)" 63 | params = [COLLECTION] + list(BBOX) 64 | 65 | query = f""" 66 | SELECT 67 | id::text AS id, 68 | ST_AsGeoJSON(geometry) AS geom 69 | FROM pgstac.items 70 | WHERE collection = %s 71 | {where_bbox} 72 | ORDER BY (content->>'datetime')::timestamptz DESC; 73 | """ 74 | 75 | log.info(f"Querying PgSTAC for features (bbox={BBOX})...") 76 | row_count = 0 77 | try: 78 | with ( 79 | connect(PG_DSN) as conn, 80 | conn.cursor() as cur, 81 | open(OUTPUT_GEOJSON, "w") as f, 82 | ): 83 | cur.execute(query, params) 84 | for row in cur: 85 | feature_id, geom_json = row 86 | if not geom_json: 87 | continue 88 | try: 89 | geom = json.loads(geom_json) 90 | feature = { 91 | "type": "Feature", 92 | "geometry": geom, 93 | "properties": {"id": feature_id}, 94 | } 95 | f.write(json.dumps(feature)) 96 | f.write("\n") 97 | row_count += 1 98 | except json.JSONDecodeError: 99 | log.warning(f"Invalid geometry for {feature_id}, skipping.") 100 | except Exception as e: 101 | log.error(f"PgSTAC query failed: {e}") 102 | raise 103 | 104 | log.info(f"Wrote {row_count} features to {OUTPUT_GEOJSON}") 105 | 106 | 107 | def geojson_to_pmtiles() -> None: 108 | """ 109 | Use Tippecanoe to generate PMTiles from GeoJSON. 110 | """ 111 | log.info("Generating vector tiles with tippecanoe...") 112 | try: 113 | subprocess.run( 114 | [ 115 | "tippecanoe", 116 | "-o", 117 | OUTPUT_PMTILES, 118 | f"--minimum-zoom={ZOOM_MIN}", 119 | f"--maximum-zoom={ZOOM_MAX}", 120 | "--drop-densest-as-needed", 121 | OUTPUT_GEOJSON, 122 | ], 123 | check=True, 124 | ) 125 | except subprocess.CalledProcessError as e: 126 | log.error(f"Tippecanoe failed with exit code {e.returncode}") 127 | raise 128 | log.info(f"PMTiles written to {OUTPUT_PMTILES}") 129 | 130 | 131 | def upload_to_s3() -> None: 132 | """ 133 | Upload the generated PMTiles to S3 (or S3-compatible) using MinIO client. 134 | Skips upload if required environment variables are not present. 135 | """ 136 | endpoint = os.getenv("S3_ENDPOINT", "s3.amazonaws.com") 137 | bucket = os.getenv("S3_BUCKET", "oin-hotosm-temp") 138 | access_key = os.getenv("S3_ACCESS_KEY") 139 | secret_key = os.getenv("S3_SECRET_KEY") 140 | region = os.getenv("S3_REGION", "us-east-1") 141 | pmtiles_obj_key = Path(OUTPUT_PMTILES).name 142 | 143 | if not (access_key and secret_key): 144 | log.warning("S3 upload skipped: missing required env vars.") 145 | return 146 | 147 | client = Minio( 148 | endpoint, 149 | access_key=access_key, 150 | secret_key=secret_key, 151 | region=region, 152 | secure=True, 153 | ) 154 | 155 | try: 156 | if not client.bucket_exists(bucket): 157 | log.error(f"Bucket {bucket} does not exist. Exiting upload.") 158 | return 159 | except S3Error as e: 160 | log.error(f"Error checking bucket: {e}") 161 | return 162 | 163 | log.info(f"Uploading {OUTPUT_PMTILES} to s3://{bucket}/{pmtiles_obj_key}") 164 | try: 165 | client.fput_object( 166 | bucket, 167 | pmtiles_obj_key, 168 | OUTPUT_PMTILES, 169 | content_type="application/vnd.pmtiles", 170 | metadata={"x-amz-acl": "public-read"}, 171 | ) 172 | log.info(f"Upload complete: s3://{bucket}/{pmtiles_obj_key}") 173 | except S3Error as e: 174 | log.error(f"S3 upload failed: {e}") 175 | 176 | 177 | if __name__ == "__main__": 178 | log.info(f"Starting global coverage PMTiles generation (TEST_MODE={TEST_MODE})") 179 | 180 | if not Path(OUTPUT_PMTILES).exists(): 181 | get_features() 182 | geojson_to_pmtiles() 183 | else: 184 | log.info(f"{OUTPUT_PMTILES} already exists, skipping generation.") 185 | 186 | upload_to_s3() 187 | -------------------------------------------------------------------------------- /backend/stac-api/scripts/wait-for-it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # shellcheck disable=SC2260,SC2128,SC2064,SC2206 3 | # Use this script to test if a given TCP host/port are available 4 | 5 | ###################################################### 6 | # Copied from https://github.com/vishnubob/wait-for-it 7 | ###################################################### 8 | 9 | WAITFORIT_cmdname=${0##*/} 10 | 11 | echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 12 | 13 | usage() 14 | { 15 | cat << USAGE >&2 16 | Usage: 17 | $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] 18 | -h HOST | --host=HOST Host or IP under test 19 | -p PORT | --port=PORT TCP port under test 20 | Alternatively, you specify the host and port as host:port 21 | -s | --strict Only execute subcommand if the test succeeds 22 | -q | --quiet Don't output any status messages 23 | -t TIMEOUT | --timeout=TIMEOUT 24 | Timeout in seconds, zero for no timeout 25 | -- COMMAND ARGS Execute command with args after the test finishes 26 | USAGE 27 | exit 1 28 | } 29 | 30 | wait_for() 31 | { 32 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then 33 | echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT \ 34 | seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" 35 | else 36 | echoerr "$WAITFORIT_cmdname: waiting for \ 37 | $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" 38 | fi 39 | WAITFORIT_start_ts=$(date +%s) 40 | while :; do 41 | if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then 42 | nc -z "$WAITFORIT_HOST" "$WAITFORIT_PORT" 43 | WAITFORIT_result=$? 44 | else 45 | (echo -n > /dev/tcp/"$WAITFORIT_HOST"/"$WAITFORIT_PORT") > \ 46 | /dev/null 2>&1 47 | WAITFORIT_result=$? 48 | fi 49 | if [[ $WAITFORIT_result -eq 0 ]]; then 50 | WAITFORIT_end_ts=$(date +%s) 51 | echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is \ 52 | available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) \ 53 | seconds" 54 | break 55 | fi 56 | sleep 1 57 | done 58 | return "$WAITFORIT_result" 59 | } 60 | 61 | wait_for_wrapper() 62 | { 63 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 64 | if [[ $WAITFORIT_QUIET -eq 1 ]]; then 65 | timeout $WAITFORIT_BUSYTIMEFLAG "$WAITFORIT_TIMEOUT" "$0" \ 66 | --quiet \ 67 | --child \ 68 | --host="$WAITFORIT_HOST" \ 69 | --port="$WAITFORIT_PORT" \ 70 | --timeout="$WAITFORIT_TIMEOUT" & 71 | else 72 | timeout $WAITFORIT_BUSYTIMEFLAG "$WAITFORIT_TIMEOUT" "$0" \ 73 | --child \ 74 | --host="$WAITFORIT_HOST" \ 75 | --port="$WAITFORIT_PORT" \ 76 | --timeout="$WAITFORIT_TIMEOUT" & 77 | fi 78 | WAITFORIT_PID=$! 79 | trap "kill -INT -$WAITFORIT_PID" INT 80 | wait $WAITFORIT_PID 81 | WAITFORIT_RESULT=$? 82 | if [[ $WAITFORIT_RESULT -ne 0 ]]; then 83 | echoerr "$WAITFORIT_cmdname: timeout occurred after waiting \ 84 | $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" 85 | fi 86 | return $WAITFORIT_RESULT 87 | } 88 | 89 | # process arguments 90 | while [[ $# -gt 0 ]]; do 91 | case "$1" in 92 | *:* ) 93 | WAITFORIT_hostport=(${1//:/ }) 94 | WAITFORIT_HOST=${WAITFORIT_hostport[0]} 95 | WAITFORIT_PORT=${WAITFORIT_hostport[1]} 96 | shift 1 97 | ;; 98 | --child) 99 | WAITFORIT_CHILD=1 100 | shift 1 101 | ;; 102 | -q | --quiet) 103 | WAITFORIT_QUIET=1 104 | shift 1 105 | ;; 106 | -s | --strict) 107 | WAITFORIT_STRICT=1 108 | shift 1 109 | ;; 110 | -h) 111 | WAITFORIT_HOST="$2" 112 | if [[ $WAITFORIT_HOST == "" ]]; then 113 | break 114 | fi 115 | shift 2 116 | ;; 117 | --host=*) 118 | WAITFORIT_HOST="${1#*=}" 119 | shift 1 120 | ;; 121 | -p) 122 | WAITFORIT_PORT="$2" 123 | if [[ $WAITFORIT_PORT == "" ]]; then 124 | break 125 | fi 126 | shift 2 127 | ;; 128 | --port=*) 129 | WAITFORIT_PORT="${1#*=}" 130 | shift 1 131 | ;; 132 | -t) 133 | WAITFORIT_TIMEOUT="$2" 134 | if [[ $WAITFORIT_TIMEOUT == "" ]]; then 135 | break 136 | fi 137 | shift 2 138 | ;; 139 | --timeout=*) 140 | WAITFORIT_TIMEOUT="${1#*=}" 141 | shift 1 142 | ;; 143 | --) 144 | shift 145 | WAITFORIT_CLI=("$@") 146 | break 147 | ;; 148 | --help) 149 | usage 150 | ;; 151 | *) 152 | echoerr "Unknown argument: $1" 153 | usage 154 | ;; 155 | esac 156 | done 157 | 158 | if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then 159 | echoerr "Error: you need to provide a host and port to test." 160 | usage 161 | fi 162 | 163 | WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} 164 | WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} 165 | WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} 166 | WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} 167 | 168 | # Check to see if timeout is from busybox? 169 | WAITFORIT_TIMEOUT_PATH=$(type -p timeout) 170 | WAITFORIT_TIMEOUT_PATH=$(realpath "$WAITFORIT_TIMEOUT_PATH" 2>/dev/null \ 171 | || readlink -f "$WAITFORIT_TIMEOUT_PATH") 172 | 173 | WAITFORIT_BUSYTIMEFLAG="" 174 | if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then 175 | WAITFORIT_ISBUSY=1 176 | # Check if busybox timeout uses -t flag 177 | # (recent Alpine versions don't support -t anymore) 178 | if timeout &>/dev/stdout | grep -q -e '-t '; then 179 | WAITFORIT_BUSYTIMEFLAG="-t" 180 | fi 181 | else 182 | WAITFORIT_ISBUSY=0 183 | fi 184 | 185 | if [[ $WAITFORIT_CHILD -gt 0 ]]; then 186 | wait_for 187 | WAITFORIT_RESULT=$? 188 | exit $WAITFORIT_RESULT 189 | else 190 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then 191 | wait_for_wrapper 192 | WAITFORIT_RESULT=$? 193 | else 194 | wait_for 195 | WAITFORIT_RESULT=$? 196 | fi 197 | fi 198 | 199 | if [[ $WAITFORIT_CLI != "" ]]; then 200 | if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then 201 | echoerr "$WAITFORIT_cmdname: strict mode, \ 202 | refusing to execute subprocess" 203 | exit $WAITFORIT_RESULT 204 | fi 205 | exec "${WAITFORIT_CLI[@]}" 206 | else 207 | exit $WAITFORIT_RESULT 208 | fi 209 | -------------------------------------------------------------------------------- /backend/stac-api/app/main.py: -------------------------------------------------------------------------------- 1 | """FastAPI application using PGStac. 2 | 3 | Enables the extensions specified as a comma-delimited list in 4 | the ENABLED_EXTENSIONS environment variable (e.g. `transactions,sort,query`). 5 | If the variable is not set, enables all extensions. 6 | """ 7 | 8 | import os 9 | from contextlib import asynccontextmanager 10 | 11 | from brotli_asgi import BrotliMiddleware 12 | from fastapi import FastAPI 13 | from fastapi.responses import ORJSONResponse 14 | from stac_fastapi.api.app import StacApi 15 | from stac_fastapi.api.middleware import CORSMiddleware, ProxyHeaderMiddleware 16 | from stac_fastapi.api.models import ( 17 | EmptyRequest, 18 | ItemCollectionUri, 19 | create_get_request_model, 20 | create_post_request_model, 21 | create_request_model, 22 | ) 23 | from stac_fastapi.extensions.core import ( 24 | CollectionSearchExtension, 25 | CollectionSearchFilterExtension, 26 | FieldsExtension, 27 | FreeTextExtension, 28 | ItemCollectionFilterExtension, 29 | OffsetPaginationExtension, 30 | SearchFilterExtension, 31 | SortExtension, 32 | TokenPaginationExtension, 33 | TransactionExtension, 34 | ) 35 | from stac_fastapi.extensions.core.fields import FieldsConformanceClasses 36 | from stac_fastapi.extensions.core.free_text import FreeTextConformanceClasses 37 | from stac_fastapi.extensions.core.query import QueryConformanceClasses 38 | from stac_fastapi.extensions.core.sort import SortConformanceClasses 39 | from stac_fastapi.extensions.third_party import BulkTransactionExtension 40 | from starlette.middleware import Middleware 41 | 42 | from stac_fastapi.pgstac.core import CoreCrudClient 43 | from stac_fastapi.pgstac.db import close_db_connection, connect_to_db 44 | from stac_fastapi.pgstac.extensions import QueryExtension 45 | from stac_fastapi.pgstac.extensions.filter import FiltersClient 46 | from stac_fastapi.pgstac.transactions import BulkTransactionsClient, TransactionsClient 47 | from stac_fastapi.pgstac.types.search import PgstacSearch 48 | 49 | from app.settings import Settings 50 | 51 | settings = Settings() 52 | 53 | # search extensions 54 | search_extensions_map = { 55 | "query": QueryExtension(), 56 | "sort": SortExtension(), 57 | "fields": FieldsExtension(), 58 | "filter": SearchFilterExtension(client=FiltersClient()), 59 | "pagination": TokenPaginationExtension(), 60 | } 61 | 62 | # collection_search extensions 63 | cs_extensions_map = { 64 | "query": QueryExtension(conformance_classes=[QueryConformanceClasses.COLLECTIONS]), 65 | "sort": SortExtension(conformance_classes=[SortConformanceClasses.COLLECTIONS]), 66 | "fields": FieldsExtension( 67 | conformance_classes=[FieldsConformanceClasses.COLLECTIONS] 68 | ), 69 | "filter": CollectionSearchFilterExtension(client=FiltersClient()), 70 | "free_text": FreeTextExtension( 71 | conformance_classes=[FreeTextConformanceClasses.COLLECTIONS], 72 | ), 73 | "pagination": OffsetPaginationExtension(), 74 | } 75 | 76 | # item_collection extensions 77 | itm_col_extensions_map = { 78 | "query": QueryExtension( 79 | conformance_classes=[QueryConformanceClasses.ITEMS], 80 | ), 81 | "sort": SortExtension( 82 | conformance_classes=[SortConformanceClasses.ITEMS], 83 | ), 84 | "fields": FieldsExtension(conformance_classes=[FieldsConformanceClasses.ITEMS]), 85 | "filter": ItemCollectionFilterExtension(client=FiltersClient()), 86 | "pagination": TokenPaginationExtension(), 87 | } 88 | 89 | enabled_extensions = { 90 | *search_extensions_map.keys(), 91 | *cs_extensions_map.keys(), 92 | *itm_col_extensions_map.keys(), 93 | "collection_search", 94 | } 95 | 96 | application_extensions = [] 97 | 98 | if os.environ.get("ENABLE_TRANSACTIONS_EXTENSIONS", "").lower() in ["yes", "true", "1"]: 99 | application_extensions.append( 100 | TransactionExtension( 101 | client=TransactionsClient(), 102 | settings=settings, 103 | response_class=ORJSONResponse, 104 | ), 105 | ) 106 | 107 | application_extensions.append( 108 | BulkTransactionExtension(client=BulkTransactionsClient()), 109 | ) 110 | 111 | # /search models 112 | search_extensions = [ 113 | extension 114 | for key, extension in search_extensions_map.items() 115 | if key in enabled_extensions 116 | ] 117 | post_request_model = create_post_request_model( 118 | search_extensions, base_model=PgstacSearch 119 | ) 120 | get_request_model = create_get_request_model(search_extensions) 121 | application_extensions.extend(search_extensions) 122 | 123 | # /collections/{collectionId}/items model 124 | items_get_request_model = ItemCollectionUri 125 | itm_col_extensions = [ 126 | extension 127 | for key, extension in itm_col_extensions_map.items() 128 | if key in enabled_extensions 129 | ] 130 | if itm_col_extensions: 131 | items_get_request_model = create_request_model( 132 | model_name="ItemCollectionUri", 133 | base_model=ItemCollectionUri, 134 | extensions=itm_col_extensions, 135 | request_type="GET", 136 | ) 137 | application_extensions.extend(itm_col_extensions) 138 | 139 | # /collections model 140 | collections_get_request_model = EmptyRequest 141 | if "collection_search" in enabled_extensions: 142 | cs_extensions = [ 143 | extension 144 | for key, extension in cs_extensions_map.items() 145 | if key in enabled_extensions 146 | ] 147 | collection_search_extension = CollectionSearchExtension.from_extensions( 148 | cs_extensions 149 | ) 150 | collections_get_request_model = collection_search_extension.GET 151 | application_extensions.append(collection_search_extension) 152 | 153 | 154 | @asynccontextmanager 155 | async def lifespan(app: FastAPI): 156 | """FastAPI Lifespan.""" 157 | await connect_to_db(app) 158 | yield 159 | await close_db_connection(app) 160 | 161 | 162 | api = StacApi( 163 | app=FastAPI( 164 | openapi_url=settings.openapi_url, 165 | docs_url=settings.docs_url, 166 | redoc_url=None, 167 | root_path=settings.root_path, 168 | title=settings.stac_fastapi_title, 169 | version=settings.stac_fastapi_version, 170 | description=settings.stac_fastapi_description, 171 | lifespan=lifespan, 172 | ), 173 | settings=settings, 174 | extensions=application_extensions, 175 | client=CoreCrudClient(pgstac_search_model=post_request_model), 176 | response_class=ORJSONResponse, 177 | items_get_request_model=items_get_request_model, 178 | search_get_request_model=get_request_model, 179 | search_post_request_model=post_request_model, 180 | collections_get_request_model=collections_get_request_model, 181 | middlewares=[ 182 | Middleware(BrotliMiddleware), 183 | Middleware(ProxyHeaderMiddleware), 184 | Middleware( 185 | CORSMiddleware, 186 | allow_origins=settings.cors_origins, 187 | allow_methods=settings.cors_methods, 188 | ), 189 | ], 190 | ) 191 | app = api.app 192 | 193 | 194 | def run(): 195 | """Run app from command line using uvicorn if available.""" 196 | try: 197 | import uvicorn 198 | 199 | uvicorn.run( 200 | "app.main:app", 201 | host=settings.app_host, 202 | port=settings.app_port, 203 | log_level="info", 204 | reload=settings.reload, 205 | root_path=os.getenv("UVICORN_ROOT_PATH", ""), 206 | ) 207 | except ImportError as e: 208 | raise RuntimeError("Uvicorn must be installed in order to use command") from e 209 | 210 | 211 | if __name__ == "__main__": 212 | run() 213 | -------------------------------------------------------------------------------- /frontend/jest.config.js: -------------------------------------------------------------------------------- 1 | const pkg = require('./package.json'); 2 | 3 | /* 4 | * For a detailed explanation regarding each configuration property, visit: 5 | * https://jestjs.io/docs/configuration 6 | */ 7 | 8 | module.exports = { 9 | // All imported modules in your tests should be mocked automatically 10 | // automock: false, 11 | 12 | // Stop running tests after `n` failures 13 | // bail: 0, 14 | 15 | // The directory where Jest should store its cached dependency information 16 | // cacheDirectory: "/private/var/folders/bz/vry80ww15sg533jytwfj7fdc0000gn/T/jest_dx", 17 | 18 | // Automatically clear mock calls, instances and results before every test 19 | // clearMocks: false, 20 | 21 | // Indicates whether the coverage information should be collected while executing the test 22 | // collectCoverage: false, 23 | 24 | // An array of glob patterns indicating a set of files for which coverage information should be collected 25 | // collectCoverageFrom: undefined, 26 | 27 | // The directory where Jest should output its coverage files 28 | // coverageDirectory: undefined, 29 | 30 | // An array of regexp pattern strings used to skip coverage collection 31 | // coveragePathIgnorePatterns: [ 32 | // "/node_modules/" 33 | // ], 34 | 35 | // Indicates which provider should be used to instrument code for coverage 36 | // coverageProvider: "babel", 37 | 38 | // A list of reporter names that Jest uses when writing coverage reports 39 | // coverageReporters: [ 40 | // "json", 41 | // "text", 42 | // "lcov", 43 | // "clover" 44 | // ], 45 | 46 | // An object that configures minimum threshold enforcement for coverage results 47 | // coverageThreshold: undefined, 48 | 49 | // A path to a custom dependency extractor 50 | // dependencyExtractor: undefined, 51 | 52 | // Make calling deprecated APIs throw helpful error messages 53 | // errorOnDeprecated: false, 54 | 55 | // Force coverage collection from ignored files using an array of glob patterns 56 | // forceCoverageMatch: [], 57 | 58 | // A path to a module which exports an async function that is triggered once before all test suites 59 | // globalSetup: undefined, 60 | 61 | // A path to a module which exports an async function that is triggered once after all test suites 62 | // globalTeardown: undefined, 63 | 64 | // A set of global variables that need to be available in all test environments 65 | globals: { 66 | NODE_ENV: 'test' 67 | }, 68 | 69 | // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. 70 | // maxWorkers: "50%", 71 | 72 | // An array of directory names to be searched recursively up from the requiring module's location 73 | moduleDirectories: ['node_modules'], 74 | 75 | // An array of file extensions your modules use 76 | moduleFileExtensions: ['js', 'jsx', 'ts', 'tsx', 'json', 'node', 'css'], 77 | 78 | // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module 79 | // This has to be kept in sync with the alias field of package.json 80 | moduleNameMapper: { 81 | // To simplify keeping the alias in sync the code below converts the aliases 82 | // defined in the package.json to module mappings: 83 | // From: 84 | // "$styles": "~/app/scripts/styles" 85 | // To: 86 | // '^\\$styles(.*)$': '/app/scripts/styles$1' 87 | ...Object.entries(pkg.alias ?? {}).reduce((acc, [key, value]) => { 88 | return value.startsWith('~/') 89 | ? { 90 | ...acc, 91 | [`^\\${key}(.*)$`]: `${value.substring(1)}$1` 92 | } 93 | : acc; 94 | }, {}), 95 | '.+\\.(css|styl|less|sass|scss)$': 96 | '/node_modules/jest-css-modules-transform' 97 | }, 98 | 99 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader 100 | // modulePathIgnorePatterns: [], 101 | 102 | // Activates notifications for test results 103 | // notify: false, 104 | 105 | // An enum that specifies notification mode. Requires { notify: true } 106 | // notifyMode: "failure-change", 107 | 108 | // A preset that is used as a base for Jest's configuration 109 | preset: 'ts-jest', 110 | 111 | // Run tests from one or more projects 112 | // projects: undefined, 113 | 114 | // Use this configuration option to add custom reporters to Jest 115 | // reporters: undefined, 116 | 117 | // Automatically reset mock state before every test 118 | // resetMocks: false, 119 | 120 | // Reset the module registry before running each individual test 121 | // resetModules: false, 122 | 123 | // A path to a custom resolver 124 | // resolver: undefined, 125 | 126 | // Automatically restore mock state and implementation before every test 127 | // restoreMocks: false, 128 | 129 | // The root directory that Jest should scan for tests and modules within 130 | // rootDir: undefined, 131 | 132 | // A list of paths to directories that Jest should use to search for files in 133 | // roots: [ 134 | // "" 135 | // ], 136 | 137 | // Allows you to use a custom runner instead of Jest's default test runner 138 | // runner: "jest-runner", 139 | 140 | // The paths to modules that run some code to configure or set up the testing environment before each test 141 | // setupFiles: [], 142 | 143 | // A list of paths to modules that run some code to configure or set up the testing framework before each test 144 | // setupFilesAfterEnv: [], 145 | 146 | // The number of seconds after which a test is considered as slow and reported as such in the results. 147 | // slowTestThreshold: 5, 148 | 149 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing 150 | // snapshotSerializers: [], 151 | 152 | // The test environment that will be used for testing 153 | testEnvironment: 'jsdom', 154 | 155 | // Options that will be passed to the testEnvironment 156 | // testEnvironmentOptions: {}, 157 | 158 | // Adds a location field to test results 159 | // testLocationInResults: false, 160 | 161 | // The glob patterns Jest uses to detect test files 162 | // testMatch: [ 163 | // "**/__tests__/**/*.[jt]s?(x)", 164 | // "**/?(*.)+(spec|test).[tj]s?(x)" 165 | // ], 166 | 167 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped 168 | // testPathIgnorePatterns: [ 169 | // "/node_modules/" 170 | // ], 171 | 172 | // The regexp pattern or array of patterns that Jest uses to detect test files 173 | // testRegex: [], 174 | 175 | // This option allows the use of a custom results processor 176 | // testResultsProcessor: undefined, 177 | 178 | // This option allows use of a custom test runner 179 | // testRunner: "jest-circus/runner", 180 | 181 | // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href 182 | // testURL: "http://localhost", 183 | 184 | // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" 185 | // timers: "real", 186 | 187 | // A map from regular expressions to paths to transformers 188 | transform: { 189 | '^.+\\.(js|jsx)$': 'babel-jest', 190 | '^.+\\.(ts|tsx)?$': 'ts-jest' 191 | }, 192 | 193 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation 194 | // transformIgnorePatterns: [ 195 | // "/node_modules/", 196 | // "\\.pnp\\.[^\\/]+$" 197 | // ], 198 | 199 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them 200 | // unmockedModulePathPatterns: undefined, 201 | 202 | // Indicates whether each individual test should be reported during the run 203 | verbose: true 204 | 205 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode 206 | // watchPathIgnorePatterns: [], 207 | 208 | // Whether to use watchman for file crawling 209 | // watchman: true, 210 | }; 211 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 |

3 | 4 | HOTOSM Logo 5 | 6 |

7 | 8 |
9 |

OpenAerialMap

10 |

OpenAerialMap is an open service to provide access to a commons of openly licensed imagery and map layer services.

11 | 12 | Release Version 13 | 14 |
15 | 16 |
17 | 18 | 19 |
20 | 21 | | **CI/CD** | | [![Deploy](https://github.com/hotosm/openaerialmap/actions/workflows/deploy.yml/badge.svg?branch=main)](https://github.com/hotosm/openaerialmap/actions/workflows/deploy.yml?query=branch%3Amain) | 22 | | :--- | :--- | :--- | 23 | | **Tech Stack** | | ![React](https://img.shields.io/badge/react-%2320232a.svg?style=for-the-badge&logo=react&logoColor=%2361DAFB) ![Postgres](https://img.shields.io/badge/postgres-%23316192.svg?style=for-the-badge&logo=postgresql&logoColor=white) ![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white) ![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white) | 24 | | **Code Style** | | [![Backend Style](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/format.json&labelColor=202235)](https://github.com/astral-sh/ruff) [![Frontend Style](https://img.shields.io/badge/code%20style-prettier-F7B93E?logo=Prettier)](https://github.com/prettier/prettier) [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/hotosm/openaerialmap/main.svg)](https://results.pre-commit.ci/latest/github/hotosm/openaerialmap/main) | 25 | | **Community** | | [![Slack](https://img.shields.io/badge/Slack-Join%20the%20community!-d63f3f?style=for-the-badge&logo=slack&logoColor=d63f3f)](https://slack.hotosm.org) [![All Contributors](https://img.shields.io/github/contributors/hotosm/openaerialmap?logo=github)](#contributors-) | 26 | | **Other Info** | | [![docs](https://github.com/hotosm/openaerialmap/blob/main/docs/images/docs_badge.svg?raw=true)](https://docs.imagery.hotosm.org/) [![license-code](https://img.shields.io/github/license/hotosm/openaerialmap.svg)](https://github.com/hotosm/openaerialmap/blob/main/LICENSE.md) | 27 | 28 |
29 | 30 | --- 31 | 32 | 33 | 34 | 35 | A revamp of OpenAerialMap, originally developed back in the 2010's. 36 | 37 | ## Components 38 | 39 | - Backend 40 | - [STAC API][4] deployment of eoAPI. 41 | - [STAC Extension][3] for OAM metadata requirements, data ingestion. 42 | - New Frontend: Hosted in this repo. 43 | - Old Frontend: (currently used as 44 | frontend + uploader) 45 | - Old API: (currently used for login / upload) 46 | 47 | ### Frontend Parts 48 | 49 | The frontend prototype was developed as part of the revamp deliverables. 50 | 51 | The idea has since morphed into [stac-map](https://github.com/developmentseed/stac-map). 52 | We should probably migrate to that and consolidate efforts within the community. 53 | 54 | Main parts: 55 | 56 | - Main OpenAerialMap landing page, with links to different parts and docs / info. 57 | - Documentation site with tutorials etc. 58 | - The uploader site. This should be HOT themed, with shared auth and consistent 59 | look / style. 60 | - `stac-browser` for a catalog search from the backend STAC. 61 | - `stac-map` to display the global coverage pmtiles layer (giving indication of 62 | where imagery is currently present), plus map-based search of the STAC once 63 | the user zooms into their area of interest (+ filtering based on various 64 | criteria). 65 | 66 | ## Contributing 👍🎉 67 | 68 | We would really welcome contributions for: 69 | 70 | - Backend Python development 71 | - Frontend Typescript development 72 | - Documentation writers 73 | - UI / UX designers 74 | - Testers! 75 | 76 | Please take a look at our [Documentation][1] and 77 | [contributor guidance][2] for more details! 78 | 79 | Reach out to us if any questions! 80 | 81 | ## Roadmap 82 | 83 | 84 | | Status | Feature | Description | Version | Effort (person-months) | 85 | |:------:|:-------|:------------|:-------:|:----------------------:| 86 | | ✅ | New OAM backend based on STAC | Core backend using pgSTAC, FastAPI STAC endpoints, and TiTiler integration. | v2.0-alpha | 1.5 | 87 | | ✅ | Kubernetes-based deployment of eoAPI for OAM STAC | A scalable, open-source infrastructure to support the growing volume of imagery hosted and indexed in OAM. | v2.0-alpha | 1.0 | 88 | | ✅ | STAC extension for OAM and metadata ingested from old API | STAC is the industry standard to describe geospatial information - including imagery - so it can be more easily indexed, discovered, and worked with. This extension aligns legacy OAM metadata with STAC. | v2.0-alpha | 1.5 | 89 | | ✅ | Revamped global mosaic | The global tiled mosaic of OAM imagery (previously available from Kontur) has been redesigned to use a combined approach: visualize footprints at lower zoom levels, and dynamically switch to full-resolution imagery at higher zoom levels. | v2.0-alpha | 0.5 | 90 | | ✅ | Prototype frontend based on STAC | This initial prototype lays the foundation for the new frontend, enabling rich interactions with available imagery, dynamic filtering, advanced search, and an overall modern user experience on the OAM platform. | v2.0-alpha | 1.0 | 91 | | ✅ | Documentation site | Public docs with setup, endpoints, and usage guides for API, mosaic/TMS, and frontend. | v2.0-alpha | 0.5 | 92 | | 🔄 | New frontend feature parity with old frontend | Features from the old Node.js frontend are being implemented in the new OAM Browser to ensure continuity in user experience and functionality. | v2.0 | 2.0 | 93 | | 📅 | Improvements to the STAC catalog search | Allow users to search across the full STAC metadata, beyond the basic set of elements currently supported in OAM. | v2.1 | 1.5 | 94 | | 📅 | Preset + advanced filtering | Improve user experience and efficiency by creating preset filters (e.g. all imagery for the selected AOI collected in the last week) and advanced filtering to find specific imagery. | v2.1 | 1.5 | 95 | | 📅 | Migrate frontend to stac-map | Move the UI to the community stac-map component (OAM theme) to reduce maintenance while keeping feature parity. | v2.1 | 1.5 | 96 | | 📅 | Cross-catalog search and display | Develop automations to harvest external STAC catalog metadata and cache previews, enabling faster, seamless display of available imagery in a unified OAM Browser interface. | v2.2 | 1.0 | 97 | | 📅 | Better visualization of imagery | Improve how imagery distribution and density are visualized in the OAM Browser, so users can quickly see what is available and what the imagery looks like before downloading. | v2.2 | 0.5 | 98 | | 📅 | Dynamic tile creation | Provide dynamic Tile Map Service (TMS) generated on the fly using TiTiler, so imagery can be easily used in JOSM/iD and other mapping software. | v2.2 | 2.0 | 99 | | 📅 | New user management and API | Create a system for user accounts, allowing drone pilots and satellite providers to log in (via OSM OAuth and Google), manage the imagery they have uploaded (delete, rename, etc.), and see contribution statistics. | v2.3 | 4.0 | 100 | | 📅 | New uploader API & UI | Develop an efficient web application that allows users to upload very large imagery files from their computer or from cloud services like Google Drive or Dropbox. This is critical to remove contribution barriers, since many imagery files are too big for the current uploader. | v2.3 | 4.0 | 101 | | 📅 | Catalog expansion | Add additional STAC catalog ingestion workflows by engaging more providers, and create ingestion processes to “map” publicly available STACs to the OAM metadata schema. This will significantly expand the amount of imagery available through OAM’s unified discovery interface. | v2.4 | 2.0 | 102 | | 📅 | Integration with ODM | Develop a plugin for OpenDroneMap to allow drone pilots to publish imagery directly to OAM (without having to download the GeoTIFF and manually upload it). | v2.4 | 2.0 | 103 | | 📅 | Imagery and user statistics | Provide rich user and data statistics to foster the open imagery community and more clearly visualize growth and usage over time. | v2.4 | 2.0 | 104 | | 📅 | Support for multispectral and non-optical imagery | Allow users to upload more advanced imagery formats and non-optical data that can be rendered and visualized alongside common RGB imagery. | v2.5 | 2.0 | 105 | | 📅 | Support for DEMs | Add capabilities to upload Digital Elevation Models (DEMs) and 3D point clouds - common byproducts of drone mapping - that can be used for risk modeling and humanitarian mapping (e.g. DTMs for flood modeling). | v2.5 | 2.0 | 106 | 107 | 108 | [1]: https://hotosm.github.io/openaerialmap 109 | [2]: https://github.com/hotosm/openaerialmap/blob/main/CONTRIBUTING.md 110 | [3]: https://github.com/hotosm/stactools-hotosm 111 | [4]: https://github.com/hotosm/k8s-infra/tree/main/kubernetes/helm 112 | -------------------------------------------------------------------------------- /frontend/app/components/map.tsx: -------------------------------------------------------------------------------- 1 | import { RASTER_API_PATH, useStacItems } from '$hooks/useStacCatalog'; 2 | import { Feature, FeatureCollection, Geometry } from 'geojson'; 3 | import { Protocol as PMTilesProtocol } from 'pmtiles'; 4 | import { schemeSet3 } from 'd3-scale-chromatic'; 5 | import maplibregl, { 6 | GeoJSONSource, 7 | LngLatBounds, 8 | Map, 9 | Marker, 10 | RasterTileSource 11 | } from 'maplibre-gl'; 12 | import 'maplibre-gl/dist/maplibre-gl.css'; 13 | import { useEffect, useRef } from 'react'; 14 | import { StacItem } from 'stac-ts'; 15 | import { useStac } from '../context/StacContext'; 16 | import { StacFeatureCollection } from '../types/stac'; 17 | 18 | interface MapComponentProps { 19 | centerCoordinates?: [number, number]; 20 | containerId?: string; 21 | features?: Feature[]; 22 | zoom?: number; 23 | onSelect?: (itemId: string) => void; 24 | } 25 | 26 | export default function MapComponent({ 27 | centerCoordinates = [0, 0], 28 | containerId = 'map', 29 | features, 30 | zoom = 1, 31 | onSelect 32 | }: MapComponentProps) { 33 | const { selectedCollection, filters, setSelectedItem, setBbox, bbox } = 34 | useStac(); 35 | const map = useRef(null); 36 | const markersRef = useRef([]); 37 | const { data: stacItems, isLoading } = useStacItems( 38 | selectedCollection, 39 | filters, 40 | bbox 41 | ); 42 | const fillOpacity = 0.7; 43 | const fillHighlightOpacity = 0.4; 44 | 45 | useEffect(() => { 46 | const protocol = new PMTilesProtocol(); 47 | maplibregl.addProtocol('pmtiles', protocol.tile); 48 | 49 | if (!map.current) { 50 | map.current = new Map({ 51 | container: containerId, 52 | style: { 53 | version: 8, 54 | sources: { 55 | 'osm-tiles': { 56 | type: 'raster', 57 | tiles: ['https://tile.openstreetmap.org/{z}/{x}/{y}.png'], 58 | tileSize: 256, 59 | attribution: '© OpenStreetMap contributors' 60 | }, 61 | 'global-coverage': { 62 | type: 'vector', 63 | url: 'pmtiles://https://s3.amazonaws.com/oin-hotosm-temp/global-coverage.pmtiles' 64 | } 65 | }, 66 | 67 | layers: [ 68 | { 69 | id: 'osm-tiles', 70 | type: 'raster', 71 | source: 'osm-tiles', 72 | minzoom: 0, 73 | maxzoom: 19 74 | }, 75 | { 76 | id: 'global-coverage', 77 | source: 'global-coverage', 78 | 'source-layer': 'globalcoverage', 79 | minzoom: 0, 80 | maxzoom: 15, 81 | type: 'fill', 82 | paint: { 83 | // https://d3js.org/d3-scale-chromatic/categorical 84 | 'fill-color': schemeSet3[3], 85 | 'fill-opacity': [ 86 | 'case', 87 | ['boolean', ['feature-state', 'hover'], false], 88 | fillHighlightOpacity, 89 | fillOpacity 90 | ] 91 | }, 92 | filter: ['==', ['geometry-type'], 'Polygon'] 93 | } 94 | ] 95 | }, 96 | center: centerCoordinates, 97 | zoom: zoom 98 | }); 99 | 100 | map.current.on('load', () => { 101 | // Add collection mosaic source and layer 102 | if (!map.current?.getSource('stac-collection-data')) { 103 | // Add the raster source first 104 | map.current?.addSource('stac-collection-data', { 105 | type: 'raster', 106 | tiles: [], 107 | tileSize: 256, 108 | minzoom: 10, 109 | maxzoom: 22 110 | }); 111 | 112 | map.current?.addLayer({ 113 | id: 'stac-collection-layer', 114 | type: 'raster', 115 | source: 'stac-collection-data', 116 | paint: { 117 | 'raster-opacity': 1 118 | } 119 | }); 120 | 121 | // Then add STAC items source and layers 122 | map.current?.addSource('stac-items-data', { 123 | type: 'geojson', 124 | data: { 125 | type: 'FeatureCollection', 126 | features: [] 127 | } 128 | }); 129 | 130 | // Add all items layer (base layer) 131 | map.current?.addLayer({ 132 | id: 'stac-items-layer', 133 | type: 'fill', 134 | source: 'stac-items-data', 135 | paint: { 136 | 'fill-color': '#000', 137 | 'fill-opacity': 0.25, 138 | 'fill-outline-color': '#fff', 139 | 'fill-antialias': true 140 | } 141 | }); 142 | 143 | // Add selected items layer (highlighted on top) 144 | map.current?.addLayer({ 145 | id: 'selected-items-layer', 146 | type: 'fill', 147 | source: 'stac-items-data', 148 | paint: { 149 | 'fill-color': '#b30000', 150 | 'fill-opacity': 0.4, 151 | 'fill-outline-color': '#b30000', 152 | 'fill-antialias': true 153 | }, 154 | filter: ['in', ['get', 'id'], ['literal', []]] 155 | }); 156 | 157 | map.current?.on('mouseenter', 'stac-items-layer', () => { 158 | if (map.current) { 159 | map.current.getCanvas().style.cursor = 'pointer'; 160 | } 161 | }); 162 | 163 | map.current?.on('mouseleave', 'stac-items-layer', () => { 164 | if (map.current) { 165 | map.current.getCanvas().style.cursor = ''; 166 | } 167 | }); 168 | 169 | map.current?.on('moveend', () => { 170 | if (map.current) { 171 | const bounds = map.current.getBounds(); 172 | setBbox(bounds.toArray().flat()); 173 | } 174 | }); 175 | } 176 | }); 177 | } 178 | return () => { 179 | maplibregl.removeProtocol('pmtiles'); 180 | }; 181 | }, [ 182 | features, 183 | centerCoordinates, 184 | containerId, 185 | zoom, 186 | onSelect, 187 | selectedCollection 188 | ]); 189 | 190 | useEffect(() => { 191 | // Clean up markers when component unmounts 192 | return () => { 193 | markersRef.current.forEach((marker) => marker.remove()); 194 | markersRef.current = []; 195 | }; 196 | }, []); 197 | 198 | // Update the stac items data and add markers 199 | useEffect(() => { 200 | if ( 201 | map.current && 202 | selectedCollection && 203 | !isLoading && 204 | stacItems && 205 | map.current.getSource('stac-items-data') 206 | ) { 207 | function prepareStacItemsForMapLibre( 208 | stacItems: StacFeatureCollection 209 | ): FeatureCollection { 210 | return { 211 | type: 'FeatureCollection', 212 | features: stacItems.features 213 | .filter( 214 | ( 215 | feature 216 | ): feature is StacItem & { 217 | geometry: NonNullable; 218 | } => feature.geometry !== null 219 | ) 220 | .map((feature) => ({ 221 | type: 'Feature', 222 | geometry: feature.geometry as Geometry, 223 | properties: { 224 | ...feature.properties, 225 | id: feature.id 226 | }, 227 | id: feature.id 228 | })) 229 | }; 230 | } 231 | 232 | // Update the GeoJSON source for the fill layers 233 | const preparedData = prepareStacItemsForMapLibre( 234 | stacItems 235 | ) as FeatureCollection; 236 | (map.current.getSource('stac-items-data') as GeoJSONSource).setData( 237 | preparedData 238 | ); 239 | map.current.triggerRepaint(); 240 | 241 | // First, clear any existing markers 242 | markersRef.current.forEach((marker) => marker.remove()); 243 | markersRef.current = []; 244 | 245 | // Add new markers for each feature 246 | preparedData.features.forEach((feature) => { 247 | if (feature.geometry.type === 'Point') { 248 | // For Point geometries, use the coordinates directly 249 | const [lng, lat] = feature.geometry.coordinates; 250 | const marker = new Marker({ color: '#3388ff' }) 251 | .setLngLat([lng, lat]) 252 | .addTo(map.current!); 253 | 254 | // Add click handler to zoom to this point 255 | const markerElement = marker.getElement(); 256 | markerElement.style.cursor = 'pointer'; 257 | 258 | markerElement.addEventListener('click', () => { 259 | // For point geometries, fly to the point location with zoom 260 | map.current?.flyTo({ 261 | center: [lng, lat], 262 | zoom: 14, 263 | speed: 0.8, 264 | essential: true 265 | }); 266 | 267 | // Select this item in the context 268 | if (feature.id) { 269 | setSelectedItem(feature.id.toString()); 270 | 271 | // Also call the onSelect prop if provided 272 | if (onSelect) { 273 | onSelect(feature.id.toString()); 274 | } 275 | } 276 | }); 277 | 278 | markersRef.current.push(marker); 279 | } else if ( 280 | feature.geometry.type === 'Polygon' || 281 | feature.geometry.type === 'MultiPolygon' 282 | ) { 283 | // For polygons, calculate the centroid 284 | const bounds = new LngLatBounds(); 285 | 286 | if (feature.geometry.type === 'Polygon') { 287 | feature.geometry.coordinates[0].forEach((coord) => { 288 | bounds.extend([coord[0], coord[1]]); 289 | }); 290 | } else { 291 | // Handle MultiPolygon 292 | feature.geometry.coordinates.forEach((polygon) => { 293 | polygon[0].forEach((coord) => { 294 | bounds.extend([coord[0], coord[1]]); 295 | }); 296 | }); 297 | } 298 | 299 | const center = bounds.getCenter(); 300 | const marker = new Marker({ color: '#3388ff' }) 301 | .setLngLat(center) 302 | .addTo(map.current!); 303 | 304 | // Add click handler to zoom to this polygon's bounds 305 | const markerElement = marker.getElement(); 306 | markerElement.style.cursor = 'pointer'; 307 | 308 | markerElement.addEventListener('click', () => { 309 | // For polygon geometries, fit to the bounds 310 | map.current?.fitBounds(bounds, { 311 | padding: 100, 312 | maxZoom: 16 313 | }); 314 | 315 | // Select this item in the context 316 | if (feature.id) { 317 | setSelectedItem(feature.id.toString()); 318 | } 319 | }); 320 | 321 | markersRef.current.push(marker); 322 | } 323 | }); 324 | } 325 | }, [stacItems, isLoading, selectedCollection, setSelectedItem, onSelect]); 326 | 327 | // Update mosaic raster data when collection changes 328 | useEffect(() => { 329 | if ( 330 | map.current && 331 | selectedCollection && 332 | map.current.getSource('stac-collection-data') 333 | ) { 334 | // Clear any existing markers 335 | markersRef.current.forEach((marker) => marker.remove()); 336 | markersRef.current = []; 337 | 338 | const params = new URLSearchParams(); 339 | params.append('assets', 'visual'); 340 | const queryParams = params.toString() ? `?${params.toString()}` : ''; 341 | const itemsSource = map.current.getSource( 342 | 'stac-items-data' 343 | ) as GeoJSONSource; 344 | itemsSource.setData({ 345 | type: 'FeatureCollection', 346 | features: [] 347 | } as FeatureCollection); 348 | 349 | const collectionSource = map.current.getSource( 350 | 'stac-collection-data' 351 | ) as RasterTileSource; 352 | 353 | collectionSource.setTiles([ 354 | `${RASTER_API_PATH}/collections/${selectedCollection}/tiles/WebMercatorQuad/{z}/{x}/{y}.png${queryParams}` 355 | ]); 356 | } 357 | }, [selectedCollection]); 358 | 359 | return ( 360 |
364 | ); 365 | } 366 | -------------------------------------------------------------------------------- /frontend/app/components/detail.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import '@awesome.me/webawesome/dist/components/drawer/drawer.js'; 4 | import '@awesome.me/webawesome/dist/components/button/button.js'; 5 | import '@awesome.me/webawesome/dist/components/button-group/button-group.js'; 6 | import '@awesome.me/webawesome/dist/components/icon/icon.js'; 7 | import '@awesome.me/webawesome/dist/components/divider/divider.js'; 8 | 9 | import { useStac } from '../context/StacContext'; 10 | import { StacItem } from 'stac-ts'; 11 | 12 | interface DetailProps { 13 | isDetailPaneShown: boolean; 14 | setShowDetailPane: (isShown: boolean) => void; 15 | } 16 | 17 | export default function Detail({ 18 | isDetailPaneShown, 19 | setShowDetailPane 20 | }: DetailProps) { 21 | const { selectedItem, stacItems, setSelectedItem } = useStac(); 22 | 23 | const itemData = stacItems?.features.find( 24 | (item) => item.id === selectedItem 25 | ) as StacItem | undefined; 26 | 27 | const formatDate = (dateString?: string) => { 28 | if (!dateString || dateString === 'N/A') return ''; 29 | return new Date(dateString).toISOString().split('T')[0]; 30 | }; 31 | 32 | const title = itemData?.id 33 | ? `${itemData.id} - ${formatDate(itemData.properties?.datetime as string)}` 34 | : 'Item Details'; 35 | 36 | const currentIndex = 37 | stacItems?.features.findIndex((item) => item.id === selectedItem) ?? -1; 38 | 39 | const totalResults = stacItems?.features.length ?? 0; 40 | 41 | return ( 42 | setShowDetailPane(false)} 47 | style={{ '--size': '420px' } as React.CSSProperties} 48 | > 49 |
59 | 60 | {itemData ? ( 61 |
62 |
70 |
71 | UPLOADED BY 72 |
73 | {itemData.properties['oam:producer_name'] 74 | ? (itemData.properties['oam:producer_name'] as string) 75 | : 'N/A'} 76 |
77 |
78 | 79 |
92 | {itemData.assets?.thumbnail ? ( 93 | {itemData.id} 98 | ) : ( 99 |
No image preview available
100 | )} 101 |
102 | 103 |
110 | 111 | 112 | Display as 113 | 114 |
115 | 116 | 117 | TMS 118 | 119 | 120 | Thumbnail 121 | 122 | 123 |
124 |
125 |
126 | 127 | {/* Action links */} 128 |
129 |
137 |
140 | 141 | Open in 142 |
143 | 161 |
162 | 163 |
170 |
173 | 174 | Copy image URL 175 |
176 | 194 |
195 |
196 | 197 | 198 | 199 | {/* Metadata table */} 200 |
201 | 202 | 203 | 204 | 209 | 214 | 215 | 216 | 219 | 222 | 223 | 224 | 227 | 235 | 236 | 237 | 240 | 245 | 246 | 247 | 248 | 253 | 254 | 255 | 258 | 263 | 264 | 265 | 266 | 269 | 270 | 271 | 274 | 284 | 285 | 286 | 287 | 290 | 291 | 292 |
207 | DATE 208 | 210 | {itemData.properties.created 211 | ? formatDate(itemData.properties.created) 212 | : 'N/A'} 213 |
217 | RESOLUTION 218 | 220 | {itemData.properties.gsd ? itemData.properties.gsd : 'N/A'} 221 |
225 | PROVIDER 226 | 228 | {itemData.properties.providers && 229 | itemData.properties.providers.length 230 | ? itemData.properties.providers 231 | .map((provider) => provider.name) 232 | .join(',') 233 | : 'N/A'} 234 |
238 | PLATFORM 239 | 241 | {itemData.properties.platform 242 | ? itemData.properties.platform 243 | : 'N/A'} 244 |
SENSOR 249 | {itemData.properties.instruments 250 | ? itemData.properties.instruments.join(',') 251 | : 'N/A'} 252 |
256 | IMAGE SIZE 257 | 259 | {itemData.assets.visual 260 | ? (itemData.assets.visual['file:size'] as number) 261 | : 'N/A'} 262 |
TYPE 267 | {/* Image + Map Layer */} 268 |
272 | LICENSE 273 | 275 | 279 | {itemData.properties.license 280 | ? itemData.properties.license 281 | : 'N/A'} 282 | 283 |
ID 288 | {itemData.id} 289 |
293 |
294 |
295 | ) : ( 296 |
297 | No item selected or data unavailable 298 |
299 | )} 300 | 301 | {/* Footer navigation */} 302 |
311 | { 316 | if (currentIndex > 0 && stacItems?.features) { 317 | const prevItem = stacItems.features[currentIndex - 1]; 318 | setSelectedItem(prevItem.id); 319 | } 320 | }} 321 | > 322 | 323 | Previous 324 | 325 | 326 |
327 | {currentIndex + 1} of {totalResults} results 328 |
329 | 330 | = totalResults - 1} 334 | onClick={() => { 335 | if (currentIndex < totalResults - 1 && stacItems?.features) { 336 | const nextItem = stacItems.features[currentIndex + 1]; 337 | setSelectedItem(nextItem.id); 338 | } 339 | }} 340 | > 341 | Next 342 | 343 | 344 |
345 | 346 | ); 347 | } 348 | --------------------------------------------------------------------------------