├── webapp
├── src
│ ├── assets
│ │ └── .gitkeep
│ ├── favicon.ico
│ ├── environments
│ │ ├── environment.prod.ts
│ │ └── environment.ts
│ ├── tsconfig.app.json
│ ├── styles.scss
│ ├── tsconfig.spec.json
│ ├── tslint.json
│ ├── app
│ │ ├── server.service.spec.ts
│ │ ├── server.service.ts
│ │ ├── app.module.ts
│ │ ├── app.component.scss
│ │ ├── app.component.spec.ts
│ │ ├── material-components
│ │ │ └── material-components.module.ts
│ │ ├── app.component.ts
│ │ ├── app.component.html
│ │ └── locations.ts
│ ├── browserslist
│ ├── main.ts
│ ├── index.html
│ ├── theme.scss
│ ├── test.ts
│ ├── karma.conf.js
│ └── polyfills.ts
├── deploy.sh
├── app.yaml
├── e2e
│ ├── src
│ │ ├── app.po.ts
│ │ └── app.e2e-spec.ts
│ ├── tsconfig.e2e.json
│ └── protractor.conf.js
├── .editorconfig
├── tsconfig.json
├── .gcloudignore
├── README.md
├── package.json
├── tslint.json
└── angular.json
├── worker
├── requirements.txt
├── Dockerfile
├── .gcloudignore
├── deploy.sh
└── main.py
├── dispatch
├── requirements.txt
├── deploy.sh
└── main.py
├── server
├── requirements.txt
├── tile_landsat.py
├── .gcloudignore
├── config.py
├── deploy.sh
├── tile_landcover.py
├── region_upload.py
├── landsat_image.py
├── region_classify.py
├── submit.py
├── templates
│ └── index.html
├── main.py
└── devops_tools.py
├── prediction
├── single_prediction
│ ├── Dockerfile
│ ├── README.md
│ └── single_predict.py
└── serving
│ ├── Dockerfile
│ ├── README.md
│ └── service.py
├── .gcloudignore
├── .gitignore
├── deploy.sh
├── setup.sh
├── README.md
└── train-model.py
/webapp/src/assets/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/worker/requirements.txt:
--------------------------------------------------------------------------------
1 | google-cloud-pubsub
2 | google-cloud-storage
3 | tensorflow
--------------------------------------------------------------------------------
/dispatch/requirements.txt:
--------------------------------------------------------------------------------
1 | google-cloud-pubsub
2 | google-cloud-storage
3 | requests
4 | tensorflow
--------------------------------------------------------------------------------
/webapp/src/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GoogleCloudPlatform/earth-ml/HEAD/webapp/src/favicon.ico
--------------------------------------------------------------------------------
/webapp/deploy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Deploying the web application.
4 | ( cd webapp ; ng build --prod )
5 | gcloud app deploy webapp
6 |
--------------------------------------------------------------------------------
/webapp/src/environments/environment.prod.ts:
--------------------------------------------------------------------------------
1 | export const environment = {
2 | production: true,
3 | serverURL: 'https://server-dot-project-earth-2018.appspot.com',
4 | };
5 |
--------------------------------------------------------------------------------
/server/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | Flask-Caching
3 | earthengine-api
4 | google-api-python-client
5 | google-cloud-storage
6 | mercantile
7 | oauth2client
8 | requests
9 | tensorflow
--------------------------------------------------------------------------------
/webapp/app.yaml:
--------------------------------------------------------------------------------
1 | runtime: nodejs10
2 |
3 | handlers:
4 | - url: /
5 | static_files: dist/project-earth/index.html
6 | upload: dist/project-earth/index.html
7 |
8 | - url: /
9 | static_dir: dist/project-earth
--------------------------------------------------------------------------------
/webapp/src/tsconfig.app.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "../tsconfig.json",
3 | "compilerOptions": {
4 | "outDir": "../out-tsc/app",
5 | "types": ["node"]
6 | },
7 | "exclude": [
8 | "test.ts",
9 | "**/*.spec.ts"
10 | ]
11 | }
12 |
--------------------------------------------------------------------------------
/webapp/src/styles.scss:
--------------------------------------------------------------------------------
1 | /* You can add global styles to this file, and also import other style files */
2 |
3 | @import 'theme.scss';
4 |
5 | * {
6 | font-family: Roboto;
7 | }
8 |
9 | body, html {
10 | height: 100%;
11 | margin: 0 auto;
12 | }
--------------------------------------------------------------------------------
/webapp/e2e/src/app.po.ts:
--------------------------------------------------------------------------------
1 | import { browser, by, element } from 'protractor';
2 |
3 | export class AppPage {
4 | navigateTo() {
5 | return browser.get('/');
6 | }
7 |
8 | getTitleText() {
9 | return element(by.css('app-root h1')).getText();
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/webapp/e2e/tsconfig.e2e.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "../tsconfig.json",
3 | "compilerOptions": {
4 | "outDir": "../out-tsc/app",
5 | "module": "commonjs",
6 | "target": "es5",
7 | "types": [
8 | "jasmine",
9 | "jasminewd2",
10 | "node"
11 | ]
12 | }
13 | }
--------------------------------------------------------------------------------
/prediction/single_prediction/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM tensorflow/tensorflow:1.12.0-py3
2 |
3 | ADD ./single_predict.py /opt/single_predict.py
4 | ADD ./project-earth-2018-sa.json /opt/project-earth-2018-sa.json
5 | RUN chmod +x /opt/single_predict.py
6 |
7 | ENTRYPOINT ["/usr/bin/python", "/opt/single_predict.py"]
--------------------------------------------------------------------------------
/webapp/.editorconfig:
--------------------------------------------------------------------------------
1 | # Editor configuration, see https://editorconfig.org
2 | root = true
3 |
4 | [*]
5 | charset = utf-8
6 | indent_style = space
7 | indent_size = 2
8 | insert_final_newline = true
9 | trim_trailing_whitespace = true
10 |
11 | [*.md]
12 | max_line_length = off
13 | trim_trailing_whitespace = false
14 |
--------------------------------------------------------------------------------
/webapp/src/tsconfig.spec.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "../tsconfig.json",
3 | "compilerOptions": {
4 | "outDir": "../out-tsc/spec",
5 | "types": [
6 | "jasmine",
7 | "node"
8 | ]
9 | },
10 | "files": [
11 | "test.ts",
12 | "polyfills.ts"
13 | ],
14 | "include": [
15 | "**/*.spec.ts",
16 | "**/*.d.ts"
17 | ]
18 | }
19 |
--------------------------------------------------------------------------------
/worker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM tensorflow/tensorflow:latest-py3
2 |
3 | COPY requirements.txt /
4 | RUN pip install -U -r /requirements.txt
5 |
6 | ENV WORKDIR /app
7 | WORKDIR ${WORKDIR}
8 | COPY . ${WORKDIR}
9 |
10 | ENV GOOGLE_APPLICATION_CREDENTIALS credentials.json
11 | ENV PROJECT project-earth-2018
12 | ENV ML_ENGINE_TOPIC ml-engine
13 |
14 | CMD ["python", "main.py"]
--------------------------------------------------------------------------------
/webapp/e2e/src/app.e2e-spec.ts:
--------------------------------------------------------------------------------
1 | import { AppPage } from './app.po';
2 |
3 | describe('workspace-project App', () => {
4 | let page: AppPage;
5 |
6 | beforeEach(() => {
7 | page = new AppPage();
8 | });
9 |
10 | it('should display welcome message', () => {
11 | page.navigateTo();
12 | expect(page.getTitleText()).toEqual('Welcome to project-earth!');
13 | });
14 | });
15 |
--------------------------------------------------------------------------------
/webapp/src/tslint.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "../tslint.json",
3 | "rules": {
4 | "directive-selector": [
5 | true,
6 | "attribute",
7 | "app",
8 | "camelCase"
9 | ],
10 | "component-selector": [
11 | true,
12 | "element",
13 | "app",
14 | "kebab-case"
15 | ]
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/webapp/src/app/server.service.spec.ts:
--------------------------------------------------------------------------------
1 | import { TestBed } from '@angular/core/testing';
2 |
3 | import { ServerService } from './server.service';
4 |
5 | describe('ServerService', () => {
6 | beforeEach(() => TestBed.configureTestingModule({}));
7 |
8 | it('should be created', () => {
9 | const service: ServerService = TestBed.get(ServerService);
10 | expect(service).toBeTruthy();
11 | });
12 | });
13 |
--------------------------------------------------------------------------------
/webapp/src/browserslist:
--------------------------------------------------------------------------------
1 | # This file is currently used by autoprefixer to adjust CSS to support the below specified browsers
2 | # For additional information regarding the format and rule options, please see:
3 | # https://github.com/browserslist/browserslist#queries
4 | #
5 | # For IE 9-11 support, please remove 'not' from the last line of the file and adjust as needed
6 |
7 | > 0.5%
8 | last 2 versions
9 | Firefox ESR
10 | not dead
11 | not IE 9-11
--------------------------------------------------------------------------------
/webapp/src/main.ts:
--------------------------------------------------------------------------------
1 | import 'hammerjs';
2 |
3 | import { enableProdMode } from '@angular/core';
4 | import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
5 |
6 | import { AppModule } from './app/app.module';
7 | import { environment } from './environments/environment';
8 |
9 | if (environment.production) {
10 | enableProdMode();
11 | }
12 |
13 | platformBrowserDynamic().bootstrapModule(AppModule)
14 | .catch(err => console.error(err));
15 |
--------------------------------------------------------------------------------
/server/tile_landsat.py:
--------------------------------------------------------------------------------
1 | import ee
2 |
3 | import landsat_image
4 |
5 |
6 | def run(x, y, zoom, year):
7 | # Return the tile URL for the input landsat image used for classification.
8 | # mapid = ee.ImageCollection(f"projects/project-earth/landsat_test").getMapId({
9 | mapid = landsat_image.get(year).getMapId({
10 | 'bands': ['B4', 'B3', 'B2'],
11 | 'min': 0.0,
12 | 'max': 0.3,
13 | 'gamma': 1.5,
14 | })
15 | return ee.data.getTileUrl(mapid, x, y, zoom)
16 |
--------------------------------------------------------------------------------
/webapp/src/app/server.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable } from '@angular/core';
2 |
3 | import { environment } from '../environments/environment';
4 |
5 | @Injectable({
6 | providedIn: 'root'
7 | })
8 | export class ServerService {
9 | landsatTileURL(x: number, y: number, zoom: number, year: number) {
10 | return `${environment.serverURL}/tile/landsat/${x}/${y}/${zoom}/${year}`
11 | }
12 |
13 | landcoverTileURL(x: number, y: number, zoom: number, year: number) {
14 | return `${environment.serverURL}/tile/landcover/${x}/${y}/${zoom}/${year}`
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/webapp/src/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | What is on Earth?
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/dispatch/deploy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Check the required environment variables.
4 | : ${PROJECT:?"Please set PROJECT to your Google Cloud Project ID"}
5 | : ${BUCKET:?"Please set BUCKET to your Cloud Storage bucket (without gs:// prefix)"}
6 | : ${ML_ENGINE_TOPIC:?"Please set ML_ENGINE_TOPIC to a PubSub topic"}
7 |
8 | # Deploying the Cloud Functions dispatcher.
9 | ( cd dispatch ; gcloud functions deploy dispatch \
10 | --runtime python37 \
11 | --memory 2048MB \
12 | --timeout 540s \
13 | --trigger-bucket $BUCKET \
14 | --set-env-vars PROJECT=$PROJECT,ML_ENGINE_TOPIC=$ML_ENGINE_TOPIC )
15 |
--------------------------------------------------------------------------------
/.gcloudignore:
--------------------------------------------------------------------------------
1 | # This file specifies files that are *not* uploaded to Google Cloud Platform
2 | # using gcloud. It follows the same syntax as .gitignore, with the addition of
3 | # "#!include" directives (which insert the entries of the given .gitignore-style
4 | # file at that point).
5 | #
6 | # For more information, run:
7 | # $ gcloud topic gcloudignore
8 | #
9 | .gcloudignore
10 | # If you would like to upload your .git directory, .gitignore file or files
11 | # from your .gitignore file, remove the corresponding line
12 | # below:
13 | .git
14 | .gitignore
15 |
16 | node_modules
17 | #!include:.gitignore
18 |
--------------------------------------------------------------------------------
/webapp/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compileOnSave": false,
3 | "compilerOptions": {
4 | "baseUrl": "./",
5 | "outDir": "./dist/out-tsc",
6 | "sourceMap": true,
7 | "declaration": false,
8 | "module": "es2015",
9 | "moduleResolution": "node",
10 | "emitDecoratorMetadata": true,
11 | "experimentalDecorators": true,
12 | "importHelpers": true,
13 | "target": "es5",
14 | "typeRoots": [
15 | "node_modules/@types"
16 | ],
17 | "lib": [
18 | "es2018",
19 | "dom"
20 | ],
21 | "downlevelIteration": true,
22 | "strict": true,
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/worker/.gcloudignore:
--------------------------------------------------------------------------------
1 | # This file specifies files that are *not* uploaded to Google Cloud Platform
2 | # using gcloud. It follows the same syntax as .gitignore, with the addition of
3 | # "#!include" directives (which insert the entries of the given .gitignore-style
4 | # file at that point).
5 | #
6 | # For more information, run:
7 | # $ gcloud topic gcloudignore
8 | #
9 | .gcloudignore
10 | # If you would like to upload your .git directory, .gitignore file or files
11 | # from your .gitignore file, remove the corresponding line
12 | # below:
13 | .git
14 | .gitignore
15 |
16 | # Python pycache:
17 | __pycache__/
18 | # Ignored by the build system
19 | /setup.cfg
--------------------------------------------------------------------------------
/webapp/src/theme.scss:
--------------------------------------------------------------------------------
1 | // @import "~@angular/material/prebuilt-themes/indigo-pink.css";
2 | // @import "~@angular/material/prebuilt-themes/deeppurple-amber.css";
3 | // @import "~@angular/material/prebuilt-themes/pink-bluegrey.css";
4 | // @import "~@angular/material/prebuilt-themes/purple-green.css";
5 |
6 | @import '~@angular/material/theming';
7 | @include mat-core();
8 |
9 | // Available color palettes: https://material.io/design/color/
10 | $primary: mat-palette($mat-green, 400);
11 | $accent: mat-palette($mat-green, 600);
12 | $warn: mat-palette($mat-deep-orange);
13 | $theme: mat-dark-theme($primary, $accent, $warn);
14 |
15 | @include angular-material-theme($theme);
--------------------------------------------------------------------------------
/server/.gcloudignore:
--------------------------------------------------------------------------------
1 | # This file specifies files that are *not* uploaded to Google Cloud Platform
2 | # using gcloud. It follows the same syntax as .gitignore, with the addition of
3 | # "#!include" directives (which insert the entries of the given .gitignore-style
4 | # file at that point).
5 | #
6 | # For more information, run:
7 | # $ gcloud topic gcloudignore
8 | #
9 | .gcloudignore
10 | # If you would like to upload your .git directory, .gitignore file or files
11 | # from your .gitignore file, remove the corresponding line
12 | # below:
13 | .git
14 | .gitignore
15 |
16 | # Python pycache:
17 | __pycache__/
18 | env/
19 |
20 | # Ignored by the build system
21 | /setup.cfg
22 |
--------------------------------------------------------------------------------
/webapp/src/test.ts:
--------------------------------------------------------------------------------
1 | // This file is required by karma.conf.js and loads recursively all the .spec and framework files
2 |
3 | import 'zone.js/dist/zone-testing';
4 | import { getTestBed } from '@angular/core/testing';
5 | import {
6 | BrowserDynamicTestingModule,
7 | platformBrowserDynamicTesting
8 | } from '@angular/platform-browser-dynamic/testing';
9 |
10 | declare const require: any;
11 |
12 | // First, initialize the Angular testing environment.
13 | getTestBed().initTestEnvironment(
14 | BrowserDynamicTestingModule,
15 | platformBrowserDynamicTesting()
16 | );
17 | // Then we find all the tests.
18 | const context = require.context('./', true, /\.spec\.ts$/);
19 | // And load the modules.
20 | context.keys().map(context);
21 |
--------------------------------------------------------------------------------
/webapp/src/environments/environment.ts:
--------------------------------------------------------------------------------
1 | // This file can be replaced during build by using the `fileReplacements` array.
2 | // `ng build --prod` replaces `environment.ts` with `environment.prod.ts`.
3 | // The list of file replacements can be found in `angular.json`.
4 |
5 | export const environment = {
6 | production: false,
7 | serverURL: 'http://127.0.0.1:5000'
8 | };
9 |
10 | /*
11 | * For easier debugging in development mode, you can import the following file
12 | * to ignore zone related error stack frames such as `zone.run`, `zoneDelegate.invokeTask`.
13 | *
14 | * This import should be commented out in production mode because it will have a negative impact
15 | * on performance if an error is thrown.
16 | */
17 | // import 'zone.js/dist/zone-error'; // Included with Angular CLI.
18 |
--------------------------------------------------------------------------------
/server/config.py:
--------------------------------------------------------------------------------
1 | import ee
2 | import json
3 | import os
4 | from google.cloud import storage
5 |
6 | # Environment variables.
7 | PROJECT = os.environ['PROJECT']
8 | BUCKET = os.environ['BUCKET']
9 | ASSET_ID = os.environ['ASSET_ID']
10 | CREDENTIALS_FILE = os.environ['GOOGLE_APPLICATION_CREDENTIALS']
11 |
12 | # Constants.
13 | region_zoom_level = 6
14 |
15 | # Initialize Earth Engine.
16 | with open(CREDENTIALS_FILE) as f:
17 | credentials = json.load(f)
18 | credentials_email = credentials['client_email']
19 | ee.Initialize(ee.ServiceAccountCredentials(credentials_email, CREDENTIALS_FILE))
20 |
21 | earthengine = ['earthengine', '--service_account_file', CREDENTIALS_FILE]
22 |
23 | # Initialize the Google Cloud client libraries.
24 | storage_client = storage.Client()
25 | bucket = storage_client.bucket(BUCKET)
26 |
--------------------------------------------------------------------------------
/webapp/.gcloudignore:
--------------------------------------------------------------------------------
1 | # This file specifies files that are *not* uploaded to Google Cloud Platform
2 | # using gcloud. It follows the same syntax as .gitignore, with the addition of
3 | # "#!include" directives (which insert the entries of the given .gitignore-style
4 | # file at that point).
5 | #
6 | # For more information, run:
7 | # $ gcloud topic gcloudignore
8 | #
9 | .gcloudignore
10 | # If you would like to upload your .git directory, .gitignore file or files
11 | # from your .gitignore file, remove the corresponding line
12 | # below:
13 | .git
14 | .gitignore
15 |
16 | # NodeJS installed modules, they will be installed automatically
17 | node_modules
18 |
19 | # Python pycache:
20 | __pycache__/
21 |
22 | # Python virtualenv:
23 | env/
24 |
25 | # Ignored by the build system
26 | /setup.cfg
27 |
28 | # Visual Studio Code files.
29 | .vscode
--------------------------------------------------------------------------------
/webapp/e2e/protractor.conf.js:
--------------------------------------------------------------------------------
1 | // Protractor configuration file, see link for more information
2 | // https://github.com/angular/protractor/blob/master/lib/config.ts
3 |
4 | const { SpecReporter } = require('jasmine-spec-reporter');
5 |
6 | exports.config = {
7 | allScriptsTimeout: 11000,
8 | specs: [
9 | './src/**/*.e2e-spec.ts'
10 | ],
11 | capabilities: {
12 | 'browserName': 'chrome'
13 | },
14 | directConnect: true,
15 | baseUrl: 'http://localhost:4200/',
16 | framework: 'jasmine',
17 | jasmineNodeOpts: {
18 | showColors: true,
19 | defaultTimeoutInterval: 30000,
20 | print: function() {}
21 | },
22 | onPrepare() {
23 | require('ts-node').register({
24 | project: require('path').join(__dirname, './tsconfig.e2e.json')
25 | });
26 | jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayStacktrace: true } }));
27 | }
28 | };
--------------------------------------------------------------------------------
/webapp/src/app/app.module.ts:
--------------------------------------------------------------------------------
1 | import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
2 | import { BrowserModule } from '@angular/platform-browser';
3 | import { HttpClientModule } from '@angular/common/http';
4 | import { NgModule } from '@angular/core';
5 |
6 | import { MaterialComponentsModule } from './material-components/material-components.module';
7 | import { AppComponent } from './app.component';
8 |
9 | import { AgmCoreModule } from '@agm/core';
10 |
11 | import { credentials } from 'src/assets/credentials';
12 |
13 | @NgModule({
14 | declarations: [
15 | AppComponent,
16 | ],
17 | imports: [
18 | BrowserModule,
19 | BrowserAnimationsModule,
20 | MaterialComponentsModule,
21 | AgmCoreModule.forRoot({
22 | apiKey: credentials.mapsKey,
23 | libraries: ['places'],
24 | }),
25 | HttpClientModule,
26 | ],
27 | providers: [],
28 | bootstrap: [AppComponent]
29 | })
30 | export class AppModule { }
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See http://help.github.com/ignore-files/ for more about ignoring files.
2 |
3 | # compiled output
4 | dist/
5 | tmp/
6 | out-tsc/
7 |
8 | # dependencies
9 | node_modules/
10 |
11 | # profiling files
12 | chrome-profiler-events.json
13 | speed-measure-plugin.json
14 |
15 | # IDEs and editors
16 | .idea
17 | .project
18 | .classpath
19 | .c9/
20 | *.launch
21 | .settings/
22 | *.sublime-workspace
23 |
24 | # IDE - VSCode
25 | .vscode
26 | .vscode/*
27 | !.vscode/settings.json
28 | !.vscode/tasks.json
29 | !.vscode/launch.json
30 | !.vscode/extensions.json
31 | .history/*
32 |
33 | # misc
34 | .sass-cache/
35 | connect.lock
36 | coverage
37 | libpeerconnection.log
38 | npm-debug.log
39 | yarn-error.log
40 | testem.log
41 | typings
42 |
43 | # System Files
44 | .DS_Store
45 | Thumbs.db
46 |
47 | # Python
48 | __pycache__/
49 | *.py[cod]
50 | *$py.class
51 |
52 | # Environment and credentials
53 | env*/
54 | credentials.*
55 |
56 | # Skip the server's app.yaml since it's generated on deploy.sh
57 | server/app.yaml
58 |
--------------------------------------------------------------------------------
/deploy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Check the required environment variables.
4 | : ${BUCKET:?"Please set BUCKET to your Cloud Storage bucket (without gs:// prefix)"}
5 | # : ${ML_ENGINE_TOPIC:?"Please set ML_ENGINE_TOPIC to a PubSub topic"}
6 | : ${ASSET_ID:?"Please set ASSET_ID to users/your-ee-username/landcover or projects/your-ee-project/landcover"}
7 | : ${GOOGLE_APPLICATION_CREDENTIALS:?"Please set GOOGLE_APPLICATION_CREDENTIALS to point to the path/to/your/credentials.json"}
8 |
9 | export PROJECT=$(gcloud config get-value project)
10 |
11 | echo "PROJECT=$PROJECT"
12 | echo "BUCKET=$BUCKET"
13 | # echo "ML_ENGINE_TOPIC=$ML_ENGINE_TOPIC"
14 | echo "ASSET_ID=$ASSET_ID"
15 | echo "GOOGLE_APPLICATION_CREDENTIALS=$GOOGLE_APPLICATION_CREDENTIALS"
16 |
17 | # Deploy the web application to App Engine.
18 | bash webapp/deploy.sh
19 |
20 | # Deploy the server to App Engine.
21 | bash server/deploy.sh
22 |
23 | # Deploy the dispatcher to Cloud Functions.
24 | bash dispatch/deploy.sh
25 |
26 | # Deploy the workers to Kubernetes.
27 | # bash worker/deploy.sh
28 |
--------------------------------------------------------------------------------
/webapp/src/app/app.component.scss:
--------------------------------------------------------------------------------
1 | .stretch {
2 | position: absolute;
3 | height: auto;
4 | width: auto;
5 | top: 0;
6 | bottom: 0;
7 | left: 0;
8 | right: 0;
9 | }
10 |
11 | #main-content {
12 | top: 7.6ex;
13 | }
14 |
15 | mat-sidenav-container {
16 | position: relative;
17 | height: 100%;
18 | }
19 |
20 | mat-slider {
21 | width: 100%;
22 | }
23 |
24 | #search-box {
25 | margin: 1ex;
26 | }
27 |
28 | #year-div {
29 | position: absolute;
30 | z-index: 1;
31 | left: 0.4ex;
32 | font-size: 8ex;
33 | text-shadow: 0 2px 0.8ex black;
34 | }
35 |
36 | #controls-div {
37 | z-index: 1;
38 | position: absolute;
39 | top: 1ex;
40 | right: 1ex;
41 | background-color: black;
42 | }
43 |
44 | #legend-div {
45 | z-index: 1;
46 | position: absolute;
47 | top: 18ex;
48 | right: 1ex;
49 | background-color: black;
50 | }
51 |
52 | #map-div {
53 | bottom: 8ex;
54 | }
55 |
56 | agm-map {
57 | height: 100%;
58 | }
59 |
60 | #slider-div {
61 | position: absolute;
62 | left: 2ex;
63 | right: 2ex;
64 | bottom: 0;
65 | }
66 |
--------------------------------------------------------------------------------
/server/deploy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Check the required environment variables.
4 | : ${PROJECT:?"Please set PROJECT to your Google Cloud Project ID"}
5 | : ${BUCKET:?"Please set BUCKET to your Cloud Storage bucket (without gs:// prefix)"}
6 | : ${ASSET_ID:?"Please set ASSET_ID to users/your-ee-username/landcover or projects/your-ee-project/landcover"}
7 | : ${GOOGLE_APPLICATION_CREDENTIALS:?"Please set GOOGLE_APPLICATION_CREDENTIALS to point to the path/to/your/credentials.json"}
8 |
9 | # Copy your credentials to upload to the server.
10 | cp $GOOGLE_APPLICATION_CREDENTIALS server/credentials.json
11 |
12 | # Generate the server's App Engine yaml file with the environment variables.
13 | cat > server/app.yaml <
19 | docker build . -t prediction
20 | docker tag predictor gcr.io/$PROJECT_ID/prediction
21 | docker push gcr.io/$PROJECT_ID/prediction
22 | ```
--------------------------------------------------------------------------------
/webapp/src/karma.conf.js:
--------------------------------------------------------------------------------
1 | // Karma configuration file, see link for more information
2 | // https://karma-runner.github.io/1.0/config/configuration-file.html
3 |
4 | module.exports = function (config) {
5 | config.set({
6 | basePath: '',
7 | frameworks: ['jasmine', '@angular-devkit/build-angular'],
8 | plugins: [
9 | require('karma-jasmine'),
10 | require('karma-chrome-launcher'),
11 | require('karma-jasmine-html-reporter'),
12 | require('karma-coverage-istanbul-reporter'),
13 | require('@angular-devkit/build-angular/plugins/karma')
14 | ],
15 | client: {
16 | clearContext: false // leave Jasmine Spec Runner output visible in browser
17 | },
18 | coverageIstanbulReporter: {
19 | dir: require('path').join(__dirname, '../coverage'),
20 | reports: ['html', 'lcovonly', 'text-summary'],
21 | fixWebpackSourcePaths: true
22 | },
23 | reporters: ['progress', 'kjhtml'],
24 | port: 9876,
25 | colors: true,
26 | logLevel: config.LOG_INFO,
27 | autoWatch: true,
28 | browsers: ['Chrome'],
29 | singleRun: false
30 | });
31 | };
--------------------------------------------------------------------------------
/webapp/src/app/app.component.spec.ts:
--------------------------------------------------------------------------------
1 | import { TestBed, async } from '@angular/core/testing';
2 | import { RouterTestingModule } from '@angular/router/testing';
3 | import { AppComponent } from './app.component';
4 |
5 | describe('AppComponent', () => {
6 | beforeEach(async(() => {
7 | TestBed.configureTestingModule({
8 | imports: [
9 | RouterTestingModule
10 | ],
11 | declarations: [
12 | AppComponent
13 | ],
14 | }).compileComponents();
15 | }));
16 |
17 | it('should create the app', () => {
18 | const fixture = TestBed.createComponent(AppComponent);
19 | const app = fixture.debugElement.componentInstance;
20 | expect(app).toBeTruthy();
21 | });
22 |
23 | it(`should have as title 'project-earth'`, () => {
24 | const fixture = TestBed.createComponent(AppComponent);
25 | const app = fixture.debugElement.componentInstance;
26 | expect(app.title).toEqual('project-earth');
27 | });
28 |
29 | it('should render title in a h1 tag', () => {
30 | const fixture = TestBed.createComponent(AppComponent);
31 | fixture.detectChanges();
32 | const compiled = fixture.debugElement.nativeElement;
33 | expect(compiled.querySelector('h1').textContent).toContain('Welcome to project-earth!');
34 | });
35 | });
36 |
--------------------------------------------------------------------------------
/server/tile_landcover.py:
--------------------------------------------------------------------------------
1 | import ee
2 | import os
3 |
4 | import config
5 |
6 | # Model classifications with the palette for visualization.
7 | classifications = [
8 | {'color': 'fbc02d', 'name': 'Farmland'}, # Yellow 700
9 | {'color': '689f38', 'name': 'Forest'}, # Light Green 600
10 | {'color': 'aed581', 'name': 'Grassland'}, # Light Green 300
11 | {'color': 'e6ee9c', 'name': 'Shrublands'}, # Lime 200
12 | {'color': '26a69a', 'name': 'Wetland'}, # Teal 400
13 | {'color': '90caf9', 'name': 'Water'}, # Blue 200
14 | {'color': 'ffab91', 'name': 'Tundra'}, # Deep Orange 200
15 | {'color': 'e0e0e0', 'name': 'Impervious'}, # Gray 300
16 | {'color': 'ffecb3', 'name': 'Barren land'}, # Amber 100
17 | {'color': 'fafafa', 'name': 'Snow/Ice'}, # Gray 50
18 | ]
19 | palette = [classification['color'] for classification in classifications]
20 |
21 | def run(tile_x, tile_y, zoom, year):
22 | # Return the tile URL for the landcover ImageCollection.
23 | mapid = (
24 | ee.ImageCollection(config.ASSET_ID)
25 | .filterDate(f"{year}-1-1", f"{year}-12-31")
26 | .getMapId({
27 | 'min': 0.0,
28 | 'max': len(classifications)-1,
29 | 'palette': palette,
30 | })
31 | )
32 | return ee.data.getTileUrl(mapid, tile_x, tile_y, zoom)
33 |
--------------------------------------------------------------------------------
/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Check the required environment variables.
4 | : ${BUCKET:?"Please set BUCKET to your Cloud Storage bucket (without gs:// prefix)"}
5 | : ${ASSET_ID:?"Please set ASSET_ID to users/your-ee-username/landcover or projects/your-ee-project/landcover"}
6 |
7 | export PROJECT=$(gcloud config get-value project)
8 | # export ML_ENGINE_TOPIC='ml-engine'
9 |
10 | echo "PROJECT=$PROJECT"
11 | echo "BUCKET=$BUCKET"
12 | echo "ASSET_ID=$ASSET_ID"
13 | # echo "ML_ENGINE_TOPIC=$ML_ENGINE_TOPIC"
14 |
15 | # Create the Cloud Storage bucket.
16 | gsutil mb gs://$BUCKET
17 |
18 | # # Create the Pub/Sub topic and subscription.
19 | # gcloud pubsub topics create projects/$PROJECT/topics/$ML_ENGINE_TOPIC
20 | # gcloud pubsub subscriptions create projects/$PROJECT/subscriptions/$ML_ENGINE_TOPIC \
21 | # --topic projects/$PROJECT/topics/$ML_ENGINE_TOPIC
22 |
23 | # # Configure Compute Engine for Kubernetes.
24 | # gcloud config compute/zone us-central1-a
25 | # gcloud components install kubectl
26 | # gcloud auth configure-docker
27 |
28 | # # Create a Google Kubernetes Engine cluster with autoscaling.
29 | # CLUSTER=workers-cluster
30 | # gcloud container clusters create $CLUSTER \
31 | # --enable-autoscaling \
32 | # --min-nodes 1 \
33 | # --max-nodes 50
34 |
35 | # # Configure kubectl to connect to our newly created cluster.
36 | # gcloud container clusters get-credentials $CLUSTER
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Project Earth
2 |
3 | > * Results in Image Collection [`projects/project-earth/landcover`](https://code.earthengine.google.com/?asset=projects/project-earth/landcover)
4 | > * Visualize in [Earth Engine](https://code.earthengine.google.com/55eeefb8448ebe418e531fabbfd9ddc7)
5 |
6 | ## Development environment
7 |
8 | Before you start, make sure you have the following installed:
9 | * [Google Cloud SDK](https://cloud.google.com/sdk/install)
10 | * [Docker](https://docs.docker.com/install/)
11 | * [Python 3](https://www.python.org/downloads/) (3.6 or higher)
12 |
13 | Test your development environment.
14 |
15 | ```sh
16 | gcloud version
17 | docker --version
18 | python3 --version
19 | ```
20 |
21 | ## Setup
22 |
23 | To set up the project in Google Cloud, you will have to set the following environment variables.
24 | Note that the Cloud Storage bucket will be created on the setup script.
25 | ```sh
26 | export BUCKET=your-cloud-storage-bucket
27 | export EE_PROJECT=users/your-ee-user # or projects/your-ee-project
28 | ```
29 |
30 | Then run the [setup.sh](setup.sh) script to configure your project with the required resources.
31 | Please refer to that file for further details on what it's doing.
32 | ```sh
33 | bash setup.sh
34 | ```
35 |
36 | ## Deploying the project
37 |
38 | Once all the resources are set, please run the [deploy.sh](deploy.sh) script to deploy all the required services.
39 | ```sh
40 | bash deploy.sh
41 | ```
42 |
--------------------------------------------------------------------------------
/worker/deploy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Check the required environment variables.
4 | : ${PROJECT:?"Please set PROJECT to your Google Cloud Project ID"}
5 | : ${ML_ENGINE_TOPIC:?"Please set ML_ENGINE_TOPIC to a PubSub topic"}
6 | : ${GOOGLE_APPLICATION_CREDENTIALS:?"Please set GOOGLE_APPLICATION_CREDENTIALS to point to the path/to/your/credentials.json"}
7 |
8 | # Copy your credentials to upload to the server.
9 | cp $GOOGLE_APPLICATION_CREDENTIALS worker/credentials.json
10 |
11 | # Generate the workers's Dockerfile with the environment variables.
12 | cat > server/app.yaml < /tmp/google-cloud-sdk.tar.gz
11 |
12 | # Installing the package
13 | RUN mkdir -p /usr/local/gcloud \
14 | && tar -C /usr/local/gcloud -xvf /tmp/google-cloud-sdk.tar.gz \
15 | && /usr/local/gcloud/google-cloud-sdk/install.sh
16 |
17 | # Adding the package path to local
18 | ENV PATH $PATH:/usr/local/gcloud/google-cloud-sdk/bin
19 |
20 | RUN gcloud config set project project-earth-2018
21 | RUN gcloud auth activate-service-account project-earth@project-earth-2018.iam.gserviceaccount.com --key-file=$GOOGLE_APPLICATION_CREDENTIALS --project=project-earth-2018
22 | RUN gcloud container clusters get-credentials projectearth2018 --zone us-central1-a --project project-earth-2018
23 | RUN gcloud config list
24 |
25 | # Set the Kubernetes version as found in the UCP Dashboard or API
26 | ENV k8sversion v1.13.1
27 |
28 | # Get the kubectl binary.
29 | RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/$k8sversion/bin/linux/amd64/kubectl
30 |
31 | # Make the kubectl binary executable.
32 | RUN chmod +x ./kubectl
33 |
34 | # Move the kubectl executable to /usr/local/bin.
35 | RUN mv ./kubectl /usr/local/bin/kubectl
36 |
37 | RUN pip install flask pyyaml
38 |
39 | EXPOSE 5000
40 |
41 | ENTRYPOINT ["/usr/bin/python", "/opt/service.py"]
--------------------------------------------------------------------------------
/prediction/serving/README.md:
--------------------------------------------------------------------------------
1 | # Serving the Model
2 |
3 | This directory contains the code to create a flask server to create prediction jobs on a Kubernetes cluster.
4 |
5 | The code in `service.py` uses the following endpoints:
6 |
7 | * `process`: a POST request which takes a jso object with 3 keys:
8 | * `input`: GCS path to the input TFRecord file
9 | * `output`: GCS path to the output TFRecord file
10 | * `name`: The job name on kubernetes
11 | * `list`: a GET request with no parameters. It returns a list of all created jobs
12 | * `delete`: a GET request with no parameters. It deletes all the created jobs, whether they are completed or not.
13 |
14 | **Note:** For simplicity, the `Dockerfile` in this directory expects the service account file to be available in this folder, and it will package it up within the docker image. In practice this approach is not recommended and we encourage you to follow the right practices to attach a service account file to your docker container.
15 |
16 | ### Build the Docker Image
17 |
18 | In order to build and register your docker image, you may run the following commands
19 |
20 | ```bash
21 | export PROJECT_ID=
22 | docker build . -t earth-server
23 | docker tag earth-server gcr.io/$PROJECT_ID/earth-server
24 | docker push gcr.io/$PROJECT_ID/earth-server
25 | ```
26 |
27 | ### Create the Service
28 |
29 | To create an endpoint in your kubernetes cluster:
30 |
31 | ```bash
32 | kubectl run landcover-predictor --image=gcr.io/$PROJECT_ID/earth-server --port 5000 -n kubeflow
33 | kubectl expose deployment landcover-predictor --type=LoadBalancer --port 7070 --target-port 5000 -n kubeflow
34 | ```
35 |
36 | Once the endpoint is created, you may find the IP address on the kubernetes cluster page (under Services).
37 |
38 | ### Updating the Service
39 |
40 | To update the served docker image:
41 | ```bash
42 | kubectl set image deployment/landcover-predictor landcover-predictor=gcr.io/$PROJECT_ID/earth-server:v2 -n kubeflow
43 | ```
44 |
45 |
--------------------------------------------------------------------------------
/server/landsat_image.py:
--------------------------------------------------------------------------------
1 | import ee
2 | import math
3 |
4 | def get(year):
5 | # Create the Landsat 8 Surface Reflectance image.
6 | image = (
7 | ee.ImageCollection('LANDSAT/LC08/C01/T1_SR')
8 | .filterDate(f"{year}-1-1", f"{year}-12-31")
9 | .map(mask_clouds)
10 | .median()
11 | )
12 |
13 | # Normalize the band values to a range from 0 to 1.
14 | optical_bands = ['B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7']
15 | max_optical_value = 10000
16 |
17 | thermal_bands = ['B10', 'B11']
18 | min_thermal_value = 273.15 # Kelvin, freezing point
19 | max_thermal_value = 373.15 # Kelvin, boiling point
20 |
21 | image = (
22 | image.select(optical_bands).divide(max_optical_value)
23 | .addBands(
24 | image.select(thermal_bands).multiply(0.1)
25 | .clamp(min_thermal_value, max_thermal_value)
26 | .subtract(min_thermal_value).multiply(0.01)
27 | )
28 | )
29 |
30 | # Add normalized elevation.
31 | max_elevation = 4373
32 | elevation = (
33 | ee.Image('JAXA/ALOS/AW3D30_V1_1')
34 | .select('AVE').divide(max_elevation).rename('elevation')
35 | )
36 | image = image.addBands(elevation)
37 |
38 | # Add normalized latitude band.
39 | max_angle = 90
40 | latitude = ee.Image.pixelLonLat().select(['latitude']).divide(max_angle)
41 | image = image.addBands(latitude)
42 |
43 | # Add normalized time projection bands.
44 | time_angle = ee.Date(f"{year}-1-1").getFraction('year').multiply(2 * math.pi)
45 | time_projected_x = ee.Image(time_angle.sin()).rename('time_x')
46 | time_projected_y = ee.Image(time_angle.cos()).rename('time_y')
47 | image = image.addBands(time_projected_x).addBands(time_projected_y)
48 |
49 | return image.double()
50 |
51 |
52 | def mask_clouds(image):
53 | qa = image.select('pixel_qa')
54 |
55 | cloud_shadow_bit = ee.Number(2**3).int()
56 | cloud_shadow_mask = qa.bitwiseAnd(cloud_shadow_bit).eq(0)
57 |
58 | clouds_bit = ee.Number(2**5).int()
59 | clouds_mask = qa.bitwiseAnd(clouds_bit).eq(0)
60 |
61 | return image.updateMask(cloud_shadow_mask.And(clouds_mask))
62 |
--------------------------------------------------------------------------------
/webapp/src/app/material-components/material-components.module.ts:
--------------------------------------------------------------------------------
1 | import { A11yModule } from '@angular/cdk/a11y';
2 | import { DragDropModule } from '@angular/cdk/drag-drop';
3 | import { ScrollingModule } from '@angular/cdk/scrolling';
4 | import { CdkStepperModule } from '@angular/cdk/stepper';
5 | import { CdkTableModule } from '@angular/cdk/table';
6 | import { CdkTreeModule } from '@angular/cdk/tree';
7 | import { NgModule } from '@angular/core';
8 | import {
9 | MatAutocompleteModule,
10 | MatBadgeModule,
11 | MatBottomSheetModule,
12 | MatButtonModule,
13 | MatButtonToggleModule,
14 | MatCardModule,
15 | MatCheckboxModule,
16 | MatChipsModule,
17 | MatDatepickerModule,
18 | MatDialogModule,
19 | MatDividerModule,
20 | MatExpansionModule,
21 | MatGridListModule,
22 | MatIconModule,
23 | MatInputModule,
24 | MatListModule,
25 | MatMenuModule,
26 | MatNativeDateModule,
27 | MatPaginatorModule,
28 | MatProgressBarModule,
29 | MatProgressSpinnerModule,
30 | MatRadioModule,
31 | MatRippleModule,
32 | MatSelectModule,
33 | MatSidenavModule,
34 | MatSliderModule,
35 | MatSlideToggleModule,
36 | MatSnackBarModule,
37 | MatSortModule,
38 | MatStepperModule,
39 | MatTableModule,
40 | MatTabsModule,
41 | MatToolbarModule,
42 | MatTooltipModule,
43 | MatTreeModule,
44 | } from '@angular/material';
45 |
46 | @NgModule({
47 | exports: [
48 | A11yModule,
49 | CdkStepperModule,
50 | CdkTableModule,
51 | CdkTreeModule,
52 | DragDropModule,
53 | MatAutocompleteModule,
54 | MatBadgeModule,
55 | MatBottomSheetModule,
56 | MatButtonModule,
57 | MatButtonToggleModule,
58 | MatCardModule,
59 | MatCheckboxModule,
60 | MatChipsModule,
61 | MatStepperModule,
62 | MatDatepickerModule,
63 | MatDialogModule,
64 | MatDividerModule,
65 | MatExpansionModule,
66 | MatGridListModule,
67 | MatIconModule,
68 | MatInputModule,
69 | MatListModule,
70 | MatMenuModule,
71 | MatNativeDateModule,
72 | MatPaginatorModule,
73 | MatProgressBarModule,
74 | MatProgressSpinnerModule,
75 | MatRadioModule,
76 | MatRippleModule,
77 | MatSelectModule,
78 | MatSidenavModule,
79 | MatSliderModule,
80 | MatSlideToggleModule,
81 | MatSnackBarModule,
82 | MatSortModule,
83 | MatTableModule,
84 | MatTabsModule,
85 | MatToolbarModule,
86 | MatTooltipModule,
87 | MatTreeModule,
88 | ScrollingModule,
89 | ]
90 | })
91 | export class MaterialComponentsModule { }
92 |
--------------------------------------------------------------------------------
/worker/main.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import os
4 | from google.cloud import pubsub
5 | from google.cloud import storage
6 |
7 | import tensorflow as tf
8 | tf.enable_eager_execution()
9 |
10 | PROJECT = os.environ['PROJECT']
11 | ML_ENGINE_TOPIC = os.environ['ML_ENGINE_TOPIC']
12 |
13 | subscriber = pubsub.SubscriberClient()
14 | storage_client = storage.Client()
15 |
16 |
17 | def run(ml_engine_file=None):
18 | if ml_engine_file:
19 | logging.info('Running for file: ' + ml_engine_file)
20 | json2tfrecord(ml_engine_file)
21 | else:
22 | logging.info("Listening...")
23 | future = subscriber.subscribe(
24 | subscription=subscriber.subscription_path(PROJECT, ML_ENGINE_TOPIC),
25 | callback=ml_engine_callback,
26 | )
27 | future.result()
28 |
29 |
30 | def ml_engine_callback(message):
31 | message.ack()
32 | json2tfrecord(message.data.decode('utf-8'))
33 |
34 |
35 | def json2tfrecord(ml_engine_file):
36 | logging.info('ml_engine_file: ' + ml_engine_file)
37 |
38 | # gs:///ml-engine/////prediction.results-00000-of-00001
39 | bucket, _, x, y, year, part, _ = ml_engine_file.lstrip('gs://').split('/')
40 | landcover_file_prefix = 'landcover/{}/{}/{}/{}.tfrecord'.format(x, y, year, part)
41 | landcover_file = 'gs://{}/{}'.format(bucket, landcover_file_prefix)
42 | logging.info('landcover_file: ' + landcover_file)
43 |
44 | with tf.io.TFRecordWriter(landcover_file) as output_file:
45 | with tf.gfile.Open(ml_engine_file) as input_file:
46 | for line in input_file:
47 | # Make a patch tf.train.Example from prediction data.
48 | data = json.loads(line)
49 | patch = tf.convert_to_tensor(data['predictions'])
50 | print('patch')
51 | example = tf.train.Example(
52 | features=tf.train.Features(
53 | feature={
54 | 'landcover': tf.train.Feature(
55 | # int64_list=tf.train.Int64List(value=tf.reshape(patch, [-1]))
56 | bytes_list=tf.train.BytesList(value=tf.reshape(patch, [-1]))
57 | )
58 | }
59 | )
60 | )
61 | output_file.write(example.SerializeToString())
62 | logging.info('done: ' + landcover_file)
63 |
64 |
65 | if __name__ == '__main__':
66 | import argparse
67 |
68 | parser = argparse.ArgumentParser()
69 | parser.add_argument(
70 | '--ml-engine-file',
71 | type=str,
72 | default=None,
73 | )
74 | args = parser.parse_args()
75 |
76 | logging.basicConfig(level=logging.INFO)
77 | run(args.ml_engine_file)
78 |
--------------------------------------------------------------------------------
/server/region_classify.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | from googleapiclient import discovery
4 | from googleapiclient import errors
5 |
6 | import config
7 |
8 | model = 'landcover'
9 | version = None
10 | batch_size = 16
11 | gce_region = 'us-central1'
12 | # max_workers = 50
13 |
14 |
15 | def run(x, y, year, part):
16 | input_path = f"gs://{config.BUCKET}/regions/{x}/{y}/{year}/{part:05}.tfrecord.gz"
17 | print(f"input_path: {input_path}")
18 |
19 | output_path = f"gs://{config.BUCKET}/ml-engine/{x}/{y}/{year}/{part:05}/"
20 | print(f"output_path: {output_path}")
21 |
22 | # Create a unique job name using the current timestamp.
23 | timestamp = time.strftime('%Y%m%d_%H%M%S', time.gmtime())
24 | job_id = f"{model}_{x}_{y}_{year}_{part}__{timestamp}"
25 |
26 | # Start building the request dictionary with required information.
27 | job_body = {
28 | 'jobId': job_id,
29 | 'predictionInput': {
30 | 'inputPaths': input_path,
31 | 'outputPath': output_path,
32 | 'dataFormat': 'tf-record-gzip',
33 | 'batchSize': batch_size,
34 | 'region': gce_region,
35 | # 'maxWorkerCount': max_workers,
36 | }
37 | }
38 |
39 | # Use the version if present, the model (its default version) if not.
40 | project_path = f"projects/{config.PROJECT}"
41 | print(f"project_path: {project_path}")
42 |
43 | model_path = f"{project_path}/models/{model}"
44 | print(f"model_path: {model_path}")
45 |
46 | if version:
47 | version_path = f"{model_path}/versions/{version}"
48 | job_body['predictionInput']['versionName'] = version_path
49 | print(f"version_path: {version_path}")
50 | else:
51 | job_body['predictionInput']['modelName'] = model_path
52 |
53 | # Create the ML Engine job request.
54 | ml = discovery.build('ml', 'v1')
55 | request = ml.projects().jobs().create(
56 | parent=project_path,
57 | body=job_body,
58 | )
59 |
60 | retry = True
61 | while retry:
62 | try:
63 | request.execute()
64 | print(f"Job requested, job_id: {job_id}")
65 | retry = False
66 |
67 | except errors.HttpError as error:
68 | # Something went wrong, print out some information.
69 | error_message = error._get_reason()
70 | print(error_message)
71 | quota_error = (
72 | 'The allowed Cloud ML quota for API calls in the '
73 | '"Job submission requests" group is exceeded'
74 | )
75 | if quota_error in error_message:
76 | # There is a quota for 60 requests every 60 seconds,
77 | # so try again in 61 seconds.
78 | time.sleep(61)
79 | retry = True
80 |
81 | return job_id
--------------------------------------------------------------------------------
/server/submit.py:
--------------------------------------------------------------------------------
1 | import ee
2 | import mercantile
3 | import os
4 |
5 | import config
6 | import landsat_image
7 |
8 |
9 | def region(x, y, start_year, end_year, dry_run=False):
10 | return [
11 | request_ee_task(x, y, year, dry_run)
12 | for year in range(start_year, end_year+1)
13 | ]
14 |
15 |
16 | def point(lng, lat, start_year, end_year, dry_run=False):
17 | tile = mercantile.tile(lng, lat, config.region_zoom_level)
18 | return region(tile.x, tile.y, start_year, end_year, dry_run)
19 |
20 |
21 | def bounds(west, south, east, north, start_year, end_year, dry_run=False):
22 | return [
23 | region(tile.x, tile.y, start_year, end_year, dry_run)
24 | for tile in mercantile.tiles(west, south, east, north, config.region_zoom_level)
25 | ]
26 |
27 |
28 | def tile(x, y, zoom, start_year, end_year, dry_run=False):
29 | xy_bounds = mercantile.xy_bounds(x, y, zoom)
30 | west, north = mercantile.lnglat(xy_bounds.left, xy_bounds.top)
31 | east, south = mercantile.lnglat(xy_bounds.right, xy_bounds.bottom)
32 | return bounds(west, south, east, north, start_year, end_year, dry_run)
33 |
34 |
35 | def request_ee_task(x, y, year, dry_run=False):
36 | # Get the region bounds to build a polygon.
37 | bounds = mercantile.bounds(x, y, config.region_zoom_level)
38 | north = bounds.north
39 | east = bounds.east + 0.1
40 | south = bounds.south - 0.1
41 | west = bounds.west
42 |
43 | # Start the task asynchronously to export to Google Cloud Storage.
44 | region_id = f"{x}-{y}-{year}"
45 | output_path_prefix = f"regions/{x}/{y}/{year}/"
46 | output_path = f"gs://{config.BUCKET}/{output_path_prefix}"
47 | task = ee.batch.Export.image.toCloudStorage(
48 | image=landsat_image.get(year),
49 | description=region_id,
50 | bucket=config.BUCKET,
51 | fileNamePrefix=output_path_prefix,
52 | region=[
53 | [east, north],
54 | [west, north],
55 | [west, south],
56 | [east, south],
57 | [east, north],
58 | ],
59 | scale=30,
60 | maxPixels=int(1e10),
61 | crs='EPSG:4326',
62 | fileFormat='TFRecord',
63 | formatOptions={
64 | 'patchDimensions': [256, 256],
65 | 'kernelSize': [32, 32],
66 | 'compressed': True,
67 | },
68 | maxWorkers=2000,
69 | )
70 |
71 | if dry_run:
72 | print(f"This is a dry run, task {task.id} will NOT be submitted.")
73 | elif config.bucket.blob(output_path_prefix + '00000.tfrecord.gz').exists():
74 | # A file already exists, that means an extraction is already in process.
75 | print(f"Skipping extraction, found: {output_path_prefix + '00000.tfrecord.gz'}")
76 | else:
77 | task_found = False
78 | for t in ee.batch.Task.list():
79 | if t.state in ('READY', 'RUNNING') and \
80 | t.task_type == 'EXTRACT_IMAGE' and \
81 | t.config['description'] == region_id:
82 | print(f"Skipping extraction, found task: {t.id}")
83 | task_found = True
84 | break
85 | if not task_found:
86 | task.start()
87 | print(f"{x}-{y}-{year}: started task {task.id} [{west},{south},{east},{north})")
88 |
89 | return {
90 | 'task_id': task.id,
91 | 'output_path': output_path,
92 | 'north': north,
93 | 'east': east,
94 | 'south': south,
95 | 'west': west,
96 | }
97 |
--------------------------------------------------------------------------------
/prediction/serving/service.py:
--------------------------------------------------------------------------------
1 | import time
2 | import subprocess
3 | from flask import Flask
4 | from flask import request
5 | import yaml
6 | import json
7 |
8 | app = Flask(__name__)
9 | MODEL_DIR = 'gs://project-earth-kubeflow-model/landcover/4/metagraph'
10 | JOB_NAMES = []
11 |
12 |
13 | def make_job(name, in_file, output_file, yaml_file):
14 | global MODEL_DIR
15 | job = {'apiVersion': 'batch/v1',
16 | 'kind': 'Job',
17 | 'metadata': {'name': name},
18 | 'spec': {'backoffLimit': 1,
19 | 'template': {'spec': {'containers': [
20 | {'env': [{'name': 'GOOGLE_APPLICATION_CREDENTIALS',
21 | 'value': '/opt/project-earth-2018-sa.json'},
22 | {'name': 'MODELDIR',
23 | 'value': MODEL_DIR},
24 | {'name': 'INPUT',
25 | 'value': in_file},
26 | {'name': 'OUTPUT',
27 | 'value': output_file}],
28 | 'image': 'gcr.io/project-earth-2018/prediction',
29 | 'imagePullPolicy': 'Always',
30 | 'name': name,
31 | 'resources': {
32 | 'requests': {'memory': '4G', 'cpu': '2000m'}},
33 | 'volumeMounts': [
34 | {'mountPath': '/secret/gcp-credentials',
35 | 'name': 'gcp-credentials',
36 | 'readOnly': True}]}],
37 | 'restartPolicy': 'Never',
38 | 'volumes': [
39 | {'name': 'gcp-credentials',
40 | 'secret': {
41 | 'secretName': 'user-gcp-sa'}
42 | }
43 | ]
44 | }
45 | }
46 | }
47 | }
48 |
49 | with open(yaml_file, 'w') as f:
50 | yaml.dump(job, f)
51 |
52 |
53 | @app.route('/process', methods=['POST'])
54 | def process_tfrecord():
55 | global JOB_NAMES
56 | data = request.get_json(silent=True)
57 | start_time = int(time.time() * 10)
58 | name = data.get('name', 'landcover-{}'.format(start_time))
59 | in_file = data['input']
60 | out_file = data['output']
61 | yaml_file = '/tmp/{}.yaml'.format(name)
62 | make_job(name, in_file, out_file, yaml_file)
63 | p = subprocess.Popen(
64 | ["kubectl", "-n", "kubeflow", "apply", "-f", yaml_file],
65 | stdout=subprocess.PIPE,
66 | stderr=subprocess.PIPE)
67 | output = p.communicate()
68 | JOB_NAMES.append(name)
69 | return json.dumps({'workload_name': name, 'output': str(output)})
70 |
71 |
72 | @app.route('/delete', methods=['GET'])
73 | def delete_jobs():
74 | global JOB_NAMES
75 | n = len(JOB_NAMES)
76 | for jn in JOB_NAMES:
77 | subprocess.call(["kubectl", "delete", "job", jn, "-n", "kubeflow"])
78 | JOB_NAMES.clear()
79 | return json.dumps({'deleted_jobs': n})
80 |
81 |
82 | @app.route('/list', methods=['GET'])
83 | def list_jobs():
84 | return json.dumps({'jobs': JOB_NAMES})
85 |
86 |
87 | if __name__ == '__main__':
88 | app.run(debug=True, host='0.0.0.0', port=5000)
89 |
--------------------------------------------------------------------------------
/webapp/tslint.json:
--------------------------------------------------------------------------------
1 | {
2 | "rulesDirectory": [
3 | "codelyzer"
4 | ],
5 | "rules": {
6 | "arrow-return-shorthand": true,
7 | "callable-types": true,
8 | "class-name": true,
9 | "comment-format": [
10 | true,
11 | "check-space"
12 | ],
13 | "curly": true,
14 | "deprecation": {
15 | "severity": "warn"
16 | },
17 | "eofline": true,
18 | "forin": true,
19 | "import-blacklist": [
20 | true,
21 | "rxjs/Rx"
22 | ],
23 | "import-spacing": true,
24 | "indent": [
25 | true,
26 | "spaces"
27 | ],
28 | "interface-over-type-literal": true,
29 | "label-position": true,
30 | "max-line-length": [
31 | true,
32 | 140
33 | ],
34 | "member-access": false,
35 | "member-ordering": [
36 | true,
37 | {
38 | "order": [
39 | "static-field",
40 | "instance-field",
41 | "static-method",
42 | "instance-method"
43 | ]
44 | }
45 | ],
46 | "no-arg": true,
47 | "no-bitwise": true,
48 | "no-console": [
49 | true,
50 | "debug",
51 | "info",
52 | "time",
53 | "timeEnd",
54 | "trace"
55 | ],
56 | "no-construct": true,
57 | "no-debugger": true,
58 | "no-duplicate-super": true,
59 | "no-empty": false,
60 | "no-empty-interface": true,
61 | "no-eval": true,
62 | "no-inferrable-types": [
63 | true,
64 | "ignore-params"
65 | ],
66 | "no-misused-new": true,
67 | "no-non-null-assertion": true,
68 | "no-redundant-jsdoc": true,
69 | "no-shadowed-variable": true,
70 | "no-string-literal": false,
71 | "no-string-throw": true,
72 | "no-switch-case-fall-through": true,
73 | "no-trailing-whitespace": true,
74 | "no-unnecessary-initializer": true,
75 | "no-unused-expression": true,
76 | "no-use-before-declare": true,
77 | "no-var-keyword": true,
78 | "object-literal-sort-keys": false,
79 | "one-line": [
80 | true,
81 | "check-open-brace",
82 | "check-catch",
83 | "check-else",
84 | "check-whitespace"
85 | ],
86 | "prefer-const": true,
87 | "quotemark": [
88 | true,
89 | "single"
90 | ],
91 | "radix": true,
92 | "semicolon": [
93 | true,
94 | "always"
95 | ],
96 | "triple-equals": [
97 | true,
98 | "allow-null-check"
99 | ],
100 | "typedef-whitespace": [
101 | true,
102 | {
103 | "call-signature": "nospace",
104 | "index-signature": "nospace",
105 | "parameter": "nospace",
106 | "property-declaration": "nospace",
107 | "variable-declaration": "nospace"
108 | }
109 | ],
110 | "unified-signatures": true,
111 | "variable-name": false,
112 | "whitespace": [
113 | true,
114 | "check-branch",
115 | "check-decl",
116 | "check-operator",
117 | "check-separator",
118 | "check-type"
119 | ],
120 | "no-output-on-prefix": true,
121 | "use-input-property-decorator": true,
122 | "use-output-property-decorator": true,
123 | "use-host-property-decorator": true,
124 | "no-input-rename": true,
125 | "no-output-rename": true,
126 | "use-life-cycle-interface": true,
127 | "use-pipe-transform-interface": true,
128 | "component-class-suffix": true,
129 | "directive-class-suffix": true
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/webapp/src/polyfills.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * This file includes polyfills needed by Angular and is loaded before the app.
3 | * You can add your own extra polyfills to this file.
4 | *
5 | * This file is divided into 2 sections:
6 | * 1. Browser polyfills. These are applied before loading ZoneJS and are sorted by browsers.
7 | * 2. Application imports. Files imported after ZoneJS that should be loaded before your main
8 | * file.
9 | *
10 | * The current setup is for so-called "evergreen" browsers; the last versions of browsers that
11 | * automatically update themselves. This includes Safari >= 10, Chrome >= 55 (including Opera),
12 | * Edge >= 13 on the desktop, and iOS 10 and Chrome on mobile.
13 | *
14 | * Learn more in https://angular.io/guide/browser-support
15 | */
16 |
17 | /***************************************************************************************************
18 | * BROWSER POLYFILLS
19 | */
20 |
21 | /** IE9, IE10, IE11, and Chrome <55 requires all of the following polyfills.
22 | * This also includes Android Emulators with older versions of Chrome and Google Search/Googlebot
23 | */
24 |
25 | // import 'core-js/es6/symbol';
26 | // import 'core-js/es6/object';
27 | // import 'core-js/es6/function';
28 | // import 'core-js/es6/parse-int';
29 | // import 'core-js/es6/parse-float';
30 | // import 'core-js/es6/number';
31 | // import 'core-js/es6/math';
32 | // import 'core-js/es6/string';
33 | // import 'core-js/es6/date';
34 | // import 'core-js/es6/array';
35 | // import 'core-js/es6/regexp';
36 | // import 'core-js/es6/map';
37 | // import 'core-js/es6/weak-map';
38 | // import 'core-js/es6/set';
39 |
40 | /** IE10 and IE11 requires the following for NgClass support on SVG elements */
41 | // import 'classlist.js'; // Run `npm install --save classlist.js`.
42 |
43 | /** IE10 and IE11 requires the following for the Reflect API. */
44 | // import 'core-js/es6/reflect';
45 |
46 | /**
47 | * Web Animations `@angular/platform-browser/animations`
48 | * Only required if AnimationBuilder is used within the application and using IE/Edge or Safari.
49 | * Standard animation support in Angular DOES NOT require any polyfills (as of Angular 6.0).
50 | */
51 | // import 'web-animations-js'; // Run `npm install --save web-animations-js`.
52 |
53 | /**
54 | * By default, zone.js will patch all possible macroTask and DomEvents
55 | * user can disable parts of macroTask/DomEvents patch by setting following flags
56 | * because those flags need to be set before `zone.js` being loaded, and webpack
57 | * will put import in the top of bundle, so user need to create a separate file
58 | * in this directory (for example: zone-flags.ts), and put the following flags
59 | * into that file, and then add the following code before importing zone.js.
60 | * import './zone-flags.ts';
61 | *
62 | * The flags allowed in zone-flags.ts are listed here.
63 | *
64 | * The following flags will work for all browsers.
65 | *
66 | * (window as any).__Zone_disable_requestAnimationFrame = true; // disable patch requestAnimationFrame
67 | * (window as any).__Zone_disable_on_property = true; // disable patch onProperty such as onclick
68 | * (window as any).__zone_symbol__BLACK_LISTED_EVENTS = ['scroll', 'mousemove']; // disable patch specified eventNames
69 | *
70 | * in IE/Edge developer tools, the addEventListener will also be wrapped by zone.js
71 | * with the following flag, it will bypass `zone.js` patch for IE/Edge
72 | *
73 | * (window as any).__Zone_enable_cross_context_check = true;
74 | *
75 | */
76 |
77 | /***************************************************************************************************
78 | * Zone JS is required by default for Angular itself.
79 | */
80 | import 'zone.js/dist/zone'; // Included with Angular CLI.
81 |
82 |
83 | /***************************************************************************************************
84 | * APPLICATION IMPORTS
85 | */
86 |
--------------------------------------------------------------------------------
/server/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Project Earth Server
7 |
8 |
9 |
10 |
14 |
15 |
16 |
17 | Project Earth Server
18 |
19 | Environment
20 |
21 | PROJECT={{project}}
22 | BUCKET={{bucket}}
23 | ASSET_ID={{asset_id}}
24 | REGION_ZOOM_LEVEL={{zoom_level}}
25 |
26 | The world is subdivided into 2^zoom_level x 2^zoom_level regions.
27 | {{2**zoom_level}} x {{2**zoom_level}} = {{2**(zoom_level*2)}} regions.
28 |
29 |
30 | Submit
31 |
80 |
81 | Tools
82 |
83 |
84 | -
85 |
90 |
91 |
92 | -
93 |
96 |
97 |
98 | -
99 |
103 |
104 |
105 |
106 |
107 |
108 |
--------------------------------------------------------------------------------
/webapp/src/app/app.component.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
3 | import { Component } from '@angular/core';
4 |
5 | import { DemoLocation, makeDemoLocations } from './locations'
6 | import { ServerService } from './server.service';
7 |
8 | class Overlay {
9 | constructor(
10 | public landsat: google.maps.ImageMapType,
11 | public landcover: google.maps.ImageMapType,
12 | ) { }
13 | }
14 |
15 | @Component({
16 | selector: 'app-root',
17 | templateUrl: './app.component.html',
18 | styleUrls: ['./app.component.scss']
19 | })
20 | export class AppComponent {
21 | title = 'Pick a location';
22 | state = {
23 | lat: 37.8195428011924,
24 | lng: -122.49165319668896,
25 | zoom: 13,
26 | }
27 |
28 | // Locations
29 | locations: DemoLocation[] = []
30 |
31 | // Yearly animation
32 | readonly startYear = 2013
33 | readonly endYear = 2018
34 | year = this.startYear
35 | overlays = new Map() // {year: Overlay}
36 | yearChangeInterval = 1200 // milliseconds
37 | animationTimer: NodeJS.Timer | null = null
38 | landcoverOn = 1.0
39 |
40 | constructor(private readonly server: ServerService) { }
41 |
42 | // @ts-ignore: uninitialized value, gets initialized at onMapReady.
43 | setLocation: (location: DemoLocation) => void
44 |
45 | // @ts-ignore: uninitialized value, gets initialized at onMapReady.
46 | updateOverlays: () => void
47 |
48 | // @ts-ignore: uninitialized value, gets initialized at onMapReady.
49 | toggleAnimation: (start: boolean) => void
50 |
51 | onMapReady($map: google.maps.Map) {
52 | // Initialize functions with closures to include a reference to $map.
53 | this.initMapMethods($map)
54 |
55 | // Set the map markers for all the locations.
56 | this.locations = makeDemoLocations()
57 | this.setLocation(this.locations[0])
58 |
59 | // Initialize the landsat and landcover overlays for every year.
60 | for (let year = this.startYear; year <= this.endYear; year++) {
61 | let overlay = {
62 | landsat: new google.maps.ImageMapType({
63 | getTileUrl: (tile, zoom) => {
64 | return this.server.landsatTileURL(tile.x, tile.y, zoom, year)
65 | },
66 | tileSize: new google.maps.Size(256, 256),
67 | }),
68 | landcover: new google.maps.ImageMapType({
69 | getTileUrl: (tile, zoom) => {
70 | return this.server.landcoverTileURL(tile.x, tile.y, zoom, year)
71 | },
72 | tileSize: new google.maps.Size(256, 256),
73 | }),
74 | }
75 | this.overlays.set(year, overlay)
76 | }
77 |
78 | // Load the landcover overlays first.
79 | for (let [_, overlay] of this.overlays) {
80 | $map.overlayMapTypes.push(overlay.landcover)
81 | }
82 |
83 | // Then load the landsat overlays.
84 | for (let [_, overlay] of this.overlays) {
85 | $map.overlayMapTypes.push(overlay.landsat)
86 | }
87 |
88 | this.updateOverlays()
89 |
90 | // Start the timelapse animation.
91 | this.toggleAnimation(true)
92 | }
93 |
94 | initMapMethods(map: google.maps.Map) {
95 | this.setLocation = (location: DemoLocation) => {
96 | map.setZoom(location.zoom)
97 | map.panTo(location.coords)
98 |
99 | // Restrict the user movements to stay in bounds.
100 | map.set('restriction', {
101 | latLngBounds: location.bounds,
102 | strictBounds: true,
103 | })
104 |
105 | map.setZoom(location.zoom)
106 | map.panTo(location.coords)
107 | }
108 |
109 | this.updateOverlays = () => {
110 | // `this.year` is updated from the [(value)]="year" binding in .
111 | for (let [year, overlay] of this.overlays) {
112 | if (this.landcoverOn) {
113 | overlay.landsat.setOpacity(0)
114 | overlay.landcover.setOpacity(year <= this.year ? 1 : 0)
115 | } else {
116 | overlay.landsat.setOpacity(year <= this.year ? 1 : 0)
117 | overlay.landcover.setOpacity(0)
118 | }
119 | }
120 | }
121 |
122 | this.toggleAnimation = (start: boolean) => {
123 | if (start) {
124 | this.animationTimer = setInterval(() => {
125 | this.year++
126 | if (this.year > this.endYear)
127 | this.year = this.startYear
128 | this.updateOverlays()
129 | }, this.yearChangeInterval)
130 | } else if (this.animationTimer) {
131 | clearInterval(this.animationTimer)
132 | }
133 | }
134 | }
135 | }
136 |
--------------------------------------------------------------------------------
/webapp/angular.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "./node_modules/@angular/cli/lib/config/schema.json",
3 | "version": 1,
4 | "newProjectRoot": "projects",
5 | "projects": {
6 | "project-earth": {
7 | "root": "",
8 | "sourceRoot": "src",
9 | "projectType": "application",
10 | "prefix": "app",
11 | "schematics": {
12 | "@schematics/angular:component": {
13 | "styleext": "scss"
14 | }
15 | },
16 | "architect": {
17 | "build": {
18 | "builder": "@angular-devkit/build-angular:browser",
19 | "options": {
20 | "outputPath": "dist/project-earth",
21 | "index": "src/index.html",
22 | "main": "src/main.ts",
23 | "polyfills": "src/polyfills.ts",
24 | "tsConfig": "src/tsconfig.app.json",
25 | "assets": [
26 | "src/favicon.ico",
27 | "src/assets"
28 | ],
29 | "styles": [
30 | "src/styles.scss"
31 | ],
32 | "scripts": []
33 | },
34 | "configurations": {
35 | "production": {
36 | "fileReplacements": [
37 | {
38 | "replace": "src/environments/environment.ts",
39 | "with": "src/environments/environment.prod.ts"
40 | }
41 | ],
42 | "optimization": true,
43 | "outputHashing": "all",
44 | "sourceMap": false,
45 | "extractCss": true,
46 | "namedChunks": false,
47 | "aot": true,
48 | "extractLicenses": true,
49 | "vendorChunk": false,
50 | "buildOptimizer": true,
51 | "budgets": [
52 | {
53 | "type": "initial",
54 | "maximumWarning": "2mb",
55 | "maximumError": "5mb"
56 | }
57 | ]
58 | }
59 | }
60 | },
61 | "serve": {
62 | "builder": "@angular-devkit/build-angular:dev-server",
63 | "options": {
64 | "browserTarget": "project-earth:build"
65 | },
66 | "configurations": {
67 | "production": {
68 | "browserTarget": "project-earth:build:production"
69 | }
70 | }
71 | },
72 | "extract-i18n": {
73 | "builder": "@angular-devkit/build-angular:extract-i18n",
74 | "options": {
75 | "browserTarget": "project-earth:build"
76 | }
77 | },
78 | "test": {
79 | "builder": "@angular-devkit/build-angular:karma",
80 | "options": {
81 | "main": "src/test.ts",
82 | "polyfills": "src/polyfills.ts",
83 | "tsConfig": "src/tsconfig.spec.json",
84 | "karmaConfig": "src/karma.conf.js",
85 | "styles": [
86 | "src/styles.scss"
87 | ],
88 | "scripts": [],
89 | "assets": [
90 | "src/favicon.ico",
91 | "src/assets"
92 | ]
93 | }
94 | },
95 | "lint": {
96 | "builder": "@angular-devkit/build-angular:tslint",
97 | "options": {
98 | "tsConfig": [
99 | "src/tsconfig.app.json",
100 | "src/tsconfig.spec.json"
101 | ],
102 | "exclude": [
103 | "**/node_modules/**"
104 | ]
105 | }
106 | }
107 | }
108 | },
109 | "project-earth-e2e": {
110 | "root": "e2e/",
111 | "projectType": "application",
112 | "prefix": "",
113 | "architect": {
114 | "e2e": {
115 | "builder": "@angular-devkit/build-angular:protractor",
116 | "options": {
117 | "protractorConfig": "e2e/protractor.conf.js",
118 | "devServerTarget": "project-earth:serve"
119 | },
120 | "configurations": {
121 | "production": {
122 | "devServerTarget": "project-earth:serve:production"
123 | }
124 | }
125 | },
126 | "lint": {
127 | "builder": "@angular-devkit/build-angular:tslint",
128 | "options": {
129 | "tsConfig": "e2e/tsconfig.e2e.json",
130 | "exclude": [
131 | "**/node_modules/**"
132 | ]
133 | }
134 | }
135 | }
136 | }
137 | },
138 | "defaultProject": "project-earth"
139 | }
--------------------------------------------------------------------------------
/webapp/src/app/app.component.html:
--------------------------------------------------------------------------------
1 |
2 |
5 | {{title}}
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | {{year}}
29 |
30 |
31 |
32 |
33 |
34 | Layers
35 |
36 |
37 |
38 | Land cover
39 |
40 |
41 |
42 |
43 |
44 | Animation
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 | layers
58 | Water
59 |
60 |
61 | layers
62 | Wetland
63 |
64 |
65 | layers
66 | Forest
67 |
68 |
69 | layers
70 | Grassland
71 |
72 |
73 | layers
74 | Shrubland
75 |
76 |
77 | layers
78 | Barren land
79 |
80 |
81 | layers
82 | Farmland
83 |
84 |
85 | layers
86 | Tundra
87 |
88 |
89 | layers
90 | Snow / Ice
91 |
92 |
93 | layers
94 | Urban
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
110 |
111 |
112 |
113 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
--------------------------------------------------------------------------------
/server/main.py:
--------------------------------------------------------------------------------
1 | import flask
2 | import json
3 | from flask_caching import Cache
4 |
5 | import config
6 | import devops_tools
7 | import tile_landsat
8 | import tile_landcover
9 | import region_classify
10 | import region_upload
11 | import submit
12 |
13 | app = flask.Flask(__name__)
14 | app.config.from_mapping({
15 | "DEBUG": True, # some Flask specific configs
16 | "CACHE_TYPE": "simple", # Flask-Caching related configs
17 | "CACHE_DEFAULT_TIMEOUT": 300
18 | })
19 | cache = Cache(app)
20 |
21 |
22 | @app.route('/')
23 | def index():
24 | return flask.render_template(
25 | 'index.html',
26 | project=config.PROJECT,
27 | bucket=config.BUCKET,
28 | asset_id=config.ASSET_ID,
29 | zoom_level=config.region_zoom_level,
30 | )
31 |
32 |
33 | #===--- DevOps tools ---===#
34 | @app.route('/check-progress', methods=['POST'])
35 | def app_check_progress():
36 | args = flask.request.form
37 | x = int(args['x'])
38 | y = int(args['y'])
39 | result = dump(devops_tools.check_progress(x, y), pretty=True)
40 | return f"""{result}Go back"""
41 |
42 |
43 | @app.route('/list-ee-tasks', methods=['POST'])
44 | def app_list_ee_tasks():
45 | result = dump(devops_tools.list_ee_tasks(), pretty=True)
46 | return f"""{result}Go back"""
47 |
48 |
49 | @app.route('/clear-image-collection', methods=['POST'])
50 | def app_clear_image_collection():
51 | result = devops_tools.clear_image_collections()
52 | return f"""{result}Go back"""
53 |
54 |
55 | #===--- tile --===#
56 | @app.route('/tile/landsat////')
57 | @cache.cached()
58 | def app_tile_landsat(x, y, zoom, year):
59 | return flask.redirect(tile_landsat.run(x, y, zoom, year))
60 |
61 |
62 | @app.route('/tile/landcover////')
63 | @cache.cached()
64 | def app_tile_landcover(x, y, zoom, year):
65 | return flask.redirect(tile_landcover.run(x, y, zoom, year))
66 |
67 |
68 | #===--- region ---===#
69 | # @app.route('/region/classify', methods=['POST'])
70 | # def app_region_classify():
71 | # args = flask.request.args
72 | # return dump(region_classify.run(
73 | # x=int(args['x']),
74 | # y=int(args['y']),
75 | # year=int(args['year']),
76 | # part=int(args['part']),
77 | # ))
78 |
79 |
80 | @app.route('/region/upload', methods=['POST'])
81 | def app_region_upload():
82 | args = flask.request.args
83 | return dump(region_upload.run(
84 | x=int(args['x']),
85 | y=int(args['y']),
86 | year=int(args['year']),
87 | parts=int(args['parts']),
88 | ))
89 |
90 |
91 | #===--- submit ---===#
92 | @app.route('/submit/region', methods=['POST'])
93 | def app_extract_point():
94 | args = flask.request.form
95 | return dump(submit.region(
96 | x=int(args['x']),
97 | y=int(args['y']),
98 | start_year=int(args['start_year']),
99 | end_year=int(args['end_year']),
100 | dry_run=as_bool(args.get('dry_run', 'n')),
101 | ))
102 |
103 |
104 | @app.route('/submit/point', methods=['POST'])
105 | def app_submit_point():
106 | args = flask.request.form
107 | return dump(submit.point(
108 | lng=float(args['lng']),
109 | lat=float(args['lat']),
110 | start_year=int(args['start_year']),
111 | end_year=int(args['end_year']),
112 | dry_run=as_bool(args.get('dry_run', 'n')),
113 | ))
114 |
115 |
116 | @app.route('/submit/bounds', methods=['POST'])
117 | def app_submit_bounds():
118 | args = flask.request.form
119 | return dump(submit.bounds(
120 | west=float(args['west']),
121 | south=float(args['south']),
122 | east=float(args['east']),
123 | north=float(args['north']),
124 | start_year=int(args['start_year']),
125 | end_year=int(args['end_year']),
126 | dry_run=as_bool(args.get('dry_run', 'n')),
127 | ))
128 |
129 |
130 | @app.route('/submit/tile', methods=['POST'])
131 | def app_submit_tile(x, y, zoom, year):
132 | args = flask.request.form
133 | return dump(submit.tile(
134 | x=int(args['x']),
135 | y=int(args['y']),
136 | zoom=int(args['zoom']),
137 | start_year=int(args['start_year']),
138 | end_year=int(args['end_year']),
139 | dry_run=as_bool(args.get('dry_run', 'n')),
140 | ))
141 |
142 |
143 | #===--- helper functions ---===#
144 | def as_bool(string_value):
145 | return string_value.lower() in ('y', 'yes', 't', 'true', '1')
146 |
147 |
148 | def dump(data, pretty=False):
149 | if pretty:
150 | return json.dumps(data, indent=2, separators=(', ', ': '))
151 | return json.dumps(data, separators=(',', ':'))
152 |
153 |
154 | if __name__ == '__main__':
155 | app.run(host='127.0.0.1', debug=True)
156 |
--------------------------------------------------------------------------------
/server/devops_tools.py:
--------------------------------------------------------------------------------
1 | import ee
2 | import os
3 | import subprocess as sp
4 | from collections import OrderedDict
5 |
6 | import config
7 |
8 |
9 | def check_progress(x, y):
10 | result = OrderedDict()
11 | result['x'] = x
12 | result['y'] = y
13 | result['config'] = {
14 | 'PROJECT': config.PROJECT,
15 | 'BUCKET': config.BUCKET,
16 | 'ASSET_ID': config.ASSET_ID,
17 | 'region_zoom_level': config.region_zoom_level,
18 | }
19 |
20 | # Look for the most recent EE task.
21 | tasks = ee.batch.Task.list()
22 |
23 | # Analyze the contents for the regions directory.
24 | info = result['regions'] = OrderedDict()
25 | prefix = f"regions/{x}/{y}/"
26 | info['path'] = f"gs://{config.BUCKET}/{prefix}"
27 | for blob in config.bucket.list_blobs(prefix=prefix):
28 | abs_path = f"gs://{config.BUCKET}/{blob.name}"
29 | valid = True
30 | try:
31 | year, filename = blob.name[len(prefix):].split('/', 1)
32 | year = int(year)
33 | if year not in info:
34 | info[year] = OrderedDict()
35 | info[year]['tfrecord_files'] = 0
36 | info[year]['mixer_file'] = False
37 | if filename.endswith('.tfrecord.gz'):
38 | info[year]['tfrecord_files'] += 1
39 | elif filename == 'mixer.json':
40 | info[year]['mixer_file'] = True
41 | else:
42 | valid = False
43 | except:
44 | valid = False
45 | if not valid:
46 | if 'unknown_files' not in info:
47 | info['unknown_files'] = []
48 | info['unknown_files'] += [abs_path]
49 |
50 | # Analyze the contents for the ml-engine directory.
51 | info = result['ml-engine'] = OrderedDict()
52 | prefix = f"ml-engine/{x}/{y}/"
53 | info['path'] = f"gs://{config.BUCKET}/{prefix}"
54 | for blob in config.bucket.list_blobs(prefix=prefix):
55 | abs_path = f"gs://{config.BUCKET}/{blob.name}"
56 | valid = True
57 | try:
58 | year, _, filename = blob.name[len(prefix):].split('/', 2)
59 | year = int(year)
60 | if year not in info:
61 | info[year] = 0
62 | if filename.startswith('prediction.results'):
63 | info[year] += 1
64 | elif filename.startswith('prediction.errors'):
65 | if blob.size != 0:
66 | if 'errors' not in info:
67 | info['errors'] = []
68 | info['errors'] += [{
69 | 'file': abs_path,
70 | 'error': blob.download_as_string().decode('utf-8'),
71 | }]
72 | else:
73 | valid = False
74 | except:
75 | valid = False
76 | if not valid:
77 | if 'unknown_files' not in info:
78 | info['unknown_files'] = []
79 | info['unknown_files'] += [abs_path]
80 |
81 | # Analyze the contents for the landcover directory.
82 | info = result['landcover'] = OrderedDict()
83 | prefix = f"landcover/{x}/{y}/"
84 | info['path'] = f"gs://{config.BUCKET}/{prefix}"
85 | for blob in config.bucket.list_blobs(prefix=prefix):
86 | abs_path = f"gs://{config.BUCKET}/{blob.name}"
87 | valid = True
88 | try:
89 | year, filename = blob.name[len(prefix):].split('/', 1)
90 | year = int(year)
91 | if year not in info:
92 | info[year] = 0
93 | if filename.endswith('.tfrecord'):
94 | info[year] += 1
95 | else:
96 | valid = False
97 | except:
98 | valid = False
99 | if not valid:
100 | if 'unknown_files' not in info:
101 | info['unknown_files'] = []
102 | info['unknown_files'] += [abs_path]
103 |
104 | # Check the Earth Engine asset upload task.
105 | info = result['upload'] = OrderedDict()
106 | for task in tasks:
107 | if task.task_type == 'INGEST':
108 | # Example description:
109 | # "Asset ingestion: projects/project/landcover/46-97-2018"
110 | region_id = task.config['description'].rsplit('/', 1)[-1]
111 | task_x, task_y, year = [int(part) for part in region_id.split('-')]
112 | if task_x != x or task_y != y:
113 | continue
114 | if year not in info:
115 | info[year] = OrderedDict()
116 | info[year]['task_id'] = task.id
117 |
118 | return result
119 |
120 |
121 | def list_ee_tasks():
122 | return [
123 | str(task).lstrip('<').rstrip('>')
124 | for task in ee.batch.Task.list()
125 | ]
126 |
127 |
128 | def clear_image_collections():
129 | outputs = ''
130 | command = config.earthengine + ['rm', '-v', '-r', config.ASSET_ID]
131 | result = sp.run(command, capture_output=True)
132 | outputs += f">> {command}\n"
133 | outputs += f"{result.stdout.decode('utf-8')}"
134 | outputs += f"stderr: {result.stderr.decode('utf-8')}"
135 | outputs += "\n\n"
136 |
137 | command = config.earthengine + ['create', 'collection', config.ASSET_ID]
138 | result = sp.run(command, capture_output=True)
139 | outputs += f">> {command}\n"
140 | outputs += f"{result.stdout.decode('utf-8')}"
141 | outputs += f"stderr: {result.stderr.decode('utf-8')}"
142 | outputs += "\n\n"
143 |
144 | return outputs
145 |
--------------------------------------------------------------------------------
/dispatch/main.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import requests
4 | from datetime import datetime
5 | # from google.cloud import pubsub
6 | from google.cloud import storage
7 |
8 | PROJECT = os.environ['PROJECT']
9 | # ML_ENGINE_TOPIC = os.environ['ML_ENGINE_TOPIC']
10 |
11 | server_url = f"https://server-dot-{PROJECT}.appspot.com"
12 | classifier_url = "http://35.222.125.215:7070"
13 |
14 | # Configure the Google Cloud client libraries.
15 | storage_client = storage.Client()
16 | # publisher = pubsub.PublisherClient()
17 | # ml_engine_topic = publisher.topic_path(PROJECT, ML_ENGINE_TOPIC)
18 |
19 |
20 | def dispatch(event, context):
21 | """Triggered by a change to a Cloud Storage bucket.
22 | Args:
23 | event (dict): Event payload.
24 | context (google.cloud.functions.Context): Metadata for the event.
25 | """
26 | bucket = event['bucket']
27 | file_path = event['name']
28 |
29 | # Ignore beam temporary files.
30 | if 'beam-temp' in file_path:
31 | return
32 |
33 | run(bucket, file_path)
34 |
35 |
36 | def run(bucket, file_path):
37 | print(f"===--- {file_path} ---===")
38 |
39 | abs_path = f"gs://{bucket}/{file_path}"
40 | dirname, x, y, year, filename = file_path.split('/', 4)
41 | if dirname == 'regions':
42 | handle_regions_file(abs_path, bucket, x, y, year, filename)
43 | # elif dirname == 'ml-engine':
44 | # handle_ml_engine_file(abs_path, bucket, x, y, year, filename)
45 | elif dirname == 'landcover':
46 | handle_landcover_file(abs_path, bucket, x, y, year, filename)
47 | else:
48 | print(f"No matching handler, ignoring: {abs_path}")
49 |
50 |
51 | def handle_regions_file(abs_path, bucket, x, y, year, filename):
52 | if filename.endswith('.tfrecord.gz'):
53 | # As data is extracted, .tfrecord.gz files start appearing.
54 | part = filename.split('.', 1)[0]
55 | timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')
56 | job_id = f"landcover-{x}-{y}-{year}-{part}-{timestamp}"
57 | input_path = f"gs://{bucket}/regions/{x}/{y}/{year}/{filename}"
58 | output_path_prefix = f"landcover/{x}/{y}/{year}/{part}.tfrecord"
59 | output_path = f"gs://{bucket}/{output_path_prefix}"
60 | gcs_bucket = storage_client.bucket(bucket)
61 | if gcs_bucket.blob(output_path_prefix).exists():
62 | print(f"Skipping classification, landcover file exists: {output_path}")
63 | return
64 | requests.post(f"{classifier_url}/process", json={
65 | 'name': job_id,
66 | 'input': input_path,
67 | 'output': output_path,
68 | })
69 | print(f"Classification job requested: {abs_path}")
70 | print(f" job_id: {job_id}")
71 | print(f" input_path: {input_path}")
72 | print(f" output_path: {output_path}")
73 | else:
74 | print(f"No action for file, ignoring: {abs_path}")
75 |
76 |
77 | # def handle_ml_engine_file(abs_path, bucket, x, y, year, filename):
78 | # if 'prediction.results' in filename:
79 | # import tensorflow as tf
80 | # tf.enable_eager_execution()
81 |
82 | # # We are using a pre-computed Example header to make it faster
83 | # # since all the patches are the same shape.
84 | # example_header = b'\n\x9b\x80\x04\n\x97\x80\x04\n\tlandcover\x12\x88\x80\x04\x1a\x84\x80\x04\n\x80\x80\x04'
85 |
86 | # # These are the output files from ML Engine batch prediction.
87 | # # We publish them to ml_engine_topic to have the workers convert it to TFRecord.
88 | # part, _ = filename.split('/', 1)
89 | # ml_engine_file = abs_path
90 | # landcover_file = f"gs://{bucket}/landcover/{x}/{y}/{year}/{part}.tfrecord"
91 | # with tf.io.TFRecordWriter(landcover_file) as output_file:
92 | # with tf.gfile.Open(ml_engine_file) as input_file:
93 | # for line in input_file:
94 | # # Make a serialized tf.train.Example for all the patches.
95 | # data = json.loads(line)
96 | # patch = tf.convert_to_tensor(data['predictions'], tf.int8)
97 | # array_as_bytes = tf.reshape(patch, [-1]).numpy().tobytes()
98 | # serialized_example = example_header + array_as_bytes
99 | # output_file.write(serialized_example)
100 | # else:
101 | # print(f"No action for file, ignoring: {abs_path}")
102 |
103 |
104 | def handle_landcover_file(abs_path, bucket, x, y, year, filename):
105 | if filename.endswith('.tfrecord'):
106 | # These are the now converted results from ML Engine as TFRecords.
107 | # Check that all the files have finished.
108 | gcs_bucket = storage_client.bucket(bucket)
109 |
110 | regions_prefix = f"regions/{x}/{y}/{year}"
111 | total_parts = 0
112 | mixer_file_found = False
113 | for blob in gcs_bucket.list_blobs(prefix=regions_prefix):
114 | if blob.name.endswith('.tfrecord.gz'):
115 | total_parts += 1
116 | elif blob.name.endswith('mixer.json'):
117 | mixer_file_found = True
118 |
119 | if not mixer_file_found:
120 | print(f"Mixer file not found, extraction is not done: {abs_path}")
121 | return
122 |
123 | parts_found = 0
124 | for part in range(total_parts):
125 | landcover_prefix = f"landcover/{x}/{y}/{year}/{part:05}.tfrecord"
126 | if gcs_bucket.blob(landcover_prefix).exists():
127 | parts_found += 1
128 | print(f"{parts_found} of {total_parts} parts finished: {abs_path}")
129 | if parts_found == total_parts:
130 | print(f"All parts finished, requesting upload to Earth Engine: {abs_path}")
131 | request('region/upload', x=x, y=y, year=year, parts=total_parts)
132 | else:
133 | print(f"No action for file, ignoring: {abs_path}")
134 |
135 |
136 | def request(action, **kwargs):
137 | # Does an asynchronous POST request, we don't need to wait for results.
138 | url = f"{server_url}/{action}"
139 | print(f"POST {url} params={kwargs}")
140 | requests.post(url, params=kwargs)
141 |
142 |
143 | if __name__ == '__main__':
144 | import argparse
145 |
146 | parser = argparse.ArgumentParser()
147 | parser.add_argument('gcs_path', help='Google Cloud Storage path')
148 | args = parser.parse_args()
149 |
150 | bucket, path_prefix = args.gcs_path.lstrip('gs://').split('/', 1)
151 | gcs_bucket = storage_client.bucket(bucket)
152 | for blob in gcs_bucket.list_blobs(prefix=path_prefix):
153 | print(blob.name)
154 | run(bucket, blob.name)
155 |
--------------------------------------------------------------------------------
/webapp/src/app/locations.ts:
--------------------------------------------------------------------------------
1 | export class DemoLocation {
2 | coords: google.maps.LatLng
3 | bounds: google.maps.LatLngBounds
4 | constructor(
5 | public name: string, public description: string,
6 | latLng: [number, number], public zoom: number,
7 | nesw: [number, number, number, number],
8 | ) {
9 | this.coords = new google.maps.LatLng(latLng[0], latLng[1])
10 | this.bounds = new google.maps.LatLngBounds(
11 | new google.maps.LatLng(nesw[2], nesw[3]),
12 | new google.maps.LatLng(nesw[0], nesw[1]),
13 | )
14 | }
15 | }
16 |
17 | export function makeDemoLocations() {
18 | return [
19 |
20 | // Not yet visible from Landsat 8
21 | // - Great Green Wall in Africa
22 | // - Grain-for-Green Project in China
23 | // - Amazon Reforestation in Brazil and Peru
24 |
25 | new DemoLocation(
26 | 'Boeng Peae Wildlife Sanctuary', 'Cambodia',
27 | [13.2066924,104.9906209], 11,
28 | [21.943045533438177, 106.975, 11.078401873711782, 95.625],
29 | ),
30 |
31 | new DemoLocation(
32 | 'Madre de Dios Mine', 'Peru',
33 | [-12.9227111,-69.9724235], 12,
34 | [0.0, -56.15, -16.73619187839766, -73.125],
35 | ),
36 |
37 | new DemoLocation(
38 | 'Rondônia', 'Brazil',
39 | [-9.708591,-64.3064042], 9,
40 | [0.0, -56.15, -16.73619187839766, -73.125],
41 | ),
42 |
43 | new DemoLocation(
44 | 'Lagos', 'Nigeria',
45 | [6.7369913,3.8314242], 10,
46 | [11.178401873711781, 5.725, 5.515985819155334, 0.0],
47 | ),
48 |
49 | new DemoLocation(
50 | 'Lake Urmia دریاچه ارومیه', 'Iran',
51 | [37.54009,45.94244], 10,
52 | [40.97989806962013, 50.725, 31.852162238024967, 39.375],
53 | ),
54 |
55 | new DemoLocation(
56 | 'United States', '',
57 | [38.8713334,-98.6173605], 3,
58 | [52.48278022207821, -61.775, 16.536191878397652, -129.375],
59 | ),
60 |
61 | // new DemoLocation(
62 | // 'Beijing 北京市', 'China',
63 | // [39.8865627,116.3692311], 9,
64 | // [45.089035564831015, 123.85, 36.49788913307021, 112.5],
65 | // ),
66 |
67 | // new DemoLocation(
68 | // 'Shenzhen 深圳市', 'Guangdong, China',
69 | // [22.6342193,113.8135933], 10,
70 | // [31.952162238024968, 123.85, 21.843045533438175, 106.875],
71 | // ),
72 |
73 | new DemoLocation(
74 | 'Suzhou 苏州市', 'Jiangsu, China',
75 | [31.226347,120.2169554], 10,
76 | [31.952162238024968, 123.85, 21.843045533438175, 106.875],
77 | ),
78 |
79 | // new DemoLocation(
80 | // 'Giza الجيزة', 'Al Omraneyah, Egypt',
81 | // [30.0265822,31.2736007], 11,
82 | // [31.952162238024968, 33.85, 26.959125784374052, 28.125],
83 | // ),
84 |
85 | new DemoLocation(
86 | 'Al Khiran الخيران', 'Kuwait',
87 | [28.6567696,48.2603528], 11,
88 | [31.952162238024968, 56.35, 21.843045533438175, 45.0],
89 | ),
90 |
91 | new DemoLocation(
92 | 'Cananea Mine', 'Sonora, Mexico',
93 | [30.9795476,-110.3203144], 12,
94 | [52.48278022207821, -61.775, 16.536191878397652, -129.375],
95 | ),
96 |
97 | // new DemoLocation(
98 | // 'Guadalajara', 'Jalisco, Mexico',
99 | // [20.4738452,-103.2796652], 10,
100 | // [52.48278022207821, -61.775, 16.536191878397652, -129.375],
101 | // ),
102 |
103 | // new DemoLocation(
104 | // 'Taupo', 'New Zealand',
105 | // [-38.9254622,175.9834146], 9,
106 | // [-31.952162238024968, 180.1, -49.02249926375824, 163.125],
107 | // ),
108 |
109 | new DemoLocation(
110 | 'Singapore', 'Singapore',
111 | [1.3492432,103.748114], 11,
112 | [5.615985819155334, 106.975, -0.1, 101.25],
113 | ),
114 |
115 | // new DemoLocation(
116 | // 'Istanbul', 'Turkey',
117 | // [41.1815404,28.7262407], 11,
118 | // [45.089035564831015, 33.85, 36.49788913307021, 22.5],
119 | // ),
120 |
121 | // new DemoLocation(
122 | // 'Cape Cod', 'Massachusetts',
123 | // [41.7988246, -70.5882979], 10,
124 | // [52.48278022207821, -61.775, 16.536191878397652, -129.375],
125 | // ),
126 |
127 | // new DemoLocation(
128 | // 'Las Vegas', 'Nevada',
129 | // [36.1251958, -115.1], 10,
130 | // [52.48278022207821, -61.775, 16.536191878397652, -129.375],
131 | // ),
132 |
133 | new DemoLocation(
134 | 'Dubai دبي', 'United Arab Emirates',
135 | [25.1566713,55.2189301], 10,
136 | [31.952162238024968, 56.35, 21.843045533438175, 45.0],
137 | ),
138 |
139 | ]
140 | }
141 |
142 |
143 | // Cologne, Germany
144 | // 6.435233992240934
145 | // 50.94999148361736
146 |
147 | // New Cairo City مدينة القاهرة الجديدة, Cairo Governorate, Egypt
148 | // Urban growth, very fast
149 | // 31.3470295
150 | // 30.0177916
151 |
152 | // Doha الدوحة, Qatar
153 | // Farming in the desert
154 | // 51.31268909575576
155 | // 25.19030787757275
156 |
157 | // Shenzhen
158 | // 113.1325497
159 | // 22.8628474
160 |
161 | // Houston
162 | // -95.44529631421166
163 | // 29.74140736826767
164 |
165 | // Brisbane, Queensland, Australia
166 | // Coast reshaping
167 | // 153.19638244061258
168 | // -27.412562979777352
169 |
170 | // Cananea Mine, Sonora, Mexico
171 | // Mining
172 | // -110.37536271621093
173 | // 30.97172792497669
174 |
175 | // Singapore
176 | // 103.564045
177 | // 1.3139843
178 |
179 | // Rwanda
180 | // 28.7589167
181 | // -1.9432847
182 |
183 | // Swiss Alps
184 | // 7.6055472
185 | // 46.4016925
186 |
187 | // Mexico City
188 | // -99.15498598857971
189 | // 19.410761813668095
190 |
191 | // Seattle
192 | // -122.34658356313494
193 | // 47.71131339800126
194 |
195 | // Vancouver
196 | // -122.50195172968668
197 | // 49.10252831912233
198 |
199 | // Oslo
200 | // 10.577184567656625
201 | // 59.927105532599036
202 |
203 | // Pompeii
204 | // 14.44492103112291
205 | // 40.785579197365585
206 |
207 | // Istanbul
208 | // 28.99432926475204
209 | // 41.09019648474241
210 |
211 | // Prypyat
212 | // 30.063155477083793
213 | // 51.37953821370403
214 |
215 | // Jakarta
216 | // 106.80738893418841
217 | // -6.129147133417564
218 |
219 | // Lena River
220 | // 124.9444087420382
221 | // 64.79987391232582
222 |
223 |
224 |
225 |
226 | // ------
227 |
228 |
229 | // Aral Sea, Orol Dengizi, Kazakhstan
230 | // Decreasing water level
231 | // 59.404562793604235
232 | // 45.200090904146826
233 |
234 | // Keller, Texas, United States (?)
235 | // Urban growth
236 | // -97.28903854596484
237 | // 32.929769555965095
238 |
239 | // Beijing 北京市, China
240 | // Urban growth, fast
241 | // 116.37269927441412
242 | // 39.922748854953895
243 |
244 | // Brisbane, Queensland, Australia
245 | // Coast reshaping
246 | // 153.19638244061258
247 | // -27.412562979777352
248 |
249 | // Busan 부산광역시, South Korea
250 | // Urban growth
251 | // 128.94304317363276
252 | // 35.150826985330916
253 |
254 | // New Cairo City مدينة القاهرة الجديدة, Cairo Governorate, Egypt
255 | // Urban growth, very fast
256 | // 31.3470295
257 | // 30.0177916
258 |
259 | // Cananea Mine, Sonora, Mexico
260 | // Mining
261 | // -110.37536271621093
262 | // 30.97172792497669
263 |
264 | // Chongqing 重庆市, China
265 | // Urban growth
266 | // 106.56895140628589
267 | // 29.514408488378315
268 |
269 | // Cologne Köln, Germany
270 | // Terraforming
271 | // 6.313014213291215
272 | // 50.94088068353434
273 |
274 | // Columbia Glacier, Alaska (?)
275 | // Decreasing ice levels
276 | // -149.99915419801934
277 | // 59.970415439023036
278 |
279 | // Dead Sea
280 | // Decreasing water levels
281 | // 35.46757539121945
282 | // 31.330154894320078
283 |
284 | // Doha الدوحة, Qatar
285 | // Farming in the desert
286 | // 51.31268909575576
287 | // 25.19030787757275
288 |
289 | // Drebkau, Germany
290 | // Terraforming
291 | // 14.240889368953527
292 | // 51.66589691482953
293 |
--------------------------------------------------------------------------------
/train-model.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import ee
3 | import os
4 | import tensorflow as tf
5 | from tensorflow import keras
6 |
7 | import landsat
8 | from classifier import config
9 |
10 | tf.enable_eager_execution()
11 |
12 |
13 | def extract_data(bucket, max_data_points=None):
14 | # Get the neighboring patch for every feature.
15 | def get_neighboring_patch(feature):
16 | # Get the start and end date from the date the feature was collected.
17 | date = ee.String(feature.get('image')).split('_').get(-1)
18 | year = ee.Number.parse(ee.String(date).slice(0, 4))
19 | month = ee.Number.parse(ee.String(date).slice(4, 6))
20 | day = ee.Number.parse(ee.String(date).slice(6, 8))
21 |
22 | # Get the image that matches where/when the point was extracted.
23 | image = landsat.get_image(year)
24 |
25 | # Set the label property to the expected name.
26 | feature = feature.set({config.label: feature.get('level1')})
27 |
28 | # Set the shard index from an initial random number from 0 to 1.
29 | feature = feature.set({'shard': (
30 | ee.Number(feature.get('shard'))
31 | .multiply(config.train_and_test_files)
32 | .toInt()
33 | )})
34 |
35 | # Return the feature as a feature collection of 1 element.
36 | return ee.FeatureCollection([
37 | image
38 | .neighborhoodToArray(ee.Kernel.square(config.cnn_padding))
39 | .sampleRegions(
40 | collection=ee.FeatureCollection([feature]),
41 | scale=30,
42 | )
43 | .first()
44 | ])
45 |
46 | # Get a patch of pixels for every data point in the Gong dataset.
47 | dataset = (
48 | ee.FeatureCollection('ft:1Olr1sJoEBs9mznunucZxk8M2a1Si8eMapOERn92K')
49 | .randomColumn('shard', 42)
50 | .map(get_neighboring_patch)
51 | .flatten()
52 | )
53 |
54 | if max_data_points is not None:
55 | dataset = dataset.limit(max_data_points)
56 |
57 | # Shard and write all the data points.
58 | print("Creating tasks...")
59 | tasks = [
60 | ee.batch.Export.table.toCloudStorage(
61 | collection=dataset.filter(ee.Filter.eq('shard', shard)),
62 | description=f"gong {shard+1} of {config.train_and_test_files}",
63 | bucket=bucket,
64 | fileNamePrefix=f"data/train/{shard}_",
65 | selectors=config.features + [config.label],
66 | fileFormat='TFRecord',
67 | )
68 | for shard in range(config.train_and_test_files)
69 | ]
70 | print(f"Starting {len(tasks)} tasks")
71 | for task in tasks:
72 | task.start()
73 |
74 | for task in tasks:
75 | print(f"Waiting for {task.config['description']} ({task.id})")
76 | landsat.wait_for_task(task.id)
77 |
78 |
79 | def make_dataset(filenames, batch_size, shuffle):
80 | # The features_dict is like a schema for the tf.Example protos.
81 | features_dict = {
82 | name: tf.FixedLenFeature(
83 | shape=[config.cnn_patch_size, config.cnn_patch_size], dtype=tf.float32)
84 | for name in config.features
85 | }
86 | features_dict[config.label] = tf.FixedLenFeature(
87 | shape=[1, 1], dtype=tf.float32)
88 |
89 | # Create a TFRecordDataset with all the files and apply the parsing function.
90 | dataset = (
91 | tf.data.TFRecordDataset(filenames, compression_type='GZIP')
92 | .map(
93 | lambda example: tf.parse_single_example(example, features_dict),
94 | num_parallel_calls=config.dataset_num_parallel_calls,
95 | )
96 | )
97 |
98 | # Get the input feature vectors and label vector.
99 | def get_feature_and_label_vectors(features_dict):
100 | label_value = tf.cast(features_dict.pop(config.label), tf.int32)
101 | label_vec = tf.one_hot(label_value, len(config.classifications))
102 | features_vec = [features_dict[name] for name in config.features]
103 | # (bands, x, y) -> (x, y, bands)
104 | features_vec = tf.transpose(features_vec, [1, 2, 0])
105 | return features_vec, label_vec
106 | dataset = dataset.map(get_feature_and_label_vectors)
107 |
108 | # Shuffle, repeat, and batch the elements.
109 | if shuffle:
110 | dataset = dataset.apply(
111 | tf.data.experimental.shuffle_and_repeat(batch_size * 16))
112 | else:
113 | dataset = dataset.repeat()
114 | dataset = dataset.batch(batch_size)
115 | dataset = dataset.prefetch(1)
116 | return dataset
117 |
118 |
119 | def train_files(bucket):
120 | return [
121 | f"gs://{bucket}/data/train/{shard}_ee_export.tfrecord.gz"
122 | for shard in range(config.train_files)
123 | ]
124 |
125 |
126 | def test_files(bucket):
127 | return [
128 | f"gs://{bucket}/data/train/{shard}_ee_export.tfrecord.gz"
129 | for shard in range(config.train_files, config.train_and_test_files)
130 | ]
131 |
132 |
133 | def train_dataset(bucket, batch_size=512, shuffle=True):
134 | return make_dataset(train_files(bucket), batch_size, shuffle)
135 |
136 |
137 | def test_dataset(bucket, batch_size=128, shuffle=False):
138 | return make_dataset(train_files(bucket), batch_size, shuffle)
139 |
140 |
141 | def make_model():
142 | model = keras.models.Sequential([
143 | keras.layers.Conv2D(
144 | name='image',
145 | input_shape=(None, None, len(config.features)),
146 | filters=32,
147 | kernel_size=config.cnn_patch_size,
148 | activation='relu',
149 | ),
150 | keras.layers.Conv2DTranspose(
151 | name='output',
152 | filters=len(config.classifications),
153 | kernel_size=config.cnn_patch_size,
154 | activation='softmax',
155 | ),
156 | ])
157 |
158 | model.compile(
159 | # optimizer=keras.optimizers.Adam(),
160 | optimizer=tf.train.AdamOptimizer(),
161 | loss='categorical_crossentropy',
162 | metrics=['categorical_accuracy'],
163 | )
164 |
165 | return model
166 |
167 |
168 | def run(
169 | bucket,
170 | train_epochs,
171 | train_batches_per_epoch,
172 | test_batches,
173 | max_train_data_points=None,
174 | force_data_extract=False,
175 | ):
176 | should_extract_data = args.force_data_extract
177 | for filename in test_files(args.bucket) + train_files(args.bucket):
178 | if not tf.gfile.Exists(filename):
179 | print(filename)
180 | should_extract_data = True
181 | break
182 |
183 | if should_extract_data:
184 | extract_data(args.bucket, args.max_train_data_points)
185 |
186 | model = make_model()
187 | print(model.summary())
188 |
189 | print('Training model')
190 | model.fit(
191 | train_dataset(bucket),
192 | epochs=train_epochs,
193 | steps_per_epoch=train_batches_per_epoch,
194 | validation_data=test_dataset(bucket),
195 | validation_steps=test_batches,
196 | )
197 |
198 | print(f"Saving model to {config.model_file}")
199 | keras.models.save_model(model, config.model_file)
200 |
201 |
202 | if __name__ == '__main__':
203 | parser = argparse.ArgumentParser()
204 | parser.add_argument(
205 | '--bucket',
206 | type=str,
207 | required=True,
208 | )
209 | parser.add_argument(
210 | '--train-epochs',
211 | type=int,
212 | default=10,
213 | )
214 | parser.add_argument(
215 | '--train-batches-per-epoch',
216 | type=int,
217 | default=30,
218 | )
219 | parser.add_argument(
220 | '--test-batches',
221 | type=int,
222 | default=5,
223 | )
224 | parser.add_argument(
225 | '--max-train-data-points',
226 | type=int,
227 | default=None,
228 | )
229 | parser.add_argument(
230 | '--force-data-extract',
231 | type=bool,
232 | default=False,
233 | )
234 | args = parser.parse_args()
235 |
236 | run(
237 | args.bucket,
238 | args.train_epochs,
239 | args.train_batches_per_epoch,
240 | args.test_batches,
241 | args.max_train_data_points,
242 | args.force_data_extract,
243 | )
244 |
--------------------------------------------------------------------------------
/prediction/single_prediction/single_predict.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import datetime
4 | import sys
5 | import tensorflow as tf
6 | slim = tf.contrib.slim
7 |
8 | SPATIAL_FEATURES_KEY = 'spatial_features'
9 | FEATURES_KEY = 'features'
10 | LABELS_KEY = 'labels'
11 |
12 | LANDCOVER_CLASSES_N = 10
13 |
14 | PREDICT_OUTPUT_L = 256
15 | PREDICT_PADDING = 16
16 |
17 |
18 | config = {'encoder_extra_convolutions': 1,
19 | 'encoder_channel_init': 29,
20 | 'encoder_channel_mul': 1.8030832537902222,
21 | 'encoder_core_n': 1,
22 | 'extra_block': True,
23 | 'classes': LANDCOVER_CLASSES_N}
24 |
25 |
26 | def blindspotv2_encoder(inputs, channel_in_n, channel_out_n, config):
27 | rdx = inputs
28 |
29 | for _ in range(0, 1 + config['encoder_extra_convolutions']):
30 | rdx = slim.separable_conv2d(rdx, num_outputs=channel_in_n)
31 |
32 | residual = slim.conv2d(inputs, num_outputs=channel_in_n, kernel_size=[1, 1])
33 |
34 | pre_downscale = tf.concat([rdx, residual], axis=3)
35 | return slim.separable_conv2d(pre_downscale,
36 | num_outputs=channel_out_n,
37 | stride=2)
38 |
39 |
40 | def blindspotv2_decoder(inputs, residual, residual_pre_n, channel_out_n, conf):
41 | newSize = [tf.shape(residual)[1], tf.shape(residual)[2]]
42 | block_scaled = tf.image.resize_images(inputs, newSize)
43 | block_scaled = slim.conv2d(
44 | block_scaled,
45 | kernel_size=[1, 1],
46 | num_outputs=channel_out_n)
47 |
48 | proc_res = slim.conv2d(
49 | residual,
50 | kernel_size=[1, 1],
51 | num_outputs=residual_pre_n)
52 | block_scaled = tf.concat([block_scaled, proc_res], axis=3)
53 | block_scaled = slim.separable_conv2d(
54 | block_scaled,
55 | num_outputs=channel_out_n)
56 | return block_scaled
57 |
58 |
59 | def get_channels(stage, config):
60 | return int(config['encoder_channel_init'] *
61 | pow(config['encoder_channel_mul'], stage))
62 |
63 |
64 | def blindspotv2(spatial_inputs, instance_inputs, training, config):
65 | with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
66 | kernel_size=[3, 3],
67 | normalizer_fn=slim.batch_norm,
68 | normalizer_params={'is_training': training}):
69 | with slim.arg_scope([slim.separable_conv2d], depth_multiplier=1):
70 | input0 = slim.conv2d(spatial_inputs, kernel_size=[1, 1],
71 | num_outputs=get_channels(0, config))
72 | input1 = slim.conv2d(input0, num_outputs=get_channels(0, config),
73 | stride=2)
74 |
75 | block = input1
76 | block_first = blindspotv2_encoder(block,
77 | get_channels(1, config),
78 | get_channels(2, config),
79 | config)
80 | block = block_first
81 |
82 | block_last = blindspotv2_encoder(block,
83 | get_channels(2, config),
84 | get_channels(3, config),
85 | config)
86 |
87 | block = block_last
88 | if config['extra_block']:
89 | block = blindspotv2_encoder(block,
90 | get_channels(3, config),
91 | get_channels(4, config),
92 | config)
93 |
94 | core_channel_count = 3 + (1 if config['extra_block'] else 0)
95 |
96 | blockRows = tf.shape(block)[1]
97 | blockCols = tf.shape(block)[2]
98 | expanded_features = tf.expand_dims(instance_inputs, axis=1)
99 | expanded_features = tf.expand_dims(expanded_features, axis=2)
100 | expanded_features = tf.tile(expanded_features,
101 | [1, blockRows, blockCols, 1])
102 |
103 | block = tf.concat([expanded_features, block], axis=3)
104 | block_res = slim.conv2d(
105 | block,
106 | kernel_size=[1, 1],
107 | num_outputs=get_channels(core_channel_count, config))
108 |
109 | for i in range(0, config['encoder_core_n']):
110 | block = slim.separable_conv2d(
111 | block,
112 | num_outputs=get_channels(core_channel_count, config))
113 |
114 | block = tf.concat([block_res, block], axis=3)
115 | block = slim.conv2d(
116 | block,
117 | get_channels(core_channel_count, config),
118 | kernel_size=[1, 1])
119 |
120 | block_scaled = blindspotv2_decoder(block,
121 | block_first,
122 | get_channels(1, config),
123 | get_channels(
124 | core_channel_count - 1,
125 | config),
126 | config)
127 |
128 | block_scaled = blindspotv2_decoder(block_scaled,
129 | input0,
130 | get_channels(0, config),
131 | get_channels(
132 | core_channel_count - 2,
133 | config),
134 | config)
135 |
136 | block_scaled = slim.separable_conv2d(
137 | block_scaled,
138 | num_outputs=get_channels(core_channel_count - 2, config))
139 |
140 | return slim.conv2d(block_scaled, config['classes'],
141 | kernel_size=[1, 1],
142 | activation_fn=None)
143 |
144 |
145 | def create_model_fn(config):
146 | def model_fn(features, labels, mode):
147 | spatial = features['spatial']
148 | inp_features = features['fixed']
149 | mask = features['mask']
150 |
151 | output = blindspotv2(
152 | spatial, inp_features, mode == tf.estimator.ModeKeys.TRAIN, config)
153 |
154 | if mode == tf.estimator.ModeKeys.PREDICT:
155 | land_cover = tf.argmax(output, axis=3)
156 | land_cover = tf.slice(
157 | land_cover,
158 | [0, PREDICT_PADDING, PREDICT_PADDING],
159 | [-1, PREDICT_OUTPUT_L, PREDICT_OUTPUT_L])
160 | return tf.estimator.EstimatorSpec(
161 | mode=mode, predictions={'predictions': land_cover})
162 | return None
163 |
164 | return model_fn
165 |
166 |
167 | def predict_fn(file_names):
168 | ds = tf.data.TFRecordDataset(file_names, 'GZIP')
169 |
170 | side_l = PREDICT_OUTPUT_L + 2 * PREDICT_PADDING
171 |
172 | def parse(example_proto):
173 | feature_columns = {
174 | 'B1': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
175 | 'B2': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
176 | 'B3': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
177 | 'B4': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
178 | 'B5': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
179 | 'B6': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
180 | 'B7': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
181 | 'B10': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
182 | 'B11': tf.FixedLenFeature([side_l, side_l], dtype=tf.float32),
183 | 'latitude': tf.FixedLenFeature([side_l, side_l],
184 | dtype=tf.float32),
185 | 'elevation': tf.FixedLenFeature([side_l, side_l],
186 | dtype=tf.float32),
187 | }
188 | parsed_features = tf.parse_single_example(example_proto,
189 | feature_columns)
190 | centered = tf.stack([
191 | parsed_features['B1'],
192 | parsed_features['B2'],
193 | parsed_features['B3'],
194 | parsed_features['B4'],
195 | parsed_features['B5'],
196 | parsed_features['B6'],
197 | parsed_features['B7'],
198 | parsed_features['B10'],
199 | parsed_features['B11'],
200 | parsed_features['latitude'],
201 | parsed_features['elevation'],
202 | ],
203 | axis=0)
204 | spatial_features = tf.transpose(centered, [1, 2, 0])
205 | test_features = tf.stack([
206 | tf.constant(-0.8403242422804175),
207 | tf.constant(-0.5420840966453842)],
208 | axis=0)
209 | test_features = tf.reshape(test_features, [2])
210 | fixed_features = test_features
211 |
212 | return spatial_features, fixed_features
213 |
214 | ds = ds.map(parse, num_parallel_calls=5)
215 | ds = ds.batch(1)
216 |
217 | iterator = ds.make_one_shot_iterator()
218 | (spatial, fixed) = iterator.get_next()
219 | return {'spatial': spatial, 'fixed': fixed,
220 | 'mask': tf.ones([side_l, side_l, 1])}
221 |
222 |
223 | def make_example(pred_dict):
224 | return tf.train.Example(
225 | features=tf.train.Features(
226 | feature={
227 | 'p': tf.train.Feature(
228 | int64_list=tf.train.Int64List(
229 | value=pred_dict['predictions'].flatten()))
230 | }
231 | )
232 | )
233 |
234 |
235 | def predict(input_files, model_dir, output_file):
236 | now = datetime.datetime.now()
237 | print('Starting at: {}'.format(now.strftime("%Y-%m-%d %H:%M")))
238 | model = tf.estimator.Estimator(model_fn=create_model_fn(config),
239 | model_dir=model_dir)
240 |
241 | predictions = model.predict(
242 | input_fn=lambda: predict_fn(file_names=input_files))
243 |
244 | MAX_RECORDS_PER_FILE = 100000
245 |
246 | still_writing = True
247 | total_patches = 0
248 | while still_writing:
249 | sys.stdout.flush()
250 | writer = tf.python_io.TFRecordWriter(output_file)
251 | print('Writing file: {}'.format(output_file))
252 | try:
253 | written_records = 0
254 | while True:
255 | pred_dict = predictions.__next__()
256 |
257 | writer.write(make_example(pred_dict).SerializeToString())
258 |
259 | written_records += 1
260 | total_patches += 1
261 |
262 | if written_records % 5 == 0:
263 | print(' Writing patch: {}'.format(written_records))
264 |
265 | if written_records == MAX_RECORDS_PER_FILE:
266 | break
267 | except Exception as e:
268 | print(str(e))
269 | still_writing = False
270 | finally:
271 | writer.close()
272 |
273 | print('Wrote: {} patches.'.format(total_patches))
274 |
275 |
276 | if __name__ == '__main__':
277 | start_time = time.time()
278 | model_dir = os.environ['MODELDIR']
279 | in_file = os.environ['INPUT']
280 | out_file = os.environ['OUTPUT']
281 |
282 | print('in_file: {}'.format(in_file))
283 | print('out_file: {}'.format(out_file))
284 | print('model_dir: {}'.format(model_dir))
285 |
286 | tf.logging.set_verbosity(tf.logging.DEBUG)
287 |
288 | predict([in_file, ], model_dir, out_file)
289 |
290 | print('Time to finish predictions: {}'.format((time.time() - start_time)))
291 |
292 |
293 |
--------------------------------------------------------------------------------