├── .nvmrc
├── Brewfile
├── test
├── mocha.opts
├── fixtures
│ ├── NE1_50M_SR.tif
│ ├── oin-buckets.json
│ ├── NE1_50M_SR.input.json
│ ├── upload-status.json
│ ├── NE1_50M_SR.output.json
│ ├── metadata.json
│ └── meta_db_objects.json
├── .setup.js
├── specs
│ ├── test_removeDuplicateVertices.js
│ ├── test_auth_getToken.js
│ ├── helper.js
│ ├── test_upload.js
│ ├── test_user.js
│ ├── test_user_jwt.js
│ ├── test_auth.js
│ ├── test_dronedeploy.js
│ ├── test_meta_update.js
│ └── test_meta.js
├── docker-compose.yml
├── integration
│ ├── helper.js
│ └── test_imagery.js
└── helper.js
├── Procfile
├── postgres
├── Dockerfile
└── schema.sql
├── .dockerignore
├── routes
├── root.js
├── analytics.js
├── auth.js
├── user.js
├── meta.js
└── uploads.js
├── .eslintrc
├── .gitignore
├── models
├── analytics.js
├── createToken.js
├── metaValidations.js
├── meta.js
└── user.js
├── index.js
├── .build_scripts
├── run_frontend_tests.sh
├── prepare_dotenv.sh
├── docs.sh
└── merge_images_into_user.js
├── controllers
├── user.js
├── analytics.js
└── meta.js
├── newrelic.js
├── docker-compose.production.yml
├── bin
└── transcoder.js
├── Dockerfile
├── monq
├── db.js
├── connection.js
└── queue.js
├── services
├── db.js
├── removeDuplicateVertices.js
├── server.js
├── s3_sync.js
└── transcoder.js
├── .github
└── workflows
│ └── deploy.yaml
├── docker-compose.staging.yml
├── .env.local.sample
├── plugins
├── response-meta.js
├── paginate.js
└── authentication.js
├── LICENSE
├── .env.sample
├── .travis.yml
├── CONTRIBUTING.md
├── package.json
├── docker-compose.yml
├── config.js
├── README.md
└── catalog-worker.js
/.nvmrc:
--------------------------------------------------------------------------------
1 | 6.10.2
2 |
--------------------------------------------------------------------------------
/Brewfile:
--------------------------------------------------------------------------------
1 | brew 'mongodb'
2 |
--------------------------------------------------------------------------------
/test/mocha.opts:
--------------------------------------------------------------------------------
1 | --require test/.setup.js
2 |
--------------------------------------------------------------------------------
/Procfile:
--------------------------------------------------------------------------------
1 | web: node index.js
2 | worker: node catalog-worker.js
3 | transcoder: node bin/transcoder.js
4 |
--------------------------------------------------------------------------------
/postgres/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM postgis/postgis:14-3.3
2 |
3 | COPY schema.sql /docker-entrypoint-initdb.d/
4 |
--------------------------------------------------------------------------------
/test/fixtures/NE1_50M_SR.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hotosm/oam-api/HEAD/test/fixtures/NE1_50M_SR.tif
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | Dockerfile
2 | node_modules
3 | newrelic_agent.log
4 | npm-debug.log
5 | .travis.yml
6 | .build_scripts
7 | .env
8 | .git
9 | local.env
10 |
--------------------------------------------------------------------------------
/routes/root.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | module.exports = [
4 | {
5 | method: 'GET',
6 | path: '/',
7 | handler: function (request, reply) {
8 | reply('The OAM API');
9 | }
10 | }
11 | ];
12 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "extends": ["standard", "standard-react"],
3 | "env": {
4 | "es6": true,
5 | "mocha": true
6 | },
7 | "rules": {
8 | "semi": [2, "always"],
9 | "no-extra-semi": 2,
10 | "semi-spacing": [2, { "before": false, "after": true }]
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/test/fixtures/oin-buckets.json:
--------------------------------------------------------------------------------
1 | {
2 | "nodes": [{
3 | "name": "Testing/Development Bucket",
4 | "contact": "info@hotosm.org",
5 | "locations": [
6 | {
7 | "type": "s3",
8 | "bucket_name": "oin-hotosm-staging"
9 | }
10 | ]
11 | }]
12 | }
13 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | bower_components
3 | .tmp
4 | *.log
5 | *.out
6 | *.pid
7 | npm-debug.log
8 | *~
9 | *#
10 | .DS_STORE
11 | .netbeans
12 | .env*
13 | !.env.sample
14 | !.env.local.sample
15 | .idea
16 | .node_history
17 | temp
18 | tmp
19 | scratch.js
20 | docs
21 | local.env
22 |
23 |
--------------------------------------------------------------------------------
/models/analytics.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var mongoose = require('mongoose');
4 |
5 | var analyticsSchema = new mongoose.Schema({
6 | count: Number,
7 | sensor_count: Number,
8 | provider_count: Number,
9 | date: Date
10 | });
11 |
12 | module.exports = mongoose.model('analytics', analyticsSchema);
13 |
--------------------------------------------------------------------------------
/test/.setup.js:
--------------------------------------------------------------------------------
1 | process.env.NODE_ENV = 'test'
2 |
3 | require('dotenv').config();
4 |
5 | // Reduce verbosity unless explicitly requested
6 | if (process.env.OAM_DEBUG !== 'true') {
7 | var methods = ['info', 'debug'];
8 | for (var i = 0; i < methods.length; i++) {
9 | console[methods[i]] = function () {};
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | require('dotenv').config();
4 | require('newrelic');
5 |
6 | var config = require('./config');
7 |
8 | var Conn = require('./services/db.js');
9 | var Server = require('./services/server.js');
10 |
11 | var db = new Conn();
12 | db.start();
13 |
14 | var server = new Server(config.port);
15 | server.start();
16 |
--------------------------------------------------------------------------------
/.build_scripts/run_frontend_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | # Note that these run on Browsersrtack and so need the BROWSERSTACK*
5 | # credentials in the ENV.
6 |
7 | pushd $HOME
8 | git clone https://github.com/hotosm/oam-browser.git
9 | cd oam-browser
10 | git checkout $FRONTEND_VERSION
11 | npm install
12 | ./test/integration/run.sh chrome
13 | popd
14 |
15 |
--------------------------------------------------------------------------------
/controllers/user.js:
--------------------------------------------------------------------------------
1 | var Boom = require('boom');
2 |
3 | module.exports = {
4 | isOwnerOfRequestedObject: function (request, reply) {
5 | if (request.auth.credentials._id.toString() !== request.app.requestedObject.user.toString()) {
6 | reply(Boom.forbidden('Authenticated user does not have permission.'));
7 | return;
8 | }
9 | reply();
10 | }
11 | };
12 |
--------------------------------------------------------------------------------
/test/fixtures/NE1_50M_SR.input.json:
--------------------------------------------------------------------------------
1 | {
2 | "scenes": [
3 | {
4 | "title": "Natural Earth Image",
5 | "provider": "Natural Earth",
6 | "license": "CC-BY",
7 | "sensor": "Some Algorithm",
8 | "platform": "satellite",
9 | "acquisition_start": "2015-04-01T00:00:00.000",
10 | "acquisition_end": "2015-04-30T00:00:00.000",
11 | "tms": null,
12 | "urls": [
13 | "http://localhost:8080/fixtures/NE1_50M_SR.tif"
14 | ]
15 | }
16 | ]
17 | }
18 |
--------------------------------------------------------------------------------
/models/createToken.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | const jwt = require('jsonwebtoken');
4 | const config = require('../config');
5 |
6 | function createToken (id, name, email, scope, expiration) {
7 | // Sign the JWT
8 | return jwt.sign(
9 | {
10 | _id: id,
11 | name,
12 | contact_email: email,
13 | scope: scope
14 | },
15 | config.jwtSecret,
16 | {
17 | algorithm: 'HS256',
18 | expiresIn: expiration
19 | });
20 | }
21 |
22 | module.exports = createToken;
23 |
--------------------------------------------------------------------------------
/.build_scripts/prepare_dotenv.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | cp .env.sample .env
5 |
6 | # docker-compose gives precedence to values in .env, so remove these empty values
7 | # so that the ENV values can take their place.
8 | sed -i '/AWS_ACCESS/d' .env
9 | sed -i '/AWS_SECRET/d' .env
10 | sed -i '/FACEBOOK_APP_ID/d' .env
11 | sed -i '/FACEBOOK_APP_SECRET/d' .env
12 |
13 | # Namespace the bucket, otherwise builds will find imagery from previous
14 | # test runs.
15 | sed -i '/OIN_BUCKET_PREFIX/d' .env
16 | echo "OIN_BUCKET_PREFIX=$TRAVIS_BUILD_NUMBER" >> .env
17 |
18 | echo "OAM_DEBUG=true" >> .env
19 |
--------------------------------------------------------------------------------
/.build_scripts/docs.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -e # halt script on error
3 |
4 | # Build docs and push to gh-pages
5 | if [ $TRAVIS_PULL_REQUEST = "false" ] && [ $TRAVIS_BRANCH = ${PRODUCTION_BRANCH} ]; then
6 | echo "Get ready, we're pushing to gh-pages!"
7 | npm run docs
8 | cd docs
9 | git init
10 | git config user.name "Travis-CI"
11 | git config user.email "travis@somewhere.com"
12 | git add .
13 | git commit -m "CI deploy to gh-pages"
14 | git push --force --quiet "https://${GH_TOKEN}@${GH_REF}" master:gh-pages
15 | else
16 | echo "Not a publishable branch so we're all done here"
17 | fi
18 |
--------------------------------------------------------------------------------
/newrelic.js:
--------------------------------------------------------------------------------
1 | /**
2 | * New Relic agent configuration.
3 | *
4 | * See lib/config.defaults.js in the agent distribution for a more complete
5 | * description of configuration variables and their potential values.
6 | */
7 | exports.config = {
8 | /**
9 | * Array of application names.
10 | */
11 | app_name: ['oam-catalog'],
12 | logging: {
13 | /**
14 | * Level at which to log. 'trace' is most useful to New Relic when diagnosing
15 | * issues with the agent, 'info' and higher will impose the least overhead on
16 | * production applications.
17 | */
18 | level: 'info'
19 | }
20 | };
21 |
22 |
--------------------------------------------------------------------------------
/docker-compose.production.yml:
--------------------------------------------------------------------------------
1 | version: "2"
2 |
3 | services:
4 | app:
5 | environment:
6 | - NODE_ENV=production
7 | env_file: .env
8 | build:
9 | context: .
10 | args:
11 | - NODE_ENV=production
12 | command: "false"
13 |
14 | api:
15 | restart: always
16 | extends: app
17 | environment:
18 | - PORT=4000
19 | - NODE_OPTIONS=--max_old_space_size=2048
20 | command: node index.js
21 | ports:
22 | - 4000:4000
23 |
24 | worker:
25 | restart: always
26 | extends: app
27 | environment:
28 | - DOT_ENV_FILENAME=.env
29 | command: node catalog-worker.js
30 |
--------------------------------------------------------------------------------
/bin/transcoder.js:
--------------------------------------------------------------------------------
1 | var monq = require('monq');
2 |
3 | var config = require('../config');
4 | var transcoder = require('../services/transcoder');
5 |
6 | var client = monq(config.dbUri);
7 |
8 | var worker = client.worker(['transcoder']);
9 |
10 | worker.register({
11 | transcode: (params, callback) =>
12 | transcoder.transcode(
13 | params.sourceUrl,
14 | params.output,
15 | params.metaUrl,
16 | callback
17 | )
18 | });
19 |
20 | worker.on('dequeued', data => console.log('dequeued:', data));
21 | worker.on('failed', data => console.error('failed:', data));
22 | worker.on('complete', data => console.log('complete:', data));
23 | worker.on('error', data => console.error('error:', data));
24 |
25 | worker.start();
26 |
--------------------------------------------------------------------------------
/test/specs/test_removeDuplicateVertices.js:
--------------------------------------------------------------------------------
1 | const chai = require('chai');
2 | var getGeom = require('@turf/invariant').getGeom;
3 | const removeDuplicateVertices = require('../../services/removeDuplicateVertices');
4 | const expect = chai.expect;
5 |
6 | describe('removeDuplicateVertices', () => {
7 | it('Removes a duplicate vertice', () => {
8 | const geom = getGeom(require('../fixtures/geojson.json'));
9 | expect(geom.coordinates[1][0].length).to.equal(166);
10 | removeDuplicateVertices(geojson);
11 | expect(geom.coordinates[1][0].length).to.equal(165);
12 | removeDuplicateVertices(geojson);
13 | expect(geom.coordinates[1][0].length).to.equal(165);
14 | });
15 |
16 | it('doesn\'t choke on real world input', () => {
17 | const geom = getGeom(require('../fixtures/5aebf2ee8153990013b938ef.json'));
18 | expect(geom.coordinates[8][18].length).to.equal(11);
19 | removeDuplicateVertices(geom, [2, 6]);
20 | expect(geom.coordinates[8][18].length).to.equal(10);
21 | });
22 | });
23 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM public.ecr.aws/r4e3a2l5/marblecutter-tools:latest
2 |
3 | ARG NODE_ENV=production
4 |
5 | RUN apt-get update \
6 | && apt-get upgrade -y \
7 | && apt-get install -y --no-install-recommends \
8 | apt-transport-https \
9 | ca-certificates \
10 | curl \
11 | git \
12 | software-properties-common \
13 | && curl -sf https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \
14 | && add-apt-repository -s "deb https://deb.nodesource.com/node_16.x $(lsb_release -c -s) main" \
15 | && apt-get update \
16 | && apt-get install --no-install-recommends -y nodejs \
17 | && apt-get clean \
18 | && rm -rf /var/lib/apt/lists/*
19 |
20 | RUN npm install --global yarn
21 |
22 | WORKDIR /app
23 |
24 | COPY yarn.lock /app
25 | COPY package.json /app
26 |
27 | RUN yarn install \
28 | && rm -rf /root/.npm
29 |
30 | COPY . /app
31 |
32 | ENV PATH /app/node_modules/.bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
33 | EXPOSE 4000
34 |
35 | CMD ["nf", "start"]
36 |
--------------------------------------------------------------------------------
/monq/db.js:
--------------------------------------------------------------------------------
1 | exports.index = function (collection) {
2 | collection.indexes(function (err, indexes) {
3 | if (err) {
4 | if (err.code === 26) {
5 | // MongoError: no collection
6 | return;
7 | }
8 | return console.log(err);
9 | }
10 |
11 | dropIndex('status_1_queue_1_enqueued_1');
12 | dropIndex('status_1_queue_1_enqueued_1_delay_1');
13 |
14 | function dropIndex (name) {
15 | if (indexes.some(function (index) { return index.name == name; })) {
16 | collection.dropIndex(name, function (err) {
17 | if (err) { console.error(err); }
18 | });
19 | }
20 | }
21 | });
22 |
23 | // FIXED ensureIndex was deprecated in favour of createIndex
24 | // Ensures there's a reasonable index for the polling dequeue
25 | // Status is first because querying by status = queued should be very selective
26 | collection.createIndex({ status: 1, queue: 1, priority: -1, _id: 1, delay: 1 }, function (err) {
27 | if (err) console.error(err);
28 | });
29 | };
30 |
--------------------------------------------------------------------------------
/services/db.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var mongoose = require('mongoose');
4 | var config = require('../config');
5 |
6 | var Connection = function () {
7 | // avoid DeprecationWarning for mongoose v6 --> v7
8 | mongoose.set('strictQuery', true);
9 | mongoose.Promise = global.Promise;
10 | mongoose.connect(config.dbUri);
11 | this.db = mongoose.connection;
12 | };
13 |
14 | Connection.prototype.start = function (cb) {
15 | this.db.on('error', console.error.bind(console, 'connection error:'));
16 | this.db.once('open', function () {
17 | console.info('Successfully connected to ' + config.dbUri);
18 | if (cb) {
19 | cb();
20 | }
21 | });
22 | };
23 |
24 | Connection.prototype.deleteDb = function (cb) {
25 | cb = cb || function () {};
26 |
27 | this.db.db.dropDatabase(function (err) {
28 | if (err) {
29 | console.log(err);
30 | }
31 | cb();
32 | });
33 | };
34 |
35 | Connection.prototype.close = function () {
36 | mongoose.connection.close();
37 | };
38 |
39 | module.exports = Connection;
40 |
--------------------------------------------------------------------------------
/.github/workflows/deploy.yaml:
--------------------------------------------------------------------------------
1 | name: deploy
2 |
3 | on:
4 | push:
5 | branches:
6 | - develop
7 |
8 | jobs:
9 | deploy:
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - name: Checkout code
14 | uses: actions/checkout@v3
15 |
16 | - name: Setup ssh
17 | uses: webfactory/ssh-agent@v0.5.3
18 | with:
19 | ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }}
20 |
21 | - name: Create SSH key
22 | run: |
23 | mkdir -p ~/.ssh/
24 | echo "$SSH_PRIVATE_KEY" > ../private.key
25 | chmod 600 ../private.key
26 | echo "$SSH_KNOWN_HOSTS" > ~/.ssh/known_hosts
27 | shell: bash
28 | env:
29 | SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
30 | SSH_KNOWN_HOSTS: ${{ secrets.SSH_KNOWN_HOSTS }}
31 |
32 | - name: Deploy to server
33 | run: |
34 | ssh -p 33322 -o "StrictHostKeyChecking no" gis@geocint.kontur.io << 'ENDSSH'
35 | cd ~/oam-api && git pull
36 | docker-compose up -d --build
37 | ENDSSH
38 |
--------------------------------------------------------------------------------
/docker-compose.staging.yml:
--------------------------------------------------------------------------------
1 | version: "2"
2 |
3 | services:
4 | app:
5 | environment:
6 | - NODE_ENV=production
7 | - DB_URI=mongodb://mongo/oam-api
8 | env_file: .env
9 | build:
10 | context: .
11 | args:
12 | - NODE_ENV=production
13 | command: "false"
14 |
15 | api:
16 | restart: always
17 | extends: app
18 | environment:
19 | - PORT=4000
20 | - NODE_OPTIONS=--max_old_space_size=2048
21 | command: node index.js
22 | ports:
23 | - 4000:4000
24 |
25 | worker:
26 | restart: always
27 | extends: app
28 | environment:
29 | - DOT_ENV_FILENAME=.env
30 | - NODE_TLS_REJECT_UNAUTHORIZED=0
31 | command: node catalog-worker.js
32 |
33 | mongo:
34 | image: mongo:3
35 | volumes:
36 | - mongodb:/data/db
37 |
38 | mongo-express:
39 | image: mongo-express:1.0.0-alpha.4
40 | ports:
41 | - 8081:8081
42 | links:
43 | - mongo
44 | environment:
45 | ME_CONFIG_MONGODB_URL: mongodb://mongo:27017/
46 |
47 | volumes:
48 | mongodb: ~
49 |
--------------------------------------------------------------------------------
/.env.local.sample:
--------------------------------------------------------------------------------
1 | # OAuth login for users
2 | # ---------------------
3 |
4 | FACEBOOK_APP_ID=fillme
5 | FACEBOOK_APP_SECRET=fillme
6 |
7 | GOOGLE_CLIENT_ID=fillme
8 | GOOGLE_CLIENT_SECRET=fillme
9 |
10 | GDRIVE_KEY=fillme
11 |
12 | # OAM-specific
13 | # ------------
14 |
15 | # The bucket into which images uploaded by users are placed
16 | OIN_BUCKET=fillme
17 | # Convenience for not having to create new buckets. Eg; Travis CI tests require this.
18 | # Remove if not needed, do not leave empty.
19 | # OIN_BUCKET_PREFIX=development
20 | # A bucket just for somewhere to place imagery uploaded from the browser, before it
21 | # gets fully processed.
22 | UPLOAD_BUCKET=fillme
23 | # Used to identify imagery after it has been uploaded.
24 | S3_PUBLIC_DOMAIN=s3.eu-central-1.amazonaws.com
25 |
26 | # AWS credentials
27 | AWS_ACCESS_KEY_ID=fillme
28 | AWS_SECRET_ACCESS_KEY=fillme
29 | AWS_REGION=eu-central-1 # the same region as S3_PUBLIC_DOMAIN
30 |
31 | # For sending emails
32 | SENDGRID_API_KEY=fillme
33 | SENDGRID_FROM=fillme
34 |
35 | # required to run locally
36 | NEW_RELIC_LICENSE_KEY=a1b2c3
--------------------------------------------------------------------------------
/models/metaValidations.js:
--------------------------------------------------------------------------------
1 | var Joi = require('joi');
2 |
3 | const getMetaValidation = () => {
4 | return Joi.object().keys({
5 | contact: Joi.object().keys({
6 | name: Joi.string().min(1).max(30).required(),
7 | email: Joi.string().email()
8 | }).allow(null),
9 | title: Joi.string().min(1).required(),
10 | provider: Joi.string().min(1).required(),
11 | platform: Joi.any().allow('satellite', 'aircraft', 'uav', 'balloon', 'kite').required(),
12 | sensor: Joi.string(),
13 | acquisition_start: Joi.date().required(),
14 | acquisition_end: Joi.date().required(),
15 | tms: Joi.string().allow(null),
16 | license: Joi.string().required(),
17 | tags: Joi.string().allow(''),
18 | urls: Joi.array().items(Joi.string().uri({scheme: ['http', 'https', 'gdrive']}))
19 | .min(1).required()
20 | });
21 | };
22 |
23 | const metaValidations = {
24 | getSceneValidations: () => {
25 | return Joi.object().keys({
26 | scenes: Joi.array().items(getMetaValidation()).min(1).required()
27 | });
28 | }
29 | };
30 |
31 | module.exports = metaValidations;
32 |
--------------------------------------------------------------------------------
/plugins/response-meta.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var _ = require('lodash');
4 |
5 | var responseMeta = {
6 | register: function (server, options, next) {
7 | var name = options.key || 'meta';
8 | var content = options.content || {credit: 'response-meta'};
9 | var results = options.results || 'results';
10 | server.ext('onPreResponse', function (request, reply) {
11 | const { tags } = request.route.settings;
12 | if (tags && tags.includes('disablePlugins')) {
13 | // skip processing by this plugin
14 | return reply.continue();
15 | }
16 |
17 | if (_.has(request.response.source, name)) {
18 | request.response.source[name] = _.merge(request.response.source[name], content);
19 | } else {
20 | var temp = request.response.source;
21 | request.response.source = {};
22 | request.response.source[name] = content;
23 | request.response.source[results] = temp;
24 | }
25 |
26 | return reply.continue();
27 | });
28 |
29 | next();
30 | }
31 | };
32 |
33 | responseMeta.register.attributes = {
34 | name: 'response-meta',
35 | version: '0.1.0'
36 | };
37 |
38 | module.exports = responseMeta;
39 |
--------------------------------------------------------------------------------
/test/fixtures/upload-status.json:
--------------------------------------------------------------------------------
1 | {
2 | "_id": "55e0c86b24c379c000544d24",
3 | "uploader": {
4 | "name": "Lady Stardust",
5 | "email": "lady@stardust.xyz"
6 | },
7 | "scenes": [
8 | {
9 | "contact": {
10 | "name": "Ziggy",
11 | "email": "ziggy@bowie.net"
12 | },
13 | "title": "Natural Earth Image",
14 | "provider": "Natural Earth",
15 | "sensor": "Some Algorithm",
16 | "platform": "satellite",
17 | "acquisition_start": "2015-04-01T00:00:00.000Z",
18 | "acquisition_end": "2015-04-30T00:00:00.000Z",
19 | "tms": null,
20 | "images": [
21 | {
22 | "_id": "55e0c86a24c379c000544d23",
23 | "url": "http://localhost:8080/uploader/fixture/NE1_50M_SR.tif",
24 | "status": "initial",
25 | "messages": [],
26 | "startedAt": "2015-08-28T20:45:31.062Z",
27 | "stoppedAt": "2015-08-28T20:45:31.247Z"
28 | }
29 | ]
30 | }
31 | ],
32 | "createdAt": "2015-08-28T20:45:30.820Z"
33 | }
34 |
--------------------------------------------------------------------------------
/test/fixtures/NE1_50M_SR.output.json:
--------------------------------------------------------------------------------
1 | {
2 | "uuid": "http://oam-uploader.s3.amazonaws.com/uploads/2015-08-18/55d3b052f885a1bb0221434b/scene/0/scene-0-image-0-NE1_50M_SR.tif",
3 | "title": "Natural Earth Image",
4 | "projection": "GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0],UNIT[\"degree\",0.0174532925199433],AUTHORITY[\"EPSG\",\"4326\"]]",
5 | "bbox": [
6 | 128.99999999999997,
7 | 29.000000000000004,
8 | 146,
9 | 54
10 | ],
11 | "footprint": "POLYGON((128.99999999999997 54,146 54,146 29.000000000000004,128.99999999999997 29.000000000000004,128.99999999999997 54))",
12 | "gsd": 0.03333333333333333,
13 | "file_size": 402897,
14 | "acquisition_start": "2015-04-01T00:00:00.000Z",
15 | "acquisition_end": "2015-04-30T00:00:00.000Z",
16 | "platform": "satellite",
17 | "provider": "Natural Earth",
18 | "contact": "Ziggy,ziggy@bowie.net",
19 | "properties": {
20 | "sensor": "Some Algorithm",
21 | "thumbnail": "http://oam-uploader.s3.amazonaws.com/uploads/2015-08-18/55d3b052f885a1bb0221434b/scene/0/scene-0-image-0-NE1_50M_SR.tif.thumb.png",
22 | "tms": null
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/routes/analytics.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var analytics = require('../controllers/analytics.js');
4 |
5 | /**
6 | * @api {get} /analytics Platform metadata
7 | * @apiGroup Analytics
8 | * @apiDescription Provides metadata about the catalog
9 | *
10 | * @apiSuccess {integer} count Number of unique images in catalog
11 | * @apiSuccess {integer} sensor_count Number of unique sensors in catalog
12 | * @apiSuccess {integer} provider_count Number of unique providers in catalog
13 | * @apiSuccess {date} date Date and time of data point
14 | *
15 | * @apiSuccessExample {json} Success Response:
16 | * HTTP/1.1 200 OK
17 | * [{
18 | * "date": "2015-07-17T18:49:22.452Z",
19 | * "count": 856,
20 | * "sensor_count": 22,
21 | * "provider_count": 43
22 | * },
23 | * {
24 | * "date": "2015-07-17T17:49:22.452Z",
25 | * "count": 856,
26 | * "sensor_count": 22,
27 | * "provider_count": 43
28 | * }]
29 | */
30 | module.exports = [
31 | {
32 | method: 'GET',
33 | path: '/analytics',
34 | handler: function (request, reply) {
35 | analytics.query(request.page, request.limit, function (err, records, count) {
36 | if (err) {
37 | console.log(err);
38 | return reply(err.message);
39 | }
40 |
41 | request.count = count;
42 | return reply(records);
43 | });
44 | }
45 | }
46 | ];
47 |
--------------------------------------------------------------------------------
/test/docker-compose.yml:
--------------------------------------------------------------------------------
1 | # Run from project root with:
2 | # `docker-compose -f test/docker-compose.yml up`
3 |
4 | version: '2'
5 | services:
6 | test-app:
7 | extends:
8 | service: app
9 | file: ../docker-compose.yml
10 |
11 | logging:
12 | options:
13 | max-size: "10k"
14 |
15 | environment:
16 | - INTEGRATION_TESTS=true
17 | - NODE_ENV=test
18 |
19 | # Variables that can be inherited from the host ENV. All
20 | # others will come from `.env`
21 | # Note that typically specifying these empty values would
22 | # override any values in .env, however, when developing
23 | # locally, the codebase is mounted from the host, including
24 | # the `.env` file, so the `dotenv` npm module takes responsibility
25 | # for loading these values.
26 | - OAM_DEBUG
27 | - AWS_ACCESS_KEY_ID
28 | - AWS_SECRET_ACCESS_KEY
29 | - FACEBOOK_APP_ID
30 | - FACEBOOK_APP_SECRET
31 |
32 | ports:
33 | - 4000:4000
34 |
35 | # Mounts the host code live into the container so you don't need
36 | # to rebuild the container everytime. Disable with APP_FROM=container
37 | volumes:
38 | - ../:/host-app
39 |
40 | # Defaults to using the live host code for the app.
41 | # Runs both foreman and a static file server for testing purposes.
42 | command: /bin/bash -c
43 | "[[ \"$APP_FROM\" != \"container\" ]] && cd /host-app;
44 | ./node_modules/.bin/http-server -p8080 ./test &
45 | ./node_modules/.bin/nf start"
46 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2016, Humanitarian Open Street Map Team
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of oam-design-system nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/services/removeDuplicateVertices.js:
--------------------------------------------------------------------------------
1 | function removeDuplicate (point, coordinates, index) {
2 | let duplicate = false;
3 | let duplicateIndex = null;
4 | const lastIndex = coordinates.length - 1;
5 | coordinates.forEach((el, coordIndex) => {
6 | if (point[0] === el[0] && point[1] === el[1] && index !== coordIndex &&
7 | coordIndex < lastIndex && coordIndex !== 0) {
8 | duplicate = true;
9 | duplicateIndex = coordIndex;
10 | }
11 | });
12 | if (duplicate) {
13 | coordinates.splice(duplicateIndex, 1);
14 | }
15 | return coordinates;
16 | }
17 |
18 | function processFeature (feature) {
19 | if (feature.geometry.type === 'Polygon') {
20 | feature.geometry.coordinates[0].forEach((coordinates) => {
21 | coordinates.forEach((point, index) => {
22 | feature.geometry.coordinates[0] =
23 | removeDuplicate(point, coordinates, index);
24 | });
25 | });
26 | } else if (feature.geometry.type === 'MultiPolygon') {
27 | feature.geometry.coordinates.forEach((points, ringIndex) => {
28 | points.forEach((coordinates, pointIndex) => {
29 | coordinates.forEach((point, coordIndex) => {
30 | feature.geometry.coordinates[ringIndex][pointIndex] =
31 | removeDuplicate(point, coordinates, coordIndex);
32 | });
33 | });
34 | });
35 | }
36 | return feature;
37 | }
38 |
39 | // Mutates geoJSON argument
40 | module.exports = function (geojson) {
41 | if (geojson.features) {
42 | geojson.features.forEach((feature, featureIndex) => {
43 | geojson.features[featureIndex] = processFeature(feature);
44 | });
45 | } else {
46 | processFeature(geojson);
47 | }
48 | };
49 |
--------------------------------------------------------------------------------
/monq/connection.js:
--------------------------------------------------------------------------------
1 | var mongoose = require('mongoose');
2 | const job = require('./job');
3 | const Queue = require('./queue');
4 | const Worker = require('./worker');
5 |
6 | module.exports = Connection;
7 |
8 | /**
9 | * @constructor
10 | * @param {string} uri - MongoDB connection string
11 | * @param {Object} options - connection options
12 | */
13 | function Connection (uri, options) {
14 | this.uri = uri;
15 | this.options = options || {};
16 |
17 | // avoid DeprecationWarning for mongoose v6 --> v7
18 | mongoose.set('strictQuery', true);
19 | mongoose.Promise = global.Promise;
20 | mongoose.connect(uri, this.options);
21 | this.db = mongoose.connection;
22 | }
23 |
24 | /**
25 | * Returns a new {@link Worker}
26 | * @param {string[]|string} queues - list of queue names, a single queue name, or '*' for a universal worker
27 | * @param {Object} options - an object with worker options
28 | */
29 | Connection.prototype.worker = function (queues, options) {
30 | var self = this;
31 |
32 | options || (options = {});
33 |
34 | var collection = options.collection || 'jobs';
35 |
36 | if (queues === '*') {
37 | options.universal = true;
38 |
39 | queues = [self.queue('*', {
40 | universal: true,
41 | collection: collection
42 | })];
43 | } else {
44 | if (!Array.isArray(queues)) {
45 | queues = [queues];
46 | }
47 |
48 | var queues = queues.map(function (queue) {
49 | if (typeof queue === 'string') {
50 | queue = self.queue(queue, {
51 | collection: collection
52 | });
53 | }
54 |
55 | return queue;
56 | });
57 | }
58 |
59 | return new Worker(queues, options);
60 | };
61 |
62 | Connection.prototype.queue = function (name, options) {
63 | return new Queue(this, name, options);
64 | };
65 |
66 | Connection.prototype.close = function () {
67 | this.client.close();
68 | };
69 |
--------------------------------------------------------------------------------
/test/fixtures/metadata.json:
--------------------------------------------------------------------------------
1 | {
2 | "uuid": "http://oin-hotosm.s3.amazonaws.com/593164d3e407d7001138610d/0/6f407df9-a342-4fe8-a802-6cd81ca8974a.tif",
3 | "title": "Finca La escalera",
4 | "platform": "uav",
5 | "provider": "Aeracoop",
6 | "contact": "Aeracoop,aeracoop@gmail.com",
7 | "properties": {
8 | "thumbnail": "http://oin-hotosm.s3.amazonaws.com/593164d3e407d7001138610d/0/6f407df9-a342-4fe8-a802-6cd81ca8974a_thumb.png",
9 | "tms": "https://tiles.openaerialmap.org/593164d3e407d7001138610d/0/6f407df9-a342-4fe8-a802-6cd81ca8974a/{z}/{x}/{y}",
10 | "wmts": "https://tiles.openaerialmap.org/593164d3e407d7001138610d/0/6f407df9-a342-4fe8-a802-6cd81ca8974a/wmts",
11 | "sensor": "DJI Mavic",
12 | "license": "CC-BY 4.0",
13 | "tags": "La Marina, mr10"
14 | },
15 | "acquisition_start": "2017-05-31T22:00:00.000Z",
16 | "acquisition_end": "2017-06-01T17:01:26.728Z",
17 | "uploaded_at": "2017-06-02T13:15:01.400Z",
18 | "file_size": 28943182,
19 | "projection": "PROJCS[\"WGS 84 / UTM zone 30N\",GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",\"8901\"]],UNIT[\"degree\",0.0174532925199433,AUTHORITY[\"EPSG\",\"9122\"]],AUTHORITY[\"EPSG\",\"4326\"]],PROJECTION[\"Transverse_Mercator\"],PARAMETER[\"latitude_of_origin\",0],PARAMETER[\"central_meridian\",-3],PARAMETER[\"scale_factor\",0.9996],PARAMETER[\"false_easting\",500000],PARAMETER[\"false_northing\",0],UNIT[\"metre\",1,AUTHORITY[\"EPSG\",\"9001\"]],AXIS[\"Easting\",EAST],AXIS[\"Northing\",NORTH],AUTHORITY[\"EPSG\",\"32630\"]]",
20 | "gsd": 0.013380000000015912,
21 | "bbox": [
22 | -0.6393963452039436,
23 | 38.144621849739245,
24 | -0.6371749068107655,
25 | 38.145986701138774
26 | ],
27 | "footprint": "POLYGON ((-0.639396345203944 38.1446218497392,-0.637174906810765 38.1446218497392,-0.637174906810765 38.1459867011388,-0.639396345203944 38.1459867011388,-0.639396345203944 38.1446218497392))"
28 | }
29 |
--------------------------------------------------------------------------------
/controllers/analytics.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var Analytics = require('../models/analytics.js');
4 |
5 | /**
6 | * Get the date the catalog was last updated.
7 | *
8 | * @param {dataCallback} cb - The callback that returns the date
9 | */
10 | module.exports.query = function (page, limit, cb) {
11 | // Execute the search and return the result via callback
12 |
13 | var skip = limit * (page - 1);
14 |
15 | Analytics.count(function (err, count) {
16 | if (err) {
17 | return cb(err, null, null);
18 | }
19 | Analytics.find(null, null, { skip: skip, limit: limit }).sort({ date: -1 }).exec(function (err, records) {
20 | if (err) {
21 | return cb(err, null, null);
22 | }
23 | cb(err, records, count);
24 | });
25 | });
26 | };
27 |
28 | /**
29 | * Add a analytics record to the database.
30 | *
31 | * @param {int} counts - An object containing
32 | * .image_count - number of images in system
33 | * .sensor_count - number of unique sensors in system
34 | * .provider_count - number of unique providers in system
35 | */
36 | module.exports.addAnalyticsRecord = function (counts, cb) {
37 | var record = new Analytics({
38 | date: Date.now(),
39 | count: counts.image_count,
40 | sensor_count: counts.sensor_count,
41 | provider_count: counts.provider_count
42 | });
43 | record.save(function (err, record) {
44 | if (err) {
45 | cb(err);
46 | return console.log(err);
47 | }
48 |
49 | cb(null);
50 | });
51 | };
52 |
53 | /**
54 | * Check the analytics collection to find the last time the system was updated.
55 | *
56 | * @param {dataCallback} cb - A callback with format (error, date)
57 | */
58 | module.exports.getLastUpdateTime = function (cb) {
59 | Analytics.findOne().sort({ date: -1 }).exec(function (err, record) {
60 | if (err) {
61 | return cb(err, null);
62 | }
63 | // If we don't have a date (this should never happen in practice), set it
64 | // to some time in the past
65 | if (record === null) {
66 | record = { date: new Date('01-01-1970') };
67 | }
68 | cb(null, record.date);
69 | });
70 | };
71 |
72 |
--------------------------------------------------------------------------------
/plugins/paginate.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var _ = require('lodash');
4 |
5 | var pagination = {
6 | register: function (server, options, next) {
7 | var defaultPage = 1;
8 | var defaultLimit = options.limit || 100;
9 | var name = options.name || 'meta';
10 | var results = options.results || 'results';
11 | var requestLimit = defaultLimit;
12 | var requestPage = defaultPage;
13 |
14 | server.ext('onPreHandler', function (request, reply) {
15 | if (_.has(request.query, 'page')) {
16 | requestPage = _.parseInt(request.query.page);
17 | request.query = _.omit(request.query, 'page');
18 | } else {
19 | requestPage = defaultPage;
20 | }
21 |
22 | if (_.has(request.query, 'limit')) {
23 | requestLimit = _.parseInt(request.query.limit);
24 | request.query = _.omit(request.query, 'limit');
25 | } else {
26 | requestLimit = defaultLimit;
27 | }
28 |
29 | request.page = requestPage;
30 | request.limit = requestLimit;
31 |
32 | return reply.continue();
33 | });
34 |
35 | server.ext('onPreResponse', function (request, reply) {
36 | const { tags } = request.route.settings;
37 | if (tags && tags.includes('disablePlugins')) {
38 | // skip processing by this plugin
39 | return reply.continue();
40 | }
41 |
42 | var meta = {
43 | page: requestPage,
44 | limit: requestLimit
45 | };
46 |
47 | if (_.has(request, 'count')) {
48 | meta['found'] = request.count;
49 | }
50 |
51 | if (_.has(request.response.source, name)) {
52 | request.response.source[name] = _.merge(request.response.source[name], meta);
53 | } else {
54 | // Because we want to add meta to top of the source, we have to go through all this hastle
55 | var temp = request.response.source;
56 | request.response.source = {};
57 | request.response.source[name] = meta;
58 | request.response.source[results] = temp;
59 | }
60 |
61 | return reply.continue();
62 | });
63 |
64 | next();
65 | }
66 | };
67 |
68 | pagination.register.attributes = {
69 | name: 'hapi-paginate',
70 | version: '0.1.0'
71 | };
72 |
73 | module.exports = pagination;
74 |
--------------------------------------------------------------------------------
/test/specs/test_auth_getToken.js:
--------------------------------------------------------------------------------
1 | const Hapi = require('hapi');
2 | const proxyquire = require('proxyquire').noCallThru();
3 | const chai = require('chai');
4 | const sinon = require('sinon');
5 | const sinonChai = require('sinon-chai');
6 | const authentication = require('../../plugins/authentication.js');
7 |
8 | const expect = chai.expect;
9 | chai.should();
10 | chai.use(sinonChai);
11 | const sandbox = sinon.sandbox.create();
12 |
13 | const buildStubs = () => {
14 | const user = {
15 | _id: 'id',
16 | name: 'name',
17 | contact_email: 'email'
18 | };
19 | const findOne = sandbox.stub().resolves(user);
20 | const token = 'token';
21 | const createToken = sandbox.stub().resolves(token);
22 | const stubs = {
23 | '../models/user': { findOne },
24 | '../models/createToken': createToken
25 | };
26 | return { stubs, findOne, createToken, user, token };
27 | };
28 |
29 | const getServer = (stubs) => {
30 | const uploads = proxyquire('../../routes/auth.js', stubs);
31 | const server = new Hapi.Server();
32 | server.connection({ port: 4000 });
33 | return server.register(authentication).then(() => {
34 | server.route(uploads);
35 | return server;
36 | });
37 | };
38 |
39 | describe('auth getToken', () => {
40 | afterEach(() => {
41 | sandbox.restore();
42 | });
43 |
44 | it('Should create and return a new JWT token', () => {
45 | const { stubs, findOne, createToken, user, token } = buildStubs();
46 | const credentials = {
47 | session_id: 'id'
48 | };
49 | const options = {
50 | method: 'GET',
51 | url: '/getToken',
52 | credentials
53 | };
54 | return getServer(stubs)
55 | .then((server) => {
56 | return server.inject(options).then((res) => {
57 | expect(findOne).to.have.been.calledWith(credentials);
58 | expect(createToken.firstCall.args[0]).to.equal(user._id);
59 | expect(createToken.firstCall.args[1]).to.equal(user.name);
60 | expect(createToken.firstCall.args[2]).to.equal(user.contact_email);
61 | expect(createToken.firstCall.args[3]).to.equal('user');
62 | expect(createToken.firstCall.args[4]).to.equal('365d');
63 | expect(res.result.token).to.equal(token);
64 | });
65 | });
66 | });
67 | });
68 |
--------------------------------------------------------------------------------
/test/specs/helper.js:
--------------------------------------------------------------------------------
1 | var Bell = require('bell');
2 | var Boom = require('boom');
3 | var FB = require('fb');
4 | var aws = require('aws-sdk');
5 | var sinon = require('sinon');
6 |
7 | var config = require('../../config');
8 | var Server = require('../../services/server');
9 | var Conn = require('../../services/db');
10 |
11 | before(function (done) {
12 | // Mock the OAuth step of logging in by forcing the final OAuth
13 | // response through values passed in the `/login` URL.
14 | //
15 | // This setup isn't ideal because in the future it is possible
16 | // that extra code will be added that indiscrimately parses and
17 | // acts upon all URL params. This would only affect test code and
18 | // not any production code, so it's not such a big issue. The problem
19 | // is though that Bell.simulate() must be called before the server
20 | // starts. So a better way of setting this up would be to set all this
21 | // up in a beforeEach and pass in a custom callback to Bell.simulate().
22 | Bell.simulate(function (request, next) {
23 | var error, response;
24 | if (request.query.test_oauth_response) {
25 | response = JSON.parse(request.query.test_oauth_response);
26 | } else {
27 | response = null;
28 | }
29 | if (request.query.test_oauth_error) {
30 | error = Boom.badRequest(request.query.test_oauth_error);
31 | } else {
32 | error = null;
33 | }
34 | next(error, response);
35 | });
36 |
37 | before(function () {
38 | sinon.stub(FB, 'api').yields({
39 | picture: { data: { url: 'http://cdn.facebook.com/123/picture.png' } }
40 | });
41 |
42 | // s3 file uploading/downloading
43 | aws.S3.prototype.getObject = sinon.stub().yields(
44 | null, {Body: Buffer.from('{}', 'utf8')}
45 | );
46 | aws.S3.prototype.putObject = sinon.stub().yields(
47 | null, 'Stub: file uploaded'
48 | );
49 | aws.S3.prototype.listObjects = sinon.stub().yields(
50 | null, {Contents: [{Key: ''}]}
51 | );
52 | aws.S3.prototype.deleteObjects = sinon.stub().yields(
53 | null, 'Stub: file deleted'
54 | );
55 | });
56 |
57 | var dbWrapper = new Conn();
58 | dbWrapper.start(function () {
59 | dbWrapper.deleteDb(function () {
60 | var server = new Server(config.port);
61 | server.start(done);
62 | });
63 | });
64 | });
65 |
--------------------------------------------------------------------------------
/test/integration/helper.js:
--------------------------------------------------------------------------------
1 | // These tests require a test docker container to be running:
2 | //
3 | // `docker-compose -f test/docker-compose.yml up -d`
4 | //
5 | // See `.travis.yml` for the (hopefully) latest working version.
6 | //
7 | // Note that these are integration tests running against a real
8 | // s3 bucket. Try to use a unique OIN_BUCKET_PREFIX in the
9 | // running API as often as you can to ensure your working with
10 | // an empty bucket/folder. For CI tests Travis prepends the build
11 | // number to OIN_BUCKET_PREFIX.
12 |
13 | process.env.INTEGRATION_TESTS = 'true';
14 |
15 | var uuidV4 = require('uuid/v4');
16 | var request = require('request');
17 | var expect = require('chai').expect;
18 |
19 | var config = require('../../config');
20 | var commonHelper = require('../helper');
21 | var Conn = require('../../services/db');
22 |
23 | var dbWrapper = new Conn();
24 |
25 | before(function (done) {
26 | dbWrapper.start(function () {
27 | done();
28 | });
29 | });
30 |
31 | beforeEach(function (done) {
32 | dbWrapper.deleteDb(function () {
33 | done();
34 | });
35 | });
36 |
37 | module.exports = {
38 | login: function (callback) {
39 | commonHelper.createUser({}, function (user) {
40 | commonHelper.generateSecureCookieForUser(user, function (cookie) {
41 | callback(cookie);
42 | });
43 | });
44 | },
45 |
46 | uploadImage: function (cookie, callback) {
47 | // So we can conclusively find the image later
48 | var title = 'test-' + uuidV4();
49 |
50 | var upload = require('../fixtures/NE1_50M_SR.input.json');
51 | upload.scenes[0].title = title;
52 |
53 | var postOptions = {
54 | url: config.apiEndpoint + '/uploads',
55 | json: upload,
56 | headers: {
57 | 'Cookie': cookie
58 | }
59 | };
60 |
61 | request.post(postOptions, function (_err, httpResponse, body) {
62 | expect(httpResponse.statusCode).to.eq(200);
63 | var uploadId = body.results.upload;
64 | callback(uploadId, title);
65 | });
66 | },
67 |
68 | loginAndUpload: function (callback) {
69 | this.login((cookie) => {
70 | this.uploadImage(cookie, (uploadId, title) => {
71 | commonHelper.waitForProcessing(uploadId, title, function (image) {
72 | callback(title, image, cookie);
73 | });
74 | });
75 | });
76 | }
77 | };
78 |
--------------------------------------------------------------------------------
/models/meta.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var mongoose = require('mongoose');
4 |
5 | var S3Sync = require('../services/s3_sync');
6 |
7 | var metaSchema = new mongoose.Schema({
8 | // The URI of the image
9 | uuid: {type: String, unique: true, required: true, dropDups: true},
10 | // URI of the meta of the image
11 | meta_uri: {type: String, unique: true, required: false},
12 | user: {type: mongoose.Schema.Types.ObjectId, ref: 'User'},
13 | thumb_uri: String,
14 | title: String,
15 | projection: String,
16 | bbox: [Number],
17 | footprint: String,
18 | gsd: Number,
19 | file_size: Number,
20 | license: String,
21 | acquisition_start: Date,
22 | acquisition_end: Date,
23 | platform: String,
24 | tags: String,
25 | provider: String,
26 | contact: String,
27 | geojson: {type: mongoose.Schema.Types.Mixed, index: '2dsphere'},
28 | properties: mongoose.Schema.Types.Mixed,
29 | custom_tms: mongoose.Schema.Types.Mixed,
30 | uploaded_at: Date
31 | });
32 |
33 | metaSchema.methods = {
34 |
35 | oamSync: function (callback) {
36 | var s3Sync = new S3Sync(this.meta_uri);
37 | var meta = Object.assign({}, this._doc);
38 |
39 | // remove MongoDB attributes
40 | delete meta.__v;
41 | delete meta._id;
42 |
43 | // remove internal tracking
44 | delete meta.meta_uri;
45 |
46 | s3Sync.uploadMeta(JSON.stringify(meta)).then(callback).catch(callback);
47 | },
48 |
49 | // Update a metadata object only after the updates have been synced to the corelating
50 | // _meta.json file on S3.
51 | oamUpdate: function (newParams, callback) {
52 | var s3Sync = new S3Sync(this.meta_uri);
53 | s3Sync.updateRemoteMeta(newParams, () => {
54 | let updatedMeta = Object.assign(this, newParams);
55 | updatedMeta.save(function (err) {
56 | if (err) throw new Error('Error saving meta: ', err);
57 | callback();
58 | });
59 | });
60 | },
61 |
62 | // Delete a metadata object only after its corelating _meta.json file has
63 | // been deleted on S3.
64 | oamDelete: function (callback) {
65 | var s3Sync = new S3Sync(this.meta_uri);
66 | s3Sync.deleteRemoteMeta(() => {
67 | this.remove(function (err) {
68 | if (err) throw new Error('Error deleting meta: ', err);
69 | callback();
70 | });
71 | });
72 | }
73 | };
74 |
75 | module.exports = mongoose.model('Meta', metaSchema);
76 |
--------------------------------------------------------------------------------
/test/specs/test_upload.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var ObjectID = require('mongodb').ObjectID;
4 | var connection = require('mongoose').connection;
5 | var expect = require('chai').expect;
6 | var request = require('request');
7 | var sinon = require('sinon');
8 |
9 | require('./helper');
10 | var commonHelper = require('../helper');
11 |
12 | var config = require('../../config');
13 | var Meta = require('../../models/meta');
14 | var transcoder = require('../../services/transcoder');
15 |
16 | describe('Uploading imagery', function () {
17 | var loggedInUser;
18 |
19 | before(function () {
20 | sinon.stub(transcoder, 'queueImage', function (sourceUrl, targetPrefix, metaUrl) {
21 | var imageId = targetPrefix.split('/').pop();
22 | var meta = require('../fixtures/NE1_50M_SR.output.json');
23 |
24 | meta.user = loggedInUser;
25 |
26 | return Promise.all([
27 | connection.db.collection('images').updateOne({
28 | _id: new ObjectID(imageId)
29 | }, {
30 | $set: {
31 | status: 'finished',
32 | metadata: meta
33 | },
34 | $currentDate: {
35 | stoppedAt: true
36 | }
37 | }),
38 | Meta.findOneAndUpdate({uuid: meta.uuid}, meta, {upsert: true})
39 | ]);
40 | });
41 | });
42 |
43 | after(function () {
44 | transcoder.queueImage.restore();
45 | });
46 |
47 | beforeEach(function (done) {
48 | connection.db.dropDatabase(function () {
49 | commonHelper.logIn(function (user) {
50 | loggedInUser = user;
51 | done();
52 | });
53 | });
54 | });
55 |
56 | it('should upload an image and associate it to the user', function (done) {
57 | var postOptions = {
58 | url: config.apiEndpoint + '/uploads',
59 | json: require('../fixtures/NE1_50M_SR.input.json'),
60 | jar: commonHelper.cookieJar
61 | };
62 |
63 | request.post(postOptions, function (_err, httpResponse, body) {
64 | expect(httpResponse.statusCode).to.eq(200);
65 | var uploadId = body.results.upload;
66 | commonHelper.waitForConversion(uploadId, function () {
67 | Meta.find({}, function (_err, result) {
68 | var meta = result[0];
69 | expect(result.length).to.eq(1);
70 | expect(meta.user.toString()).to.eq(loggedInUser._id.toString());
71 | done();
72 | });
73 | });
74 | });
75 | });
76 | });
77 |
--------------------------------------------------------------------------------
/.env.sample:
--------------------------------------------------------------------------------
1 | # Environment variables.
2 | #
3 | # * This is a canonical reference. *All* ENV vars needed in *all*
4 | # environments should be placed here.
5 | # * A `.env` is a requirement of *all* environments, including tests.
6 | # * Copy `.env.sample` to `.env` for it to be automatically sourced.
7 | # * See /config.js for individual explanations of each variable.
8 | # * Don't use ""'' quotes
9 |
10 | # API
11 | # ---
12 |
13 | # Port that the API runs on
14 | PORT=4000
15 | # Used for sending links in emails
16 | BROWSER_URL=http://localhost:4000
17 | COOKIE_PASSWORD=12345678901234567890123456789012
18 | # Used for cookies
19 | HOST_TLD=localhost
20 | DB_URI=mongodb://localhost:27017/oam-api
21 |
22 |
23 | # OAuth login for users
24 | # ---------------------
25 |
26 | FACEBOOK_APP_ID=123
27 | FACEBOOK_APP_SECRET=123abc
28 | GOOGLE_CLIENT_ID=123
29 | GOOGLE_CLIENT_SECRET=asdasd@clientd
30 |
31 |
32 | # OAM-specific
33 | # ------------
34 |
35 | # The list of buckets to index
36 | OIN_REGISTER_URL=http://localhost:8080/fixtures/oin-buckets.json
37 | # The bucket into which images uploaded by users are placed
38 | OIN_BUCKET=oin-hotosm-staging
39 | # Convenience for not having to create new buckets. Eg; Travis CI tests require this.
40 | # Remove if not needed, do not leave empty.
41 | OIN_BUCKET_PREFIX=development
42 | # A bucket just for somewhere to place imagery uploaded from the browser, before it
43 | # gets fully processed.
44 | UPLOAD_BUCKET=oam-uploader-staging-temp
45 | # The location of the dynamic tiler service
46 | TILER_BASE_URL=http://tiles.staging.openaerialmap.org
47 | # How often to run background processes. Run quickly like this only for non-production.
48 | # Will not be needed once Monq is integrated.
49 | CRON_TIME=*/15 * * * * *
50 | # Used to identify imagery after it has been uploaded.
51 | S3_PUBLIC_DOMAIN=s3.amazonaws.com
52 |
53 | # AWS credentials
54 | AWS_ACCESS_KEY_ID=123
55 | AWS_SECRET_ACCESS_KEY=abc
56 | AWS_REGION=us-east-1
57 |
58 | # Application monitoring service
59 | NEW_RELIC_LICENSE_KEY=123abc
60 |
61 | # For sending emails
62 | SENDGRID_API_KEY=123
63 | SENDGRID_FROM=info@hotosm.org
64 |
65 | # For being able to grab imagery from a user's GDrive account
66 | GDRIVE_KEY=abc123
67 |
68 |
69 | # For non-production environments
70 | # -------------------------------
71 |
72 | # Used for testing only
73 | API_ENDPOINT=http://localhost:4000
74 | # More verbosity in the logs. Can also be specified on the command line
75 | OAM_DEBUG=false
76 |
77 |
--------------------------------------------------------------------------------
/plugins/authentication.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 | const Bell = require('bell');
3 | var config = require('../config');
4 | var User = require('../models/user');
5 |
6 | var Authentication = {
7 | register: function (server, options, next) {
8 | if (config.isCookieOverHTTPS) {
9 | server.ext('onPreAuth', function (request, reply) {
10 | request.connection.info.protocol = 'https';
11 | return reply.continue();
12 | });
13 | }
14 |
15 | server.register([
16 | { register: require('hapi-auth-cookie') },
17 | // Various OAuth login strategies
18 | { register: require('bell') },
19 | { register: require('hapi-auth-jwt2') }
20 | ], function (err) {
21 | if (err) throw err;
22 |
23 | const facebookCustom = Bell.providers.facebook({
24 | fields: 'id,name,email,first_name,last_name,picture.type(small)'
25 | });
26 | // Facebook OAuth login flow
27 | server.auth.strategy('facebook', 'bell', {
28 | provider: facebookCustom,
29 | password: config.cookiePassword,
30 | clientId: config.facebookAppId,
31 | clientSecret: config.facebookAppSecret,
32 | isSecure: config.isCookieOverHTTPS
33 | });
34 |
35 | // Google OAuth login flow
36 | server.auth.strategy('google', 'bell', {
37 | provider: 'google',
38 | password: config.cookiePassword,
39 | clientId: config.googleClientId,
40 | clientSecret: config.googleClientSecret,
41 | isSecure: config.isCookieOverHTTPS
42 | });
43 |
44 | server.auth.strategy('session', 'cookie', {
45 | ttl: 24 * 60 * 60 * 7000, // 7 days
46 | keepAlive: true,
47 | password: config.cookiePassword,
48 | cookie: config.sessionCookieKey,
49 | domain: config.hostTld === 'localhost' ? null : config.hostTld,
50 | clearInvalid: true,
51 | redirectTo: false,
52 | validateFunc: User.validateSession.bind(User),
53 | isHttpOnly: false, // so JS can see it
54 | isSecure: config.isCookieOverHTTPS
55 | });
56 |
57 | server.auth.strategy('jwt', 'jwt', {
58 | key: config.jwtSecret,
59 | validateFunc: (decoded, request, callback) => callback(null, true),
60 | verifyOptions: { algorithms: [ 'HS256' ] }
61 | });
62 | next();
63 | });
64 | }
65 | };
66 |
67 | Authentication.register.attributes = {
68 | name: 'authentication'
69 | };
70 |
71 | module.exports = Authentication;
72 |
--------------------------------------------------------------------------------
/test/specs/test_user.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var expect = require('chai').expect;
4 | var request = require('request');
5 |
6 | var connection = require('mongoose').connection;
7 | var config = require('../../config');
8 | var Meta = require('../../models/meta');
9 | var meta = require('../fixtures/meta_db_objects.json');
10 |
11 | require('./helper');
12 | var commonHelper = require('../helper');
13 |
14 | describe('User', function () {
15 | let savedUser;
16 | beforeEach(function (done) {
17 | connection.db.dropDatabase();
18 | commonHelper.createUser({
19 | facebook_id: 123,
20 | session_id: null
21 | }, function (user) {
22 | savedUser = user;
23 | Meta.create(meta).then(function (results) {
24 | results.forEach(function (result) {
25 | result.user = user;
26 | result.save();
27 | });
28 | done();
29 | });
30 | });
31 | });
32 |
33 | it('should return an existing user', function (done) {
34 | var options = {
35 | url: config.apiEndpoint + '/user/' + savedUser.id,
36 | json: true
37 | };
38 |
39 | request.get(options, function (_err, httpResponse, body) {
40 | const user = body.results;
41 | const images = user.images;
42 | expect(httpResponse.statusCode).to.equal(200);
43 | expect(user.name).to.eq('Tester');
44 | expect(images.length).to.eq;
45 | done();
46 | });
47 | });
48 |
49 | it('should update the logged in user', function (done) {
50 | var options = {
51 | url: config.apiEndpoint + '/user',
52 | jar: commonHelper.cookieJar,
53 | json: {
54 | name: 'Mr. Updated',
55 | website: 'http://example.com',
56 | bio: 'This is a test bio'
57 | }
58 | };
59 |
60 | commonHelper.logUserIn(savedUser, function (_httpResponse, _body) {
61 | request.put(options, function (_err, httpResponse, _body) {
62 | expect(httpResponse.statusCode).to.equal(204);
63 | var options = {
64 | url: config.apiEndpoint + '/user',
65 | jar: commonHelper.cookieJar,
66 | json: true
67 | };
68 |
69 | request.get(options, function (_err, httpResponse, body) {
70 | const user = body.results;
71 | expect(user.name).to.eq('Mr. Updated');
72 | expect(user.website).to.eq('http://example.com');
73 | expect(user.bio).to.eq('This is a test bio');
74 | done();
75 | });
76 | });
77 | });
78 | });
79 | });
80 |
--------------------------------------------------------------------------------
/services/server.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var Hapi = require('hapi');
4 | var config = require('../config');
5 | var Qs = require('qs');
6 |
7 | // Parse POST bodies with deep fields like 'field[0]'
8 | var onPostAuth = function (request, reply) {
9 | if (typeof request.payload === 'object' &&
10 | !Buffer.isBuffer(request.payload)) {
11 | request.payload = Qs.parse(request.payload);
12 | }
13 | return reply.continue();
14 | };
15 |
16 | var Server = function (port) {
17 | this.port = port;
18 | this.hapi = new Hapi.Server({
19 | connections: {
20 | routes: {
21 | cors: {
22 | origin: ['*'],
23 | credentials: true
24 | },
25 | state: {
26 | parse: true,
27 | failAction: 'ignore'
28 | }
29 | },
30 | router: {
31 | stripTrailingSlash: true
32 | }
33 | },
34 | debug: config.debug === 'true' ? {
35 | log: [ 'error', 'debug', 'info', 'worker' ],
36 | request: [ 'error', 'received', 'response' ]
37 | } : false
38 | });
39 | };
40 |
41 | Server.prototype.start = function (cb) {
42 | var self = this;
43 | self.hapi.connection({ port: self.port, uri: config.host });
44 |
45 | self.hapi.register([
46 | { register: require('../plugins/authentication') },
47 | {
48 | register: require('hapi-router'),
49 | options: {
50 | routes: './routes/*.js',
51 | ignore: './routes/_apidoc.js'
52 | }
53 | },
54 | {
55 | register: require('../plugins/response-meta.js'),
56 | options: {
57 | content: {
58 | provided_by: 'OpenAerialMap',
59 | license: 'CC-BY 4.0',
60 | website: 'http://beta.openaerialmap.org'
61 | }
62 | }
63 | },
64 | {
65 | register: require('../plugins/paginate.js')
66 | },
67 | {
68 | register: require('hapi-raven'),
69 | options: {
70 | dsn: process.env.SENTRY_DSN
71 | }
72 | }
73 | ], function (err) {
74 | if (err) throw err;
75 | });
76 |
77 | self.hapi.on('request-error', (req, err) => {
78 | console.warn(`${req.method.toUpperCase()} ${req.url.path}`);
79 | console.warn(err.stack);
80 | });
81 |
82 | self.hapi.start(function () {
83 | console.info(
84 | 'Server (' + process.env.NODE_ENV + ') running at:',
85 | self.hapi.info.uri
86 | );
87 | if (cb) {
88 | cb();
89 | }
90 | });
91 |
92 | self.hapi.ext('onPostAuth', onPostAuth);
93 | };
94 |
95 | module.exports = Server;
96 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: required
2 |
3 | language: node_js
4 |
5 | services:
6 | - mongodb
7 | - docker
8 |
9 | env:
10 | global:
11 | # Set OAM_DEBUG=true in Travis UI to see debug output
12 | - OAM_DEBUG=true
13 | - NODE_ENV=test
14 | - GH_REF=github.com/hotosm/oam-catalog.git
15 | - PRODUCTION_BRANCH=master
16 | - BSTACK_USER=thomasbuckleyhou1
17 | - FACEBOOK_APP_ID=228592690987600
18 | # GH_TOKEN for deploying docs to Github
19 | - secure: AhiKIqAYPeXH1HjhYza/VdMJrp8wCri5WNRY+Kdkb4piYEK20dAQPVgcVT2EM1tvlyZL5Q0ZU4rtnATkJOU3k9DQHI99xVjk4MjnqfpkrM9iZtif6o9xDZwf9iWwBeuu9MmKH0tsvKhktxrJkzZ7vAThurZVz89ZPoCAXuudyqk=
20 | # OAM API ENV
21 | - secure: HCCPmMJQhYJ4hG+YCcZc9hmFDThpXsNZDO8ktMQjluvIZAarsoBn6BXOIyyATil9bkGgdzzQzKilimg2TNFFsGCjyd8vxsbSA7igrxahxmM0qSGsVoAzIoWKsqJJHY0muHto/7Mmg47xa0sI2CcHaX5aKIPC2l8FD5GLYcdn39c=
22 | - secure: ZyTkxBgknlW11Aow0jQBv8pFdANlkrpe24QeWgKBh3LHVYqMBQoIbEWOAKL80tH+D7REOqLkZfvpqqGAkHLFBglbfT9D0V8w0zs7MzNlSYBH3txJ3NgI921GjeO/Z4dkp30C/jajKUWqEff3U6spp2RSNsv8rym1E3b/uKHeTI4=
23 | # Facebook App secret
24 | - secure: YNMdxIaCy39lq86vNvWWPSBFaVl4FzBa5v84PC3vSMi8pGhyQFv73i5oHx8/TifD2Q9un7bzUllmCJSco9j3Rva/c2HiS/yLBV9Rqp4GO5g/pkJhSEQOOkCK51YmCqgtdFg7Xd0CmtEwFPND1nSEB6TD2SW3OPX1kE3gg17etSI=
25 | # Browserstack
26 | - secure: OvY20t1Re9jLO6gxkTOUPgNo8bqj4oJJtxIeFzYPNWQHb3fpXkvHm4/zzrOg31cdPKo81ljyhrXAO7wHDZCrs1jZVDWHodxwHX7/OUHoZKvKa4lo9mxxcj7nPXwyacCcUhibmrrJuHDuUXMwxP3DB+tULOaSMaXSBNwFIKBoGqk=
27 |
28 | # Defaults. Can be overridden in individual job stages.
29 | before_script:
30 | - ./.build_scripts/prepare_dotenv.sh
31 | # Build and start the API with all workers and dynamic tiler processor
32 | - docker-compose -f test/docker-compose.yml up -d
33 | - sleep 30
34 | after_failure:
35 | - docker ps
36 | - cat .env
37 | - echo "Logs for API Integration test server:"
38 | # TODO: When the Travis platform update docker-compose use `docker-compose logs` instead
39 | - docker logs test_test-app_1
40 | - sleep 10
41 | - echo "END"
42 |
43 | jobs:
44 | include:
45 | - stage: "Specs :factory:"
46 | before_script:
47 | - ./.build_scripts/prepare_dotenv.sh
48 | - sed -i '/OAM_DEBUG/d' .env
49 | script: npm test
50 | after_failure: true
51 |
52 | - stage: "Imagery processing tests :japan:"
53 | script: mocha test/integration
54 |
55 | - &integration-test
56 | stage: "Integration tests with frontend code :earth_asia:"
57 | env: FRONTEND_VERSION=oauth-login # TODO: Replace with `production` once merged
58 | script: ./.build_scripts/run_frontend_tests.sh
59 | - <<: *integration-test
60 | env: FRONTEND_VERSION=develop
61 |
62 | allow_failures:
63 | - env: FRONTEND_VERSION=develop
64 |
--------------------------------------------------------------------------------
/.build_scripts/merge_images_into_user.js:
--------------------------------------------------------------------------------
1 | #!/bin/env node
2 |
3 | // Note. To connect to a specific remote database prepend DB_URI=... to the command.
4 | // Try not to edit your .env file, accidents happen, for example when running tests
5 | // whatever database is specified in .env is dropped.
6 |
7 | require('dotenv').config();
8 | var Meta = require('../models/meta');
9 | var User = require('../models/user');
10 | var Conn = require('../services/db');
11 | var dbWrapper = new Conn();
12 |
13 | var imageryEmail = process.argv[2];
14 | var userEmail = process.argv[3];
15 | var confirm = process.argv[4] === 'confirm';
16 |
17 | if (!userEmail) {
18 | throw new Error('Must include [user email] argument');
19 | }
20 |
21 | if (confirm) {
22 | console.log('This is NOT a dry run.');
23 | } else {
24 | console.log('This is a dry run. No database changes will be made.' +
25 | ' Use "confirm" to make changes.');
26 | }
27 |
28 | dbWrapper.start(search);
29 |
30 | function search () {
31 | const imagesPromise = Meta.find({ contact: new RegExp(imageryEmail, 'i') });
32 | const usersPromise = User.find({ contact_email: new RegExp(userEmail, 'i') });
33 | Promise.all([imagesPromise, usersPromise])
34 | .then(function (values) {
35 | const images = values[0];
36 | const users = values[1];
37 |
38 | if (images.length === 0) {
39 | throw new Error('No imagery found for ' + imageryEmail);
40 | }
41 | if (users.length === 0) {
42 | throw new Error('No users found matching ' + userEmail);
43 | }
44 | if (users.length > 1) {
45 | throw new Error('Multiple users found matching ' + userEmail);
46 | }
47 | console.log(images.length + ' images associated with ' + imageryEmail);
48 |
49 | const user = users[0];
50 | console.log('The following user matches the email: ' + user.name);
51 |
52 | if (confirm) {
53 | console.log('Merging images ...');
54 | images.forEach(function (image) {
55 | user.images.addToSet(image);
56 | });
57 | const savePromises = images.map(function (image) {
58 | image.user = user;
59 | return image.save();
60 | });
61 | savePromises.push(user.save());
62 | return Promise.all(savePromises);
63 | } else {
64 | console.log('Run the command again with "confirm" as the last argument');
65 | dbWrapper.close();
66 | }
67 | })
68 | .then(function (values) {
69 | const user = values[values.length - 1];
70 | console.log(values.length - 1 + ' Images merged for ' + user.name);
71 | dbWrapper.close();
72 | })
73 | .catch(function (error) {
74 | console.log(error.message);
75 | dbWrapper.close();
76 | });
77 | }
78 |
79 |
--------------------------------------------------------------------------------
/routes/auth.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | const Boom = require('boom');
4 | const User = require('../models/user');
5 | const createToken = require('../models/createToken');
6 |
7 | function oauthHandler (request, reply) {
8 | if (!request.auth.isAuthenticated) {
9 | reply('Authentication failed due to: ' + request.auth.error.message);
10 | return;
11 | }
12 |
13 | User.login(request.auth.credentials, function (err, sessionId) {
14 | if (err) {
15 | reply(Boom.badImplementation(err));
16 | return;
17 | }
18 | request.cookieAuth.set({ sessionId: sessionId });
19 | reply.redirect(request.auth.credentials.query.original_uri || '/');
20 | });
21 | }
22 |
23 | function jwtHandler (request, reply) {
24 | User.jwtLogin(request.auth.credentials).then((token) => {
25 | const messageResponse = ``;
26 | const response = reply(messageResponse).type('text/html');
27 | return response;
28 | });
29 | }
30 |
31 | module.exports = [
32 | {
33 | method: ['GET', 'POST'],
34 | path: '/oauth/facebook',
35 | config: {
36 | auth: 'facebook',
37 | handler: oauthHandler
38 | }
39 | },
40 |
41 | {
42 | method: ['GET', 'POST'],
43 | path: '/oauth/google',
44 | config: {
45 | auth: 'google',
46 | handler: oauthHandler
47 | }
48 | },
49 | {
50 | method: 'GET',
51 | path: '/oauth/jwtfacebook',
52 | config: {
53 | auth: 'facebook',
54 | handler: jwtHandler,
55 | tags: ['disablePlugins']
56 | }
57 | },
58 | {
59 | method: 'GET',
60 | path: '/oauth/jwtgoogle',
61 | config: {
62 | auth: 'google',
63 | handler: jwtHandler,
64 | tags: ['disablePlugins']
65 | }
66 | },
67 | {
68 | method: 'GET',
69 | path: '/logout',
70 | config: {
71 | auth: 'session'
72 | },
73 | handler: function (request, reply) {
74 | request.cookieAuth.clear();
75 | reply({
76 | code: 200,
77 | message: 'Goodbye!'
78 | });
79 | }
80 | },
81 | {
82 | method: 'GET',
83 | path: '/getToken',
84 | config: {
85 | auth: 'session',
86 | tags: ['disablePlugins']
87 | },
88 | handler: function (request, reply) {
89 | User.findOne({
90 | session_id: request.auth.credentials.session_id
91 | }).then(function (user) {
92 | return createToken(
93 | user._id, user.name, user.contact_email, 'user', '365d'
94 | );
95 | }).then(function (token) {
96 | reply({ token });
97 | }).catch(function (err) {
98 | reply(Boom.badImplementation(err));
99 | });
100 | }
101 | }
102 | ];
103 |
--------------------------------------------------------------------------------
/test/integration/test_imagery.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var expect = require('chai').expect;
4 | var request = require('request');
5 |
6 | var helper = require('./helper');
7 | var config = require('../../config');
8 | var S3Sync = require('../../services/s3_sync');
9 |
10 | describe('Imagery CRUD', function () {
11 | // Needs time to process the image
12 | this.timeout(3 * 60 * 1000);
13 |
14 | let prereqs = {};
15 |
16 | beforeEach(function (done) {
17 | helper.loginAndUpload(function (title, image, cookie) {
18 | prereqs.title = title;
19 | prereqs.image = image;
20 | prereqs.cookie = cookie;
21 | done();
22 | });
23 | });
24 |
25 | it('should upload, convert and process an image', function () {
26 | expect(prereqs.image.user.name).to.eq('Tester');
27 |
28 | expect(prereqs.image.title).to.eq(prereqs.title);
29 | expect(prereqs.image.properties.license).to.eq('CC-BY');
30 | // expect(prereqs.image.projection).to.include('GEOGCS');
31 | expect(prereqs.image.gsd).to.eq(3706.49755482);
32 | expect(prereqs.image.bbox).to.deep.eq([129, 29, 146, 54]);
33 |
34 | // TODO: Fetch these and check they're good too
35 | expect(prereqs.image.properties.tms).to.include('/{z}/{x}/{y}');
36 | expect(prereqs.image.meta_uri).to.include('_meta.json');
37 | });
38 |
39 | it('should update meta in S3', function (done) {
40 | var s3Sync = new S3Sync(prereqs.image.meta_uri);
41 | var options = {
42 | url: config.apiEndpoint + '/meta/' + prereqs.image._id,
43 | json: {
44 | title: 'Updated title'
45 | },
46 | headers: {
47 | 'Cookie': prereqs.cookie
48 | }
49 | };
50 |
51 | request.put(options, function (_err, httpResponse, body) {
52 | expect(httpResponse.statusCode).to.eq(204);
53 | s3Sync.s3Params.Key = s3Sync.s3Key;
54 | s3Sync.downloadRemoteMeta().then(function (metadataString) {
55 | let metadata = JSON.parse(metadataString);
56 | expect(metadata.bbox).to.deep.eq([129, 29, 146, 54]);
57 | expect(metadata.title).to.eq('Updated title');
58 | done();
59 | });
60 | });
61 | });
62 |
63 | it('should delete meta in S3', function (done) {
64 | var s3Sync = new S3Sync(prereqs.image.meta_uri);
65 | var options = {
66 | url: config.apiEndpoint + '/meta/' + prereqs.image._id,
67 | headers: {
68 | 'Cookie': prereqs.cookie
69 | }
70 | };
71 |
72 | request.delete(options, function (_err, httpResponse, body) {
73 | expect(httpResponse.statusCode).to.eq(204);
74 | s3Sync.s3Params.Key = s3Sync.s3Key;
75 | s3Sync.downloadRemoteMeta().then(function () {
76 | throw new Error('Imagery was not deleted');
77 | }).then(
78 | function () {},
79 | function (err) {
80 | expect(err).to.contain('NoSuchKey');
81 | done();
82 | }
83 | );
84 | });
85 | });
86 | });
87 |
--------------------------------------------------------------------------------
/test/fixtures/meta_db_objects.json:
--------------------------------------------------------------------------------
1 | [{
2 | "uuid": "http://www.example.com/some_image1.tif",
3 | "title": "some_image1.tif",
4 | "projection": "PROJCS[\"WGS84/Pseudo-Mercator\",GEOGCS[\"WGS84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0],UNIT[\"degree\",0.0174532925199433],AUTHORITY[\"EPSG\",\"4326\"]],PROJECTION[\"Mercator_1SP\"],PARAMETER[\"central_meridian\",0],PARAMETER[\"scale_factor\",1],PARAMETER[\"false_easting\",0],PARAMETER[\"false_northing\",0],UNIT[\"metre\",1,AUTHORITY[\"EPSG\",\"9001\"]],EXTENSION[\"PROJ4\",\"+proj=merc+a=6378137+b=6378137+lat_ts=0.0+lon_0=0.0+x_0=0.0+y_0=0+k=1.0+units=m+nadgrids=@null+wktext+no_defs\"],AUTHORITY[\"EPSG\",\"3857\"]]",
5 | "footprint": "POLYGON((37.51555277777778 59.79141666666666,41.82819444444445 59.79141666666666,41.82819444444445 57.57304444444445,37.51555277777778 57.57304444444445,37.51555277777778 59.79141666666666))",
6 | "gsd": 0.0004999999999998001,
7 | "file_size": 2121158626,
8 | "acquisition_start": "2015-04-20T00:00:00.000Z",
9 | "acquisition_end": "2015-04-21T00:00:00.000Z",
10 | "platform": "drone",
11 | "provider": "UAViators",
12 | "contact": "someone@nasa.gov",
13 | "properties": {
14 | "thumbnail": "http://www.example.com/some_image1.thumb.jpg"
15 | },
16 | "meta_uri": "http://www.example.com/some_image1_meta.json",
17 | "bbox": [
18 | 37.51555277777778,
19 | 57.57304444444445,
20 | 41.82819444444445,
21 | 59.79141666666666
22 | ]
23 | },
24 | {
25 | "uuid": "http://www.example.com/some_image2.tif",
26 | "title": "some_image2.tif",
27 | "projection": "PROJCS[\"WGS84/UTMzone45N\",GEOGCS[\"WGS84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0],UNIT[\"degree\",0.0174532925199433],AUTHORITY[\"EPSG\",\"4326\"]],PROJECTION[\"Transverse_Mercator\"],PARAMETER[\"latitude_of_origin\",0],PARAMETER[\"central_meridian\",87],PARAMETER[\"scale_factor\",0.9996],PARAMETER[\"false_easting\",500000],PARAMETER[\"false_northing\",0],UNIT[\"metre\",1,AUTHORITY[\"EPSG\",\"9001\"]],AUTHORITY[\"EPSG\",\"32645\"]]",
28 | "footprint": "POLYGON((85.15127222222223 28.07871111111111,85.20211944444445 28.07938888888889,85.20296388888889 28.028730555555555,85.1521388888889 28.028055555555554,85.15127222222223 28.07871111111111))",
29 | "gsd": 4,
30 | "file_size": 2121158626,
31 | "acquisition_start": "2015-01-01T00:00:00.000Z",
32 | "acquisition_end": "2015-01-02T00:00:00.000Z",
33 | "platform": "drone",
34 | "provider": "UAViators",
35 | "contact": "someone@nasa.gov",
36 | "properties": {
37 | "tms": "http://example.com/some.tms",
38 | "thumbnail": "http://www.example.com/some_image2.thumb.jpg"
39 | },
40 | "meta_uri": "http://www.example.com/some_image2_meta.json",
41 | "bbox": [
42 | 85.15127222222223,
43 | 28.028055555555554,
44 | 85.20296388888889,
45 | 28.07938888888889
46 | ]
47 | }]
48 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | #Contributing guidelines
2 |
3 | There are many ways to contribute to a project, below are some examples:
4 |
5 | - Report bugs, ideas, requests for features by creating “Issues” in the project repository.
6 | - Fork the code and play with it, whether you later choose to make a pull request or not.
7 | - Create pull requests of changes that you think are laudatory. From typos to major design flaws, you will find a target-rich environment for improvements.
8 |
9 | ## Issues
10 |
11 | When creating a task through the issue tracker, please include the following where applicable:
12 |
13 | * A summary of identified tasks related to the issue; and
14 | * Any dependencies related to completion of the task (include links to tickets with the dependency).
15 |
16 | ### Design and feature request issues should include:
17 | * What the goal of the task being accomplished is; and
18 | * The user need being addressed.
19 |
20 | ### Development issues should include:
21 | * Unknowns tasks or dependencies that need investigation.
22 |
23 | Use checklists (via `- [ ]`) to keep track of sub-items wherever possible.
24 |
25 | ## Coding style
26 |
27 | When writing code it is generally a good idea to try and match your
28 | formatting to that of any existing code in the same file, or to other
29 | similar files if you are writing new code. Consistency of layout is
30 | far more important that the layout itself as it makes reading code
31 | much easier.
32 |
33 | One golden rule of formatting -- please don't use tabs in your code
34 | as they will cause the file to be formatted differently for different
35 | people depending on how they have their editor configured.
36 |
37 | ## Comments
38 |
39 | Sometimes it's not apparent from the code itself what it does, or,
40 | more importantly, **why** it does that. Good comments help your fellow
41 | developers to read the code and satisfy themselves that it's doing the
42 | right thing.
43 |
44 | When developing, you should:
45 |
46 | * Comment your code - don't go overboard, but explain the bits which
47 | might be difficult to understand what the code does, why it does it
48 | and why it should be the way it is.
49 | * Check existing comments to ensure that they are not misleading.
50 |
51 | ## Committing
52 |
53 | When you submit patches, the project maintainer has to read them and
54 | understand them. This is difficult enough at the best of times, and
55 | misunderstanding patches can lead to them being more difficult to
56 | merge. To help with this, when submitting you should:
57 |
58 | * Split up large patches into smaller units of functionality.
59 | * Keep your commit messages relevant to the changes in each individual
60 | unit.
61 |
62 | When writing commit messages please try and stick to the same style as
63 | other commits, namely:
64 |
65 | * A one line summary, starting with a capital and with no full stop.
66 | * A blank line.
67 | * Full description, as proper sentences with capitals and full stops.
68 |
69 | For simple commits the one line summary is often enough and the body
70 | of the commit message can be left out.
71 |
72 | If you have forked on GitHub then the best way to submit your patches is to
73 | push your changes back to GitHub and then send a "pull request" on GitHub.
74 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "oam-catalog",
3 | "version": "0.8.1",
4 | "description": "A catalog for Open Aerial Map Imagery",
5 | "main": "index.js",
6 | "scripts": {
7 | "docs": "apidoc -i routes/ -o docs/",
8 | "lint": "eslint . --ext .js",
9 | "test": "mocha test/specs",
10 | "integration": "mocha test/integration"
11 | },
12 | "repository": {
13 | "type": "git",
14 | "url": "https://github.com/hotosm/oam-catalog.git"
15 | },
16 | "author": "Humanitarian OpenStreetMap Team",
17 | "license": "BSD-3-Clause",
18 | "bugs": {
19 | "url": "https://github.com/hotosm/oam-catalog/issues"
20 | },
21 | "homepage": "https://github.com/hotosm/oam-catalog",
22 | "dependencies": {
23 | "@mapbox/sphericalmercator": "^1.0.5",
24 | "@turf/area": "^6.0.1",
25 | "@turf/bbox": "^4.7.3",
26 | "@turf/bbox-polygon": "^6.0.1",
27 | "@turf/envelope": "^5.0.4",
28 | "@turf/intersect": "^6.1.2",
29 | "@turf/invariant": "^4.7.3",
30 | "@turf/union": "^6.0.2",
31 | "async": "2.1.4",
32 | "aws-sdk": "^2.79.0",
33 | "babel": "^5.8.21",
34 | "bell": "^8.9.0",
35 | "bluebird": "^3.4.1",
36 | "boom": "^2.8.0",
37 | "cron": "^1.2.1",
38 | "dotenv": "^4.0.0",
39 | "es6-promisify": "^3.0.0",
40 | "exit-hook": "^1.1.1",
41 | "fb": "^2.0.0",
42 | "foreman": "^2.0.0",
43 | "fs-extra": "^0.18.2",
44 | "good": "^7.2.0",
45 | "good-console": "^6.4.0",
46 | "hapi": "^16.4.3",
47 | "hapi-auth-cookie": "^7.0.0",
48 | "hapi-auth-jwt2": "^7.3.0",
49 | "hapi-raven": "^6.0.0",
50 | "hapi-router": "^3.5.0",
51 | "joi": "^10.6.0",
52 | "jsonwebtoken": "^8.1.0",
53 | "lodash": "^3.10.1",
54 | "moment": "^2.10.6",
55 | "mongodb": "^4.17.2",
56 | "mongoose": "^6.13.8",
57 | "monq": "^0.3.7",
58 | "newrelic": "^1.20.0",
59 | "node-cron": "^1.1.3",
60 | "qs": "^6.4.0",
61 | "queue-async": "^1.0.7",
62 | "request": "^2.60.0",
63 | "sendgrid": "^1.9.2",
64 | "tmp": "0.0.26",
65 | "uuid": "^3.0.1",
66 | "wellknown": "^0.3.1",
67 | "xtend": "^4.0.0",
68 | "pg": "8.10.0"
69 | },
70 | "devDependencies": {
71 | "apidoc": "^0.13.1",
72 | "build-url": "^1.0.10",
73 | "chai": "^2.3.0",
74 | "ecstatic": "^0.8.0",
75 | "eslint": "^3.10.2",
76 | "eslint-config-standard": "^6.2.1",
77 | "eslint-config-standard-react": "^4.2.0",
78 | "eslint-plugin-promise": "^3.4.0",
79 | "eslint-plugin-react": "^6.7.1",
80 | "eslint-plugin-standard": "^2.0.1",
81 | "http-server": "^0.10.0",
82 | "iron": "^4.0.5",
83 | "mocha": "^2.2.4",
84 | "nodemon": "^1.12.1",
85 | "omit-deep": "^0.1.2",
86 | "proxyquire": "^1.8.0",
87 | "sinon": "^2.3.7",
88 | "sinon-chai": "^2.14.0"
89 | },
90 | "apidoc": {
91 | "title": "OAM API",
92 | "name": "OpenAerialMap Catalog API",
93 | "description": "",
94 | "url": "https://api.openaerialmap.org",
95 | "template": {
96 | "withCompare": false
97 | },
98 | "header": {
99 | "title": "Getting Started",
100 | "filename": "about.md"
101 | },
102 | "order": [
103 | "Meta",
104 | "TMS",
105 | "Analytics"
106 | ]
107 | }
108 | }
--------------------------------------------------------------------------------
/postgres/schema.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION btree_gist;
2 |
3 | -- STEP1 create table layers
4 | DROP TABLE IF EXISTS layers;
5 | CREATE TABLE layers (
6 | id int4 NOT NULL GENERATED ALWAYS AS IDENTITY,
7 | public_id text NOT NULL,
8 | "name" text NULL,
9 | url text NULL,
10 | "type" text NULL,
11 | description text NULL,
12 | copyrights text NULL,
13 | last_updated timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
14 | source_updated timestamptz NULL,
15 | access_time timestamptz NULL,
16 | "owner" text NULL,
17 | is_public bool NULL,
18 | category_id int4 NULL,
19 | group_id int4 NULL,
20 | properties jsonb NULL,
21 | is_dirty bool NULL,
22 | zoom_visibility_rules jsonb NULL,
23 | geom geometry NULL,
24 | is_visible bool NULL DEFAULT false,
25 | feature_properties jsonb NULL,
26 | api_key varchar NULL,
27 | is_global bool NULL DEFAULT false,
28 | tile_size int4 NULL,
29 | min_zoom int4 NULL,
30 | max_zoom int4 NULL,
31 | mapbox_styles jsonb NULL,
32 | CONSTRAINT layers_mapbox_styles_check CHECK ((mapbox_styles ? 'url'::text)),
33 | CONSTRAINT layers_pkey PRIMARY KEY (id),
34 | CONSTRAINT layers_public_id_key UNIQUE (public_id)
35 | );
36 | CREATE INDEX layers_geom_idx ON layers USING gist (geom);
37 |
38 |
39 | -- STEP2 populate table layers with feature for openaerialmap
40 | insert into layers(
41 | public_id,
42 | name,
43 | url,
44 | type,
45 | description,
46 | copyrights,
47 | last_updated,
48 | source_updated,
49 | owner,
50 | is_public,
51 | is_visible,
52 | is_global,
53 | geom)
54 | select
55 | 'openaerialmap_geocint',
56 | 'OAM Mosaic',
57 | -- (case '' when 'dev' then 'https://test-apps02.konturlabs.com/raster-tiler/oam/mosaic/{z}/{x}/{y}.png'
58 | -- when 'test' then 'https://test-apps.konturlabs.com/raster-tiler/oam/mosaic/{z}/{x}/{y}.png'
59 | -- else 'https://apps.kontur.io/raster-tiler/oam/mosaic/{z}/{x}/{y}.png'
60 | -- end),
61 | '',
62 | 'raster',
63 | 'The open collection of openly licensed satellite and unmanned aerial vehicle (UAV) imagery. ',
64 | 'All imagery is publicly licensed and made available through the Humanitarian OpenStreetMap Team''s Open Imagery Network (OIN) Node. All imagery contained in OIN is licensed CC-BY 4.0, with attribution as contributors of Open Imagery Network. All imagery is available to be traced in OpenStreetMap. © OpenAerialMap, © Kontur',
65 | now(),
66 | now(),
67 | 'layers-db',
68 | true,
69 | false,
70 | true,
71 | (select st_setsrid(ST_MakeBox2D(st_point(-179.0, -85.06), st_point(179.0, 85.06)),4326));
72 |
73 |
74 | -- STEP3 creating table layers_features
75 | DROP TABLE IF EXISTS layers_features;
76 | CREATE TABLE layers_features (
77 | feature_id text NOT NULL,
78 | layer_id int4 NOT NULL,
79 | properties jsonb NULL,
80 | geom geometry NULL,
81 | last_updated timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP,
82 | zoom int4 NULL DEFAULT 999,
83 | CONSTRAINT layers_features_feature_id_layer_id_zoom_key UNIQUE (feature_id, layer_id, zoom)
84 | );
85 | CREATE INDEX layers_features_3857_idx ON layers_features USING gist (st_transform(geom, 3857));
86 | CREATE INDEX layers_features_layer_id_3857_idx ON layers_features USING gist (layer_id, st_transform(geom, 3857));
87 | CREATE INDEX layers_features_layer_id_geom_idx ON layers_features USING gist (layer_id, geom);
88 | CREATE INDEX layers_features_layer_id_zoom_geom_idx ON layers_features USING gist (layer_id, zoom, geom);
89 | CREATE INDEX layers_features_zoom_idx ON layers_features USING btree (zoom);
90 |
91 | ALTER TABLE layers_features ADD CONSTRAINT layers_features_layer_id_fkey FOREIGN KEY (layer_id) REFERENCES layers(id) ON DELETE CASCADE;
92 |
--------------------------------------------------------------------------------
/monq/queue.js:
--------------------------------------------------------------------------------
1 | const { ObjectId } = require('mongodb');
2 | const db = require('./db');
3 | const Job = require('./job');
4 |
5 | module.exports = Queue;
6 |
7 | function Queue (connection, name, options) {
8 | if (typeof name === 'object' && options === undefined) {
9 | options = name;
10 | name = undefined;
11 | }
12 |
13 | options || (options = {});
14 | options.collection || (options.collection = 'jobs');
15 | options.universal || (options.universal = false);
16 |
17 | this.connection = connection;
18 | this.name = name || 'default';
19 | this.options = options;
20 |
21 | this.collection = this.connection.db.collection(this.options.collection);
22 |
23 | if (options.index !== false) {
24 | db.index(this.collection);
25 | }
26 | }
27 |
28 | Queue.prototype.job = function (data) {
29 | return new Job(this.collection, data);
30 | };
31 |
32 | Queue.prototype.get = function (id, callback) {
33 | var self = this;
34 |
35 | if (typeof id === 'string') {
36 | id = new ObjectId(id);
37 | }
38 |
39 | var query = { _id: id };
40 | if (!this.options.universal) {
41 | query.queue = this.name;
42 | }
43 |
44 | this.collection.findOne(query, function (err, data) {
45 | if (err) return callback(err);
46 |
47 | var job = new Job(self.collection, data);
48 | callback(null, job);
49 | });
50 | };
51 |
52 | Queue.prototype.enqueue = function (name, params, options, callback) {
53 | if (!callback && typeof options === 'function') {
54 | callback = options;
55 | options = {};
56 | }
57 |
58 | var job = this.job({
59 | name: name,
60 | params: params,
61 | queue: this.name,
62 | attempts: parseAttempts(options.attempts),
63 | timeout: parseTimeout(options.timeout),
64 | delay: options.delay,
65 | priority: options.priority
66 | });
67 |
68 | job.enqueue(callback);
69 | };
70 |
71 | Queue.prototype.dequeue = function (options, callback) {
72 | var self = this;
73 |
74 | if (callback === undefined) {
75 | callback = options;
76 | options = {};
77 | }
78 |
79 | var query = {
80 | status: Job.QUEUED,
81 | delay: { $lte: new Date() }
82 | };
83 |
84 | if (!this.options.universal) {
85 | query.queue = this.name;
86 | }
87 |
88 | if (options.minPriority !== undefined) {
89 | query.priority = { $gte: options.minPriority };
90 | }
91 |
92 | if (options.callbacks !== undefined) {
93 | var callback_names = Object.keys(options.callbacks);
94 | query.name = { $in: callback_names };
95 | }
96 |
97 | var sort = {
98 | 'priority': -1,
99 | '_id': 1
100 | };
101 |
102 | var update = { $set: { status: Job.DEQUEUED, dequeued: new Date() }};
103 |
104 | this.collection.findOneAndUpdate(
105 | query,
106 | update,
107 | { sort: sort, returnDocument: 'after' },
108 | function (err, result) {
109 | if (err) return callback(err);
110 | if (!result.value) return callback();
111 |
112 | callback(null, self.job(result.value));
113 | }
114 | );
115 | };
116 |
117 | // Helpers
118 |
119 | function parseTimeout (timeout) {
120 | if (timeout === undefined) return undefined;
121 | return parseInt(timeout, 10);
122 | }
123 |
124 | function parseAttempts (attempts) {
125 | if (attempts === undefined) return undefined;
126 |
127 | if (typeof attempts !== 'object') {
128 | throw new Error('attempts must be an object');
129 | }
130 |
131 | var result = {
132 | count: parseInt(attempts.count, 10)
133 | };
134 |
135 | if (attempts.delay !== undefined) {
136 | result.delay = parseInt(attempts.delay, 10);
137 | result.strategy = attempts.strategy;
138 | }
139 |
140 | return result;
141 | }
142 |
--------------------------------------------------------------------------------
/test/specs/test_user_jwt.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 | const sinon = require('sinon');
3 | const sinonChai = require('sinon-chai');
4 | const chai = require('chai');
5 | const jwt = require('jsonwebtoken');
6 | const User = require('../../models/user');
7 | const config = require('../../config');
8 |
9 | const expect = chai.expect;
10 | chai.use(sinonChai);
11 | const sandbox = sinon.sandbox.create();
12 |
13 | const facebookCredentials = {
14 | provider: 'custom',
15 | profile: {
16 | id: 0,
17 | displayName: 'displayName',
18 | email: 'email@email.org',
19 | raw: {
20 | picture: {
21 | data: {
22 | url: 'url'
23 | }
24 | }
25 | }
26 | }
27 | };
28 |
29 | const googleCredentials = {
30 | provider: 'google',
31 | profile: {
32 | id: 1,
33 | displayName: 'displayName',
34 | email: 'email@email.org',
35 | raw: {
36 | picture: 'url'
37 | }
38 | }
39 | };
40 |
41 | describe('User', () => {
42 | afterEach(() => {
43 | sandbox.restore();
44 | });
45 | it('should be invalid if name is empty', () => {
46 | const user = new User();
47 | return user.validate().catch((error) => {
48 | expect(error.errors.name).to.exist;
49 | });
50 | });
51 |
52 | it('jwtLogin should find existing user with facebook_id', () => {
53 | const findOne = sandbox.stub(User, 'findOne').returns(Promise.resolve({}));
54 |
55 | return User.jwtLogin(facebookCredentials).then((token) => {
56 | expect(findOne).to.have.been
57 | .calledWith({ facebook_id: facebookCredentials.profile.id });
58 | });
59 | });
60 |
61 | it('jwtLogin should find existing user with google_id', () => {
62 | const findOne = sandbox.stub(User, 'findOne').returns(Promise.resolve({}));
63 |
64 | return User.jwtLogin(googleCredentials).then((token) => {
65 | expect(findOne).to.have.been
66 | .calledWith({ google_id: googleCredentials.profile.id });
67 | });
68 | });
69 |
70 | it('jwtLogin should create new Facebook user when none is found', () => {
71 | const createUser = {
72 | facebook_id: facebookCredentials.profile.id,
73 | name: facebookCredentials.profile.displayName,
74 | contact_email: facebookCredentials.profile.email,
75 | profile_pic_uri: facebookCredentials.profile.raw.picture.data.url
76 | };
77 |
78 | sandbox.stub(User, 'findOne').returns(Promise.resolve(null));
79 | const create = sandbox.stub(User, 'create')
80 | .returns(Promise.resolve({ _id: 'id', name: 'name' }));
81 |
82 | return User.jwtLogin(facebookCredentials).then((token) => {
83 | expect(create).to.have.been
84 | .calledWith(createUser);
85 | });
86 | });
87 |
88 | it('jwtLogin should create new Google user when none if found', () => {
89 | const createUser = {
90 | google_id: googleCredentials.profile.id,
91 | name: googleCredentials.profile.displayName,
92 | contact_email: googleCredentials.profile.email,
93 | profile_pic_uri: googleCredentials.profile.raw.picture
94 | };
95 |
96 | sandbox.stub(User, 'findOne').returns(Promise.resolve(null));
97 | const create = sandbox.stub(User, 'create')
98 | .returns(Promise.resolve({ _id: 'id', name: 'name' }));
99 |
100 | return User.jwtLogin(googleCredentials).then((token) => {
101 | expect(create).to.have.been
102 | .calledWith(createUser);
103 | });
104 | });
105 |
106 | it('jwtLogin should return promise with valid JWT token', () => {
107 | const user = { profile: { _id: 'id', name: 'name' } };
108 | sandbox.stub(User, 'findOne').returns(Promise.resolve(user));
109 | return User.jwtLogin(facebookCredentials).then((token) => {
110 | const decoded = jwt.verify(token, config.jwtSecret);
111 | expect(decoded.id).to.equal(user._id);
112 | });
113 | });
114 | });
115 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | volumes:
2 | mongodb: ~
3 |
4 | networks:
5 | oam-api:
6 |
7 |
8 | services:
9 | app:
10 | environment:
11 | - API_ENDPOINT=http://api:4000
12 | - DB_URI=mongodb://mongo/oam-api
13 | - BROWSER_URL=http://localhost:3000
14 | - OAM_DEBUG=true
15 | - HOST=0.0.0.0
16 | env_file: .env.local
17 | build:
18 | context: .
19 | args:
20 | - NODE_ENV=development
21 | command: "false"
22 | networks:
23 | - oam-api
24 |
25 | api:
26 | extends: app
27 | environment:
28 | - PORT=4000
29 | - HOST_TLD=localhost
30 | - COOKIE_PASSWORD=12345678901234567890123456789012
31 | - TILER_BASE_URL=http://localhost:8000
32 | command: nodemon index.js
33 | depends_on:
34 | - mongo
35 | links:
36 | - mongo
37 | ports:
38 | - 4000:4000
39 | volumes:
40 | - ./bin:/app/bin
41 | - ./controllers:/app/controllers
42 | - ./models:/app/models
43 | - ./plugins:/app/plugins
44 | - ./routes:/app/routes
45 | - ./services:/app/services
46 | - ./test:/app/test
47 | - ./catalog-worker.js:/app/catalog-worker.js
48 | - ./config.js:/app/config.js
49 | - ./index.js:/app/index.js
50 | - ./newrelic.js:/app/newrelic.js
51 | - ./package.json:/app/package.json
52 | # override usage of custom mongojs driver in monq
53 | - ./monq/connection.js:/app/node_modules/monq/lib/connection.js:ro
54 | - ./monq/queue.js:/app/node_modules/monq/lib/queue.js:ro
55 | - ./monq/db.js:/app/node_modules/monq/lib/db.js:ro
56 | networks:
57 | - oam-api
58 |
59 | worker:
60 | extends: app
61 | environment:
62 | - DOT_ENV_FILENAME=.env.local
63 | - CRON_TIME=*/15 * * * *
64 | - PGHOST=localhost # postgres from mosaic-tiler in dev mode on localhost
65 | - PGPORT=5432
66 | - PGUSER=postgres
67 | - PGPASSWORD=postgres
68 | - PGDATABASE=postgres
69 | command: nodemon catalog-worker.js
70 | depends_on:
71 | - mongo
72 | - register
73 | links:
74 | - mongo
75 | - register
76 | volumes:
77 | - ./bin:/app/bin
78 | - ./controllers:/app/controllers
79 | - ./models:/app/models
80 | - ./plugins:/app/plugins
81 | - ./routes:/app/routes
82 | - ./services:/app/services
83 | - ./test:/app/test
84 | - ./catalog-worker.js:/app/catalog-worker.js
85 | - ./config.js:/app/config.js
86 | - ./index.js:/app/index.js
87 | - ./newrelic.js:/app/newrelic.js
88 | - ./package.json:/app/package.json
89 | # This allows usage of services running directly on the host machine
90 | extra_hosts:
91 | - host.docker.internal:host-gateway
92 | networks:
93 | - oam-api
94 |
95 | transcoder:
96 | extends: app
97 | command: nodemon bin/transcoder.js
98 | depends_on:
99 | - mongo
100 | links:
101 | - api
102 | - mongo
103 | volumes:
104 | - ./bin:/app/bin
105 | - ./controllers:/app/controllers
106 | - ./models:/app/models
107 | - ./plugins:/app/plugins
108 | - ./routes:/app/routes
109 | - ./services:/app/services
110 | - ./test:/app/test
111 | - ./catalog-worker.js:/app/catalog-worker.js
112 | - ./config.js:/app/config.js
113 | - ./index.js:/app/index.js
114 | - ./newrelic.js:/app/newrelic.js
115 | - ./package.json:/app/package.json
116 | networks:
117 | - oam-api
118 |
119 | register:
120 | build:
121 | context: .
122 | args:
123 | - NODE_ENV=development
124 | command: http-server test/
125 | ports:
126 | - 8080:8080
127 | volumes:
128 | - ./test:/app/test
129 | networks:
130 | - oam-api
131 |
132 | mongo:
133 | image: mongo:6
134 | volumes:
135 | - mongodb:/data/db
136 | networks:
137 | - oam-api
138 |
139 | mongo-express:
140 | # this version works with Mongo v6
141 | # https://github.com/mongo-express/mongo-express/issues/887#issuecomment-1241830515
142 | # upgrading past this has not been tested, but probably works
143 | image: mongo-express:1.0
144 | ports:
145 | - 8081:8081
146 | links:
147 | - mongo
148 | environment:
149 | ME_CONFIG_MONGODB_URL: mongodb://mongo:27017/
150 | networks:
151 | - oam-api
152 |
--------------------------------------------------------------------------------
/services/s3_sync.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var _ = require('lodash');
4 | var url = require('url');
5 | var S3 = require('aws-sdk/clients/s3');
6 |
7 | var config = require('../config');
8 |
9 | /**
10 | * Manage the syncing of meta objects with their *_metadata.json counterparts
11 | */
12 | module.exports = class S3Sync {
13 | constructor (remoteMetaUri) {
14 | this.s3 = new S3();
15 | this.remoteMetaUri = remoteMetaUri;
16 | this.s3Params = {
17 | Bucket: config.oinBucket
18 | };
19 | this.parseS3Key();
20 | }
21 |
22 | // Don't try to manipulate metadata files that aren't in OIN's public bucket.
23 | isOINMeta () {
24 | let isInDomain = this.remoteMetaUri.includes(`${config.oinBucket}.${config.s3PublicDomain}`);
25 | let isInPath = this.remoteMetaUri.includes(`/${config.oinBucket}/`);
26 | return isInDomain || isInPath;
27 | }
28 |
29 | updateRemoteMeta (newDetails, callback) {
30 | if (!this.isOINMeta()) {
31 | callback();
32 | return;
33 | }
34 | this.s3Params.Key = this.s3Key;
35 | this.newMetadataDetails = newDetails;
36 | this.downloadRemoteMeta().then((metadataString) => {
37 | let metadata = this.updateMetadataJSON(metadataString, newDetails);
38 | return this.uploadMeta(metadata);
39 | }).then(function () {
40 | callback();
41 | }).catch(function (err) {
42 | throw new Error(err);
43 | });
44 | }
45 |
46 | deleteRemoteMeta (callback) {
47 | if (!this.isOINMeta()) {
48 | callback();
49 | return;
50 | }
51 | this.setParamsToDeleteDirectory();
52 | console.log(`Attempting to delete: s3://${this.s3Params.Bucket}/${this.s3Params.Key}`);
53 | this.s3Params.Prefix = this.s3Params.Key;
54 | delete this.s3Params.Key;
55 | this.s3.listObjects(this.s3Params, (err, data) => {
56 | if (err) throw new Error(err);
57 | if (data.Contents.length === 0) {
58 | callback();
59 | return;
60 | }
61 |
62 | delete this.s3Params.Prefix;
63 | this.s3Params.Delete = { Objects: [] };
64 |
65 | data.Contents.forEach((content) => {
66 | this.s3Params.Delete.Objects.push({Key: content.Key});
67 | });
68 |
69 | this.s3.deleteObjects(this.s3Params, function (err, data) {
70 | if (err) throw new Error(err);
71 | callback();
72 | });
73 | });
74 | }
75 |
76 | // Find the base folder within which all the assets for a single image exists
77 | setParamsToDeleteDirectory () {
78 | let path;
79 | let directories = this.s3Key.split('/');
80 | if (config.oinBucketPrefix) {
81 | // If there's a bucket prefix we need to include that, otherwise we'll delete the entire prefix
82 | path = `${directories[0]}/${directories[1]}`;
83 | } else {
84 | path = directories[0];
85 | }
86 | this.s3Params.Key = path;
87 | }
88 |
89 | parseS3Key () {
90 | this.s3Key = url.parse(this.remoteMetaUri).pathname.replace(/^\//, '');
91 | // For whatever reason the bucket name may have been placed in the path rather than
92 | // as a prefix to the subdomain. For instance if using an S3-compatible service.
93 | if (this.remoteMetaUri.includes(`${config.s3PublicDomain}/${config.oinBucket}`)) {
94 | this.s3Key = this.s3Key.replace(`${config.oinBucket}/`, '');
95 | }
96 | }
97 |
98 | downloadRemoteMeta () {
99 | return new Promise((resolve, reject) => {
100 | console.info(`Downloading metadata file: ${this.s3Params.Key}`);
101 | this.s3.getObject(this.s3Params, function (err, response) {
102 | if (err) {
103 | reject('Unable to download metadata: ' + err.stack);
104 | } else {
105 | resolve(response.Body.toString('utf8'));
106 | }
107 | });
108 | });
109 | }
110 |
111 | updateMetadataJSON (metadataString, newDetails) {
112 | let metadata = JSON.parse(metadataString);
113 | metadata = _.merge(metadata, newDetails);
114 | return JSON.stringify(metadata);
115 | }
116 |
117 | uploadMeta (metadata) {
118 | this.s3Params.Key = this.s3Key;
119 | return new Promise((resolve, reject) => {
120 | console.info(`Uploading metadata file: ${this.s3Params.Key}`);
121 | this.s3Params.Body = Buffer.from(metadata, 'utf8');
122 | this.s3.putObject(this.s3Params, function (err, _response) {
123 | if (err) {
124 | reject(new Error('Unable to replace metadata: ' + err.stack));
125 | } else {
126 | resolve();
127 | }
128 | });
129 | });
130 | }
131 | };
132 |
--------------------------------------------------------------------------------
/config.js:
--------------------------------------------------------------------------------
1 | // Canonical configuration settings.
2 | // All configuration and 'magic numbers' should be defined here.
3 | // Strive to set all ENV values through `.env`.
4 |
5 | // Amendments for test env
6 | if (process.env.NODE_ENV === 'test') {
7 | process.env.DB_URI = 'mongodb://localhost:27017/oam-api-test';
8 | process.env.OAM_DEBUG = process.env.OAM_DEBUG || 'false';
9 | process.env.NEW_RELIC_ENABLED = false;
10 | process.env.PORT = 47357;
11 | process.env.API_ENDPOINT = 'http://localhost:' + process.env.PORT;
12 | }
13 |
14 | // Amendments for integration tests, the ones that run in Docker against
15 | // a S3 bucket.
16 | if (process.env.INTEGRATION_TESTS === 'true') {
17 | process.env.PORT = 4000;
18 | process.env.API_ENDPOINT = 'http://localhost:' + process.env.PORT;
19 | }
20 |
21 | const config = {
22 | env: process.env.NODE_ENV,
23 | debug: process.env.OAM_DEBUG,
24 |
25 | // Server setup
26 | host: process.env.HOST,
27 | port: process.env.PORT,
28 | apiEndpoint: process.env.API_ENDPOINT,
29 | browserURL: process.env.BROWSER_URL,
30 |
31 | // Mosaic layer
32 | oamMosacLayerId: process.env.OAM_LAYER_ID || 'openaerialmap',
33 |
34 | // DB connection
35 | dbUri: process.env.DB_URI,
36 |
37 | // OIN bucket in which imagery ultimately lives
38 | oinBucket: process.env.OIN_BUCKET,
39 | // Place imagery in a folder. Useful for running multiple OAM instances
40 | // or testing.
41 | oinBucketPrefix: process.env.OIN_BUCKET_PREFIX,
42 | // OIN bucket for temporary storage of direct uploads
43 | uploadBucket: process.env.UPLOAD_BUCKET,
44 | // Base domain for public read access to OIN bucket
45 | s3PublicDomain: process.env.S3_PUBLIC_DOMAIN,
46 |
47 | // How often to poll OIN buckets for new imagery
48 | cronTime: process.env.CRON_TIME,
49 | // Location of master record of OIN buckets to poll
50 | oinRegisterUrl: process.env.OIN_REGISTER_URL,
51 | // Approximate thumbnail size, in kilobytes
52 | thumbnailSize: 300,
53 |
54 | facebookAppId: process.env.FACEBOOK_APP_ID,
55 | facebookAppSecret: process.env.FACEBOOK_APP_SECRET,
56 |
57 | googleClientId: process.env.GOOGLE_CLIENT_ID,
58 | googleClientSecret: process.env.GOOGLE_CLIENT_SECRET,
59 | gdriveKey: process.env.GDRIVE_KEY,
60 |
61 | // Base URL for accessing the slippy map TMS endpoint for imagery. This is
62 | // the entrypoint for using the Dynamic Tiler to serve imagery.
63 | tilerBaseUrl: process.env.TILER_BASE_URL,
64 |
65 | // AWS credentials
66 | awsKey: process.env.AWS_ACCESS_KEY_ID,
67 | awsSecret: process.env.AWS_SECRET_ACCESS_KEY,
68 | awsRegion: process.env.AWS_REGION,
69 |
70 | // Sendgrid sends emails
71 | sendgridApiKey: process.env.SENDGRID_API_KEY,
72 | sendgridFrom: process.env.SENDGRID_FROM,
73 | emailNotification: {
74 | subject: '[ OAM Uploader ] Imagery upload submitted',
75 | text: 'Your upload has been successfully submitted and is now being ' +
76 | 'processed. You can check on the status of the upload at ' +
77 | process.env.BROWSER_URL + '/#/upload/status/{UPLOAD_ID}'
78 | },
79 |
80 | // For encrypting/decrypting cookie data
81 | cookiePassword: process.env.COOKIE_PASSWORD,
82 | isCookieOverHTTPS: !!process.env.BROWSER_URL.match(/https/),
83 | sessionCookieKey: 'oam-session',
84 |
85 | // Key for signing JWT tokens
86 | jwtSecret: process.env.JWT_SECRET_KEY,
87 |
88 | hostTld: process.env.HOST_TLD,
89 |
90 | logOptions: {
91 | opsInterval: 3000,
92 | reporters: [{
93 | reporter: require('good-console'),
94 | events: {
95 | request: '*',
96 | error: '*',
97 | response: '*',
98 | info: '*',
99 | log: '*'
100 | }
101 | }]
102 | },
103 |
104 | useBatch: process.env.USE_BATCH === 'true' && process.env.AWS_BATCH_JD_NAME != null && process.env.AWS_BATCH_JQ_NAME != null,
105 | batch: {
106 | jobDefinition: process.env.AWS_BATCH_JD_NAME,
107 | jobQueue: process.env.AWS_BATCH_JQ_NAME
108 | },
109 | maxBatchMemoryMB: process.env.MAX_BATCH_MEMORY_MB || 60000, // 60GB
110 |
111 | useTitiler: process.env.USE_TITILER || false,
112 |
113 | // TODO: Deprecate the following once user accounts have been implemented.
114 | // Credentials for Uploader Admin
115 | adminPassword: process.env.ADMIN_PASSWORD,
116 | adminUsername: process.env.ADMIN_USERNAME,
117 | // Token to access POST requests to /tms and /meta
118 | tokenForPostRequests: process.env.SECRET_TOKEN
119 | };
120 |
121 | // Override json.stringify behavior so we don't accidentally log secret keys
122 | config.toJSON = function () {
123 | return '[ hidden ]';
124 | };
125 | module.exports = config;
126 |
--------------------------------------------------------------------------------
/test/helper.js:
--------------------------------------------------------------------------------
1 | var request = require('request');
2 | var _ = require('lodash');
3 | var Iron = require('iron');
4 |
5 | var config = require('../config');
6 | var User = require('../models/user');
7 |
8 | module.exports = {
9 | cookieJar: request.jar(),
10 |
11 | // When running against the containerised API it's pretty hard to go through the whole
12 | // OAuth flow, ie logging into Facebook, accepting the app's terms, etc. So it's easier
13 | // just to generate the cookie ourselves.
14 | generateSecureCookieForUser: function (user, callback) {
15 | user.updateSession(function (_err, sessionId) {
16 | var session = {
17 | sessionId: sessionId
18 | };
19 | Iron.seal(session, config.cookiePassword, Iron.defaults, function (_err, sealed) {
20 | var cookie =
21 | config.sessionCookieKey + '=' +
22 | sealed + '; ' +
23 | 'Path=/; SameSite=Strict; hostOnly=true; aAge=10ms; cAge=770ms;';
24 | callback(cookie);
25 | });
26 | });
27 | },
28 |
29 | // Pass a fake OAuth response as a request parameter. Used
30 | // in conjunction with Bell.simulate().
31 | setTestOauthResponse: function (response) {
32 | return {
33 | test_oauth_response: JSON.stringify(response)
34 | };
35 | },
36 |
37 | createUser: function (userDetails, callback) {
38 | var user = _.defaults(userDetails, {
39 | name: 'Tester',
40 | facebook_id: 123
41 | });
42 |
43 | User.create(user).then(function (result, e) {
44 | callback(result);
45 | }).catch(function (err) {
46 | console.error(err);
47 | callback(false);
48 | });
49 | },
50 |
51 | logUserIn: function (oauthUser, callback, redirect) {
52 | var options = {
53 | url: config.apiEndpoint + '/oauth/facebook',
54 | qs: this.setTestOauthResponse({
55 | profile: { id: oauthUser.facebook_id }
56 | }),
57 | jar: this.cookieJar
58 | };
59 |
60 | if (redirect) {
61 | options.qs.original_uri = redirect;
62 | }
63 |
64 | request.get(options, (err, httpResponse, body) => {
65 | if (err) {
66 | throw new Error(err);
67 | }
68 | callback(httpResponse, body);
69 | });
70 | },
71 |
72 | logIn: function (callback) {
73 | this.createUser({}, (user) => {
74 | this.logUserIn(user, function () {
75 | callback(user);
76 | });
77 | });
78 | },
79 |
80 | // Wait for images to be fully uploaded, processed and indexed
81 | // inside a local test Docker instance of the current codebase.
82 | waitForProcessing: function (id, title, processedCb) {
83 | this.waitForConversion(id, () => {
84 | this.waitForIndexing(title, processedCb);
85 | });
86 | },
87 |
88 | // The dynamic tiler fetches the images, processes, transcodes it,
89 | // creates metadata for it, and so on.
90 | waitForConversion: function (id, callback) {
91 | var getOptions = {
92 | url: config.apiEndpoint + '/uploads/' + id,
93 | json: true
94 | };
95 |
96 | request.get(getOptions, (_err, httpResponse, body) => {
97 | var status = body.results.scenes[0].images[0].status;
98 | if (status === 'finished') {
99 | callback();
100 | } else {
101 | setTimeout(this.waitForConversion.bind(this), 100, id, callback);
102 | }
103 | });
104 | },
105 |
106 | // This comes from the periodic cron to check the bucket for
107 | // new imagery. Once a new image's *meta.json is found it is
108 | // parsed and added to the DB.
109 | // TODO:
110 | // This is no longer necessary as the _meta.json URI is actually
111 | // added at the point of conversion. Before the conversion step
112 | // added the metadata URI we had to wait for the background cron
113 | // worker process to index new metadata files from S3. This was
114 | // good for testing, because it meant that this function was
115 | // testing that background worker process (`/catalog-worker.js`
116 | // at time of writing). I'm not aware that this indexing step
117 | // adds anything at all new that we can detect. Therefore the
118 | // best solution would just be to write dedicated tests for the
119 | // indexing process.
120 | waitForIndexing: function (title, processedCb) {
121 | var getOptions = {
122 | url: config.apiEndpoint + '/meta?title=' + title,
123 | json: true
124 | };
125 |
126 | request.get(getOptions, (_err, httpResponse, body) => {
127 | if (body.results.length > 0 && body.results[0].meta_uri != null) {
128 | processedCb(body.results[0]);
129 | } else {
130 | setTimeout(this.waitForIndexing.bind(this), 100, title, processedCb);
131 | }
132 | });
133 | }
134 | };
135 |
--------------------------------------------------------------------------------
/services/transcoder.js:
--------------------------------------------------------------------------------
1 | var cp = require('child_process');
2 | var url = require('url');
3 |
4 | var AWS = require('aws-sdk');
5 | var monq = require('monq');
6 | var promisify = require('es6-promisify');
7 | var request = require('request');
8 |
9 | var config = require('../config');
10 |
11 | var batch = new AWS.Batch();
12 | var client = monq(config.dbUri);
13 | var queue = client.queue('transcoder');
14 | var s3 = new AWS.S3();
15 | var s3bucket = config.oinBucket;
16 |
17 | module.exports.transcode = (sourceUrl, output, metaUrl, callback) => {
18 | var args = [sourceUrl, output, metaUrl];
19 |
20 | var child = cp.spawn('process.py', args, {
21 | AWS_ACCESS_KEY_ID: config.awsKey,
22 | AWS_SECRET_ACCESS_KEY: config.awsSecret,
23 | AWS_DEFAULT_REGION: config.awsRegion,
24 | AWS_REGION: config.awsRegion,
25 | THUMBNAIL_SIZE: config.thumbnailSize
26 | });
27 |
28 | var stderr = [];
29 | child.stdout.pipe(process.stdout);
30 | child.stderr.pipe(process.stderr);
31 | child.stderr.on('data', chunk => stderr.push(chunk));
32 |
33 | child.on('error', err => {
34 | // prevent callback from being called twice
35 | var _callback = callback;
36 | callback = function () {};
37 |
38 | return _callback(err);
39 | });
40 |
41 | child.on('exit', code => {
42 | // prevent callback from being called twice
43 | var _callback = callback;
44 | callback = function () {};
45 |
46 | if (code !== 0) {
47 | return _callback(
48 | new Error(
49 | 'Exited with ' + code + ': ' + Buffer.concat(stderr).toString()
50 | )
51 | );
52 | }
53 |
54 | return callback();
55 | });
56 | };
57 |
58 | var getSize = (sourceUrl, callback) => {
59 | var uri = url.parse(sourceUrl);
60 |
61 | switch (uri.protocol) {
62 | case 's3:':
63 | return s3.headObject({
64 | Bucket: uri.hostname,
65 | Key: uri.pathname.slice(1)
66 | }, (err, data) => {
67 | if (err) {
68 | return callback(err);
69 | }
70 |
71 | return callback(null, data.ContentLength);
72 | });
73 |
74 | default:
75 | return request.head(sourceUrl, (err, rsp) => {
76 | if (err) {
77 | return callback(err);
78 | }
79 |
80 | return callback(null, rsp.headers['content-length']);
81 | });
82 | }
83 | };
84 |
85 | var guessMemoryAllocation = (sourceUrl, callback) =>
86 | getSize(sourceUrl, (err, size) => {
87 | if (err) {
88 | console.warn(err.stack);
89 | return callback(null, 3000);
90 | }
91 |
92 | if (!size) {
93 | console.warn('Unable to get file size by url');
94 | return callback(null, 3000);
95 | }
96 |
97 | var mbs = Math.ceil(size / (1024 * 1024));
98 |
99 | // optimistic about source encoding; assume it's the smallest it can be (but
100 | // cap allocated memory at 30GB)
101 | // provide a minimum for smaller images
102 | var recommended = Math.max(3000, Math.min(config.maxBatchMemoryMB, mbs * 10));
103 |
104 | return callback(null, recommended);
105 | });
106 |
107 | var batchTranscode = (jobName, input, output, callbackUrl, callback) =>
108 | guessMemoryAllocation(input, (_, memory) =>
109 | batch.submitJob(
110 | {
111 | jobDefinition: config.batch.jobDefinition,
112 | jobName,
113 | jobQueue: config.batch.jobQueue,
114 | parameters: {
115 | input,
116 | output,
117 | callback_url: callbackUrl
118 | },
119 | containerOverrides: {
120 | 'resourceRequirements': [
121 | {
122 | type: 'MEMORY',
123 | value: `${memory}`
124 | }
125 | ]
126 | }
127 | },
128 | (err, data) => callback(err)
129 | )
130 | );
131 |
132 | var monqTranscode = (sourceUrl, output, metaUrl, callback) =>
133 | queue.enqueue(
134 | 'transcode',
135 | {
136 | sourceUrl: sourceUrl,
137 | output: output,
138 | metaUrl: metaUrl
139 | },
140 | {
141 | attempts: {
142 | count: 1
143 | }
144 | },
145 | (err, job) => callback(err)
146 | );
147 |
148 | var queueImage = (sourceUrl, targetPrefix, metaUrl, callback) => {
149 | // Namespace the uploaded image under a directory
150 | if (config.oinBucketPrefix) {
151 | targetPrefix = config.oinBucketPrefix + '/' + targetPrefix;
152 | }
153 |
154 | // Google drive url comes in the form of gdrive://FILE_ID
155 | // We need this because large files can only be downloaded with an api key.
156 | var pieces = sourceUrl.match(/gdrive:\/\/(.+)/);
157 | if (pieces) {
158 | sourceUrl = `https://www.googleapis.com/drive/v3/files/${pieces[1]}?alt=media&key=${config.gdriveKey}`;
159 | }
160 |
161 | var output = `s3://${s3bucket}/${targetPrefix}`;
162 | var name = targetPrefix.replace(/\//g, '_');
163 |
164 | if (config.useBatch) {
165 | return batchTranscode(name, sourceUrl, output, metaUrl, callback);
166 | }
167 |
168 | return monqTranscode(sourceUrl, output, metaUrl, callback);
169 | };
170 |
171 | module.exports.queueImage = promisify(queueImage);
172 |
--------------------------------------------------------------------------------
/test/specs/test_auth.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var expect = require('chai').expect;
4 | var request = require('request');
5 |
6 | var connection = require('mongoose').connection;
7 | var config = require('../../config');
8 | var User = require('../../models/user');
9 |
10 | require('./helper');
11 | var commonHelper = require('../helper');
12 |
13 | describe('Auth', function () {
14 | beforeEach(function (done) {
15 | connection.db.dropDatabase(done);
16 | });
17 |
18 | context('For an already existent user', function () {
19 | var existingUser;
20 |
21 | beforeEach(function (done) {
22 | commonHelper.createUser({
23 | facebook_id: 123,
24 | session_id: null
25 | }, function (result) {
26 | existingUser = result;
27 | done();
28 | });
29 | });
30 |
31 | it('should set a new session for OAuth login', function (done) {
32 | commonHelper.logUserIn(existingUser, function (httpResponse, body) {
33 | expect(httpResponse.statusCode).to.equal(200);
34 | User.findOne({
35 | facebook_id: existingUser.facebook_id
36 | }).then(function (updatedUser) {
37 | expect(updatedUser.session_id).to.not.eq(null);
38 | expect(updatedUser.session_id).to.not.eq(existingUser.session_id);
39 | expect(updatedUser.session_expiration).to.be.gt(new Date());
40 | done();
41 | });
42 | });
43 | });
44 |
45 | it('should redirect to the URL specified by a param', function (done) {
46 | commonHelper.logUserIn(existingUser, function (httpResponse, body) {
47 | expect(httpResponse.statusCode).to.equal(200);
48 | expect(httpResponse.request.uri.path).to.equal('/user');
49 | done();
50 | }, '/user');
51 | });
52 |
53 | it('should log a user in based on their session', function (done) {
54 | var options = {
55 | url: config.apiEndpoint + '/user',
56 | jar: commonHelper.cookieJar,
57 | json: true
58 | };
59 |
60 | commonHelper.logUserIn(existingUser, function (loggedUserHttpResponse, _body) {
61 | request.get(options, function (_err, httpResponse, body) {
62 | expect(httpResponse.statusCode).to.equal(200);
63 | expect(body.results.name).to.equal(existingUser.name);
64 | done();
65 | });
66 | });
67 | });
68 |
69 | context('Preventing bad logins', function () {
70 | it('should not log a user if their session ID is wrong', function (done) {
71 | var options = {
72 | url: config.apiEndpoint + '/user',
73 | jar: commonHelper.cookieJar
74 | };
75 |
76 | commonHelper.logUserIn(existingUser, function (loggedUserHttpResponse, _body) {
77 | existingUser.session_id = 'wrong123';
78 | existingUser.save(function () {
79 | request.get(options, function (_err, httpResponse, body) {
80 | expect(httpResponse.statusCode).to.equal(401);
81 | done();
82 | });
83 | });
84 | });
85 | });
86 |
87 | it('should not log a user if their session is old', function (done) {
88 | var options = {
89 | url: config.apiEndpoint + '/user',
90 | jar: commonHelper.cookieJar
91 | };
92 |
93 | commonHelper.logUserIn(existingUser, function (loggedUserHttpResponse, _body) {
94 | existingUser.session_expiration = new Date();
95 | existingUser.save(function () {
96 | request.get(options, function (_err, httpResponse, body) {
97 | expect(httpResponse.statusCode).to.equal(401);
98 | done();
99 | });
100 | });
101 | });
102 | });
103 | });
104 | });
105 |
106 | context('For a non-existent user', function () {
107 | it('should create a new user and set their session', function (done) {
108 | var options = {
109 | url: config.apiEndpoint + '/oauth/facebook',
110 | qs: commonHelper.setTestOauthResponse({
111 | profile: {
112 | id: 456,
113 | displayName: 'Tester'
114 | }
115 | })
116 | };
117 |
118 | request.get(options, function (_err, httpResponse, body) {
119 | expect(httpResponse.statusCode).to.equal(200);
120 | User.count({}, function (_err, count) {
121 | expect(count).to.eq(1);
122 | User.findOne({
123 | facebook_id: 456
124 | }).then(function (user) {
125 | expect(user.name).to.eq('Tester');
126 | expect(user.session_id).to.not.eq(null);
127 | done();
128 | });
129 | });
130 | });
131 | });
132 |
133 | context('Preventing bad logins', function () {
134 | it('should not set a session if OAuth flow fails', function (done) {
135 | var options = {
136 | url: config.apiEndpoint + '/oauth/facebook',
137 | qs: {
138 | test_oauth_error: 'Fake OAuth error'
139 | },
140 | json: true
141 | };
142 |
143 | request.get(options, function (_err, httpResponse, body) {
144 | expect(httpResponse.statusCode).to.equal(400);
145 | expect(body.message).to.eq('Fake OAuth error');
146 | User.count({}, function (_err, count) {
147 | expect(count).to.eq(0);
148 | done();
149 | });
150 | });
151 | });
152 | });
153 | });
154 | });
155 |
--------------------------------------------------------------------------------
/test/specs/test_dronedeploy.js:
--------------------------------------------------------------------------------
1 | /* eslint camelcase: 0 */
2 | const Hapi = require('hapi');
3 | const proxyquire = require('proxyquire').noCallThru();
4 | const chai = require('chai');
5 | const sinon = require('sinon');
6 | const sinonChai = require('sinon-chai');
7 | const buildUrl = require('build-url');
8 | const authentication = require('../../plugins/authentication.js');
9 |
10 | const expect = chai.expect;
11 | chai.should();
12 | chai.use(sinonChai);
13 | const sandbox = sinon.sandbox.create();
14 |
15 | const ObjectId = 'ObjectId';
16 | const download_path = 'https://s3.com';
17 | const apiEndpoint = 'apiEndpoint';
18 | const contact_email = 'Test@test.com';
19 | const sendgridFrom = 'sendgridFrom';
20 | const subject = 'subject';
21 | const text = 'text';
22 |
23 | const buildStubs = () => {
24 | const insertMany = sandbox.stub().resolves([1]);
25 | const insertOne = sandbox.stub().resolves(true);
26 | const collectionStub = {
27 | insertMany: insertMany,
28 | insertOne: insertOne
29 | };
30 | const collection = sandbox.stub().returns(collectionStub);
31 | const queueImage = sandbox.stub().resolves(true);
32 | const send = sandbox.stub().callsArgWith(1, null, {});
33 | const ObjectID = function () {
34 | };
35 | ObjectID.prototype.toString = () => ObjectId;
36 | const stubs = {
37 | 'mongoose': {
38 | connection: { collection: collection }
39 | },
40 | 'mongodb': {
41 | ObjectID: ObjectID
42 | },
43 | '../services/transcoder': {
44 | queueImage: queueImage
45 | },
46 | 'sendgrid': () => {
47 | return { send: send };
48 | },
49 | '../config': {
50 | apiEndpoint,
51 | sendgridFrom,
52 | emailNotification: { subject, text }
53 | }
54 | };
55 | return { stubs, collection, insertMany, insertOne, queueImage, send };
56 | };
57 |
58 | const getServer = (stubs) => {
59 | const uploads = proxyquire('../../routes/uploads.js', stubs);
60 | const server = new Hapi.Server();
61 | server.connection({ port: 4000 });
62 | return server.register(authentication).then(() => {
63 | server.route(uploads);
64 | return server;
65 | });
66 | };
67 |
68 | const buildWebhook = () => {
69 | const acquisition_start = new Date(2018, 1, 1).toISOString();
70 | const acquisition_end = new Date(2018, 1, 2).toISOString();
71 | const sensor = 'Sensor';
72 | const provider = 'Provider';
73 | const tags = '';
74 | const title = 'Title';
75 |
76 | const url = buildUrl('http://oam.com', {
77 | path: 'dronedeploy',
78 | queryParams: {
79 | acquisition_start: encodeURIComponent(acquisition_start),
80 | acquisition_end: encodeURIComponent(acquisition_end),
81 | sensor,
82 | provider,
83 | tags,
84 | title
85 | }
86 | });
87 | const credentials = {
88 | _id: 'id',
89 | name: 'Test',
90 | contact_email: contact_email
91 | };
92 | const payload = {
93 | download_path
94 | };
95 | const options = {
96 | method: 'POST',
97 | url,
98 | credentials,
99 | payload
100 | };
101 | return options;
102 | };
103 |
104 | describe('Uploading image from DroneDeploy', () => {
105 | afterEach(() => {
106 | sandbox.restore();
107 | });
108 |
109 | it('Process the url and provides reply', () => {
110 | const options = buildWebhook();
111 | const {
112 | stubs,
113 | collection,
114 | insertMany,
115 | insertOne,
116 | queueImage,
117 | send
118 | } = buildStubs();
119 | return getServer(stubs)
120 | .then((server) => {
121 | return server.inject(options).then((res) => {
122 | expect(collection).to.have.been.calledWith('images');
123 | expect(collection).to.have.been.calledWith('uploads');
124 | expect(insertMany.firstCall.args[0].length).to.equal(1);
125 | const upload = insertOne.firstCall.args[0];
126 | expect(upload.scenes[0].images.length).to.equal(1);
127 | expect(upload.user).to.equal('id');
128 | expect(upload._id).to.not.equal(null);
129 | const queuedImage = queueImage.firstCall.args;
130 | expect(queuedImage[0]).to.equal(download_path);
131 | expect(queuedImage[1]).to.equal(`${ObjectId}/0/${ObjectId}`);
132 | expect(queuedImage[2])
133 | .to.equal(`${apiEndpoint}/uploads/${ObjectId}/0/${ObjectId}`);
134 | expect(send.firstCall.args[0].to).to.equal(contact_email);
135 | expect(res.result.upload.toString()).to.equal(ObjectId);
136 | });
137 | });
138 | });
139 |
140 | it('It provides failure reply when transcoder rejects', () => {
141 | const options = buildWebhook();
142 | const { stubs } = buildStubs();
143 | stubs['../services/transcoder'].queueImage = sandbox.stub().rejects('failed');
144 | return getServer(stubs)
145 | .then((server) => {
146 | return server.inject(options).then((res) => {
147 | expect(res.result.statusCode).to.equal(500);
148 | });
149 | });
150 | });
151 |
152 | //it('Sendgrid callback wrapper rejects with error and forces catch', () => {
153 | //const options = buildWebhook();
154 | //const { stubs } = buildStubs();
155 | //stubs.sendgrid = () => {
156 | //return { send: sandbox.stub().callsArgWith(1, 'error', null) };
157 | //};
158 | //return getServer(stubs)
159 | //.then((server) => {
160 | //return server.inject(options).then((res) => {
161 | //expect(res.result.statusCode).to.equal(500);
162 | //});
163 | //});
164 | //});
165 | });
166 |
--------------------------------------------------------------------------------
/models/user.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var uuidV4 = require('uuid/v4');
4 | var mongoose = require('mongoose');
5 | var FB = require('fb');
6 | const createToken = require('./createToken');
7 |
8 | var userSchema = mongoose.Schema({
9 | name: {type: String, required: true},
10 | website: String,
11 | bio: String,
12 | facebook_id: Number,
13 | facebook_token: String,
14 | google_id: Number,
15 | contact_email: String,
16 | profile_pic_uri: String,
17 | bucket_url: {type: String, unique: true, sparse: true},
18 | session_id: String,
19 | session_expiration: Date,
20 | images: [{type: mongoose.Schema.Types.ObjectId, ref: 'Meta'}]
21 | });
22 |
23 | userSchema.statics = {
24 | jwtLogin: function (credentials) {
25 | let idKey;
26 | let profilePicUrl;
27 | if (credentials.provider === 'custom') {
28 | idKey = 'facebook_id';
29 | profilePicUrl = credentials.profile.raw.picture.data.url;
30 | } else if (credentials.provider === 'google') {
31 | idKey = 'google_id';
32 | profilePicUrl = credentials.profile.raw.picture;
33 | }
34 | return this.findOne({
35 | [idKey]: credentials.profile.id
36 | })
37 | .then((user) => {
38 | if (user) {
39 | return user;
40 | } else {
41 | return this.create({
42 | [idKey]: credentials.profile.id,
43 | name: credentials.profile.displayName,
44 | contact_email: credentials.profile.email,
45 | profile_pic_uri: profilePicUrl
46 | });
47 | }
48 | })
49 | .then((user) => {
50 | const userJWT = createToken(
51 | user._id, user.name, user.contact_email, 'user', '1d');
52 | return userJWT;
53 | })
54 | .catch((error) => {
55 | console.error(error);
56 | });
57 | },
58 |
59 | login: function (credentials, callback) {
60 | if (credentials.provider === 'custom') {
61 | this.facebookLogin(credentials, callback);
62 | } else
63 | if (credentials.provider === 'google') {
64 | this.googleLogin(credentials, callback);
65 | } else {
66 | throw new Error(
67 | //`The ${credentials.provider} provider hasn't been setup yet.`
68 | );
69 | }
70 | },
71 |
72 | facebookLogin: function (credentials, callback) {
73 | this.findOne({
74 | facebook_id: credentials.profile.id
75 | }).then((fbUser) => {
76 | this.postFbAuth(fbUser, credentials, callback);
77 | }).catch(function (err) {
78 | console.error(callback(err));
79 | });
80 | },
81 |
82 | googleLogin: function (credentials, callback) {
83 | this.findOne({
84 | google_id: credentials.profile.id
85 | }).then((googleUser) => {
86 | this.postGoogleAuth(googleUser, credentials, callback);
87 | }).catch(function (err) {
88 | console.error(callback(err));
89 | });
90 | },
91 |
92 | postFbAuth: function (user, credentials, callback) {
93 | if (user) {
94 | user.postFbAuthSuccess(
95 | { facebook_token: credentials.token },
96 | callback
97 | );
98 | } else {
99 | this.create({
100 | facebook_id: credentials.profile.id,
101 | name: credentials.profile.displayName,
102 | contact_email: credentials.profile.email
103 | }).then(function (newUser) {
104 | newUser.postFbAuthSuccess(
105 | { facebook_token: credentials.token },
106 | callback
107 | );
108 | }).catch(function (err) {
109 | console.error(callback(err));
110 | });
111 | }
112 | },
113 |
114 | postGoogleAuth: function (user, credentials, callback) {
115 | if (user) {
116 | user.generateNewSessionValues(callback);
117 | } else {
118 | this.create({
119 | google_id: credentials.profile.id,
120 | name: credentials.profile.displayName,
121 | contact_email: credentials.profile.email,
122 | profile_pic_uri: credentials.profile.raw.picture
123 | }).then(function (newUser) {
124 | newUser.generateNewSessionValues(callback);
125 | }).catch(function (err) {
126 | console.error(callback(err));
127 | });
128 | }
129 | },
130 |
131 | validateSession: function (_request, session, callback) {
132 | this.findOne({session_id: session.sessionId}).then(function (user) {
133 | if (user && user.session_expiration > Date.now()) {
134 | callback(null, true, user);
135 | } else {
136 | callback(null, false);
137 | }
138 | }).catch(function (err) {
139 | console.error(callback(err, false));
140 | });
141 | }
142 | };
143 |
144 | userSchema.methods = {
145 | updateSession: function (callback) {
146 | this.generateNewSessionValues();
147 | this.save((err) => {
148 | if (err) {
149 | callback(err);
150 | return;
151 | }
152 | callback(null, this.session_id);
153 | });
154 | },
155 |
156 | generateNewSessionValues: function (callback) {
157 | var now = new Date();
158 | this.session_id = uuidV4();
159 | this.session_expiration = now.setDate(now.getDate() + 7);
160 | if (typeof callback === 'function') {
161 | this.save((err) => {
162 | if (err) {
163 | callback(err);
164 | return;
165 | }
166 | callback(null, this.session_id);
167 | });
168 | }
169 | },
170 |
171 | postFbAuthSuccess: function (updates, callback) {
172 | Object.assign(this, updates);
173 | this.generateNewSessionValues();
174 | this.getFBProfilePic((profilePicURI) => {
175 | this.profile_pic_uri = profilePicURI;
176 | this.save((err) => {
177 | if (err) {
178 | callback(err);
179 | return;
180 | }
181 | callback(null, this.session_id);
182 | });
183 | });
184 | },
185 |
186 | getFBProfilePic: function (callback) {
187 | FB.setAccessToken(this.facebook_token);
188 | FB.api('me', { fields: 'picture.type(small)' }, function (result) {
189 | if (!result || result.error) {
190 | console.error(!result ? 'Error getting FB profile pic' : result.error);
191 | callback(null);
192 | }
193 | callback(result.picture.data.url);
194 | });
195 | }
196 | };
197 |
198 | module.exports = mongoose.model('User', userSchema);
199 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | OAM Catalog API
2 |
3 |
4 |
5 |
6 |
15 |
16 | A catalog and processor for OpenAerialMap imagery. The application indexes all metadata available within Open Imagery Network and creates an API to search and find imagery. It also takes imagery from users and converts to a format and places it in the Open Imagery Network. The API powers the frontend search tool, [OAM Imagery Browser](https://github.com/hotosm/oam-browser). Read the [ecosystem documentation](https://docs.openaerialmap.org/ecosystem/getting-started/) for more information about OpenAerialMap.
17 |
18 | Full details of the API endpoints is available at: https://hotosm.github.io/oam-catalog
19 |
20 | ## Installation
21 |
22 | First you will need to have [MongoDB](https://www.mongodb.org/) and [Node.js](https://nodejs.org/) on your machine.
23 |
24 | Then you will need to install the applications node module dependencies:
25 |
26 | ```
27 | npm install
28 | ```
29 |
30 | ## Usage
31 |
32 | The API can be started with: `node index.js`
33 |
34 | And the background OIN indexer can be run with: `node catalog-worker.js`
35 |
36 | ## Development Using Docker
37 |
38 | Before running `oam-api` you need to run `oam-mosaic`
39 |
40 | ```bash
41 | git clone https://github.com/hotosm/oam-mosaic-map
42 | cd oam-mosaic-map
43 | docker-compose up
44 | ```
45 |
46 | To start a self-contained development instance using Docker Compose, run:
47 |
48 | ```bash
49 | docker-compose up
50 | ```
51 |
52 | This will download Docker image dependencies and install code for this project into a set of containers.
53 |
54 | Once it has started, connect to `http://localhost:4000` to access the API.
55 |
56 | The MongoDB command line interface can be run within its container like so: `docker-compose exec mongo mongo`
57 |
58 | Use `http://localhost:8081` to inspect the `mongo` database content with `mongo-express`.
59 |
60 | The following environment variables should be set (probably in `.env.local`; see `.env.local.sample` for defaults and additional information)
61 |
62 | If `OIN_REGISTER_URL` env variable is not set, the `OIN_BUCKET` will be used for indexing by default.
63 | You can also set `OIN_REGISTER_URL=http://register:8080/fixtures/oin-buckets.json` to the `worker` service on `docker-compose.yml` and then modify `test/fixtures/oin-buckets.json` to specify a bucket for indexing.
64 |
65 |
66 | Instructions for generating the JWT signing key can be found [here](https://github.com/dwyl/hapi-auth-jwt2#generating-your-secret-key).
67 | If you find that additional environment variables are needed, please submit a pull request!
68 |
69 | ## Testing
70 |
71 | There are 3 test suites:
72 |
73 | **Unit-like tests**, under `test/specs`
74 | These should be isolated and fast, with as much mocking/stubbing as possible, suitable for TDD. Run with: `mocha test/specs`
75 |
76 | **Integration tests**, under `test/integration`
77 | These test the actual interaction of the API against real imagery uploads to Amazon S3. You will need AWS credendtials in your `.env` and, in order to use the imagery processing binaries, a running instance of the Docker image for this repo. There is a customised Docker Compose config at `test/docker-compose.yml` which already has all the necessary and useful changes to run on a local developer machine (such as mounting the live codebase into the running container). It can be run with `docker-compose -f test/docker-compose.yml up`. The actual tests can be run with `mocha test/integration`.
78 |
79 | **End-to-end browser tests**, see https://github.com/hotosm/oam-browser
80 | The frontend code is pinned against a specific version of this API, so it is necessary to ensure that this pinning is still reliable and also, if the version here is bumped, to note if that new version is compatible (if not then the frontend will need updating). These tests require the frontend code itself, generally you will not need to run them locally, they will be run by Travis on every commit to Github.
81 |
82 | ## Transcoding using AWS Batch
83 |
84 | [AWS Batch](https://aws.amazon.com/batch/) can be used for transcoding; this enables use of elastic resources to process large quantities of imagery without requiring the API to run on an instance scaled for imagery ingestion. To enable it, set the following environment variables:
85 |
86 | ```bash
87 | USE_BATCH=true
88 | # Job Definition name
89 | AWS_BATCH_JD_NAME=oam-transcode
90 | # Job Queue name
91 | AWS_BATCH_JQ_NAME=oam-transcoding
92 | ```
93 |
94 | Configuring Batch is out of scope for this document, although a sample job definition looks like this:
95 |
96 | ```json
97 | {
98 | "jobDefinitionName": "oam-transcode",
99 | "type": "container",
100 | "parameters": {},
101 | "retryStrategy": {
102 | "attempts": 2
103 | },
104 | "containerProperties": {
105 | "image": "quay.io/mojodna/marblecutter-tools",
106 | "vcpus": 1,
107 | "memory": 3000,
108 | "command": [
109 | "process.sh",
110 | "Ref::input",
111 | "Ref::output",
112 | "Ref::callback_url"
113 | ],
114 | "jobRoleArn": "arn:aws:iam:::role/oam-transcoder",
115 | "volumes": [],
116 | "environment": [
117 | {
118 | "name": "EFS_HOST",
119 | "value": ".efs.us-east-1.amazonaws.com"
120 | }
121 | ],
122 | "mountPoints": [],
123 | "privileged": true,
124 | "ulimits": []
125 | }
126 | }
127 | ```
128 |
129 | In this sample, an [Amazon Elastic File System (EFS)](https://aws.amazon.com/efs/) volume is mapped into the container through the `EFS_HOST` environment variable. This allows Batch jobs to handle imagery that outstrips available temporary storage on underlying instances (22GB at this writing, shared between all running tasks). If you expect to transcode smaller imagery (or don't need to support concurrent large uploads), this can be omitted.
130 |
131 | The `oam-transcoder` role needs to have been created ahead of time with appropriate access to both the upload (`UPLOAD_BUCKET`) and storage (`OIN_BUCKET`) buckets.
132 |
133 | The user / role used when running the API itself (typically an instance role if running on AWS) requires permission to submit Batch jobs, specified as:
134 |
135 | ```json
136 | {
137 | "Version": "2012-10-17",
138 | "Statement": [
139 | {
140 | "Effect": "Allow",
141 | "Action": [
142 | "batch:SubmitJob"
143 | ],
144 | "Resource": "*"
145 | }
146 | ]
147 | }
148 | ```
149 |
150 | ## Contributing
151 |
152 | Contributions are very welcome. Please see [CONTRIBUTING.md](./CONTRIBUTING.md).
153 |
154 | ## License
155 | OAM Browser is licensed under **BSD 3-Clause License**, see the [LICENSE](LICENSE) file for more details.
156 |
--------------------------------------------------------------------------------
/controllers/meta.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var _ = require('lodash');
4 | var request = require('request');
5 | var parse = require('wellknown');
6 | var bboxPolygon = require('@turf/bbox-polygon').default;
7 | var Boom = require('boom');
8 |
9 | var Meta = require('../models/meta.js');
10 |
11 | /**
12 | * Query Meta model. Implements all protocols supported by /meta endpoint
13 | *
14 | * @param {Object} payload - Payload contains query paramters and their values
15 | * @param {recordsCallback} cb - The callback that returns the records
16 | */
17 | module.exports.query = function (payload, page, limit, cb) {
18 | // bounding box search | looks for bbox in payload
19 | if (_.has(payload, 'bbox')) {
20 | var bboxPattern = /(-?\d+(?:\.\d*)?,-?\d+(?:\.\d*)?,-?\d+(?:\.\d*)?),-?\d+(?:\.\d*)?/;
21 |
22 | if (bboxPattern.test(payload.bbox)) {
23 | var coordinates = payload.bbox.split(',').map(parseFloat);
24 | var geometry = bboxPolygon(coordinates).geometry;
25 | payload.geojson = {
26 | $geoIntersects: { $geometry: geometry }
27 | };
28 |
29 | // remove bbox from payload
30 | payload = _.omit(payload, 'bbox');
31 | }
32 | }
33 |
34 | // Handle date ranges
35 | if (_.has(payload, 'acquisition_from')) {
36 | // Test to make sure the date is formatted correctly
37 | var fromDate = new Date(payload.acquisition_from);
38 | if (!isNaN(fromDate.getTime())) {
39 | payload.acquisition_start = { $gte: new Date(payload.acquisition_from) };
40 | }
41 |
42 | // sanitize payload
43 | payload = _.omit(payload, 'acquisition_from');
44 | }
45 | if (_.has(payload, 'acquisition_to')) {
46 | // Test to make sure the date is formatted correctly
47 | var toDate = new Date(payload.acquisition_to);
48 | if (!isNaN(toDate.getTime())) {
49 | payload.acquisition_end = { $lte: new Date(payload.acquisition_to) };
50 | }
51 |
52 | // sanitize payload
53 | payload = _.omit(payload, 'acquisition_to');
54 | }
55 |
56 | // Handle resolution ranges
57 | if (_.has(payload, 'gsd_from') && _.has(payload, 'gsd_to')) {
58 | payload.gsd = { $gte: payload.gsd_from, $lte: payload.gsd_to };
59 |
60 | // sanitize payload
61 | payload = _.omit(payload, ['gsd_from', 'gsd_to']);
62 | } else if (_.has(payload, 'gsd_from')) {
63 | payload.gsd = { $gte: payload.gsd_from };
64 |
65 | // sanitize payload
66 | payload = _.omit(payload, 'gsd_from');
67 | } else if (_.has(payload, 'gsd_to')) {
68 | payload.gsd = { $lte: payload.gsd_to };
69 |
70 | // sanitize payload
71 | payload = _.omit(payload, 'gsd_to');
72 | }
73 |
74 | if (_.has(payload, 'has_tiled')) {
75 | payload['$or'] = [
76 | {'properties.tms': { $exists: true }},
77 | {'custom_tms': { $exists: true }}
78 | ];
79 |
80 | // sanitized payload
81 | payload = _.omit(payload, 'has_tiled');
82 | }
83 |
84 | // Handle custom sorts, starting with default of higher resolution and
85 | // newer imagery first. Do nothing if we don't have both sort and order_by.
86 | var sort = { gsd: 1, acquisition_end: -1 };
87 | if (_.has(payload, 'sort') && _.has(payload, 'order_by')) {
88 | // Custom sort, overwrite default
89 | sort = {};
90 | sort[payload.order_by] = (payload.sort === 'asc') ? 1 : -1;
91 |
92 | // sanitized payload
93 | payload = _.omit(payload, 'sort');
94 | payload = _.omit(payload, 'order_by');
95 | } else if (_.has(payload, 'sort')) {
96 | // sanitized payload
97 | payload = _.omit(payload, 'sort');
98 | } else if (_.has(payload, 'order_by')) {
99 | // sanitized payload
100 | payload = _.omit(payload, 'order_by');
101 | }
102 |
103 | var skip = limit * (page - 1);
104 |
105 | // Execute the search and return the result via callback
106 | Meta.count(payload, function (err, count) {
107 | if (err) {
108 | return cb(err, null, null);
109 | }
110 | Meta
111 | .find(payload, null, { skip: skip, limit: limit })
112 | .populate({ path: 'user', select: ['_id', 'name'] })
113 | .sort(sort)
114 | .exec(function (err, records) {
115 | cb(err, records, count);
116 | });
117 | });
118 | };
119 |
120 | /**
121 | * Add Meta Information from a provided URI. This function reads the remote json meta file
122 | * and adds the content to Meta model.
123 | * @param {String} remoteUri - a URI to the remote file
124 | * @param {Date} lastModified
125 | * @param {Date} lastSystemUpdate
126 | * @param {Callback} cb - The callback that handles the response
127 | */
128 | module.exports.addRemoteMeta = function (remoteUri, lastModified, lastSystemUpdate, cb) {
129 | // Check if the meta data is already added
130 | Meta.findOne({meta_uri: remoteUri}, function (err, meta) {
131 | if (err) {
132 | return cb(err);
133 | }
134 |
135 | // if the meta file doesn't exist then add, if the meta file is more recent
136 | // than our last update, then update
137 | if (meta == null || lastModified > lastSystemUpdate) {
138 | return request({
139 | json: true,
140 | uri: remoteUri
141 | }, function (err, response, payload) {
142 | if (err) {
143 | return cb(err);
144 | }
145 |
146 | // clean up metadata that conflicts with Mongo in the staging bucket
147 | delete payload.__v;
148 | delete payload._id;
149 |
150 | if (response.statusCode === 200 && payload != null) {
151 | if (payload.uuid == null) {
152 | // not OIN metadata
153 | // TODO specify oin-metadata (or something) with a version number
154 | return cb();
155 | }
156 |
157 | payload.meta_uri = payload.meta_uri || remoteUri;
158 |
159 | // create a geojson object from footprint and bbox
160 | // TODO: Put in a Mongoose middleware hook
161 | if (payload.footprint != null && payload.geojson == null) {
162 | payload.geojson = parse(payload.footprint);
163 | payload.geojson.bbox = payload.bbox;
164 | }
165 |
166 | var query = { uuid: payload.uuid };
167 | var options = { upsert: true, new: true, select: { uuid: 1 } };
168 | return Meta.findOneAndUpdate(query, payload, options, function (err, record) {
169 | if (err) {
170 | return cb(err);
171 | }
172 |
173 | var status = (meta === null) ? ' added' : ' updated';
174 | var message = record.uuid + status + '!';
175 | console.log('[meta]', message);
176 | return cb(null, message);
177 | });
178 | }
179 |
180 | return cb();
181 | });
182 | }
183 |
184 | return cb();
185 | });
186 | };
187 |
188 | // Middleware to check if the current user has permission to access
189 | // the requested object. Injects the object at `request.app` so that another
190 | // DB call doesn't need to be made again.
191 | module.exports.fetchRequestedObject = function (request, reply) {
192 | var metaId = request.params.id;
193 | Meta.findOne({_id: metaId}, function (err, record) {
194 | if (!(record instanceof Meta)) {
195 | reply(Boom.notFound('The requested imagery does not exist.'));
196 | return;
197 | }
198 | if (err) {
199 | reply(Boom.badImplementation(err.message));
200 | return;
201 | }
202 | request.app.requestedObject = record;
203 | reply();
204 | });
205 | };
206 |
--------------------------------------------------------------------------------
/routes/user.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var _ = require('lodash');
4 | var area = require('@turf/area').default;
5 | var bboxPolygon = require('@turf/bbox-polygon').default;
6 | var Boom = require('boom');
7 | var intersect = require('@turf/intersect').default;
8 | var Joi = require('joi');
9 | var merc = new (require('@mapbox/sphericalmercator'))();
10 | var union = require('@turf/union').default;
11 |
12 | var User = require('../models/user');
13 | var Meta = require('../models/meta');
14 |
15 | module.exports = [
16 | {
17 | method: 'GET',
18 | path: '/user',
19 | config: {
20 | auth: 'session'
21 | },
22 | handler: function (request, reply) {
23 | User.findOne({
24 | session_id: request.auth.credentials.session_id
25 | }).then(function (user) {
26 | // TODO: Add `.to_json()` to all API-expressable models.
27 | return _.pick(user, [
28 | '_id',
29 | 'name',
30 | 'website',
31 | 'bio',
32 | 'contact_email',
33 | 'profile_pic_uri'
34 | ]);
35 | }).then(function (user) {
36 | Meta.find({user: user._id}).then(function (images) {
37 | user.images = images;
38 | reply(user);
39 | });
40 | }).catch(function (err) {
41 | reply(Boom.badImplementation(err));
42 | });
43 | }
44 | },
45 |
46 | {
47 | method: 'PUT',
48 | path: '/user',
49 | config: {
50 | auth: 'session',
51 | validate: {
52 | params: {
53 | name: Joi.string().min(3).max(30),
54 | website: Joi.string().uri(),
55 | bio: Joi.string().min(1).max(300)
56 | }
57 | }
58 | },
59 | handler: function (request, reply) {
60 | User.findOne({
61 | session_id: request.auth.credentials.session_id
62 | }).then(function (user) {
63 | user = Object.assign(user, request.payload);
64 | user.save(function (err) {
65 | if (err) throw new Error('Error saving user: ', err);
66 | reply(null).code(204);
67 | });
68 | }).catch(function (err) {
69 | reply(Boom.badImplementation(err));
70 | });
71 | }
72 | },
73 |
74 | {
75 | method: 'GET',
76 | path: '/user/{id}',
77 | handler: function (request, reply) {
78 | User.findOne({
79 | _id: request.params.id
80 | }).then(function (user) {
81 | return _.pick(user, [
82 | '_id',
83 | 'name',
84 | 'website',
85 | 'bio',
86 | 'profile_pic_uri'
87 | ]);
88 | }).then(function (user) {
89 | Meta.find({user: request.params.id}).then(function (images) {
90 | user.images = images;
91 | reply(user);
92 | });
93 | }).catch(function (err) {
94 | reply(Boom.badImplementation(err));
95 | });
96 | }
97 | },
98 |
99 | {
100 | method: 'GET',
101 | path: '/user/{user}/catalog.json',
102 | config: {
103 | tags: ['disablePlugins']
104 | },
105 | handler: (request, reply) => {
106 | const { user } = request.params;
107 |
108 | return Promise.all([
109 | User.findOne({
110 | _id: user
111 | }, {
112 | name: 1,
113 | website: 1
114 | }),
115 | Meta.find({
116 | user
117 | }, {
118 | bbox: 1, gsd: 1
119 | })
120 | ])
121 | .then(([user, images]) => {
122 | if (images.length === 0) {
123 | return reply(Boom.notFound());
124 | }
125 |
126 | let bounds = [-180, 180, -90, 90];
127 | let approximateZoom = 0;
128 |
129 | if (images.length > 0) {
130 | approximateZoom = Math.floor(images
131 | .map(meta => Math.ceil(Math.log2(2 * Math.PI * 6378137 / (meta.gsd * 256))))
132 | .reduce((a, b) => a + b) / images.length);
133 |
134 | bounds = images.reduce((bbox, meta) =>
135 | [
136 | Math.min(bbox[0], meta.bbox[0]),
137 | Math.min(bbox[1], meta.bbox[1]),
138 | Math.max(bbox[2], meta.bbox[2]),
139 | Math.max(bbox[3], meta.bbox[3])
140 | ], [Infinity, Infinity, -Infinity, -Infinity]);
141 | }
142 |
143 | return reply({
144 | name: user.name,
145 | bounds: bounds,
146 | center: [
147 | (bounds[0] + bounds[2]) / 2,
148 | (bounds[1] + bounds[3]) / 2,
149 | approximateZoom - 3
150 | ],
151 | maxzoom: approximateZoom + 3,
152 | minzoom: approximateZoom - 10
153 | });
154 | })
155 | .catch(err => reply(Boom.badImplementation(err.message)));
156 | }
157 | },
158 |
159 | {
160 | method: 'GET',
161 | path: '/user/{user}/{z}/{x}/{y}.json',
162 | config: {
163 | tags: ['disablePlugins']
164 | },
165 | handler: (request, reply) => {
166 | const { user, z, x, y } = request.params;
167 |
168 | const bbox = merc.bbox(x, y, z);
169 | const { geometry } = bboxPolygon(bbox);
170 |
171 | return Meta.find({
172 | user,
173 | geojson: {
174 | $geoIntersects: {
175 | $geometry: geometry
176 | }
177 | }
178 | }, {
179 | acquisition_end: 1,
180 | geojson: 1,
181 | gsd: 1,
182 | title: 1,
183 | uuid: 1
184 | }, {
185 | sort: {
186 | gsd: 1,
187 | acquisition_end: -1
188 | }
189 | }).then(images => {
190 | if (images.length === 0) {
191 | return reply(Boom.notFound());
192 | }
193 |
194 | // for filtering; more readable than embedding everything into reduce()
195 | let tileArea = area(geometry);
196 | let totalArea = 0;
197 | let totalOverlap = null;
198 | let filled = false;
199 |
200 | // sort by overlap
201 | const sources = images
202 | // calculate overlap with the target tile
203 | .map(image => Object.assign(image, {
204 | overlap: intersect(geometry, image.geojson)
205 | }))
206 | // sort by overlap
207 | .sort((a, b) => area(b.overlap) - area(a.overlap))
208 | // filter unnecessary sources
209 | .filter(x => {
210 | if (filled) {
211 | // already full
212 | return false;
213 | }
214 |
215 | const newOverlap = totalOverlap == null ? x.overlap : union(totalOverlap, x.overlap);
216 | const newArea = area(newOverlap);
217 |
218 | if (newArea > totalArea) {
219 | // this source contributes
220 | if (newArea === tileArea) {
221 | // now full
222 | filled = true;
223 | }
224 |
225 | totalOverlap = newOverlap;
226 | totalArea = newArea;
227 | return true;
228 | }
229 |
230 | return false;
231 | });
232 |
233 | return reply(sources
234 | .map(meta => ({
235 | url: meta.uuid,
236 | name: meta.title,
237 | acquired_at: meta.acquisition_end,
238 | resolution: meta.gsd,
239 | recipes: {
240 | imagery: true
241 | }
242 | })));
243 | })
244 | .catch(err => {
245 | console.warn(err.stack);
246 | return reply(Boom.badImplementation(err.message));
247 | });
248 | }
249 | }
250 | ];
251 |
--------------------------------------------------------------------------------
/test/specs/test_meta_update.js:
--------------------------------------------------------------------------------
1 | /* eslint camelcase: 0 */
2 | const Hapi = require('hapi');
3 | const proxyquire = require('proxyquire').noCallThru();
4 | const chai = require('chai');
5 | const sinon = require('sinon');
6 | const sinonChai = require('sinon-chai');
7 | const authentication = require('../../plugins/authentication.js');
8 | const meta = require('../fixtures/metadata.json');
9 | const geojson = require('../fixtures/geojson.json');
10 |
11 | const expect = chai.expect;
12 | chai.should();
13 | chai.use(sinonChai);
14 | const sandbox = sinon.sandbox.create();
15 |
16 | const ObjectId = 'ObjectId';
17 | const apiEndpoint = 'apiEndpoint';
18 | const id = 'id';
19 | const sceneIdx = 'sceneIdx';
20 | const imageId = 'imageId';
21 | const url = `http://oam.com/uploads/${id}/${sceneIdx}/${imageId}`;
22 | const image = {
23 | metadata: meta,
24 | user_id: 'user_id'
25 | };
26 |
27 | const buildStubs = () => {
28 | const findOne = sandbox.stub().resolves(image);
29 | const updateOne = sandbox.stub().resolves(true);
30 | const collectionStub = {
31 | findOne: findOne,
32 | updateOne: updateOne
33 | };
34 | const collection = sandbox.stub().returns(collectionStub);
35 | const ObjectID = function (id) {
36 | if (id) {
37 | this.id = id;
38 | } else {
39 | this.id = ObjectId;
40 | }
41 | };
42 | ObjectID.prototype.toString = function toString () {
43 | return this.id;
44 | };
45 | ObjectID.isValid = () => true;
46 | const stubs = {
47 | 'mongoose': {
48 | connection: { collection: collection }
49 | },
50 | 'mongodb': {
51 | ObjectID: ObjectID
52 | },
53 | '../config': {
54 | apiEndpoint
55 | }
56 | };
57 | return { stubs, collection, findOne, updateOne };
58 | };
59 |
60 | const getServer = (stubs) => {
61 | const uploads = proxyquire('../../routes/uploads.js', stubs);
62 | const server = new Hapi.Server();
63 | server.connection({ port: 4000 });
64 | return server.register(authentication).then(() => {
65 | server.route(uploads);
66 | return server;
67 | });
68 | };
69 |
70 | describe('Updating image metadata', () => {
71 | beforeEach(() => {
72 | sandbox.restore();
73 | });
74 |
75 | it('Sets status as processsing', () => {
76 | const {
77 | stubs,
78 | collection,
79 | updateOne
80 | } = buildStubs();
81 |
82 | const options = {
83 | method: 'POST',
84 | url,
85 | payload: {
86 | status: 'processing'
87 | }
88 | };
89 | return getServer(stubs)
90 | .then((server) => {
91 | return server.inject(options).then((res) => {
92 | expect(collection).to.have.been.calledWith('images');
93 | expect(updateOne.firstCall.args[0]._id.toString()).to.equal(imageId);
94 | expect(updateOne.firstCall.args[1].$set.status).to.equal('processing');
95 | expect(updateOne.firstCall.args[1].$currentDate.startedAt).to.be.true;
96 | });
97 | });
98 | });
99 |
100 | it('Sets status as errored', () => {
101 | const {
102 | stubs,
103 | collection,
104 | updateOne
105 | } = buildStubs();
106 |
107 | const options = {
108 | method: 'POST',
109 | url,
110 | payload: {
111 | status: 'failed'
112 | }
113 | };
114 | return getServer(stubs)
115 | .then((server) => {
116 | return server.inject(options).then((res) => {
117 | expect(collection).to.have.been.calledWith('images');
118 | expect(updateOne.firstCall.args[0]._id.toString()).to.equal(imageId);
119 | expect(updateOne.firstCall.args[1].$set.status).to.equal('errored');
120 | expect(updateOne.firstCall.args[1].$currentDate.stoppedAt).to.be.true;
121 | });
122 | });
123 | });
124 |
125 | it('Adds upload status message', () => {
126 | const {
127 | stubs,
128 | collection,
129 | updateOne
130 | } = buildStubs();
131 |
132 | const options = {
133 | method: 'POST',
134 | url,
135 | payload: {
136 | message: 'message'
137 | }
138 | };
139 | return getServer(stubs)
140 | .then((server) => {
141 | return server.inject(options).then((res) => {
142 | expect(collection).to.have.been.calledWith('images');
143 | expect(updateOne.firstCall.args[0]._id.toString()).to.equal(imageId);
144 | expect(updateOne.firstCall.args[1].$push.messages).to.equal('message');
145 | });
146 | });
147 | });
148 |
149 | it('Updates metadata when upload processing finishes', () => {
150 | const {
151 | stubs,
152 | findOne,
153 | updateOne
154 | } = buildStubs();
155 |
156 | const oamSync = sandbox.stub().resolves(true);
157 | const meta = {
158 | oamSync
159 | };
160 | const create = sandbox.stub().resolves(meta);
161 | const Meta = {
162 | create
163 | };
164 | stubs['../models/meta'] = Meta;
165 |
166 | const options = {
167 | method: 'POST',
168 | url,
169 | payload: geojson
170 | };
171 | return getServer(stubs)
172 | .then((server) => {
173 | return server.inject(options).then((res) => {
174 | expect(updateOne.firstCall.args[0]._id.toString()).to.equal(imageId);
175 | expect(updateOne.firstCall.args[1].$set.status).to.equal('finished');
176 | expect(updateOne.firstCall.args[1].$currentDate.stoppedAt).to.be.true;
177 |
178 | expect(findOne.firstCall.args[0]._id.toString()).to.equal(imageId);
179 | expect(create.firstCall.args[0].user).to.equal(image.user_id);
180 | expect(oamSync).to.have.been.called;
181 |
182 | expect(res.statusCode).to.equal(200);
183 | });
184 | });
185 | });
186 |
187 | it('Retries metadata update after removing dupicate vertices', () => {
188 | const { stubs } = buildStubs();
189 |
190 | const oamSync = sandbox.stub().resolves(true);
191 | const meta = {
192 | oamSync
193 | };
194 | const create = sandbox.stub();
195 | create.onFirstCall().rejects({
196 | code: 16755
197 | });
198 | create.onSecondCall().resolves(meta);
199 |
200 | const Meta = {
201 | create
202 | };
203 | stubs['../models/meta'] = Meta;
204 |
205 | const removeDuplicateVertices = sinon.stub();
206 | stubs['../services/removeDuplicateVertices'] = removeDuplicateVertices;
207 |
208 | const options = {
209 | method: 'POST',
210 | url,
211 | payload: geojson
212 | };
213 | return getServer(stubs)
214 | .then((server) => {
215 | return server.inject(options).then((res) => {
216 | expect(create.firstCall.args[0].user).to.equal(image.user_id);
217 | expect(create.secondCall.args[0].user).to.equal(image.user_id);
218 | expect(removeDuplicateVertices).to.have.been.calledOnce;
219 | expect(oamSync).to.have.been.calledOnce;
220 |
221 | expect(res.statusCode).to.equal(200);
222 | });
223 | });
224 | });
225 |
226 | it('Propagates other mongo errors', () => {
227 | const { stubs } = buildStubs();
228 |
229 | const create = sandbox.stub();
230 | create.onFirstCall().rejects({
231 | code: 0
232 | });
233 |
234 | const Meta = {
235 | create
236 | };
237 | stubs['../models/meta'] = Meta;
238 |
239 | const options = {
240 | method: 'POST',
241 | url,
242 | payload: geojson
243 | };
244 | return getServer(stubs)
245 | .then((server) => {
246 | return server.inject(options).then((res) => {
247 | expect(res.statusCode).to.equal(500);
248 | });
249 | });
250 | });
251 | });
252 |
--------------------------------------------------------------------------------
/routes/meta.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var Boom = require('boom');
4 |
5 | var Meta = require('../models/meta');
6 | var metaController = require('../controllers/meta.js');
7 | var userController = require('../controllers/user.js');
8 |
9 | module.exports = [
10 | /**
11 | * @api {get} /meta List all images' metadata
12 | * @apiGroup Meta
13 | * @apiDescription Main endpoint to find data within the catalog
14 | *
15 | * @apiParam {string} [bbox] Bounding box to search within. Format `?bbox=[lon_min],[lat_min],[lon_max],[lat_max]`.
16 | * @apiParam {string} [title] Limit results by `title`.
17 | * @apiParam {string} [provider] Limit results by `provider`.
18 | * @apiParam {number} [gsd_from] Find results greater than a certain resolution. Can be used independently of `gsd_to`.
19 | * @apiParam {number} [gsd_to] Find results with lower than a certain resolution. Can be used independently of `gsd_from`.
20 | * @apiParam {date} [acquisition_from] Show results after a certain date. Can be used independently of `acquisition_to`.
21 | * @apiParam {date} [acquisition_to] Show results before a certain date. Can be used independently of `acquisition_from`.
22 | * @apiParam {boolean} [has_tiled] Return only images with associated tiled images.
23 | * @apiParam {string} [sort=desc] The sort order, asc or desc. Must be used with `order_by`.
24 | * @apiParam {string} [order_by=gsd & date] Field to sort by. Must be used with `sort`.
25 | * @apiParam {number} [limit=100] Change the number of results returned, max is 100.
26 | * @apiParam {number} [page=1] Paginate through results.
27 | * @apiParam {number} [skip] Number of records to skip.
28 | *
29 | * @apiExample {curl} Simple example:
30 | * curl 'https://oam-catalog.herokuapp.com/meta?has_tiled&gsd_to=10'
31 | *
32 | * @apiExample {curl} Using bbox:
33 | * curl 'https://oam-catalog.herokuapp.com/meta?bbox=-66.15966796875,46.45678142812658,-65.63232421875,46.126556302418514&gsd_from=20&acquisition_from=2014-01-01&limit=100'
34 | *
35 | * @apiUse metaSuccess
36 | *
37 | * @apiUse metaSuccessExample
38 | *
39 | */
40 | {
41 | method: 'GET',
42 | path: '/meta',
43 | handler: function (request, reply) {
44 | var payload = {};
45 |
46 | if (request.query) {
47 | payload = request.query;
48 | }
49 |
50 | metaController.query(payload, request.page, request.limit, function (err, records, count) {
51 | if (err) {
52 | console.error(err);
53 | return reply(err.message);
54 | }
55 |
56 | request.count = count;
57 | return reply(records);
58 | });
59 | }
60 | },
61 |
62 | /**
63 | * @api {get} /meta/:id Get an image's metadata
64 | * @apiGroup Meta
65 | * @apiDescription Display data for an individual image
66 | *
67 | * @apiParam {string} [id] The id of the image.
68 | *
69 | * @apiUse metaSuccess
70 | *
71 | * @apiUse metaSuccessExample
72 | */
73 | {
74 | method: 'GET',
75 | path: '/meta/{id}',
76 | handler: function (request, reply) {
77 | var metaId = request.params.id;
78 |
79 | Meta.findOne({_id: metaId}, function (err, record) {
80 | if (err) {
81 | console.error(err);
82 | return reply(Boom.badImplementation(err.message));
83 | }
84 | return reply(record);
85 | });
86 | }
87 | },
88 |
89 | /**
90 | * @api {put} /meta/:id Update an image's metadata
91 | * @apiGroup Meta
92 | * @apiDescription Update data for an individual image
93 | *
94 | * @apiParam {string} [id] The id of the image.
95 | *
96 | * @apiSuccess (204) PageUpdated.
97 | */
98 | {
99 | method: 'PUT',
100 | path: '/meta/{id}',
101 | config: {
102 | auth: 'session',
103 | pre: [
104 | {method: metaController.fetchRequestedObject},
105 | {method: userController.isOwnerOfRequestedObject}
106 | ]
107 | },
108 | handler: function (request, reply) {
109 | let meta = request.app.requestedObject;
110 | meta.oamUpdate(request.payload, function (err, _result) {
111 | if (err) {
112 | console.error(err);
113 | reply(Boom.badImplementation(err));
114 | return;
115 | }
116 | reply(null).code(204);
117 | });
118 | }
119 | },
120 |
121 | /**
122 | * @api {delete} /meta/:id Delete an image
123 | * @apiGroup Meta
124 | * @apiDescription Delete an image
125 | *
126 | * @apiParam {string} [id] The id of the image.
127 | *
128 | * @apiSuccess (204) PageUpdated.
129 | */
130 | {
131 | method: 'DELETE',
132 | path: '/meta/{id}',
133 | config: {
134 | auth: 'session',
135 | pre: [
136 | {method: metaController.fetchRequestedObject},
137 | {method: userController.isOwnerOfRequestedObject}
138 | ]
139 | },
140 | handler: function (request, reply) {
141 | let meta = request.app.requestedObject;
142 | meta.oamDelete(function (err, _result) {
143 | if (err) {
144 | console.error(err);
145 | reply(Boom.badImplementation(err));
146 | return;
147 | }
148 | reply(null).code(204);
149 | });
150 | }
151 | }
152 | ];
153 |
154 | // -----------------------------------------------------------------------------
155 | // Meta success return values
156 | // -----------------------------------------------------------------------------
157 | /**
158 | * @apiDefine metaSuccess
159 | * @apiSuccess {string} _id Unique internal ID
160 | * @apiSuccess {url} uuid Image source
161 | * @apiSuccess {string} title Name of image
162 | * @apiSuccess {string} projection Image projection information
163 | * @apiSuccess {string} footprint Image footprint
164 | * @apiSuccess {number} gsd Spatial resolution of image (in meters)
165 | * @apiSuccess {number} file_size File size of image (in bytes)
166 | * @apiSuccess {date} acquisition_start Start of image capture
167 | * @apiSuccess {date} acquisition_end End of image capture
168 | * @apiSuccess {string} platform Recording platform of image (UAV, satellite, etc)
169 | * @apiSuccess {string} provider Imagery provider
170 | * @apiSuccess {string} contact Imagery contact point
171 | * @apiSuccess {object} properties Optional metadata about the image
172 | * @apiSuccess {url} meta_uri URL of metadata information
173 | * @apiSuccess {string} geojson GeoJSON information for image
174 | * @apiSuccess {string} bbox Bounding box of image
175 | */
176 |
177 | // -----------------------------------------------------------------------------
178 | // Meta success example
179 | // -----------------------------------------------------------------------------
180 | /**
181 | * @apiDefine metaSuccessExample
182 | * @apiSuccessExample {json} Success Response:
183 | * HTTP/1.1 200 OK
184 | * {
185 | * "_id": "556f7a49ac00a903002fb016",
186 | * "uuid": "http://hotosm-oam.s3.amazonaws.com/2015-04-20_dar_river_merged_transparent_mosaic_group1.tif",
187 | * "title": "2015-04-20_dar_river_merged_transparent_mosaic_group1.tif",
188 | * "projection": "PROJCS[\"WGS84/UTMzone37S\",GEOGCS[\"WGS84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0],UNIT[\"degree\",0.0174532925199433],AUTHORITY[\"EPSG\",\"4326\"]],PROJECTION[\"Transverse_Mercator\"],PARAMETER[\"latitude_of_origin\",0],PARAMETER[\"central_meridian\",39],PARAMETER[\"scale_factor\",0.9996],PARAMETER[\"false_easting\",500000],PARAMETER[\"false_northing\",10000000],UNIT[\"metre\",1,AUTHORITY[\"EPSG\",\"9001\"]],AUTHORITY[\"EPSG\",\"32737\"]]",
189 | * "footprint": "POLYGON((39.24013333333333 -6.755633333333333,39.26116944444444 -6.755622222222223,39.261183333333335 -6.776669444444444,39.24014444444445 -6.776680555555555,39.24013333333333 -6.755633333333333))",
190 | * "gsd": 0.04069,
191 | * "file_size": 2121158626,
192 | * "acquisition_start": "2015-04-20T00:00:00.000Z",
193 | * "acquisition_end": "2015-04-21T00:00:00.000Z",
194 | * "platform": "UAV",
195 | * "provider": "",
196 | * "contact": "",
197 | * "properties": {
198 | * "tms": "",
199 | * "thumbnail": ""
200 | * },
201 | * "meta_uri": "",
202 | * "geojson": {},
203 | * "bbox": []
204 | * }
205 | */
206 |
--------------------------------------------------------------------------------
/catalog-worker.js:
--------------------------------------------------------------------------------
1 | /**
2 | * This background process polls S3 buckets for new imagery
3 | */
4 |
5 | "use strict";
6 | var path = require("path");
7 |
8 | console.log("Starting catalog worker...");
9 |
10 | require("dotenv").config({
11 | path: path.resolve(process.cwd(), process.env.DOT_ENV_FILENAME || ".env"),
12 | });
13 |
14 | var _ = require("lodash");
15 | var S3 = require("aws-sdk/clients/s3");
16 | var async = require("async");
17 | var config = require("./config");
18 | var Conn = require("./services/db");
19 | var analytics = require("./controllers/analytics");
20 | var meta = require("./controllers/meta");
21 | var Meta = require("./models/meta");
22 | // Replace mongoose's deprecated promise library (mpromise) with bluebird
23 |
24 | var mongoose = require("mongoose");
25 | mongoose.Promise = require("bluebird");
26 | var request = require("request");
27 | var cron = require("node-cron");
28 | var { Client: PgClient } = require("pg");
29 |
30 | var db = new Conn();
31 | db.start();
32 |
33 | var consoleLog = function (err, msg) {
34 | if (err) {
35 | console.log(err);
36 | }
37 | console.log(msg);
38 | };
39 |
40 | /**
41 | * Get the list of buckets from the master register
42 | *
43 | * @param {function} cb - Callback with response (buckets) that returns array
44 | * of buckets.
45 | */
46 | var getBucketList = function (cb) {
47 | if (typeof config.oinRegisterUrl === "undefined") {
48 | cb(null, [{ type: "s3", bucket_name: config.oinBucket }]);
49 | } else {
50 | request.get(
51 | {
52 | json: true,
53 | uri: config.oinRegisterUrl,
54 | },
55 | function (err, res, remoteData) {
56 | if (err) {
57 | return cb(err);
58 | }
59 |
60 | if (res.statusCode !== 200) {
61 | return console.error("Unable to get register list.");
62 | }
63 | var buckets = _.map(remoteData.nodes, function (node) {
64 | return node.locations;
65 | });
66 | buckets = _.flatten(buckets);
67 | cb(null, buckets);
68 | }
69 | );
70 | }
71 | };
72 |
73 | /**
74 | * Runs the readBuckets tasks in parallel and save analytics data when done
75 | *
76 | * @param {Array} tasks - The array of bucket read functions to be run in parallel
77 | */
78 | var readBuckets = function (tasks) {
79 | console.info("--- Started indexing all buckets ---");
80 | async.parallelLimit(
81 | tasks,
82 | 4,
83 | // Results is an [[tasks]]
84 | function (err, results) {
85 | if (err) {
86 | return console.error(err);
87 | }
88 | results = _.flatten(results);
89 | results = results.map(function (task) {
90 | return async.retryable(task);
91 | });
92 | async.parallelLimit(results, 5, function (err, results) {
93 | if (err) {
94 | return console.error(err);
95 | }
96 | console.info("--- Finished indexing all buckets ---");
97 | // Get image, sensor, and provider counts and save to analytics collection
98 | return Promise.all([
99 | Meta.count(),
100 | Meta.distinct("properties.sensor"),
101 | Meta.distinct("provider"),
102 | ])
103 | .then(function (res) {
104 | var counts = {};
105 | counts.image_count = res[0];
106 | counts.sensor_count = res[1].length;
107 | counts.provider_count = res[2].length;
108 | analytics.addAnalyticsRecord(counts, function (err) {
109 | // Catch error in record addition
110 | if (err) {
111 | console.error(err);
112 | }
113 | console.info("--- Added new analytics record ---");
114 | });
115 | // Catch error in db query promises
116 | })
117 | .catch(function (err) {
118 | return console.error(err);
119 | });
120 | });
121 | }
122 | );
123 | };
124 |
125 | // Read bucket method for S3. It reads the S3 bucket and adds/updates *_metadata.json to Meta model
126 | var readBucket = function (bucket, lastSystemUpdate, errCb, done) {
127 | console.info("--- Reading from bucket: " + bucket.bucket_name + " ---");
128 |
129 | let bucketDetails = {
130 | Bucket: bucket.bucket_name,
131 | };
132 |
133 | if (bucket.bucket_name === config.oinBucket) {
134 | bucketDetails.Prefix = config.oinBucketPrefix;
135 | }
136 |
137 | var s3 = new S3();
138 | s3.listObjects(bucketDetails, function (err, data) {
139 | if (err) {
140 | errCb(err);
141 | done(err);
142 | return;
143 | }
144 | var tasks = [];
145 | data.Contents.forEach(function (item) {
146 | if (item.Key.includes("_meta.json")) {
147 | // Get the last time the metadata file was modified so we can determine
148 | // if we need to update it.
149 | var lastModified = item.LastModified;
150 | var url = `https://${config.s3PublicDomain}/${bucket.bucket_name}/${item.Key}`;
151 | var task = function (done) {
152 | meta.addRemoteMeta(url, lastModified, lastSystemUpdate, done);
153 | };
154 | tasks.push(task);
155 | }
156 | });
157 | done(null, tasks);
158 | });
159 | };
160 |
161 | // The main function to get the registered buckets, read them and update metadata
162 | var getListAndReadBuckets = function () {
163 | // Start off by getting the last time the system was updated.
164 | analytics.getLastUpdateTime(function (err, lastSystemUpdate) {
165 | if (err) {
166 | return console.error(err);
167 | }
168 |
169 | console.info("Last system update time:", lastSystemUpdate);
170 | getBucketList(function (err, buckets) {
171 | if (err) {
172 | return console.error(err.stack);
173 | }
174 |
175 | // Generate array of tasks to run in parallel
176 | var tasks = _.map(buckets, function (bucket) {
177 | return function (done) {
178 | if (bucket.type === "s3") {
179 | readBucket(bucket, lastSystemUpdate, consoleLog, done);
180 | } else {
181 | console.error("Unknown bucket type: " + bucket.type);
182 | }
183 | };
184 | });
185 |
186 | // Read the buckets and store metadata
187 | readBuckets(tasks);
188 | });
189 | });
190 | };
191 |
192 | // Kick it all off
193 | cron.schedule(config.cronTime, function () {
194 | console.log("Running a catalog worker (cron time: " + config.cronTime + ")");
195 | getListAndReadBuckets();
196 | });
197 |
198 | const {
199 | PGHOST,
200 | PGPORT,
201 | PGUSER,
202 | PGPASSWORD,
203 | PGDATABASE,
204 | PG_CRON_TIME = "* * * * *",
205 | } = process.env;
206 |
207 | const isPgEnabled = [PGHOST, PGUSER, PGPASSWORD, PGDATABASE, PGPORT].every(
208 | Boolean
209 | );
210 |
211 | let pgConnection;
212 | async function pgCreateConnection() {
213 | if (pgConnection) {
214 | return pgConnection;
215 | }
216 |
217 | const connection = new PgClient({
218 | user: PGUSER,
219 | password: PGPASSWORD,
220 | database: PGDATABASE,
221 | port: 5432,
222 | host: PGHOST,
223 | ssl: true,
224 | });
225 |
226 | try {
227 | await connection.connect();
228 | } catch (error) {
229 | console.error(error);
230 | }
231 |
232 | pgConnection = connection;
233 | return pgConnection;
234 | }
235 |
236 | // This is a task scheduled by cron run that copies all images metadata from
237 | // mongodb in postgres. It is required to run mosaic server that relies on
238 | // postgres db with postgis extension.
239 | if (isPgEnabled) {
240 | cron.schedule(PG_CRON_TIME, async function () {
241 | const records = await new Promise((resolve, reject) => {
242 | Meta.find({}, null, {}).exec((err, records) => {
243 | if (err) {
244 | reject(err);
245 | return;
246 | }
247 |
248 | resolve(records);
249 | });
250 | });
251 |
252 | const pgConnection = await pgCreateConnection();
253 |
254 | const mosaicLayerId = config.oamMosacLayerId;
255 |
256 | try {
257 | await pgConnection.query("begin");
258 |
259 | await pgConnection.query(
260 | `delete from layers_features where layer_id = (select id from layers where public_id = '${mosaicLayerId}')`
261 | );
262 |
263 | // TODO: there should be a better way to do bulk insert
264 | const queryText = `insert into public.layers_features (feature_id, layer_id, properties, geom, last_updated, zoom) values ($1, (select id from layers where public_id = '${mosaicLayerId}'), $2, ST_Transform(ST_GeomFromGeoJSON($3), 4326), now(), 999)`;
265 | for (const record of records) {
266 | const queryValues = [
267 | record._id,
268 | JSON.stringify({
269 | ...record.properties,
270 | gsd: record.gsd,
271 | uuid: record.uuid,
272 | uploaded_at: record.uploaded_at,
273 | acquisition_start: record.acquisition_start,
274 | acquisition_end: record.acquisition_end,
275 | }),
276 | JSON.stringify(record.geojson),
277 | ];
278 |
279 | await pgConnection.query(queryText, queryValues);
280 | }
281 |
282 | await pgConnection.query("commit");
283 | } catch (err) {
284 | console.error(err);
285 | await pgConnection.query("rollback");
286 | }
287 | });
288 | } else {
289 | console.warn(
290 | "The Postgres credentials not defined, skip mosaic index updating"
291 | );
292 | }
--------------------------------------------------------------------------------
/test/specs/test_meta.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var connection = require('mongoose').connection;
4 | var expect = require('chai').expect;
5 | var request = require('request');
6 | var wktParse = require('wellknown');
7 | var AWS = require('aws-sdk');
8 |
9 | var config = require('../../config');
10 | var Meta = require('../../models/meta');
11 | var meta = require('../fixtures/meta_db_objects.json');
12 | var commonHelper = require('../helper');
13 |
14 | require('./helper');
15 |
16 | describe('Meta endpoint', function () {
17 | let savedUser = {};
18 | before(function (done) {
19 | commonHelper.createUser({
20 | facebook_id: 123,
21 | session_id: null
22 | }, function (user) {
23 | savedUser = user;
24 | Meta.create(meta).then(function (results) {
25 | results.forEach(function (result) {
26 | result.user = user;
27 | // TODO: Put in a Mongoose middleware hook
28 | result.geojson = wktParse(result.footprint);
29 | result.geojson.bbox = result.bbox;
30 | result.save();
31 | });
32 | done();
33 | });
34 | });
35 | });
36 |
37 | it('list meta', function (done) {
38 | request(config.apiEndpoint + '/meta/', function (err, response, body) {
39 | if (err) {
40 | console.log(err);
41 | }
42 | var res = JSON.parse(body);
43 | expect(res.results.length).to.equal(2);
44 | done();
45 | });
46 | });
47 |
48 | it('includes user data', function (done) {
49 | request(config.apiEndpoint + '/meta/', function (err, response, body) {
50 | if (err) {
51 | console.log(err);
52 | }
53 | var res = JSON.parse(body);
54 | const user = res.results[0].user;
55 | expect(Object.keys(user).length).to.eq(2);
56 | expect(user._id).to.equal(savedUser._id.toString());
57 | expect(user.name).to.equal('Tester');
58 | done();
59 | });
60 | });
61 |
62 | it('search meta', function (done) {
63 | request(config.apiEndpoint + '/meta/?title=some_image2.tif', function (err, response, body) {
64 | if (err) {
65 | console.log(err);
66 | }
67 | var res = JSON.parse(body);
68 | expect(res.results.length).to.equal(1);
69 | expect(res.results[0].uuid).to.equal(meta[1].uuid);
70 | expect(res.results[0].platform).to.equal('drone');
71 | });
72 |
73 | request(config.apiEndpoint + '/meta/?platform=drone', function (err, response, body) {
74 | if (err) {
75 | console.log(err);
76 | }
77 | var res = JSON.parse(body);
78 | expect(res.results.length).to.equal(2);
79 | done();
80 | });
81 | });
82 |
83 | it('search by bounding box', function (done) {
84 | request(config.apiEndpoint + '/meta/?bbox=8.26171875,57.87981645527841,42.03369140625,62.32920841458002', function (err, response, body) {
85 | if (err) {
86 | console.log(err);
87 | }
88 | var res = JSON.parse(body);
89 | expect(res.results.length).to.equal(1);
90 | expect(res.results[0].uuid).to.equal(meta[0].uuid);
91 | done();
92 | });
93 | });
94 |
95 | it('search by resolution', function (done) {
96 | request(config.apiEndpoint + '/meta/?gsd_from=1&gsd_to=4', function (err, response, body) {
97 | if (err) {
98 | console.log(err);
99 | }
100 | var res = JSON.parse(body);
101 | expect(res.results.length).to.equal(1);
102 | expect(res.results[0].uuid).to.equal(meta[1].uuid);
103 | });
104 |
105 | request(config.apiEndpoint + '/meta/?gsd_from=20&gsd_to=100', function (err, response, body) {
106 | if (err) {
107 | console.log(err);
108 | }
109 | var res = JSON.parse(body);
110 | expect(res.results.length).to.equal(0);
111 | done();
112 | });
113 | });
114 |
115 | it('search if tms is provided', function (done) {
116 | request(config.apiEndpoint + '/meta/?has_tiled', function (err, response, body) {
117 | if (err) {
118 | console.log(err);
119 | }
120 | var res = JSON.parse(body);
121 | expect(res.results.length).to.equal(1);
122 | expect(res.results[0].uuid).to.equal(meta[1].uuid);
123 | done();
124 | });
125 | });
126 |
127 | it('search date', function (done) {
128 | request(
129 | config.apiEndpoint + '/meta/?acquisition_from=2015-04-10&acquisition_to=2015-05-01',
130 | function (err, response, body) {
131 | if (err) {
132 | console.log(err);
133 | }
134 | var res = JSON.parse(body);
135 | expect(res.results.length).to.equal(1);
136 | expect(res.results[0].uuid).to.equal(meta[0].uuid);
137 | }
138 | );
139 |
140 | request(
141 | config.apiEndpoint + '/meta/?acquisition_from=2015-01-01&acquisition_to=2015-05-01',
142 | function (err, response, body) {
143 | if (err) {
144 | console.log(err);
145 | }
146 | var res = JSON.parse(body);
147 | expect(res.results.length).to.equal(2);
148 | expect(res.results[0].uuid).to.equal(meta[0].uuid);
149 | done();
150 | }
151 | );
152 | });
153 |
154 | it('order', function (done) {
155 | request(
156 | config.apiEndpoint + '/meta/?order_by=acquisition_start&sort=asc',
157 | function (err, response, body) {
158 | if (err) {
159 | console.log(err);
160 | }
161 | var res = JSON.parse(body);
162 | expect(res.results.length).to.equal(2);
163 | expect(res.results[0].uuid).to.equal(meta[1].uuid);
164 | }
165 | );
166 |
167 | request(
168 | config.apiEndpoint + '/meta/?order_by=acquisition_start&sort=desc',
169 | function (err, response, body) {
170 | if (err) {
171 | console.log(err);
172 | }
173 | var res = JSON.parse(body);
174 | expect(res.results.length).to.equal(2);
175 | expect(res.results[0].uuid).to.equal(meta[0].uuid);
176 | done();
177 | }
178 | );
179 | });
180 |
181 | it('limit and page order', function (done) {
182 | request(
183 | config.apiEndpoint + '/meta/?limit=1&page=1',
184 | function (err, response, body) {
185 | if (err) {
186 | console.log(err);
187 | }
188 | var res = JSON.parse(body);
189 | expect(res.results.length).to.equal(1);
190 | expect(res.results[0].uuid).to.equal(meta[0].uuid);
191 | }
192 | );
193 |
194 | request(
195 | config.apiEndpoint + '/meta/?limit=1&page=2',
196 | function (err, response, body) {
197 | if (err) {
198 | console.log(err);
199 | }
200 | var res = JSON.parse(body);
201 | expect(res.results.length).to.equal(1);
202 | expect(res.results[0].uuid).to.equal(meta[1].uuid);
203 | done();
204 | }
205 | );
206 | });
207 |
208 | context('Updating', function () {
209 | var existingUser;
210 | var existingMeta;
211 |
212 | beforeEach(function (done) {
213 | connection.db.dropDatabase();
214 |
215 | commonHelper.createUser({
216 | facebook_id: 123,
217 | session_id: null
218 | }, function (user) {
219 | existingUser = user;
220 | let metaToSave = meta[0];
221 | metaToSave.user = existingUser;
222 | Meta.create(metaToSave).then(function (savedMeta) {
223 | existingMeta = savedMeta;
224 | done();
225 | });
226 | });
227 | });
228 |
229 | context('Wrong user', function () {
230 | var otherUser;
231 |
232 | beforeEach(function (done) {
233 | commonHelper.createUser({
234 | facebook_id: 124,
235 | session_id: null
236 | }, function (user) {
237 | otherUser = user;
238 | done();
239 | });
240 | });
241 |
242 | it('should not let a non-owner update imagery', function (done) {
243 | var options = {
244 | url: config.apiEndpoint + '/meta/' + existingMeta.id,
245 | jar: commonHelper.cookieJar,
246 | json: true
247 | };
248 |
249 | commonHelper.logUserIn(otherUser, function (httpResponse, body) {
250 | request.put(options, function (_err, httpResponse, body) {
251 | expect(httpResponse.statusCode).to.equal(403);
252 | expect(body.message).to.include('does not have permission');
253 | done();
254 | });
255 | });
256 | });
257 | });
258 |
259 | it('should update imagery', function (done) {
260 | var options = {
261 | url: config.apiEndpoint + '/meta/' + existingMeta.id,
262 | jar: commonHelper.cookieJar,
263 | json: {
264 | title: 'A different title',
265 | properties: {
266 | sensor: 'A different sensor'
267 | }
268 | }
269 | };
270 |
271 | commonHelper.logUserIn(existingUser, function (httpResponse, body) {
272 | request.put(options, function (_err, httpResponse, body) {
273 | expect(httpResponse.statusCode).to.equal(204);
274 | expect(AWS.S3.prototype.getObject.callCount).to.eq(0);
275 | expect(AWS.S3.prototype.putObject.callCount).to.eq(0);
276 | Meta.findOne({_id: existingMeta.id}, function (_err, result) {
277 | expect(result.title).to.eq('A different title');
278 | expect(result.properties.sensor).to.eq('A different sensor');
279 | done();
280 | });
281 | });
282 | });
283 | });
284 |
285 | it('should delete imagery', function (done) {
286 | var options = {
287 | url: config.apiEndpoint + '/meta/' + existingMeta.id,
288 | jar: commonHelper.cookieJar
289 | };
290 |
291 | commonHelper.logUserIn(existingUser, function (httpResponse, body) {
292 | request.delete(options, function (_err, httpResponse, body) {
293 | expect(httpResponse.statusCode).to.equal(204);
294 | Meta.findOne({_id: existingMeta.id}, function (_err, result) {
295 | expect(result).to.eq(null);
296 | done();
297 | });
298 | });
299 | });
300 | });
301 |
302 | context('Syncing to S3', function () {
303 | let metaInOINBucket;
304 |
305 | beforeEach(function (done) {
306 | let metaToSave = meta[1];
307 | metaToSave.meta_uri = `https://example.com/${config.oinBucket}/123_metadata.json`;
308 | Meta.create(metaToSave).then(function (savedMeta) {
309 | metaInOINBucket = savedMeta;
310 | done();
311 | });
312 | });
313 |
314 | it('should try to sync to S3 when updating imagery', function (done) {
315 | let newDetails = {title: 'I hope my S3 file is synced'};
316 | metaInOINBucket.oamUpdate(newDetails, function () {
317 | expect(AWS.S3.prototype.getObject.callCount).to.eq(1);
318 | expect(AWS.S3.prototype.putObject.callCount).to.eq(1);
319 | let metadataString = AWS.S3.prototype.putObject.args[0][0].Body.toString('utf8');
320 | let metadataJSON = JSON.parse(metadataString);
321 | expect(metadataJSON).to.deep.equal(newDetails);
322 | done();
323 | });
324 | });
325 |
326 | it('should try to delete from S3 when deleting imagery', function (done) {
327 | metaInOINBucket.oamDelete(function () {
328 | expect(AWS.S3.prototype.listObjects.callCount).to.eq(1);
329 | expect(AWS.S3.prototype.deleteObjects.callCount).to.eq(1);
330 | done();
331 | });
332 | });
333 | });
334 | });
335 | });
336 |
--------------------------------------------------------------------------------
/routes/uploads.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var bbox = require('@turf/bbox');
4 | var envelope = require('@turf/envelope');
5 | var getGeom = require('@turf/invariant').getGeom;
6 | var db = require('mongoose').connection;
7 | var ObjectID = require('mongodb').ObjectID;
8 | var queue = require('queue-async');
9 | var Boom = require('boom');
10 | var Joi = require('joi');
11 | var S3 = require('aws-sdk/clients/s3');
12 | var crypto = require('crypto');
13 | var wellknown = require('wellknown');
14 | var Meta = require('../models/meta');
15 | var config = require('../config');
16 | var transcoder = require('../services/transcoder');
17 | const metaValidations = require('../models/metaValidations.js');
18 | const removeDuplicateVertices = require('../services/removeDuplicateVertices');
19 |
20 | var sendgrid = require('sendgrid')(config.sendgridApiKey);
21 | const uploadSchema = metaValidations.getSceneValidations();
22 |
23 | function insertImages (scene, name, email, userID) {
24 | const images = scene.urls.map((url) => {
25 | const id = new ObjectID();
26 | return {
27 | _id: id,
28 | url: url,
29 | status: 'initial',
30 | user_id: userID,
31 | messages: [],
32 | metadata: {
33 | acquisition_end: scene.acquisition_end,
34 | acquisition_start: scene.acquisition_start,
35 | contact: `${name},${email}`,
36 | platform: scene.platform,
37 | provider: scene.provider,
38 | properties: {
39 | license: scene.license,
40 | sensor: scene.sensor
41 | },
42 | title: scene.title
43 | }
44 | };
45 | });
46 | const imageIds = images.map(image => image._id);
47 | return db.collection('images').insertMany(images).then(() => imageIds);
48 | }
49 |
50 | function includeImages (db, scene, callback) {
51 | db.collection('images').find({
52 | _id: { $in: scene.images }
53 | })
54 | .toArray(function (err, images) {
55 | scene.images = images;
56 | delete scene.tags;
57 | delete scene.license;
58 | callback(err, scene);
59 | });
60 | }
61 |
62 | function hmac (key, value) {
63 | return crypto.createHmac('sha256', key).update(value).digest();
64 | }
65 |
66 | function hexhmac (key, value) {
67 | return crypto.createHmac('sha256', key).update(value).digest('hex');
68 | }
69 |
70 | module.exports = [
71 | /**
72 | * @api {get} /uploads List uploads of currently authenticated user.
73 | * @apiGroup uploads
74 | * @apiSuccess {Object[]} results
75 | * @apiUse uploadStatusSuccess
76 | * @apiPermission Token
77 | */
78 | {
79 | method: 'GET',
80 | path: '/uploads',
81 | config: {
82 | auth: 'session'
83 | },
84 | handler: function (request, reply) {
85 | var user = request.auth.credentials.id;
86 | db.collection('uploads').find({ user: user })
87 | .toArray(function (err, uploads) {
88 | if (err) { return reply(Boom.wrap(err)); }
89 | var q = queue();
90 | uploads.forEach(function (upload) {
91 | upload.scenes.forEach(function (scene) {
92 | q.defer(includeImages, db, scene);
93 | });
94 | });
95 |
96 | q.awaitAll(function (err) {
97 | if (err) { return reply(Boom.wrap(err)); }
98 | reply({ results: uploads });
99 | });
100 | });
101 | }
102 | },
103 | {
104 | method: 'GET',
105 | path: '/signupload',
106 | config: {
107 | auth: 'session',
108 | tags: ['disablePlugins']
109 | },
110 | handler: function (request, reply) {
111 | const timestamp = request.query.datetime.substr(0, 8);
112 | const date = hmac('AWS4' + config.awsSecret, timestamp);
113 | const region = hmac(date, config.awsRegion);
114 | const service = hmac(region, 's3');
115 | const signing = hmac(service, 'aws4_request');
116 | reply(hexhmac(signing, request.query.to_sign));
117 | }
118 | },
119 | /**
120 | * @api {get} /uploads/url Get presigned URL for upload to S3
121 | * @apiParam {Object} payload Parameters sent as object resolvable from request.payload
122 | * @apiParam {string} payload.name The name of the file to be uploaded
123 | * @apiParam {string} payload.type The content type of the file to be uploaded
124 | * @apiUse uploadStatusSuccess
125 | */
126 | {
127 | method: 'POST',
128 | path: '/uploads/url',
129 | config: {
130 | auth: 'session'
131 | },
132 | handler: function (request, reply) {
133 | var payload = request.payload;
134 | var s3 = new S3();
135 | var params = {
136 | Bucket: config.uploadBucket,
137 | Key: payload.name,
138 | ContentType: payload.type,
139 | Expires: 60
140 | };
141 | s3.getSignedUrl('putObject', params, function (err, url) {
142 | if (err) {
143 | console.log(err);
144 | return reply({code: 500, url: null});
145 | } else {
146 | return reply({code: 200, url: url});
147 | }
148 | });
149 | }
150 | },
151 |
152 | /**
153 | * @api {get} /uploads/:id Get the status of a given upload
154 | * @apiGroup uploads
155 | * @apiParam {String} id The id of the upload
156 | * @apiUse uploadStatusSuccess
157 | */
158 | {
159 | method: 'GET',
160 | path: '/uploads/{id}',
161 | handler: function (request, reply) {
162 | if (!ObjectID.isValid(request.params.id)) {
163 | return reply(Boom.badRequest('Invalid id: ' + request.params.id));
164 | }
165 | db.collection('uploads').findOne({
166 | _id: new ObjectID(request.params.id)
167 | })
168 | .then(function (upload) {
169 | if (upload == null) {
170 | return reply(Boom.notFound('The requested upload does not exist'));
171 | }
172 |
173 | var q = queue();
174 | upload.scenes.forEach(function (scene) {
175 | q.defer(includeImages, db, scene);
176 | });
177 |
178 | q.awaitAll(function (err) {
179 | if (err) { return reply(Boom.wrap(err)); }
180 | reply(upload);
181 | });
182 | })
183 | .catch(function (err) { reply(Boom.wrap(err)); });
184 | }
185 | },
186 |
187 | /**
188 | * @api {post} /uploads/:id/:sceneIdx/:imageId Update imagery metadata
189 | * @apiGroup uploads
190 | * @apiParam {String} id The id of the upload
191 | * @apiUse uploadStatusSuccess
192 | */
193 | {
194 | method: 'POST',
195 | path: '/uploads/{id}/{sceneIdx}/{imageId}',
196 | config: {
197 | payload: {
198 | allow: 'application/json',
199 | output: 'data',
200 | parse: true
201 | }
202 | },
203 | handler: function (request, reply) {
204 | if (!ObjectID.isValid(request.params.id)) {
205 | return reply(Boom.badRequest('Invalid upload id: ' + request.params.id));
206 | }
207 |
208 | if (!ObjectID.isValid(request.params.imageId)) {
209 | return reply(Boom.badRequest('Invalid image id: ' + request.params.imageId));
210 | }
211 |
212 | var imageId = new ObjectID(request.params.imageId);
213 |
214 | const status = updateUploadStatus(request, imageId);
215 | const message = updateUploadMessage(request, imageId);
216 | return Promise.all([status, message])
217 | .then((values) => {
218 | let promise = Promise.resolve(true);
219 | if (values[0] === 'finished') {
220 | promise = updateUploadMetadata(request, imageId);
221 | }
222 | return promise;
223 | })
224 | .then(reply)
225 | .catch(error => reply(Boom.wrap(error)));
226 | }
227 | },
228 |
229 | /**
230 | * @api {post} /uploads Add an upload to the queue
231 | * @apiGroup uploads
232 | * @apiPermission Token
233 | *
234 | * @apiParam {Object} contactInfo
235 | * @pariParam {string} contactInfo.name
236 | * @pariParam {string} contactInfo.email
237 | * @apiParam {Object[]} scenes
238 | * @apiParam {Object} scenes.metadata The OAM metadata
239 | * @apiParam {string[]} scenes.urls The image URLs
240 | *
241 | * @apiExample {js} Example post
242 | * {
243 | * "scenes": [
244 | * {
245 | * "contact": {
246 | * "name": "Sat E Lyte",
247 | * "email": "foo@bar.com"
248 | * },
249 | * "title": "A scene title",
250 | * "platform": "UAV",
251 | * "provider": "Drones R Us",
252 | * "sensor": "DroneModel01",
253 | * "acquisition_start": "2015-04-01T00:00:00.000",
254 | * "acquisition_end": "2015-04-30T00:00:00.000",
255 | * "urls": [
256 | * "http://dron.es/image1.tif",
257 | * "http://dron.es/image2.tif",
258 | * "http://dron.es/image3.tif",
259 | * ]
260 | * },
261 | * {
262 | * "contact": {
263 | * "name": "Someone Else",
264 | * "email": "birds@eye.view.com"
265 | * },
266 | * "title": "Another title",
267 | * "platform": "satellite",
268 | * "provider": "Satellites R Us",
269 | * "sensor": "SATELLITE_I",
270 | * "acquisition_start": "2015-04-01T00:00:00.000",
271 | * "acquisition_end": "2015-04-30T00:00:00.000",
272 | * "urls": [
273 | * "http://satellit.es/image1.tif",
274 | * "http://satellit.es/image2.tif",
275 | * ]
276 | * }
277 | * ]
278 | * }
279 | */
280 | {
281 | method: 'POST',
282 | path: '/uploads',
283 | config: {
284 | auth: 'session',
285 | payload: {
286 | allow: 'application/json',
287 | output: 'data',
288 | parse: true
289 | }
290 | },
291 | handler: function (request, reply) {
292 | const { error: validationError } = Joi.validate(request.payload,
293 | uploadSchema);
294 | if (!validationError) {
295 | return processUpload(request.payload, request, reply)
296 | .then((upload) => {
297 | reply(upload);
298 | })
299 | .catch((err) => {
300 | reply(Boom.wrap(err));
301 | });
302 | } else {
303 | request.log(['info'], validationError);
304 | reply(Boom.badRequest(validationError));
305 | }
306 | }
307 | },
308 | {
309 | method: 'POST',
310 | path: '/dronedeploy',
311 | config: {
312 | auth: 'jwt',
313 | payload: {
314 | allow: 'application/json',
315 | output: 'data',
316 | parse: true
317 | }
318 | },
319 | handler: function (request, reply) {
320 | const {
321 | acquisition_start,
322 | acquisition_end,
323 | sensor,
324 | provider,
325 | tags,
326 | title
327 | } = request.query;
328 |
329 | const scene = {
330 | contact: {
331 | name: request.auth.credentials.name,
332 | email: request.auth.credentials.contact_email
333 | },
334 | acquisition_start,
335 | acquisition_end,
336 | sensor,
337 | provider,
338 | tags,
339 | title,
340 | tms: null,
341 | urls: [request.payload.download_path],
342 | license: 'CC-BY 4.0',
343 | platform: 'uav'
344 | };
345 |
346 | const data = { scenes: [scene] };
347 | const { error: validationError } = Joi.validate(data, uploadSchema);
348 |
349 | if (!validationError) {
350 | return processUpload(data, request, reply)
351 | .then((upload) => {
352 | reply(upload);
353 | })
354 | .catch((err) => {
355 | reply(Boom.wrap(err));
356 | });
357 | } else {
358 | request.log(['info'], validationError);
359 | reply(Boom.badRequest(validationError));
360 | }
361 | }
362 | }
363 | ];
364 |
365 | function updateUploadMetadata (request, imageId) {
366 | return db.collection('images').findOne({
367 | _id: imageId
368 | })
369 | .then(image => {
370 | const meta = image.metadata;
371 | const geojson = getGeom(request.payload);
372 | const boundbox = bbox(geojson);
373 | meta.user = image.user_id;
374 | meta.uuid = request.payload.properties.url.replace(/^s3:\/\/([^/]+)\//, `https://$1.${config.s3PublicDomain}/`);
375 | meta.geojson = geojson;
376 | meta.geojson.bbox = boundbox;
377 | meta.bbox = meta.geojson.bbox;
378 | meta.footprint = wellknown.stringify(envelope(meta.geojson));
379 | meta.gsd = request.payload.properties.resolution_in_meters;
380 | meta.file_size = request.payload.properties.size;
381 | meta.projection = request.payload.properties.projection;
382 | meta.meta_uri = meta.uuid.replace(/\.tif$/, '_meta.json');
383 | meta.uploaded_at = new Date();
384 | meta.properties = Object.assign(meta.properties, request.payload.properties);
385 | meta.properties.thumbnail = meta.properties.thumbnail.replace(/^s3:\/\/([^/]+)\//, `https://$1.${config.s3PublicDomain}/`);
386 | meta.properties.tms = config.useTitiler
387 | ? `${config.tilerBaseUrl}/cog/tiles/WebMercatorQuad/{z}/{x}/{y}@1x?url=${encodeURIComponent(meta.uuid)}`
388 | : `${config.tilerBaseUrl}/${request.params.id}/${request.params.sceneIdx}/${request.params.imageId}/{z}/{x}/{y}`;
389 | meta.properties.wmts = `${config.tilerBaseUrl}/${request.params.id}/${request.params.sceneIdx}/${request.params.imageId}/wmts`;
390 |
391 | const tilejsonUrl = `${config.tilerBaseUrl}/cog/tilejson.json?url=${encodeURIComponent(meta.uuid)}`;
392 | meta.properties.tilejson = tilejsonUrl;
393 |
394 | // remove duplicated properties
395 | delete meta.properties.projection;
396 | delete meta.properties.size;
397 | const metaCreate = Meta.create(meta)
398 | .catch((error) => {
399 | if (error.code === 16755) {
400 | // Mutates geojson
401 | removeDuplicateVertices(request.payload);
402 | meta.geojson = getGeom(request.payload);
403 | return Meta.create(meta);
404 | } else {
405 | throw error;
406 | }
407 | })
408 | .then(meta => {
409 | db.collection('images').updateOne({
410 | _id: imageId
411 | }, {
412 | $set: {
413 | metadata: meta
414 | }
415 | });
416 |
417 | return meta;
418 | })
419 | .then(meta => meta.oamSync())
420 | .then(() => true);
421 | return metaCreate;
422 | });
423 | }
424 |
425 | function updateUploadStatus (request, imageId) {
426 | const notFinished = 'notFinished';
427 | const finished = 'finished';
428 | let promise = Promise.resolve(notFinished);
429 | if (request.payload.status === 'failed') {
430 | promise = db.collection('images').updateOne({
431 | _id: imageId
432 | }, {
433 | $set: {
434 | status: 'errored'
435 | },
436 | $currentDate: {
437 | stoppedAt: true
438 | }
439 | })
440 | .then(() => notFinished);
441 | }
442 | if (request.payload.status === 'processing') {
443 | promise = db.collection('images').updateOne({
444 | _id: imageId
445 | }, {
446 | $set: {
447 | status: 'processing'
448 | },
449 | $currentDate: {
450 | startedAt: true
451 | }
452 | })
453 | .then(() => notFinished);
454 | }
455 | if (request.payload.properties) {
456 | promise = db.collection('images').updateOne({
457 | _id: imageId
458 | }, {
459 | $set: {
460 | status: 'finished'
461 | },
462 | $currentDate: {
463 | stoppedAt: true
464 | }
465 | })
466 | .then(() => finished);
467 | }
468 | return promise;
469 | }
470 |
471 | function updateUploadMessage (request, imageId) {
472 | let promise = Promise.resolve(true);
473 | if (request.payload.message != null) {
474 | promise = db.collection('images').updateOne({
475 | _id: imageId
476 | }, {
477 | $push: {
478 | messages: {
479 | status: request.payload.status,
480 | message: request.payload.message
481 | }
482 | }
483 | });
484 | }
485 | return promise;
486 | }
487 |
488 | function sendEmail (address, uploadId) {
489 | return new Promise((resolve, reject) => {
490 | const message = {
491 | to: address,
492 | from: config.sendgridFrom,
493 | subject: config.emailNotification.subject,
494 | text: config.emailNotification.text.replace('{UPLOAD_ID}', uploadId)
495 | };
496 | sendgrid.send(message, (err, json) => {
497 | if (err) {
498 | reject(err);
499 | } else {
500 | resolve(json);
501 | }
502 | });
503 | });
504 | }
505 |
506 | function processUpload (data, request, reply) {
507 | const uploadId = new ObjectID();
508 | const upload = Object.assign({}, data,
509 | {
510 | _id: uploadId,
511 | user: request.auth.credentials._id,
512 | createdAt: new Date()
513 | });
514 |
515 | const insertImagePromises = upload.scenes.map((scene) => {
516 | let name, email;
517 | if (scene.contact) {
518 | name = scene.contact.name;
519 | email = scene.contact.email;
520 | } else {
521 | name = request.auth.credentials.name;
522 | email = request.auth.credentials.contact_email;
523 | }
524 | return insertImages(scene, name, email, request.auth.credentials._id);
525 | });
526 |
527 | const insertImagesAll = Promise.all(insertImagePromises);
528 |
529 | const uploadPromise = insertImagesAll.then((sceneImageIds) => {
530 | const scenes = upload.scenes.map((scene, sceneIndex) => {
531 | return Object.assign({}, scene,
532 | { images: sceneImageIds[sceneIndex].slice() });
533 | });
534 | const uploadWithImages = Object.assign({}, upload, { scenes });
535 | return db.collection('uploads').insertOne(uploadWithImages);
536 | });
537 |
538 | sendEmail(request.auth.credentials.contact_email, uploadId)
539 | .then((json) => {
540 | request.log(['debug', 'email'], json);
541 | })
542 | .catch((error) => {
543 | request.log(['error', 'email'], error);
544 | });
545 |
546 | const transcoderPromisesAll =
547 | Promise.all([uploadPromise, insertImagesAll]).then((results) => {
548 | const sceneImageIds = results[1];
549 | const transcoderPromises = upload.scenes
550 | .reduce((accum, scene, sceneIndex) => {
551 | const imageIds = sceneImageIds[sceneIndex];
552 | const queuedImages = imageIds.map((imageId, imageIdIndex) => {
553 | const key = [uploadId, sceneIndex, imageId].join('/');
554 | const sourceUrl = scene.urls[imageIdIndex];
555 | return transcoder.queueImage(sourceUrl, key,
556 | `${config.apiEndpoint}/uploads/${uploadId}/${sceneIndex}/${imageId}`);
557 | });
558 | accum.push(...queuedImages);
559 | return accum;
560 | }, []);
561 | return Promise.all(transcoderPromises);
562 | });
563 |
564 | return transcoderPromisesAll.then(() => {
565 | return { upload: upload._id };
566 | });
567 | }
568 | /**
569 | * @apiDefine uploadStatusSuccess
570 | * @apiSuccess {Object[]} results.scenes
571 | * @apiSuccess {Object} results.scenes.contact Contact person for this scene
572 | * @apiSuccess {String} results.scenes.contact.name
573 | * @apiSuccess {String} results.scenes.contact.email
574 | * @apiSuccess {String} results.scenes.title Scene title
575 | * @apiSuccess {String="satellite","aircraft","UAV","balloon","kite"} results.scenes.platform
576 | * @apiSuccess {String} results.scenes.provider Imagery provider
577 | * @apiSuccess {String} results.scenes.sensor Sensor/device
578 | * @apiSuccess {String} results.scenes.acquisition_start Date and time of imagery acquisition
579 | * @apiSuccess {String} results.scenes.acquisition_end Date and time of imagery acquisition
580 | * @apiSuccess {Object[]} results.scenes.images Array of images in this scene
581 | * @apiSuccess {String} results.scenes.images.url
582 | * @apiSuccess {String="initial","processing","finished","errored"} results.scenes.images.status
583 | * @apiSuccess {String} results.scenes.images.error
584 | * @apiSuccess {String[]} results.scenes.images.messages
585 | * @apiSuccess {String} results.scenes.images.startedAt Date and time the processing started
586 | * @apiSuccess {String} results.scenes.images.stoppedAt Date and time the processing stopped
587 | */
588 |
--------------------------------------------------------------------------------