├── .babelrc ├── .circleci └── config.yml ├── .dockerignore ├── .eslintrc ├── .gitignore ├── .nvmrc ├── Dockerfile ├── LICENSE ├── README.md ├── app ├── config.js ├── config │ ├── base.js │ └── offline.js ├── db │ ├── index.js │ └── structure.js ├── index.js ├── lib │ └── ogr2osm │ │ ├── Readme.md │ │ ├── default_translation.py │ │ ├── geom.py │ │ └── ogr2osm.py ├── plugins │ ├── hapi-paginate.js │ └── rra-osm-p2p-server.js ├── routes │ ├── projects--create.js │ ├── projects--delete.js │ ├── projects--files-delete.js │ ├── projects--files-download.js │ ├── projects--finish-setup.js │ ├── projects--get.js │ ├── projects--rah-export.js │ ├── projects--source-data.js │ ├── projects--tiles.js │ ├── projects--update.js │ ├── root.js │ ├── scenarios--create.js │ ├── scenarios--delete.js │ ├── scenarios--files-delete.js │ ├── scenarios--files-download.js │ ├── scenarios--gen-results.js │ ├── scenarios--get.js │ ├── scenarios--poi.js │ ├── scenarios--results.js │ ├── scenarios--source-data.js │ ├── scenarios--tiles.js │ ├── scenarios--update.js │ └── wbcatalog-source-data.js ├── s3 │ ├── index.js │ ├── structure.js │ └── utils.js ├── services │ ├── export-road-network │ │ ├── export-road-network.js │ │ └── index.js │ ├── plugins.js │ ├── project-setup │ │ ├── admin-bounds.js │ │ ├── common.js │ │ ├── index.js │ │ ├── origins.js │ │ ├── poi.js │ │ ├── profile.js │ │ ├── project-setup.js │ │ └── road-network.js │ ├── rra-osm-p2p.js │ ├── scenario-create │ │ ├── index.js │ │ └── scenario-create.js │ └── server.js └── utils │ ├── app-logger.js │ ├── aws-task-runner.js │ ├── aws.js │ ├── default.profile.lua │ ├── default.profile.template.js │ ├── errors.js │ ├── operation.js │ ├── osrm-profile.js │ ├── overpass.js │ ├── service-runner.js │ ├── utils.js │ ├── vector-tiles.js │ └── wbcatalog.js ├── docker-compose-dev.yml ├── docker-compose-test.yml ├── docker-compose.yml ├── index.js ├── install.sh ├── package.json ├── setup ├── create-test-db.sh ├── fixtures │ └── fixtures.js ├── index.js ├── setup-extensions.sh └── setup.js ├── test ├── test-projects-files.js ├── test-projects-source-data.js ├── test-projects.js ├── test-rah-export.js ├── test-result-gen.js ├── test-root.js ├── test-scenario-source-data.js ├── test-scenarios-create.js ├── test-scenarios-files.js ├── test-scenarios-poi.js ├── test-scenarios-results.js ├── test-scenarios.js ├── test-services-project-setup.js ├── test-utils-operations.js ├── test-wbcatalog-source-data.js ├── test.env └── utils │ ├── data-sergipe │ ├── README.md │ ├── admin-boundaries.geojson │ ├── osm-p2p-db │ │ ├── index │ │ │ ├── 000042.ldb │ │ │ ├── 000043.ldb │ │ │ ├── 000044.ldb │ │ │ ├── 000045.ldb │ │ │ ├── 000051.ldb │ │ │ ├── 000063.ldb │ │ │ ├── 000064.ldb │ │ │ ├── 000065.ldb │ │ │ ├── 000066.ldb │ │ │ ├── 000067.ldb │ │ │ ├── CURRENT │ │ │ ├── LOCK │ │ │ ├── LOG │ │ │ ├── LOG.old │ │ │ └── MANIFEST-000004 │ │ ├── kdb │ │ └── log │ │ │ ├── 000005.ldb │ │ │ ├── CURRENT │ │ │ ├── LOCK │ │ │ ├── LOG │ │ │ ├── LOG.old │ │ │ └── MANIFEST-000004 │ ├── poi-townhalls.geojson │ ├── profile.lua │ ├── results-p2000-s2000.csv │ ├── results-p2000-s2000.geojson │ ├── results-p2000-s2000.json │ ├── results.csv │ ├── road-network.osm │ └── villages.geojson │ ├── data.js │ ├── road-network-small.osm │ ├── test-file │ ├── test-file-scenario-1200 │ └── test-file.json └── yarn.lock /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ "es2015" ], 3 | "plugins": [ 4 | ["transform-object-rest-spread", { "useBuiltIns": true }] 5 | ], 6 | "sourceMaps": "inline", 7 | "retainLines": true 8 | } 9 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build: 4 | docker: 5 | - image: circleci/node:8 6 | 7 | working_directory: ~/repo 8 | 9 | environment: 10 | - DOCKER_IMAGE: ram-backend 11 | - DOCKER_ORG: wbtransport 12 | 13 | steps: 14 | - checkout 15 | - setup_remote_docker: 16 | docker_layer_caching: true 17 | 18 | # Download and cache dependencies 19 | - restore_cache: 20 | keys: 21 | - v1-dependencies-{{ checksum "package.json" }} 22 | # fallback to using the latest cache if no exact match is found 23 | - v1-dependencies- 24 | 25 | - run: 26 | name: Set up the test db and storage bucket 27 | command: docker-compose -f docker-compose-test.yml run ram-api yarn run setup -- --db --bucket 28 | 29 | - run: 30 | name: Run tests 31 | command: docker-compose -f docker-compose-test.yml run ram-api yarn test-no-env 32 | 33 | - save_cache: 34 | paths: 35 | - node_modules 36 | key: v1-dependencies-{{ checksum "package.json" }} 37 | 38 | - run: 39 | name: Lint 40 | command: | 41 | yarn install 42 | yarn lint 43 | 44 | deploy-stage: 45 | docker: 46 | - image: circleci/node:8 47 | 48 | working_directory: ~/repo 49 | 50 | environment: 51 | - DOCKER_IMAGE: ram-backend 52 | - DOCKER_ORG: wbtransport 53 | 54 | steps: 55 | - checkout 56 | - setup_remote_docker: 57 | docker_layer_caching: true 58 | 59 | - run: 60 | name: Build Docker image for dev version 61 | command: | 62 | echo "Building Docker image" 63 | docker build -t ${DOCKER_IMAGE} . 64 | docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWD 65 | 66 | echo "Pushing image to Docker Hub as :latest-dev" 67 | docker tag ${DOCKER_IMAGE} ${DOCKER_ORG}/${DOCKER_IMAGE}:latest-dev 68 | docker push ${DOCKER_ORG}/${DOCKER_IMAGE}:latest-dev 69 | 70 | # Also publish a tag with the CIRCLE_SHA1 so dev versions can 71 | # be redeployed to AWS. 72 | CODE=$(echo $CIRCLE_SHA1 | cut -c 1-8) 73 | echo "Pushing image to Docker Hub as :dev-${CODE}" 74 | docker tag ${DOCKER_IMAGE} ${DOCKER_ORG}/${DOCKER_IMAGE}:dev-${CODE} 75 | docker push ${DOCKER_ORG}/${DOCKER_IMAGE}:dev-${CODE} 76 | 77 | deploy-prod: 78 | docker: 79 | - image: circleci/node:8 80 | 81 | working_directory: ~/repo 82 | 83 | environment: 84 | - DOCKER_IMAGE: ram-backend 85 | - DOCKER_ORG: wbtransport 86 | 87 | steps: 88 | - checkout 89 | - setup_remote_docker: 90 | docker_layer_caching: true 91 | 92 | - add_ssh_keys: 93 | fingerprints: 94 | - "18:04:a0:3c:f9:2f:6c:c8:46:3c:6e:f0:be:56:23:19" 95 | 96 | - run: 97 | name: Build Docker image for stable version 98 | command: | 99 | # Grab version from package.json and prepend with v (v0.5.0) 100 | VERSION=v$(grep -m1 version package.json | awk -F: '{ print $2 }' | sed 's/[", ]//g') 101 | 102 | # Attempt to add a git tag based on version in package.json. If 103 | # the tag already exists, git will fail and stop the build. 104 | if ! git tag ${VERSION} master 105 | then 106 | echo >&2 "Failed to tag a new release, skipping build. Did you update the version in package.json?" 107 | exit 1 108 | else 109 | # Push tag to Github 110 | git push origin ${VERSION} 111 | 112 | echo Building Docker image 113 | docker build -t ${DOCKER_IMAGE} . 114 | docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWD 115 | 116 | echo Pushing image to Docker Hub with ${VERSION} tag 117 | docker tag ${DOCKER_IMAGE} ${DOCKER_ORG}/${DOCKER_IMAGE}:${VERSION} 118 | docker push ${DOCKER_ORG}/${DOCKER_IMAGE}:${VERSION} 119 | 120 | echo Pushing image to Docker Hub with latest tag 121 | docker tag ${DOCKER_IMAGE} ${DOCKER_ORG}/${DOCKER_IMAGE}:latest 122 | docker push ${DOCKER_ORG}/${DOCKER_IMAGE}:latest 123 | fi 124 | 125 | workflows: 126 | version: 2 127 | build-deploy: 128 | jobs: 129 | - build 130 | - deploy-stage: 131 | requires: 132 | - build 133 | filters: 134 | branches: 135 | only: develop 136 | - deploy-prod: 137 | requires: 138 | - build 139 | filters: 140 | branches: 141 | only: master 142 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .resources 2 | node_modules 3 | osm-p2p-dbs 4 | *.log -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["standard"], 3 | "env": { 4 | "es6": true, 5 | "mocha": true 6 | }, 7 | "parserOptions": { 8 | "ecmaVersion": 2017 9 | }, 10 | "rules": { 11 | "semi": [2, "always"], 12 | "no-extra-semi": 2, 13 | "semi-spacing": [2, { "before": false, "after": true }] 14 | } 15 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ################################################ 2 | ############### .gitignore ################## 3 | ################################################ 4 | # 5 | # This file is only relevant if you are using git. 6 | # 7 | # Files which match the splat patterns below will 8 | # be ignored by git. This keeps random crap and 9 | # and sensitive credentials from being uploaded to 10 | # your repository. It allows you to configure your 11 | # app for your machine without accidentally 12 | # committing settings which will smash the local 13 | # settings of other developers on your team. 14 | # 15 | # Some reasonable defaults are included below, 16 | # but, of course, you should modify/extend/prune 17 | # to fit your needs! 18 | ################################################ 19 | 20 | 21 | ################################################ 22 | # Local Configuration 23 | # 24 | # Explicitly ignore files which contain: 25 | # 26 | # 1. Sensitive information you'd rather not push to 27 | # your git repository. 28 | # e.g., your personal API keys or passwords. 29 | # 30 | # 2. Environment-specific configuration 31 | # Basically, anything that would be annoying 32 | # to have to change every time you do a 33 | # `git pull` 34 | # e.g., your local development database, or 35 | # the S3 bucket you're using for file uploads 36 | # development. 37 | # 38 | ################################################ 39 | 40 | app/config/local.js 41 | .env.sample 42 | 43 | 44 | ################################################ 45 | # Dependencies 46 | # 47 | # When releasing a production app, you may 48 | # consider including your node_modules and 49 | # bower_components directory in your git repo, 50 | # but during development, its best to exclude it, 51 | # since different developers may be working on 52 | # different kernels, where dependencies would 53 | # need to be recompiled anyway. 54 | # 55 | # 56 | ################################################ 57 | 58 | node_modules 59 | 60 | 61 | ################################################ 62 | # Node.js / NPM 63 | # 64 | # Common files generated by Node, NPM, and the 65 | # related ecosystem. 66 | ################################################ 67 | 68 | lib-cov 69 | *.seed 70 | *.log 71 | *.out 72 | *.pid 73 | npm-debug.log 74 | 75 | 76 | ################################################ 77 | # Apidocs 78 | # 79 | # Common files generated by apidocs and other docs 80 | ################################################ 81 | 82 | 83 | 84 | ################################################ 85 | # Miscellaneous 86 | # 87 | # Common files generated by text editors, 88 | # operating systems, file systems, etc. 89 | ################################################ 90 | 91 | *~ 92 | *# 93 | .DS_STORE 94 | .netbeans 95 | nbproject 96 | .idea 97 | .resources 98 | .node_history 99 | temp 100 | tmp 101 | .tmp 102 | dist 103 | .nyc_output 104 | ecs-task-definition-generated.yml 105 | *.pyc 106 | .vscode 107 | 108 | # OSM P2P db 109 | osm-p2p-dbs 110 | osm-p2p-dbs-test 111 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | v8 -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:16.04 2 | ADD . /dist 3 | WORKDIR /dist 4 | RUN bash install.sh 5 | RUN yarn install --unsafe-perm 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2018 World Bank 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /app/config.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const _ = require('lodash'); 3 | 4 | // Empty template as base. 5 | var config = require('./config/base'); 6 | 7 | // local config overrides when present. 8 | try { 9 | _.merge(config, require('./config/local')); 10 | } catch (e) { 11 | // Local file is not mandatory. 12 | } 13 | 14 | // In an offline setup, the other config files are ignored 15 | if (process.env.DS_ENV === 'offline') { 16 | config = require('./config/offline'); 17 | } 18 | 19 | // Check if an instance id was defined. 20 | config.instanceId = process.env.INSTANCE_ID || config.instanceId; 21 | 22 | if (!config.instanceId) throw new Error('The RAM instance id was not defined. Set one with INSTANCE_ID'); 23 | 24 | if (!config.instanceId.match(/^[a-z0-9-_.]+$/)) throw new Error('Instance id invalid. Use only lowercase alphanumeric characters and _ - .'); 25 | 26 | // Overrides by ENV variables. 27 | config.db = process.env.DB_URI || config.db; 28 | 29 | config.debug = process.env.DEBUG !== undefined ? (process.env.DEBUG.toLowerCase() === 'true') : config.debug; 30 | config.connection.port = process.env.PORT || config.connection.port; 31 | config.connection.host = process.env.HOST || config.connection.host; 32 | 33 | config.osmP2PDir = process.env.OSM_P2P_DIR || config.osmP2PDir; 34 | 35 | config.storage.host = process.env.STORAGE_HOST || config.storage.host; 36 | config.storage.port = parseInt(process.env.STORAGE_PORT) || config.storage.port; 37 | config.storage.engine = process.env.STORAGE_ENGINE || config.storage.engine; 38 | config.storage.accessKey = process.env.STORAGE_ACCESS_KEY || config.storage.accessKey; 39 | config.storage.secretKey = process.env.STORAGE_SECRET_KEY || config.storage.secretKey; 40 | config.storage.bucket = process.env.STORAGE_BUCKET || config.storage.bucket; 41 | config.storage.region = process.env.STORAGE_REGION || config.storage.region; 42 | 43 | config.analysisProcess.service = process.env.ANL_SERVICE || config.analysisProcess.service; 44 | config.analysisProcess.container = process.env.ANL_CONTAINER || config.analysisProcess.container; 45 | config.analysisProcess.db = process.env.ANL_DB || config.analysisProcess.db; 46 | config.analysisProcess.storageHost = process.env.ANL_STORAGE_HOST || config.analysisProcess.storageHost; 47 | config.analysisProcess.storagePort = process.env.ANL_STORAGE_PORT || config.analysisProcess.storagePort; 48 | 49 | config.vtProcess.service = process.env.VT_SERVICE || config.vtProcess.service; 50 | config.vtProcess.container = process.env.VT_CONTAINER || config.vtProcess.container; 51 | config.vtProcess.storageHost = process.env.VT_STORAGE_HOST || config.vtProcess.storageHost; 52 | config.vtProcess.storagePort = process.env.VT_STORAGE_PORT || config.vtProcess.storagePort; 53 | 54 | config.rahExport.ghRepo = process.env.RAH_GH_REPO || config.rahExport.ghRepo; 55 | config.rahExport.ghToken = process.env.RAH_GH_TOKEN || config.rahExport.ghToken; 56 | config.rahExport.ghPath = process.env.RAH_GH_PATH || config.rahExport.ghPath; 57 | config.rahExport.committerName = process.env.RAH_CNAME || config.rahExport.committerName; 58 | config.rahExport.committerEmail = process.env.RAH_CEMAIL || config.rahExport.committerEmail; 59 | config.rahExport.authorName = process.env.RAH_ANAME || config.rahExport.authorName; 60 | config.rahExport.authorEmail = process.env.RAH_AEMAIL || config.rahExport.authorEmail; 61 | 62 | config.roadNetEditMax = process.env.ROAD_NET_EDIT_MAX || config.roadNetEditMax; 63 | 64 | config.baseDir = __dirname; 65 | 66 | module.exports = config; 67 | -------------------------------------------------------------------------------- /app/config/base.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | connection: { 4 | host: '0.0.0.0', 5 | port: 4000 6 | }, 7 | instanceId: null, 8 | db: null, 9 | osmP2PDir: null, 10 | storage: { 11 | host: null, 12 | port: null, 13 | engine: 's3', 14 | accessKey: null, 15 | secretKey: null, 16 | bucket: null, 17 | region: null 18 | }, 19 | analysisProcess: { 20 | service: null, 21 | container: 'wbtransport/ram-analysis:v0.1.0', 22 | db: null, 23 | storageHost: null, 24 | storagePort: null 25 | }, 26 | vtProcess: { 27 | service: null, 28 | container: null, 29 | storageHost: null, 30 | storagePort: null 31 | }, 32 | rahExport: { 33 | ghRepo: null, 34 | ghPath: null, 35 | ghToken: null, 36 | committerName: null, 37 | committerEmail: null, 38 | authorName: null, 39 | authorEmail: null 40 | }, 41 | roadNetEditMax: 20 * Math.pow(1024, 2) // 20MB 42 | }; 43 | -------------------------------------------------------------------------------- /app/config/offline.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | environment: 'offline', 3 | debug: false, 4 | instanceId: 'offline-ram', 5 | connection: { 6 | host: '0.0.0.0', 7 | port: 4000 8 | }, 9 | auth: { 10 | strategy: 'none' 11 | }, 12 | db: 'postgresql://ram:ram@172.99.99.10:5432/ram', 13 | osmP2PDir: `${__dirname}/../../osm-p2p-dbs`, 14 | storage: { 15 | host: '172.99.99.15', 16 | port: 9000, 17 | engine: 'minio', 18 | accessKey: 'minio', 19 | secretKey: 'miniostorageengine', 20 | bucket: 'ram', 21 | region: 'us-east-1' 22 | }, 23 | analysisProcess: { 24 | service: 'docker', 25 | container: 'wbtransport/ram-analysis:v0.1.0', 26 | db: 'postgresql://ram:ram@172.99.99.10:5432/ram', 27 | storageHost: '172.99.99.15', 28 | storagePort: 9000 29 | }, 30 | vtProcess: { 31 | service: 'docker', 32 | container: 'wbtransport/ram-vt:v0.1.0', 33 | storageHost: '172.99.99.15', 34 | storagePort: 9000 35 | }, 36 | roadNetEditMax: 20 * Math.pow(1024, 2) // 20MB 37 | }; 38 | -------------------------------------------------------------------------------- /app/db/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import knex from 'knex'; 3 | 4 | import config from '../config'; 5 | 6 | export default knex({ 7 | client: 'pg', 8 | connection: config.db 9 | }); 10 | -------------------------------------------------------------------------------- /app/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | require('dotenv').config(); 3 | 4 | import config from './config'; 5 | import initServer from './services/server'; 6 | import { getAWSInstanceCredentials } from './utils/aws'; 7 | 8 | var options = { 9 | connection: config.connection 10 | }; 11 | 12 | async function main () { 13 | // If we're using a S3 storage engine but no accessKey and secretKey are set 14 | // up, we assume that it is being run from a EC2 instance and will try to 15 | // get the credentials through the url. 16 | const { engine, accessKey, secretKey } = config.storage; 17 | if (engine === 's3' && !accessKey && !secretKey) { 18 | console.log('AWS access key and secret not set. Will try to get them.'); 19 | try { 20 | // Try to get the credentials on start just to see if everything is ok. 21 | await getAWSInstanceCredentials('', true); 22 | console.log('AWS credentials successfully fetched.'); 23 | } catch (err) { 24 | console.log(err); 25 | console.log('Is this running on a EC2 instance?'); 26 | process.exit(1); 27 | } 28 | } 29 | 30 | // Start API server 31 | initServer(options, (err, server) => { 32 | if (err) throw err; 33 | server.start(() => { 34 | // Started. 35 | }); 36 | }); 37 | } 38 | 39 | main(); 40 | -------------------------------------------------------------------------------- /app/lib/ogr2osm/Readme.md: -------------------------------------------------------------------------------- 1 | ogr2osm.py 2 | ========== 3 | 4 | A tool for converting ogr-readable files like shapefiles into .osm data 5 | 6 | 7 | Installation 8 | ------------ 9 | 10 | ogr2osm requires gdal with python bindings. Depending on the file formats 11 | you want to read you may have to compile it yourself but there should be no 12 | issues with shapefiles. On Ubuntu you can run `sudo apt-get install -y python-gdal python-lxml` to get 13 | the software you need. 14 | 15 | It also makes use of lxml. Although it should fall back to builtin XML implementations seamlessly these are less likely to be tested and will most likely run much slower. 16 | 17 | To install ogr2osm and download the default translations the following command 18 | can be used: 19 | 20 | git clone --recursive https://github.com/pnorman/ogr2osm 21 | 22 | To update 23 | 24 | cd ogr2osm 25 | git pull 26 | git submodule update 27 | 28 | About 29 | ----- 30 | 31 | This version of ogr2osm is based on 32 | [Andrew Guertin's version for UVM](https://github.com/andrewguertin/ogr2osm) 33 | which is in turn based on Ivan Ortega's version from the OSM SVN server. 34 | 35 | ogr2osm will read any data source that ogr can read and handle reprojection for 36 | you. It takes a python file to translate external data source tags into OSM 37 | tags, allowing you to use complicated logic. If no translation is specified it 38 | will use an identity translation, carrying all tags from the source to the .osm 39 | output. 40 | 41 | Import Cautions 42 | --------------- 43 | Anyone planning an import into OpenStreetMap should read and review the import 44 | guidelines located [on the wiki](http://wiki.openstreetmap.org/wiki/Import/Guidelines). 45 | When writing your translation file you should look at other examples and 46 | carefully consider each external data source tag to see if it should be 47 | converted to an OSM tag. 48 | 49 | Usage 50 | ----- 51 | 52 | Usage: ogr2osm.py SRCFILE 53 | 54 | Options: 55 | -h, --help show this help message and exit 56 | -t TRANSLATION, --translation=TRANSLATION 57 | Select the attribute-tags translation method. See the 58 | translations/ directory for valid values. 59 | -o OUTPUT, --output=OUTPUT 60 | Set destination .osm file name and location. 61 | -e EPSG_CODE, --epsg=EPSG_CODE 62 | EPSG code of source file. Do not include the 'EPSG:' 63 | prefix. If specified, overrides projection from source 64 | metadata if it exists. 65 | -p PROJ4_STRING, --proj4=PROJ4_STRING 66 | PROJ.4 string. If specified, overrides projection from 67 | source metadata if it exists. 68 | -v, --verbose 69 | -d, --debug-tags Output the tags for every feature parsed. 70 | -f, --force Force overwrite of output file. 71 | --encoding=ENCODING Encoding of the source file. If specified, overrides 72 | the default of utf-8 73 | --significant-digits=SIGNIFICANTDIGITS 74 | Number of decimal places for coordinates 75 | --rounding-digits=ROUNDINGDIGITS 76 | Number of decimal places for rounding 77 | --no-memory-copy Do not make an in-memory working copy 78 | --no-upload-false Omit upload=false from the completed file to surpress 79 | JOSM warnings when uploading. 80 | --id=ID ID to start counting from for the output file. 81 | Defaults to 0. 82 | -------------------------------------------------------------------------------- /app/lib/ogr2osm/default_translation.py: -------------------------------------------------------------------------------- 1 | ''' 2 | The default translation file removes all the attributes 3 | with empty values 4 | ''' 5 | 6 | def filterTags(attrs): 7 | if not attrs: return 8 | 9 | tags = {} 10 | 11 | for k,v in attrs.iteritems(): 12 | if v: 13 | tags.update({k: v}) 14 | 15 | return tags -------------------------------------------------------------------------------- /app/lib/ogr2osm/geom.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2012-2013 Paul Norman 4 | # 5 | # Released under the MIT license: http://opensource.org/licenses/mit-license.php 6 | 7 | # Classes 8 | class Geometry(object): 9 | elementIdCounter = 0 10 | elementIdCounterIncr = -1 11 | geometries = [] 12 | def __init__(self): 13 | self.id = getNewID() 14 | self.parents = set() 15 | Geometry.geometries.append(self) 16 | def replacejwithi(self, i, j): 17 | pass 18 | def addparent(self, parent): 19 | self.parents.add(parent) 20 | def removeparent(self, parent, shoulddestroy=True): 21 | self.parents.discard(parent) 22 | if shoulddestroy and len(self.parents) == 0: 23 | Geometry.geometries.remove(self) 24 | 25 | # Helper function to get a new ID 26 | def getNewID(): 27 | Geometry.elementIdCounter += Geometry.elementIdCounterIncr 28 | return Geometry.elementIdCounter 29 | 30 | class Point(Geometry): 31 | def __init__(self, x, y): 32 | Geometry.__init__(self) 33 | self.x = x 34 | self.y = y 35 | def replacejwithi(self, i, j): 36 | pass 37 | 38 | class Way(Geometry): 39 | def __init__(self): 40 | Geometry.__init__(self) 41 | self.points = [] 42 | def replacejwithi(self, i, j): 43 | self.points = [i if x == j else x for x in self.points] 44 | j.removeparent(self) 45 | i.addparent(self) 46 | 47 | class Relation(Geometry): 48 | def __init__(self): 49 | Geometry.__init__(self) 50 | self.members = [] 51 | def replacejwithi(self, i, j): 52 | self.members = [(i, x[1]) if x[0] == j else x for x in self.members] 53 | j.removeparent(self) 54 | i.addparent(self) 55 | 56 | class Feature(object): 57 | features = [] 58 | def __init__(self): 59 | self.geometry = None 60 | self.tags = {} 61 | Feature.features.append(self) 62 | def replacejwithi(self, i, j): 63 | if self.geometry == j: 64 | self.geometry = i 65 | j.removeparent(self) 66 | i.addparent(self) 67 | -------------------------------------------------------------------------------- /app/plugins/hapi-paginate.js: -------------------------------------------------------------------------------- 1 | /* global require, exports, module */ 2 | 'use strict'; 3 | 4 | var _ = require('lodash'); 5 | 6 | exports.register = function (server, options, next) { 7 | var defaultPage = 1; 8 | var defaultLimit = options.limit || 100; 9 | var name = options.name || 'meta'; 10 | var results = options.results || 'results'; 11 | var routes = options.routes || ['*']; 12 | var excludeFormats = options.excludeFormats || []; 13 | var requestLimit = defaultLimit; 14 | var requestPage = defaultPage; 15 | 16 | server.ext('onPreHandler', function (request, reply) { 17 | if (_.has(request.query, 'page')) { 18 | requestPage = _.parseInt(request.query.page); 19 | request.query = _.omit(request.query, 'page'); 20 | } else { 21 | requestPage = defaultPage; 22 | } 23 | 24 | if (_.has(request.query, 'limit')) { 25 | requestLimit = _.parseInt(request.query.limit); 26 | request.query = _.omit(request.query, 'limit'); 27 | } else { 28 | requestLimit = defaultLimit; 29 | } 30 | 31 | request.page = requestPage; 32 | request.limit = requestLimit; 33 | 34 | return reply.continue(); 35 | }); 36 | 37 | server.ext('onPreResponse', function (request, reply) { 38 | var meta = { 39 | page: requestPage, 40 | limit: request.limit 41 | }; 42 | 43 | if (_.has(request, 'count')) { 44 | meta.found = request.count; 45 | } 46 | 47 | // Make sure route matches and we're not exclude based on format 48 | const { path, method } = request.route; 49 | const whitelistRoute = routes[0] === '*' || !!routes.find(o => o.route === path && o.methods.indexOf(method.toUpperCase()) !== -1); 50 | if (whitelistRoute && excludeFormats.indexOf(request.query.format) === -1) { 51 | if (_.has(request.response.source, name)) { 52 | request.response.source[name] = _.merge(request.response.source[name], meta); 53 | } else { 54 | // Because we want to add meta to top of the source, we have to go through all this hastle 55 | var temp = request.response.source; 56 | request.response.source = {}; 57 | request.response.source[name] = meta; 58 | request.response.source[results] = temp; 59 | } 60 | } else { 61 | // Remove any previous meta content since we don't want it in this case 62 | // if (_.has(request.response.source, name)) { 63 | // delete request.response.source[name].page; 64 | // delete request.response.source[name].limit; 65 | // delete request.response.source[name].found; 66 | // } 67 | } 68 | 69 | return reply.continue(); 70 | }); 71 | 72 | next(); 73 | }; 74 | 75 | exports.register.attributes = { 76 | 'name': 'hapi-paginate', 77 | 'version': '0.4.0', 78 | 'description': 'A pagination plugin for Hapi' 79 | }; 80 | -------------------------------------------------------------------------------- /app/plugins/rra-osm-p2p-server.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Boom from 'boom'; 4 | 5 | import { getRouter } from '../services/rra-osm-p2p'; 6 | import db from '../db'; 7 | 8 | const rraOsmRoute = { 9 | path: '/projects/{projId}/scenarios/{scId}/osm/{path*}', 10 | method: '*', 11 | config: { 12 | validate: { 13 | params: { 14 | projId: Joi.number(), 15 | scId: Joi.number(), 16 | path: Joi.string() 17 | } 18 | }, 19 | // Ensure that the payload is still a stream so the osm-p2p-server 20 | // can handle. 21 | payload: { 22 | output: 'stream', 23 | maxBytes: 1000 * 1024 * 1024 24 | } 25 | }, 26 | handler: (request, reply) => { 27 | const { projId, scId, path } = request.params; 28 | const router = getRouter(projId, scId); 29 | 30 | let req = request.raw.req; 31 | let res = request.raw.res; 32 | 33 | let qs = req.url.match(/\?(.*)+/); 34 | qs = qs ? qs[0] : ''; 35 | 36 | if (router.match(request.method, `/api/0.6/${path}`)) { 37 | res.setHeader('Access-Control-Allow-Origin', '*'); 38 | res.setHeader('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept'); 39 | } 40 | 41 | req.url = `/api/0.6/${path}${qs}`; 42 | 43 | const handleIt = () => { 44 | if (!router.handle(req, res)) { 45 | return reply(Boom.notFound()); 46 | } 47 | }; 48 | 49 | if (path.match(/changeset\/[0-9]+\/upload/)) { 50 | // Update the database with the road generation time. 51 | db('scenarios_settings') 52 | .update({value: (new Date())}) 53 | .where('scenario_id', scId) 54 | .where('key', 'rn_updated_at') 55 | .then(() => handleIt()); 56 | } else { 57 | handleIt(); 58 | } 59 | } 60 | }; 61 | 62 | exports.register = function (server, options, next) { 63 | server.route(rraOsmRoute); 64 | next(); 65 | }; 66 | 67 | exports.register.attributes = { 68 | 'name': 'rra-osm-p2p-server', 69 | 'version': '0.1.0', 70 | 'description': 'RRA connection to osm-p2p-server' 71 | }; 72 | -------------------------------------------------------------------------------- /app/routes/projects--create.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Boom from 'boom'; 4 | 5 | import db from '../db/'; 6 | 7 | module.exports = [ 8 | { 9 | path: '/projects', 10 | method: 'POST', 11 | config: { 12 | validate: { 13 | payload: { 14 | name: Joi.string().required(), 15 | description: Joi.string() 16 | } 17 | } 18 | }, 19 | handler: (request, reply) => { 20 | const now = new Date(); 21 | const data = request.payload; 22 | const base = { 23 | status: 'pending', 24 | created_at: now, 25 | updated_at: now 26 | }; 27 | 28 | db('projects') 29 | .returning('*') 30 | .insert(Object.assign({}, data, base)) 31 | .then(projectData => { 32 | projectData = projectData[0]; 33 | // Create first scenario. This is needed to store the related files. 34 | return db('scenarios') 35 | .returning('*') 36 | .insert({ 37 | name: 'Main scenario', 38 | project_id: projectData.id, 39 | status: 'pending', 40 | master: true, 41 | created_at: now, 42 | updated_at: now 43 | }) 44 | .then(scenarioData => { 45 | scenarioData = scenarioData[0]; 46 | return db.batchInsert('scenarios_settings', [ 47 | { 48 | scenario_id: scenarioData.id, 49 | key: 'res_gen_at', 50 | value: 0, 51 | created_at: now, 52 | updated_at: now 53 | }, 54 | { 55 | scenario_id: scenarioData.id, 56 | key: 'rn_updated_at', 57 | value: 0, 58 | created_at: now, 59 | updated_at: now 60 | } 61 | ]); 62 | }) 63 | .then(() => reply(projectData)) 64 | .catch(err => reply(Boom.badImplementation(err))); 65 | }) 66 | .catch(err => { 67 | if (err.constraint === 'projects_name_unique') { 68 | return reply(Boom.conflict(`Project name already in use: ${data.name}`)); 69 | } 70 | console.error(err); 71 | return reply(Boom.badImplementation(err)); 72 | }); 73 | } 74 | } 75 | ]; 76 | -------------------------------------------------------------------------------- /app/routes/projects--delete.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | 4 | import db from '../db/'; 5 | import { removeDir as removeS3Dir } from '../s3/utils'; 6 | import { ProjectNotFoundError, getBoomResponseForError } from '../utils/errors'; 7 | 8 | module.exports = [ 9 | { 10 | path: '/projects/{projId}', 11 | method: 'DELETE', 12 | config: { 13 | validate: { 14 | params: { 15 | projId: Joi.number() 16 | } 17 | } 18 | }, 19 | handler: (request, reply) => { 20 | const id = request.params.projId; 21 | db.transaction(trx => { 22 | return trx.select('id').from('scenarios').where('project_id', id) 23 | .then(scenarios => { 24 | // Let the dir be removed in the background. 25 | scenarios.forEach(s => removeS3Dir(`scenario-${s.id}/`)); 26 | }) 27 | // Delete the project. Everything else will follow due to 28 | // cascade delete. 29 | // - project files 30 | // - scenario 31 | // - scenario files 32 | // - operations 33 | // - operation logs 34 | .then(() => trx 35 | .delete() 36 | .from('projects') 37 | .where('id', id) 38 | .then(res => { 39 | if (res <= 0) { 40 | throw new ProjectNotFoundError(); 41 | } 42 | }) 43 | ) 44 | .then(() => { 45 | // Let the dir be removed in the background. 46 | removeS3Dir(`project-${id}/`); 47 | }); 48 | }) 49 | .then(() => reply({statusCode: 200, message: 'Project deleted'})) 50 | .catch(err => reply(getBoomResponseForError(err))); 51 | } 52 | } 53 | ]; 54 | -------------------------------------------------------------------------------- /app/routes/projects--files-delete.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | 4 | import db from '../db/'; 5 | import { removeFile } from '../s3/utils'; 6 | import { ProjectNotFoundError, FileNotFoundError, ProjectStatusError, getBoomResponseForError } from '../utils/errors'; 7 | 8 | module.exports = [ 9 | { 10 | path: '/projects/{projId}/files/{fileId}', 11 | method: 'DELETE', 12 | config: { 13 | validate: { 14 | params: { 15 | projId: Joi.number(), 16 | fileId: Joi.number() 17 | } 18 | } 19 | }, 20 | handler: (request, reply) => { 21 | db('projects') 22 | .select('projects.id', 23 | 'projects.status', 24 | 'projects_files.path as file_path', 25 | 'projects_files.id as file_id') 26 | .leftJoin('projects_files', function () { 27 | this.on('projects.id', '=', 'projects_files.project_id') 28 | .andOn(db.raw('projects_files.id = :fileId', {fileId: request.params.fileId})); 29 | }) 30 | .where('projects.id', request.params.projId) 31 | .then(res => { 32 | if (!res.length) throw new ProjectNotFoundError(); 33 | let data = res[0]; 34 | if (data.status !== 'pending') throw new ProjectStatusError('Project no longer in the setup phase. Files can not be removed'); 35 | if (data.file_id === null) throw new FileNotFoundError(); 36 | 37 | return db('projects_files') 38 | .where('id', data.file_id) 39 | .del() 40 | .then(() => removeFile(data.file_path)); 41 | }) 42 | .then(() => reply({statusCode: 200, message: 'File deleted'})) 43 | .catch(err => reply(getBoomResponseForError(err))); 44 | } 45 | } 46 | ]; 47 | -------------------------------------------------------------------------------- /app/routes/projects--files-download.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Boom from 'boom'; 4 | 5 | import db from '../db/'; 6 | import { getFile } from '../s3/utils'; 7 | import { FileNotFoundError, getBoomResponseForError } from '../utils/errors'; 8 | 9 | module.exports = [ 10 | { 11 | path: '/projects/{projId}/files/{fileId}', 12 | method: 'GET', 13 | config: { 14 | validate: { 15 | params: { 16 | projId: Joi.number(), 17 | fileId: Joi.number() 18 | }, 19 | query: { 20 | download: Joi.boolean().truthy('true').falsy('false') 21 | } 22 | } 23 | }, 24 | handler: (request, reply) => { 25 | if (!request.query.download) { 26 | return reply(Boom.notImplemented('Query parameter "download" missing')); 27 | } 28 | 29 | db('projects_files') 30 | .select('*') 31 | .where('id', request.params.fileId) 32 | .where('project_id', request.params.projId) 33 | .then(res => { 34 | if (!res.length) throw new FileNotFoundError(); 35 | return res[0]; 36 | }) 37 | .then(file => { 38 | return getFile(file.path) 39 | .then(dataStream => { 40 | let mime; 41 | switch (file.type) { 42 | case 'profile': 43 | mime = 'text/x-lua'; 44 | break; 45 | case 'origins': 46 | case 'admin-bounds': 47 | mime = 'application/json'; 48 | break; 49 | } 50 | 51 | reply(dataStream) 52 | .type(mime) 53 | .header('Content-Disposition', `attachment; filename=${file.name}`); 54 | }); 55 | }) 56 | .catch(err => reply(getBoomResponseForError(err))); 57 | } 58 | } 59 | ]; 60 | -------------------------------------------------------------------------------- /app/routes/projects--finish-setup.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Promise from 'bluebird'; 4 | 5 | import db from '../db/'; 6 | import { DataConflictError, getBoomResponseForError } from '../utils/errors'; 7 | import { getProject } from './projects--get'; 8 | import Operation from '../utils/operation'; 9 | import ServiceRunner from '../utils/service-runner'; 10 | import { closeDatabase } from '../services/rra-osm-p2p'; 11 | 12 | module.exports = [ 13 | { 14 | path: '/projects/{projId}/finish-setup', 15 | method: 'POST', 16 | config: { 17 | validate: { 18 | params: { 19 | projId: Joi.number() 20 | }, 21 | payload: { 22 | scenarioName: Joi.string().required(), 23 | scenarioDescription: Joi.string() 24 | } 25 | } 26 | }, 27 | handler: (request, reply) => { 28 | getProject(request.params.projId) 29 | .then(project => { 30 | if (project.status !== 'pending') { 31 | throw new DataConflictError('Project setup already completed'); 32 | } 33 | if (!project.readyToEndSetup) { 34 | throw new DataConflictError('Project preconditions to finish setup not met'); 35 | } 36 | }) 37 | .then(() => db('scenarios') 38 | .select('*') 39 | .where('project_id', request.params.projId) 40 | .where('master', true) 41 | .first() 42 | ) 43 | .then(scenario => { 44 | let projId = scenario.project_id; 45 | let scId = scenario.id; 46 | let {scenarioName, scenarioDescription} = request.payload; 47 | 48 | return db.transaction(function (trx) { 49 | return Promise.all([ 50 | trx('projects') 51 | .update({ 52 | updated_at: (new Date()) 53 | }) 54 | .where('id', projId), 55 | trx('scenarios') 56 | .update({ 57 | name: scenarioName, 58 | description: typeof scenarioDescription === 'undefined' ? '' : scenarioDescription, 59 | updated_at: (new Date()) 60 | }) 61 | .where('id', scId) 62 | ]); 63 | }) 64 | .then(() => startOperation(projId, scId) 65 | .then(op => concludeProjectSetup(projId, scId, op.getId())) 66 | ); 67 | }) 68 | .then(() => reply({statusCode: 200, message: 'Project setup finish started'})) 69 | .catch(err => reply(getBoomResponseForError(err))); 70 | } 71 | } 72 | ]; 73 | 74 | function startOperation (projId, scId) { 75 | let op = new Operation(db); 76 | return op.loadByData('project-setup-finish', projId, scId) 77 | .then(op => { 78 | if (op.isStarted()) { 79 | throw new DataConflictError('Project finish setup already in progress'); 80 | } 81 | }, err => { 82 | // In this case if the operation doesn't exist is not a problem. 83 | if (err.message.match(/not exist/)) { return; } 84 | throw err; 85 | }) 86 | .then(() => { 87 | let op = new Operation(db); 88 | return op.start('project-setup-finish', projId, scId) 89 | .then(() => op.log('start', {message: 'Operation started'})); 90 | }); 91 | } 92 | 93 | function concludeProjectSetup (projId, scId, opId, cb) { 94 | // In test mode we don't want to start the generation. 95 | // It will be tested in the appropriate place. 96 | if (process.env.DS_ENV === 'test') { return; } 97 | 98 | closeDatabase(projId, scId).then(() => { 99 | console.log(`p${projId} s${scId}`, 'concludeProjectSetup'); 100 | let service = new ServiceRunner('project-setup', {projId, scId, opId}); 101 | 102 | service.on('complete', err => { 103 | console.log(`p${projId} s${scId}`, 'concludeProjectSetup complete'); 104 | if (err) { 105 | // The operation may not have finished if the error took place outside 106 | // the promise, or if the error was due to a wrong db connection. 107 | let op = new Operation(db); 108 | op.loadById(opId) 109 | .then(op => { 110 | if (!op.isCompleted()) { 111 | return op.finish('error', {error: err.message}); 112 | } 113 | }); 114 | } 115 | }) 116 | .start(); 117 | }); 118 | } 119 | -------------------------------------------------------------------------------- /app/routes/projects--get.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Promise from 'bluebird'; 4 | 5 | import db from '../db/'; 6 | import { ProjectNotFoundError, getBoomResponseForError } from '../utils/errors'; 7 | import { getSourceData, getOperationData } from '../utils/utils'; 8 | 9 | export default [ 10 | { 11 | path: '/projects', 12 | method: 'GET', 13 | handler: async (request, reply) => { 14 | let {page, limit} = request; 15 | let offset = (page - 1) * limit; 16 | 17 | try { 18 | let [{count}, projects] = await Promise.all([ 19 | db('projects').count('id').first(), 20 | db.select('*').from('projects').orderBy('created_at').offset(offset).limit(limit) 21 | ]); 22 | projects = await Promise.map(projects, p => attachProjectSourceData(p).then(p => attachScenarioCount(p))); 23 | request.count = parseInt(count); 24 | reply(projects); 25 | } catch (error) { 26 | reply(getBoomResponseForError(error)); 27 | } 28 | } 29 | }, 30 | { 31 | path: '/projects/{projId}', 32 | method: 'GET', 33 | config: { 34 | validate: { 35 | params: { 36 | projId: Joi.number() 37 | } 38 | } 39 | }, 40 | handler: async(request, reply) => { 41 | try { 42 | const project = await getProject(request.params.projId); 43 | reply(project); 44 | } catch (error) { 45 | reply(getBoomResponseForError(error)); 46 | } 47 | } 48 | } 49 | ]; 50 | 51 | function attachProjectSourceData (project) { 52 | return getSourceData(db, 'project', project.id) 53 | .then(sourceData => { 54 | project.sourceData = sourceData; 55 | return project; 56 | }); 57 | } 58 | 59 | function attachScenarioCount (project) { 60 | return db('scenarios') 61 | .count('id') 62 | .where('project_id', project.id) 63 | .then(count => { 64 | project.scenarioCount = parseInt(count[0].count); 65 | return project; 66 | }); 67 | } 68 | 69 | function getProject (id) { 70 | return db.select('*') 71 | .from('projects') 72 | .where('id', id) 73 | .first() 74 | .then(project => { 75 | if (!project) throw new ProjectNotFoundError(); 76 | return project; 77 | }) 78 | .then(project => attachProjectSourceData(project)) 79 | .then(project => attachFinishSetupOperation(project)) 80 | .then(project => { 81 | // GetId of first scenario. 82 | return db('scenarios') 83 | .select('id') 84 | .where('project_id', project.id) 85 | .where('master', true) 86 | .first() 87 | .then(scenario => getSourceData(db, 'scenario', scenario.id)) 88 | .then(scenarioSourceData => { 89 | let sources = Object.assign({}, project.sourceData, scenarioSourceData); 90 | 91 | // Check if all sources are valid. 92 | // If source is osm is OK. 93 | // If is file, there has to be at least one. 94 | project.readyToEndSetup = Object.keys(sources) 95 | .every(k => { 96 | let src = sources[k]; 97 | if (src.type === null) return false; 98 | if (src.type === 'file') return src.files.length >= 1; 99 | return true; 100 | }); 101 | 102 | return project; 103 | }); 104 | }) 105 | .then(project => attachScenarioCount(project)); 106 | } 107 | 108 | function attachFinishSetupOperation (project) { 109 | return db('scenarios') 110 | .select('id') 111 | .where('project_id', project.id) 112 | .where('master', true) 113 | .first() 114 | .then(scenario => getOperationData(db, 'project-setup-finish', scenario.id)) 115 | .then(opData => { 116 | project.finish_setup = opData; 117 | return project; 118 | }); 119 | } 120 | 121 | module.exports.getProject = getProject; 122 | -------------------------------------------------------------------------------- /app/routes/projects--tiles.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | 4 | import db from '../db/'; 5 | import { ProjectNotFoundError, getBoomResponseForError } from '../utils/errors'; 6 | import { getFile } from '../s3/utils'; 7 | 8 | module.exports = [ 9 | { 10 | path: '/projects/{projId}/tiles/{type}/{z}/{x}/{y}', 11 | method: 'GET', 12 | config: { 13 | validate: { 14 | params: { 15 | projId: Joi.number().required(), 16 | type: Joi.string().valid('admin-bounds'), 17 | z: Joi.number().required(), 18 | x: Joi.number().required(), 19 | y: Joi.number().required() 20 | } 21 | } 22 | }, 23 | handler: (request, reply) => { 24 | const { projId, type, z, x, y } = request.params; 25 | 26 | return db.select('*') 27 | .from('projects') 28 | .where('id', request.params.projId) 29 | .first() 30 | .then(project => { 31 | if (!project) throw new ProjectNotFoundError(); 32 | }) 33 | .then(() => getFile(`project-${projId}/tiles/${type}/${z}/${x}/${y}.pbf`)) 34 | .then(file => { 35 | reply(file) 36 | .type('application/octet-stream') 37 | .header('Content-Encoding', 'gzip'); 38 | }) 39 | .catch(err => reply(getBoomResponseForError(err))); 40 | } 41 | } 42 | ]; 43 | -------------------------------------------------------------------------------- /app/routes/projects--update.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | 4 | import db from '../db/'; 5 | 6 | import { ProjectNotFoundError, DataConflictError, getBoomResponseForError } from '../utils/errors'; 7 | 8 | module.exports = [ 9 | { 10 | path: '/projects/{projId}', 11 | method: 'PATCH', 12 | config: { 13 | validate: { 14 | params: { 15 | projId: Joi.number() 16 | }, 17 | payload: { 18 | name: Joi.string(), 19 | description: Joi.alternatives().try(Joi.valid(null), Joi.string()) 20 | } 21 | } 22 | }, 23 | handler: (request, reply) => { 24 | const data = request.payload; 25 | let update = { 26 | updated_at: (new Date()) 27 | }; 28 | 29 | typeof data.name !== 'undefined' && (update.name = data.name); 30 | typeof data.description !== 'undefined' && (update.description = data.description); 31 | 32 | db('projects') 33 | .returning('*') 34 | .update(update) 35 | .where('id', request.params.projId) 36 | .then(projects => { 37 | if (!projects.length) throw new ProjectNotFoundError(); 38 | return projects[0]; 39 | }) 40 | .then(project => reply(project)) 41 | .catch(err => { 42 | if (err.constraint === 'projects_name_unique') { 43 | throw new DataConflictError(`Project name already in use: ${data.name}`); 44 | } 45 | throw err; 46 | }) 47 | .catch(err => reply(getBoomResponseForError(err))); 48 | } 49 | } 50 | ]; 51 | -------------------------------------------------------------------------------- /app/routes/root.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = [ 4 | { 5 | path: '/', 6 | method: 'GET', 7 | config: { 8 | auth: false 9 | }, 10 | handler: (request, reply) => { 11 | reply({ 12 | statusCode: 200, 13 | message: 'In the beginning the Universe was created. This has made a lot of people very upset and been widely regarded as a bad move.' 14 | }); 15 | } 16 | } 17 | ]; 18 | -------------------------------------------------------------------------------- /app/routes/scenarios--delete.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | 4 | import db from '../db/'; 5 | import { removeDir as removeS3Dir } from '../s3/utils'; 6 | import { MasterScenarioError, ScenarioNotFoundError, getBoomResponseForError } from '../utils/errors'; 7 | 8 | module.exports = [ 9 | { 10 | path: '/projects/{projId}/scenarios/{scId}', 11 | method: 'DELETE', 12 | config: { 13 | validate: { 14 | params: { 15 | projId: Joi.number(), 16 | scId: Joi.number() 17 | } 18 | } 19 | }, 20 | handler: (request, reply) => { 21 | const {projId, scId} = request.params; 22 | 23 | // Check for the master scenario. That one can't be deleted. 24 | db('scenarios') 25 | .select('*') 26 | .where('id', scId) 27 | .where('project_id', projId) 28 | .then(res => { 29 | if (!res.length) { 30 | throw new ScenarioNotFoundError(); 31 | } 32 | if (res[0].master) { 33 | throw new MasterScenarioError('The master scenario of a project can not be deleted'); 34 | } 35 | }) 36 | .then(() => db.transaction(trx => { 37 | return trx 38 | .select('*') 39 | .from('scenarios_files') 40 | .where('scenario_id', scId) 41 | .where('project_id', projId) 42 | // Delete the scenario. Everything else will follow due to 43 | // cascade delete. 44 | // - scenario files 45 | // - operations 46 | // - operation logs 47 | .then(() => trx.delete().from('scenarios').where('id', scId).where('project_id', projId)) 48 | .then(res => { 49 | if (res <= 0) { 50 | throw new ScenarioNotFoundError(); 51 | } 52 | }) 53 | .then(() => db('projects').update({updated_at: (new Date())}).where('id', request.params.projId)) 54 | .then(() => { 55 | // Let the dir be removed in the background. 56 | removeS3Dir(`scenario-${scId}/`); 57 | }); 58 | })) 59 | .then(() => reply({statusCode: 200, message: 'Scenario deleted'})) 60 | .catch(err => reply(getBoomResponseForError(err))); 61 | } 62 | } 63 | ]; 64 | -------------------------------------------------------------------------------- /app/routes/scenarios--files-delete.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | 4 | import db from '../db/'; 5 | import { removeFile } from '../s3/utils'; 6 | import { 7 | ProjectNotFoundError, 8 | ScenarioNotFoundError, 9 | FileNotFoundError, 10 | ProjectStatusError, 11 | getBoomResponseForError 12 | } from '../utils/errors'; 13 | 14 | module.exports = [ 15 | { 16 | path: '/projects/{projId}/scenarios/{scId}/files/{fileId}', 17 | method: 'DELETE', 18 | config: { 19 | validate: { 20 | params: { 21 | projId: Joi.number(), 22 | scId: Joi.number(), 23 | fileId: Joi.number() 24 | } 25 | } 26 | }, 27 | handler: (request, reply) => { 28 | db('scenarios') 29 | .select('scenarios.id', 30 | 'projects.status as project_status', 31 | 'projects.id as project_id', 32 | 'scenarios_files.path as file_path', 33 | 'scenarios_files.id as file_id') 34 | .leftJoin('projects', function () { 35 | this.on('projects.id', '=', 'scenarios.project_id') 36 | .andOn(db.raw('projects.id = :projId', {projId: request.params.projId})); 37 | }) 38 | .leftJoin('scenarios_files', function () { 39 | this.on('scenarios.id', '=', 'scenarios_files.scenario_id') 40 | .andOn(db.raw('scenarios_files.id = :fileId', {fileId: request.params.fileId})); 41 | }) 42 | .where('scenarios.id', request.params.scId) 43 | .then(res => { 44 | if (!res.length) throw new ScenarioNotFoundError(); 45 | let data = res[0]; 46 | if (data.project_id === null) throw new ProjectNotFoundError(); 47 | if (data.project_status !== 'pending') throw new ProjectStatusError('Project no longer in the setup phase. Files can not be removed'); 48 | if (data.file_id === null) throw new FileNotFoundError(); 49 | 50 | return db('scenarios_files') 51 | .where('id', data.file_id) 52 | .del() 53 | .then(() => removeFile(data.file_path)); 54 | }) 55 | .then(() => db('projects').update({updated_at: (new Date())}).where('id', request.params.projId)) 56 | .then(() => reply({statusCode: 200, message: 'File deleted'})) 57 | .catch(err => reply(getBoomResponseForError(err))); 58 | } 59 | } 60 | ]; 61 | -------------------------------------------------------------------------------- /app/routes/scenarios--files-download.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Boom from 'boom'; 4 | 5 | import db from '../db/'; 6 | import { getFile } from '../s3/utils'; 7 | import { FileNotFoundError, getBoomResponseForError } from '../utils/errors'; 8 | 9 | module.exports = [ 10 | { 11 | path: '/projects/{projId}/scenarios/{scId}/files/{fileId}', 12 | method: 'GET', 13 | config: { 14 | validate: { 15 | params: { 16 | projId: Joi.number(), 17 | scId: Joi.number(), 18 | fileId: Joi.number() 19 | }, 20 | query: { 21 | download: Joi.boolean().truthy('true').falsy('false') 22 | } 23 | } 24 | }, 25 | handler: (request, reply) => { 26 | if (!request.query.download) { 27 | return reply(Boom.notImplemented('Query parameter "download" missing')); 28 | } 29 | 30 | db('scenarios_files') 31 | .select('*') 32 | .where('id', request.params.fileId) 33 | .where('project_id', request.params.projId) 34 | .where('scenario_id', request.params.scId) 35 | .then(res => { 36 | if (!res.length) throw new FileNotFoundError(); 37 | return res[0]; 38 | }) 39 | .then(file => { 40 | return getFile(file.path) 41 | .then(dataStream => { 42 | let mime; 43 | switch (file.type) { 44 | case 'poi': 45 | mime = 'application/json'; 46 | break; 47 | case 'road-network': 48 | mime = 'application/xml'; 49 | break; 50 | } 51 | 52 | reply(dataStream) 53 | .type(mime) 54 | .header('Content-Disposition', `attachment; filename=${file.name}`); 55 | }); 56 | }) 57 | .catch(err => reply(getBoomResponseForError(err))); 58 | } 59 | } 60 | ]; 61 | -------------------------------------------------------------------------------- /app/routes/scenarios--get.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Promise from 'bluebird'; 4 | 5 | import db from '../db/'; 6 | import { ScenarioNotFoundError, ProjectNotFoundError, getBoomResponseForError } from '../utils/errors'; 7 | import { getSourceData, getOperationData } from '../utils/utils'; 8 | 9 | const routeSingleScenarioConfig = { 10 | validate: { 11 | params: { 12 | projId: Joi.number(), 13 | scId: Joi.number() 14 | } 15 | } 16 | }; 17 | 18 | export default [ 19 | { 20 | path: '/projects/{projId}/scenarios', 21 | method: 'GET', 22 | config: { 23 | validate: { 24 | params: { 25 | projId: Joi.number() 26 | } 27 | } 28 | }, 29 | handler: async (request, reply) => { 30 | let {page, limit} = request; 31 | let offset = (page - 1) * limit; 32 | 33 | try { 34 | let [{count}, scenarios] = await Promise.all([ 35 | db('scenarios').where('project_id', request.params.projId).count('id').first(), 36 | db.select('id').from('scenarios').where('project_id', request.params.projId).orderBy('created_at').offset(offset).limit(limit) 37 | ]); 38 | scenarios = await Promise.map(scenarios, s => loadScenario(request.params.projId, s.id)); 39 | request.count = parseInt(count); 40 | reply(scenarios); 41 | } catch (error) { 42 | reply(getBoomResponseForError(error)); 43 | } 44 | } 45 | }, 46 | { 47 | path: '/projects/{projId}/scenarios/0', 48 | method: 'GET', 49 | config: routeSingleScenarioConfig, 50 | handler: async (request, reply) => { 51 | try { 52 | const masterProj = await db('scenarios') 53 | .select('id') 54 | .where('project_id', request.params.projId) 55 | .where('master', true) 56 | .first(); 57 | 58 | if (!masterProj) throw new ProjectNotFoundError(); 59 | 60 | // Fake scenario load. 61 | request.params.scId = masterProj.id; 62 | singleScenarioHandler(request, reply); 63 | } catch (error) { 64 | reply(getBoomResponseForError(error)); 65 | } 66 | } 67 | }, 68 | { 69 | path: '/projects/{projId}/scenarios/{scId}', 70 | method: 'GET', 71 | config: routeSingleScenarioConfig, 72 | handler: singleScenarioHandler 73 | } 74 | ]; 75 | 76 | function singleScenarioHandler (request, reply) { 77 | return loadScenario(request.params.projId, request.params.scId) 78 | .then(scenario => reply(scenario)) 79 | .catch(err => reply(getBoomResponseForError(err))); 80 | } 81 | 82 | export function loadScenario (projId, scId) { 83 | return db.select('*') 84 | .from('scenarios') 85 | .where('id', scId) 86 | .where('project_id', projId) 87 | .orderBy('created_at') 88 | .first() 89 | .then(scenario => { 90 | if (!scenario) throw new ScenarioNotFoundError(); 91 | return scenario; 92 | }) 93 | .then(scenario => attachAdminAreas(scenario)) 94 | .then(scenario => attachScenarioSettings(scenario)) 95 | .then(scenario => attachScenarioSourceData(scenario)) 96 | .then(scenario => attachOperation('generate-analysis', 'gen_analysis', scenario)) 97 | .then(scenario => attachOperation('scenario-create', 'scen_create', scenario)); 98 | } 99 | 100 | function attachScenarioSettings (scenario) { 101 | return db.select('key', 'value') 102 | .from('scenarios_settings') 103 | // Admin areas are handled differently because the name has to be 104 | // fetched as well. 105 | .whereNotIn('key', ['admin_areas']) 106 | .where('scenario_id', scenario.id) 107 | .then(data => { 108 | scenario.data = {}; 109 | data.forEach(o => { 110 | scenario.data[o.key] = parseType(o.value); 111 | }); 112 | return scenario; 113 | }); 114 | } 115 | 116 | function parseType (val) { 117 | // Quick and dirty way to parse type. 118 | // Using JSON.parse will parse every value except strings. So if the parsing 119 | // fails assume it's a string and carry on. 120 | try { 121 | return JSON.parse(val); 122 | } catch (e) { 123 | return val; 124 | } 125 | } 126 | 127 | function attachScenarioSourceData (scenario) { 128 | return getSourceData(db, 'scenario', scenario.id) 129 | .then(sourceData => { 130 | scenario.sourceData = sourceData; 131 | return scenario; 132 | }); 133 | } 134 | 135 | function attachAdminAreas (scenario) { 136 | return Promise.all([ 137 | // Get admin areas. 138 | db('projects_aa') 139 | .select('id', 'name', 'type') 140 | .where('project_id', scenario.project_id), 141 | // Get selected ids. 142 | db('scenarios_settings') 143 | .select('value') 144 | .where('key', 'admin_areas') 145 | .where('scenario_id', scenario.id) 146 | .first() 147 | ]) 148 | .then(data => { 149 | let [aa, selected] = data; 150 | 151 | if (!aa.length) { 152 | scenario.admin_areas = null; 153 | } else { 154 | selected = selected ? JSON.parse(selected.value) : []; 155 | 156 | // Mark selected as selected. 157 | aa = aa.map(o => { 158 | o.selected = selected.indexOf(o.id) !== -1; 159 | return o; 160 | }).sort((a, b) => a.id - b.id); 161 | scenario.admin_areas = aa; 162 | } 163 | 164 | return scenario; 165 | }); 166 | } 167 | 168 | function attachOperation (opName, prop, scenario) { 169 | return getOperationData(db, opName, scenario.id) 170 | .then(opData => { 171 | scenario[prop] = opData; 172 | return scenario; 173 | }); 174 | } 175 | -------------------------------------------------------------------------------- /app/routes/scenarios--poi.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import centerOfMass from '@turf/center-of-mass'; 4 | 5 | import db from '../db/'; 6 | import { getJSONFileContents } from '../s3/utils'; 7 | import { FileNotFoundError, getBoomResponseForError } from '../utils/errors'; 8 | 9 | export default [ 10 | { 11 | path: '/projects/{projId}/scenarios/{scId}/poi', 12 | method: 'GET', 13 | config: { 14 | validate: { 15 | params: { 16 | projId: Joi.number(), 17 | scId: Joi.number() 18 | }, 19 | query: { 20 | type: Joi.string().required() 21 | } 22 | } 23 | }, 24 | handler: async (request, reply) => { 25 | const { projId, scId } = request.params; 26 | const { type } = request.query; 27 | 28 | try { 29 | const fauxFeature = await getFauxPoiFeature(projId, scId, type); 30 | return reply(fauxFeature); 31 | } catch (error) { 32 | return reply(getBoomResponseForError(error)); 33 | } 34 | } 35 | } 36 | ]; 37 | 38 | /** 39 | * Returns a compressed version of a feature to conserve bandwidth. The response 40 | * should be hydrated to geoJSON on the client. 41 | * 42 | * @param {Integer} projId Project id 43 | * @param {Integer} scId Scenario id 44 | * @param {string} type Type of the POI 45 | * 46 | * @returns {object} compressed POI feature 47 | */ 48 | export async function getFauxPoiFeature (projId, scId, type) { 49 | const poiFile = await db('scenarios_files') 50 | .select('*') 51 | .where('project_id', projId) 52 | .where('scenario_id', scId) 53 | .where('type', `poi`) 54 | .where('subtype', type) 55 | .first(); 56 | 57 | if (!poiFile) throw new FileNotFoundError('Poi type not found'); 58 | const poi = await getJSONFileContents(poiFile.path); 59 | 60 | let response = []; 61 | poi.features.forEach((feat, idx) => { 62 | let coords = feat.geometry.type !== 'Point' 63 | ? centerOfMass(feat).geometry.coordinates 64 | : feat.geometry.coordinates; 65 | 66 | // The response will be converted to a feature on the client. 67 | // This way we reduce the response size by a lot. 68 | response.push({ 69 | // feature id. 70 | i: idx, 71 | // Coordinates. 72 | c: [parseInt(coords[0] * 1e5) / 1e5, parseInt(coords[1] * 1e5) / 1e5] 73 | }); 74 | }); 75 | 76 | return response; 77 | } 78 | -------------------------------------------------------------------------------- /app/routes/scenarios--tiles.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Boom from 'boom'; 4 | 5 | import db from '../db/'; 6 | import { ScenarioNotFoundError, getBoomResponseForError } from '../utils/errors'; 7 | import { getFile } from '../s3/utils'; 8 | 9 | module.exports = [ 10 | { 11 | path: '/projects/{projId}/scenarios/{scId}/tiles/{type}/{z}/{x}/{y}', 12 | method: 'GET', 13 | config: { 14 | validate: { 15 | params: { 16 | projId: Joi.number().required(), 17 | scId: Joi.number().required(), 18 | type: Joi.string().valid('road-network'), 19 | z: Joi.number().required(), 20 | x: Joi.number().required(), 21 | y: Joi.number().required() 22 | } 23 | } 24 | }, 25 | handler: (request, reply) => { 26 | const { scId, type, z, x, y } = request.params; 27 | 28 | return db.select('*') 29 | .from('scenarios') 30 | .where('id', scId) 31 | .first() 32 | .then(project => { 33 | if (!project) throw new ScenarioNotFoundError(); 34 | }) 35 | .then(() => getFile(`scenario-${scId}/tiles/${type}/${z}/${x}/${y}.pbf`)) 36 | .then(file => { 37 | reply(file) 38 | .type('application/octet-stream') 39 | .header('Content-Encoding', 'gzip'); 40 | }) 41 | .catch(err => { 42 | // Specific override for this case. 43 | if (err.code === 'NoSuchKey') { 44 | return reply(Boom.notFound('Tile not found')); 45 | } 46 | throw err; 47 | }) 48 | .catch(err => reply(getBoomResponseForError(err))); 49 | } 50 | } 51 | ]; 52 | -------------------------------------------------------------------------------- /app/routes/scenarios--update.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import Promise from 'bluebird'; 4 | 5 | import db from '../db/'; 6 | import { loadScenario } from './scenarios--get'; 7 | import { ScenarioNotFoundError, DataConflictError, getBoomResponseForError } from '../utils/errors'; 8 | 9 | module.exports = [ 10 | { 11 | path: '/projects/{projId}/scenarios/{scId}', 12 | method: 'PATCH', 13 | config: { 14 | validate: { 15 | params: { 16 | projId: Joi.number(), 17 | scId: Joi.number() 18 | }, 19 | payload: { 20 | name: Joi.string(), 21 | description: Joi.alternatives().try(Joi.valid(null), Joi.string()), 22 | selectedAdminAreas: Joi.array() 23 | } 24 | } 25 | }, 26 | handler: (request, reply) => { 27 | const data = request.payload; 28 | let update = { 29 | updated_at: (new Date()) 30 | }; 31 | 32 | typeof data.name !== 'undefined' && (update.name = data.name); 33 | typeof data.description !== 'undefined' && (update.description = data.description); 34 | 35 | let executor = Promise.resolve(update); 36 | 37 | if (typeof data.selectedAdminAreas !== 'undefined') { 38 | // Get all the admin areas ids to perform some validation. 39 | executor = db('projects_aa') 40 | .select('id') 41 | .where('project_id', request.params.projId) 42 | .then(aa => aa.filter(o => data.selectedAdminAreas 43 | .indexOf(o.id) !== -1) 44 | .map(o => o.id) 45 | ) 46 | // Store the selected admin areas in the settings table as an array. 47 | .then(adminAreas => db('scenarios_settings') 48 | .update({ value: JSON.stringify(adminAreas) }) 49 | .where('key', 'admin_areas') 50 | .where('scenario_id', request.params.scId) 51 | ); 52 | } 53 | 54 | executor 55 | .then(() => db('scenarios') 56 | .returning('id') 57 | .update(update) 58 | .where('id', request.params.scId) 59 | .where('project_id', request.params.projId) 60 | ) 61 | .then(scenarios => { 62 | if (!scenarios.length) throw new ScenarioNotFoundError(); 63 | return scenarios[0]; 64 | }) 65 | .then(scenarioId => loadScenario(request.params.projId, scenarioId)) 66 | .then(scenario => db('projects').update({updated_at: (new Date())}).where('id', request.params.projId).then(() => scenario)) 67 | .then(scenario => reply(scenario)) 68 | .catch(err => { 69 | if (err.constraint === 'scenarios_project_id_name_unique') { 70 | throw new DataConflictError(`Scenario name already in use for this project: ${data.name}`); 71 | } 72 | throw err; 73 | }) 74 | .catch(err => reply(getBoomResponseForError(err))); 75 | } 76 | } 77 | ]; 78 | -------------------------------------------------------------------------------- /app/routes/wbcatalog-source-data.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Joi from 'joi'; 3 | import fetch from 'node-fetch'; 4 | import Promise from 'bluebird'; 5 | import _ from 'lodash'; 6 | import https from 'https'; 7 | 8 | import db from '../db/'; 9 | import { getBoomResponseForError } from '../utils/errors'; 10 | 11 | // Number of days the data is considered valid. 12 | export const CACHE_DAYS = 7; 13 | 14 | // https://github.com/WorldBank-Transport/ram-backend/issues/214#issuecomment-394736868 15 | const SOURCE_TO_TAG_ID = { 16 | // ram-origins 17 | origins: 1426, 18 | // ram-profile 19 | profile: -1, 20 | // ram-admin 21 | admin: 1413, 22 | // ram-poi 23 | poi: 1425, 24 | // ram-rn 25 | // 'road-network': 1412 26 | 'road-network': -1 // Disable temporarily 27 | }; 28 | 29 | // Allow unauthorized requests. 30 | // https://github.com/WorldBank-Transport/ram-backend/issues/223 31 | const httpsAgent = new https.Agent({ 32 | rejectUnauthorized: false 33 | }); 34 | 35 | /** 36 | * Checks if mimetype is valid according to source name. 37 | * 38 | * @param {string} sourceName Name of the source being validated. 39 | * @param {string} mimetype Mime type to validate. 40 | * 41 | * @returns {boolean} Whether or not the mimetype is valid. 42 | */ 43 | function isValidMimetypeForSource (sourceName, mimetype) { 44 | if ((sourceName === 'poi' || sourceName === 'admin' || sourceName === 'origins') && mimetype === 'GeoJSON') return true; 45 | 46 | return false; 47 | } 48 | 49 | /** 50 | * Check is a given source has data and is not expired. 51 | * 52 | * @param {string} sourceName (origins | profile | admin | poi | road-network) 53 | */ 54 | export function checkValidSource (sourceName) { 55 | // To check if the data expired or not, check if something is returned 56 | // Since all data is imported at the same time, it is enough to 57 | // check one record. 58 | // Using cacheDays as identifier instead of value to avoid the 59 | // "could not determine data type of parameter" error. 60 | return db.raw(` 61 | SELECT wbcatalog_resources.id, exp.expire_at 62 | FROM wbcatalog_resources, ( 63 | SELECT created_at + interval ':cacheDays:' day as expire_at 64 | FROM wbcatalog_resources 65 | WHERE wbcatalog_resources.type = :type 66 | ) exp 67 | WHERE exp.expire_at > now() AND wbcatalog_resources.type = :type 68 | `, {cacheDays: CACHE_DAYS.toString(), type: sourceName}) 69 | .then(data => !!data.rows.length); 70 | // .then(data => false); 71 | } 72 | 73 | /** 74 | * Fetches the resource information for a given resourceId 75 | * 76 | * @param {string} sourceName Name of the source. 77 | * @param {string} resourceId The id of the resource 78 | * 79 | * @returns {object} The resource information 80 | */ 81 | async function fetchResourceData (sourceName, resourceId) { 82 | try { 83 | const {result: {url, name, mimetype}} = await fetch(`https://datacatalog.worldbank.org/api/3/action/resource_show?id=${resourceId}`, {agent: httpsAgent}) 84 | .then(res => res.json()); 85 | 86 | // If there's no url, file is not valid. 87 | if (!url) throw new Error('Resource file missing url'); 88 | 89 | // Validate mimetype based on sourceName. 90 | if (!isValidMimetypeForSource(sourceName, mimetype)) throw new Error(`Invalid mimetype for source: ${sourceName} - ${mimetype}`); 91 | 92 | return {id: resourceId, name, url}; 93 | } catch (error) { 94 | console.log('Error fetching resource data for', resourceId, error); 95 | console.log('Error handled ^'); 96 | // Invalidate source in case of any error. 97 | return {id: null}; 98 | } 99 | } 100 | 101 | /** 102 | * Fetch data for a given source from the wb catalog. 103 | * 104 | * 105 | * @param {string} sourceName (origins | profile | admin | poi | road-network) 106 | */ 107 | export async function fetchCatalogData (sourceName) { 108 | const tagId = SOURCE_TO_TAG_ID[sourceName]; 109 | 110 | const datasets = await fetch(`https://datacatalog.worldbank.org/search-service/search_api/datasets?filter[field_tags]=${tagId}&fields=title,nid,field_resources`, {agent: httpsAgent}) 111 | .then(res => res.json()); 112 | 113 | // Build concurrent tasks. 114 | // Using lodash's reduce because `datasets.result` is an object. 115 | const tasks = _.reduce(datasets.result, (acc, dataset) => { 116 | // Ensure there are resources. 117 | const res = _.get(dataset, 'field_resources.und', []); 118 | return acc.concat(_.reduce(res, (_acc, r) => { 119 | return r.target_id 120 | ? _acc.concat(async () => { 121 | const data = await fetchResourceData(sourceName, r.target_id); 122 | return { 123 | ...data, 124 | name: `${dataset.title} - ${data.name}` 125 | }; 126 | }) 127 | : _acc; 128 | }, [])); 129 | }, []); 130 | 131 | // Execute tasks. 132 | const files = await Promise.map(tasks, task => task(), {concurrency: 5}); 133 | 134 | // Remove the invalid results. 135 | return files.filter(f => !!f.id); 136 | } 137 | 138 | /** 139 | * Removes old data from the database and stores the wb catalog data 140 | * for caching purposes. 141 | * 142 | * 143 | * @param {string} sourceName (origins | profile | admin | poi | road-network) 144 | * @param {array} catalogData Data from the WB catalog as returned by fetchCatalogData() 145 | * 146 | * @see fetchCatalogData 147 | */ 148 | export function buildCache (sourceName, catalogData) { 149 | const data = catalogData.map(o => ({ 150 | type: sourceName, 151 | name: o.name, 152 | resource_id: o.id, 153 | resource_url: o.url 154 | })); 155 | 156 | return db('wbcatalog_resources') 157 | .where('type', sourceName) 158 | .del() 159 | .then(() => db.batchInsert('wbcatalog_resources', data)); 160 | } 161 | 162 | /** 163 | * Gets the data for a given source form the database. 164 | * 165 | * @param {string} sourceName (origins | profile | admin | poi | road-network) 166 | * 167 | */ 168 | export function getResourcesFromDb (sourceName) { 169 | return db.select('resource_id', 'name') 170 | .from('wbcatalog_resources') 171 | .where('type', sourceName) 172 | .orderBy('name'); 173 | } 174 | 175 | /** 176 | * Hapi handler for endpoints. 177 | */ 178 | async function wbCatalogHandler (request, reply) { 179 | const {sourceName} = request.payload; 180 | 181 | try { 182 | const hasData = await checkValidSource(sourceName); 183 | if (!hasData) { 184 | const catalogData = await fetchCatalogData(sourceName); 185 | await buildCache(sourceName, catalogData); 186 | } 187 | const data = await getResourcesFromDb(sourceName); 188 | return reply(data); 189 | } catch (err) { 190 | return reply(getBoomResponseForError(err)); 191 | } 192 | } 193 | 194 | export default [ 195 | { 196 | path: '/projects/wbcatalog-source-data', 197 | method: 'POST', 198 | config: { 199 | validate: { 200 | payload: { 201 | sourceName: Joi.string().valid('origins', 'profile', 'admin').required() 202 | } 203 | } 204 | }, 205 | handler: wbCatalogHandler 206 | }, 207 | { 208 | path: '/scenarios/wbcatalog-source-data', 209 | method: 'POST', 210 | config: { 211 | validate: { 212 | payload: { 213 | sourceName: Joi.string().valid('poi', 'road-network').required() 214 | } 215 | } 216 | }, 217 | handler: wbCatalogHandler 218 | } 219 | ]; 220 | -------------------------------------------------------------------------------- /app/s3/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import * as Minio from 'minio'; 3 | import Http from 'http'; 4 | import Https from 'https'; 5 | 6 | import config from '../config'; 7 | import { getAWSInstanceCredentials } from '../utils/aws'; 8 | 9 | const { host, port, engine, accessKey, secretKey } = config.storage; 10 | 11 | export const bucket = config.storage.bucket; 12 | export const region = config.storage.region; 13 | 14 | /** 15 | * Initializes the minio s3 client depending on the engine and credentials 16 | * source in use. Needs to be a promise because it may rely on asynchronously 17 | * fetched credentials. 18 | * 19 | * @returns Minio Client 20 | */ 21 | export default async function S3 () { 22 | let minioClient; 23 | let agent; 24 | 25 | switch (engine) { 26 | case 'minio': 27 | minioClient = new Minio.Client({ 28 | endPoint: host, 29 | port: port, 30 | secure: false, 31 | accessKey: accessKey, 32 | secretKey: secretKey 33 | }); 34 | agent = Http.globalAgent; 35 | break; 36 | case 's3': 37 | let credentials; 38 | if (!accessKey && !secretKey) { 39 | // If we're using a S3 storage engine but no accessKey and secretKey 40 | // are set up, we assume that it is being run from a EC2 instance and 41 | // will try to get the credentials through the url. We're not throwing 42 | // any error if it fails because that is checked on startup. 43 | // See app/index.js 44 | const AWSInstanceCredentials = await getAWSInstanceCredentials(); 45 | credentials = { 46 | accessKey: AWSInstanceCredentials.accessKey, 47 | secretKey: AWSInstanceCredentials.secretKey, 48 | sessionToken: AWSInstanceCredentials.sessionToken 49 | }; 50 | } else { 51 | credentials = { accessKey, secretKey }; 52 | } 53 | 54 | minioClient = new Minio.Client({ 55 | endPoint: 's3.amazonaws.com', 56 | ...credentials 57 | }); 58 | agent = Https.globalAgent; 59 | break; 60 | default: 61 | throw new Error('Invalid storage engine. Use s3 or minio'); 62 | } 63 | 64 | // Temp fix for https://github.com/minio/minio-js/issues/641 65 | minioClient.agent = agent; 66 | 67 | return minioClient; 68 | } 69 | -------------------------------------------------------------------------------- /app/s3/structure.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Promise from 'bluebird'; 3 | 4 | import S3, { bucket, region } from './'; 5 | import config from '../config'; 6 | 7 | const DEBUG = config.debug; 8 | const BUCKET = bucket; 9 | const REGION = region; 10 | 11 | export async function listObjects (bucket, objPrefix = '') { 12 | const s3 = await S3(); 13 | return new Promise((resolve, reject) => { 14 | var objects = []; 15 | var stream = s3.listObjectsV2(bucket, objPrefix, true); 16 | stream.on('data', obj => { 17 | objects.push(obj); 18 | }); 19 | stream.on('error', err => { 20 | return reject(err); 21 | }); 22 | stream.on('end', () => { 23 | return resolve(objects); 24 | }); 25 | }); 26 | } 27 | 28 | export async function bucketExists (bucket) { 29 | const s3 = await S3(); 30 | return new Promise((resolve, reject) => { 31 | s3.bucketExists(bucket, err => { 32 | if (err) { 33 | return err.code === 'NoSuchBucket' || err.code === 'NotFound' 34 | ? resolve(false) 35 | : reject(err); 36 | } 37 | return resolve(true); 38 | }); 39 | }); 40 | } 41 | 42 | export async function emptyBucket (bucket, objPrefix = '') { 43 | try { 44 | const objects = await listObjects(bucket, objPrefix); 45 | return Promise.map(objects, o => removeObject(bucket, o.name), { concurrency: 10 }); 46 | } catch (err) { 47 | if (err.code === 'NoSuchBucket') { 48 | return []; 49 | } 50 | throw err; 51 | } 52 | } 53 | 54 | export function destroyBucket (bucket) { 55 | return emptyBucket(bucket) 56 | .then(() => removeBucket(bucket)); 57 | } 58 | 59 | export async function createBucket (bucket, region) { 60 | const s3 = await S3(); 61 | return new Promise((resolve, reject) => { 62 | s3.makeBucket(bucket, region, err => { 63 | if (err) { 64 | if (err.code === 'BucketAlreadyOwnedByYou') { 65 | DEBUG && console.log(`Bucket ${bucket} already exists`); 66 | } else { 67 | return reject(err); 68 | } 69 | } 70 | DEBUG && console.log(`Bucket ${bucket} created`); 71 | return resolve({bucket, region}); 72 | }); 73 | }); 74 | } 75 | 76 | export async function setupStructure () { 77 | await destroyBucket(BUCKET); 78 | return createBucket(BUCKET, REGION); 79 | } 80 | 81 | export async function removeObject (bucket, name) { 82 | const s3 = await S3(); 83 | return new Promise((resolve, reject) => { 84 | s3.removeObject(bucket, name, err => { 85 | if (err) { 86 | return reject(err); 87 | } 88 | return resolve(); 89 | }); 90 | }); 91 | } 92 | 93 | async function removeBucket (bucket) { 94 | const s3 = await S3(); 95 | return new Promise((resolve, reject) => { 96 | s3.removeBucket(bucket, err => { 97 | if (err) { 98 | if (err.code === 'NoSuchBucket') { 99 | DEBUG && console.log(`Bucket ${bucket} does not exist. Skipping deletion`); 100 | } else { 101 | return reject(err); 102 | } 103 | } 104 | DEBUG && console.log(`Bucket ${bucket} deleted`); 105 | return resolve(); 106 | }); 107 | }); 108 | } 109 | 110 | export async function putObjectFromFile (bucket, name, filepath) { 111 | const s3 = await S3(); 112 | return new Promise((resolve, reject) => { 113 | s3.fPutObject(bucket, name, filepath, 'application/octet-stream', (err, etag) => { 114 | if (err) { 115 | return reject(err); 116 | } 117 | return resolve(etag); 118 | }); 119 | }); 120 | } 121 | 122 | export async function putObject (bucket, file, stream) { 123 | const s3 = await S3(); 124 | return new Promise((resolve, reject) => { 125 | s3.putObject(bucket, file, stream, (err, etag) => { 126 | if (err) return reject(err); 127 | return resolve(etag); 128 | }); 129 | }); 130 | } 131 | -------------------------------------------------------------------------------- /app/s3/utils.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import fs from 'fs-extra'; 3 | import Promise from 'bluebird'; 4 | 5 | import S3, { bucket } from './'; 6 | import { removeObject, putObjectFromFile, listObjects, emptyBucket, putObject } from './structure'; 7 | 8 | const readFile = Promise.promisify(fs.readFile); 9 | 10 | export async function getPresignedUrl (file) { 11 | const s3 = await S3(); 12 | return new Promise((resolve, reject) => { 13 | s3.presignedPutObject(bucket, file, 24 * 60 * 60, (err, presignedUrl) => { 14 | if (err) { 15 | return reject(err); 16 | } 17 | return resolve(presignedUrl); 18 | }); 19 | }); 20 | } 21 | 22 | // Proxy of removeObject function, assuming the bucket. 23 | export function removeFile (file) { 24 | return removeObject(bucket, file); 25 | } 26 | 27 | // Proxy of emptyBucket function, assuming the bucket. 28 | export function removeDir (dir) { 29 | return emptyBucket(bucket, dir); 30 | } 31 | 32 | // Get file. 33 | export async function getFile (file) { 34 | const s3 = await S3(); 35 | return new Promise((resolve, reject) => { 36 | s3.getObject(bucket, file, (err, dataStream) => { 37 | if (err) { 38 | return reject(err); 39 | } 40 | return resolve(dataStream); 41 | }); 42 | }); 43 | } 44 | 45 | // Get s3 file to file. 46 | export async function fGetFile (file, dest) { 47 | const s3 = await S3(); 48 | return new Promise((resolve, reject) => { 49 | s3.fGetObject(bucket, file, dest, (err) => { 50 | if (err) { 51 | return reject(err); 52 | } 53 | return resolve(dest); 54 | }); 55 | }); 56 | } 57 | 58 | // Copy file. 59 | export async function copyFile (oldFile, newFile) { 60 | const s3 = await S3(); 61 | return new Promise((resolve, reject) => { 62 | s3.copyObject(bucket, newFile, `${bucket}/${oldFile}`, null, (err, data) => { 63 | if (err) { 64 | return reject(err); 65 | } 66 | return resolve(); 67 | }); 68 | }); 69 | } 70 | 71 | // File stats. 72 | export async function getFileInfo (file) { 73 | const s3 = await S3(); 74 | return new Promise((resolve, reject) => { 75 | s3.statObject(bucket, file, (err, stat) => { 76 | if (err) { 77 | return reject(err); 78 | } 79 | return resolve(stat); 80 | }); 81 | }); 82 | } 83 | 84 | // Copy directory. 85 | export async function copyDirectory (sourceDir, destDir) { 86 | const files = await listFiles(sourceDir); 87 | return Promise.map(files, file => { 88 | const newName = file.name.replace(sourceDir, destDir); 89 | return copyFile(file.name, newName); 90 | }, { concurrency: 10 }); 91 | } 92 | 93 | // Get file content. 94 | export async function getFileContents (file) { 95 | const s3 = await S3(); 96 | return new Promise((resolve, reject) => { 97 | s3.getObject(bucket, file, (err, dataStream) => { 98 | if (err) return reject(err); 99 | 100 | var data = ''; 101 | dataStream.on('data', chunk => (data += chunk)); 102 | dataStream.on('end', () => resolve(data)); 103 | dataStream.on('error', () => reject(err)); 104 | }); 105 | }); 106 | } 107 | 108 | // Get file content in JSON. 109 | export async function getJSONFileContents (file) { 110 | const result = await getFileContents(file); 111 | return JSON.parse(result); 112 | } 113 | 114 | // Put object 115 | // Proxy of putObject function, assuming the bucket. 116 | export function putFileStream (file, stream) { 117 | return putObject(bucket, file, stream); 118 | } 119 | 120 | // Put file 121 | // Proxy of putObjectFromFile function, assuming the bucket. 122 | export function putFile (name, filepath) { 123 | return putObjectFromFile(bucket, name, filepath); 124 | } 125 | 126 | // List files 127 | // Proxy of listObjects function, assuming the bucket. 128 | export function listFiles (namePrefix) { 129 | return listObjects(bucket, namePrefix); 130 | } 131 | 132 | // Put directory 133 | export async function putDirectory (sourceDir, destDir) { 134 | let files = await getLocalFilesInDir(sourceDir); 135 | return Promise.map(files, file => { 136 | let newName = file.replace(sourceDir, destDir); 137 | return putFile(newName, file); 138 | }, { concurrency: 10 }); 139 | } 140 | 141 | // Local file operation. 142 | 143 | export function removeLocalFile (path, quiet = false) { 144 | return new Promise((resolve, reject) => { 145 | fs.unlink(path, err => { 146 | if (err && !quiet) { 147 | return reject(err); 148 | } 149 | return resolve(); 150 | }); 151 | }); 152 | } 153 | 154 | export async function getLocalFileContents (path) { 155 | const data = await readFile(path, 'utf8'); 156 | 157 | // https://github.com/sindresorhus/strip-bom 158 | // Catches EFBBBF (UTF-8 BOM) because the buffer-to-string 159 | // conversion translates it to FEFF (UTF-16 BOM) 160 | return data.charCodeAt(0) === 0xFEFF ? data.slice(1) : data; 161 | } 162 | 163 | export async function getLocalJSONFileContents (path) { 164 | const result = await getLocalFileContents(path); 165 | return JSON.parse(result); 166 | } 167 | 168 | export async function getLocalFilesInDir (dir) { 169 | const files = await fs.readdir(dir); 170 | 171 | return Promise.reduce(files, async (acc, file) => { 172 | const name = dir + '/' + file; 173 | const stats = await fs.stat(name); 174 | 175 | return stats.isDirectory() 176 | ? acc.concat(await getLocalFilesInDir(name)) 177 | : acc.concat(name); 178 | }, []); 179 | } 180 | 181 | export function writeFileStreamPromise (stream, path) { 182 | return new Promise((resolve, reject) => { 183 | const writeStream = fs.createWriteStream(path); 184 | writeStream.on('error', err => reject(err)); 185 | writeStream.on('finish', () => resolve(path)); 186 | stream.pipe(writeStream); 187 | }); 188 | } 189 | -------------------------------------------------------------------------------- /app/services/export-road-network/export-road-network.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import path from 'path'; 3 | import obj2osm from 'obj2osm'; 4 | import osmP2PApi from 'osm-p2p-server/api/index'; 5 | import through2 from 'through2'; 6 | import osmtogeojson from 'osmtogeojson'; 7 | 8 | import config from '../../config'; 9 | import { getDatabase } from '../rra-osm-p2p'; 10 | import db from '../../db/'; 11 | import { putFileStream, removeFile } from '../../s3/utils'; 12 | import Operation from '../../utils/operation'; 13 | import AppLogger from '../../utils/app-logger'; 14 | 15 | const DEBUG = config.debug; 16 | let appLogger = AppLogger({ output: DEBUG }); 17 | let logger; 18 | 19 | process.on('message', function (e) { 20 | // Capture all the errors. 21 | try { 22 | logger = appLogger.group(`p${e.projId} s${e.scId} exp-rn`); 23 | logger.log('init'); 24 | e.callback = (err) => { 25 | if (err) return process.exit(1); 26 | else process.exit(0); 27 | }; 28 | exportRoadNetwork(e); 29 | } catch (err) { 30 | process.send({type: 'error', data: err.message, stack: err.stack}); 31 | throw err; 32 | } 33 | }); 34 | 35 | // The export road network script is setup so that it run on a different 36 | // node process using fork. This allows us to offload the main server 37 | // not causing blocking operations. 38 | 39 | /** 40 | * Exports the road network from the osm-p2p-db and converts it to osm 41 | * format to be consumed by osrm. The resulting data is uploaded directly 42 | * to the s3 bucket. 43 | * 44 | * @param {object} e Data. 45 | * e.opId Operation Id. It has to be already started. 46 | * e.projId Project Id. 47 | * e.scId Scenario Id. 48 | * e.callback 49 | */ 50 | export function exportRoadNetwork (e) { 51 | const {opId, projId, scId, callback} = e; 52 | 53 | let op = new Operation(db); 54 | op.loadById(opId) 55 | .then(op => op.log('road-network', {message: 'Updating road network and pois'})) 56 | // Load scenario poi types. 57 | .then(() => db('scenarios_files') 58 | .select('subtype') 59 | .where('type', 'poi') 60 | .where('project_id', projId) 61 | .where('scenario_id', scId) 62 | .then(types => types.map(o => o.subtype)) 63 | ) 64 | .then(poiTypes => { 65 | const bbox = [-180, -90, 180, 90]; 66 | const toOsmOptions = { 67 | bounds: {minlon: bbox[0], minlat: bbox[1], maxlon: bbox[2], maxlat: bbox[3]} 68 | }; 69 | const osmDb = getDatabase(projId, scId); 70 | const formatTransform = obj2osm(toOsmOptions); 71 | 72 | formatTransform.on('error', (err) => { 73 | throw err; 74 | }); 75 | 76 | logger && logger.log('Exporting data from osm-p2p'); 77 | 78 | let stream = osmP2PApi(osmDb).getMap(bbox, {order: 'type'}) 79 | .pipe(processOSMP2PExport()); 80 | 81 | // Extract the POI into a promise and continue with the road network. 82 | let splitting = collectPOIs(stream, poiTypes); 83 | 84 | stream = splitting.stream.pipe(formatTransform); 85 | 86 | function processRN () { 87 | const fileName = `road-network_${Date.now()}`; 88 | const filePath = `scenario-${scId}/${fileName}`; 89 | 90 | return putFileStream(filePath, stream) 91 | // Get previous file. 92 | .then(() => db('scenarios_files') 93 | .select('path') 94 | .where('type', 'road-network') 95 | .where('project_id', projId) 96 | .where('scenario_id', scId) 97 | .first() 98 | ) 99 | // Delete from storage. 100 | .then(file => removeFile(file.path)) 101 | // Add entry to the database 102 | .then(() => db('scenarios_files') 103 | .update({ 104 | name: fileName, 105 | path: filePath, 106 | updated_at: (new Date()) 107 | }) 108 | .where('type', 'road-network') 109 | .where('project_id', projId) 110 | .where('scenario_id', scId) 111 | ); 112 | } 113 | 114 | function processPOI () { 115 | return splitting.deferred 116 | // Convert to Feature Collection from Overpass style nodes. 117 | .then(data => { 118 | let fc = osmtogeojson({elements: data}); 119 | // Group features by its ram_poi_type. 120 | let groups = fc.features.reduce((acc, feat) => { 121 | let type = feat.properties.ram_poi_type; 122 | if (!acc[type]) { 123 | acc[type] = { 124 | type: 'FeatureCollection', 125 | features: [] 126 | }; 127 | } 128 | acc[type].features.push(feat); 129 | return acc; 130 | }, {}); 131 | 132 | return groups; 133 | }) 134 | .then(groups => Promise.all(Object.keys(groups).map(key => { 135 | const fileName = `poi_${key}_${Date.now()}`; 136 | const filePath = `scenario-${scId}/${fileName}`; 137 | 138 | let data = JSON.stringify(groups[key]); 139 | 140 | return putFileStream(filePath, data) 141 | // Get previous file. 142 | .then(() => db('scenarios_files') 143 | .select('id', 'path') 144 | .where('type', 'poi') 145 | .where('subtype', key) 146 | .where('project_id', projId) 147 | .where('scenario_id', scId) 148 | .first() 149 | ) 150 | // Delete from storage. 151 | .then(file => removeFile(file.path) 152 | .then(() => file.id) 153 | ) 154 | // Add entry to the database 155 | .then(id => db('scenarios_files') 156 | .update({ 157 | name: fileName, 158 | path: filePath, 159 | updated_at: (new Date()) 160 | }) 161 | .where('type', 'poi') 162 | .where('id', id) 163 | .where('project_id', projId) 164 | .where('scenario_id', scId) 165 | ); 166 | }))); 167 | } 168 | 169 | return processRN() 170 | .then(() => processPOI()) 171 | .then(() => logger && logger.log('Exporting data from osm-p2p... done')); 172 | }) 173 | // Note: There's no need to close the osm-p2p-db because when the process 174 | // terminates the connection is automatically closed. 175 | .then(() => { 176 | logger && logger.log('process complete'); 177 | DEBUG && appLogger && appLogger.toFile(path.resolve(__dirname, `../../../export-road-network_p${projId}s${scId}.log`)); 178 | callback(); 179 | }) 180 | .catch(err => { 181 | logger && logger.log('error', err); 182 | DEBUG && appLogger && appLogger.toFile(path.resolve(__dirname, `../../../export-road-network_p${projId}s${scId}.log`)); 183 | return op.finish('error', {error: err.message}) 184 | .then(() => callback(err.message), () => callback(err.message)); 185 | }); 186 | } 187 | 188 | /** 189 | * Clean data exported from osm-p2p-db so it can be used by osrm. 190 | * Deletes attributes: version, timestamp, changeset. 191 | * Assigns new ids to nodes and ways. 192 | * Requires that nodes appear before ways. 193 | * 194 | * @return Stream transform function 195 | */ 196 | function processOSMP2PExport () { 197 | let c = 0; 198 | const newId = () => ++c; 199 | let ids = {}; 200 | 201 | return through2.obj((data, enc, cb) => { 202 | delete data.version; 203 | delete data.timestamp; 204 | delete data.changeset; 205 | 206 | if (!ids[data.id]) ids[data.id] = newId(); 207 | 208 | data.id = ids[data.id]; 209 | 210 | if (data.nodes) data.nodes = data.nodes.map(n => ids[n]); 211 | 212 | cb(null, data); 213 | }); 214 | } 215 | 216 | function collectPOIs (stream, poiTypes) { 217 | let rn = []; 218 | let pois = []; 219 | let nodeStack = {}; 220 | 221 | // Create a sort of deferred. 222 | // This promise will collect the POI and return them for 223 | // later processing. 224 | let _resolve; 225 | const deferred = new Promise((resolve) => { 226 | _resolve = resolve; 227 | }); 228 | 229 | let dbgSkipped = 0; 230 | 231 | const write = (data, enc, next) => { 232 | if (data.type === 'node') { 233 | if (data.tags && data.tags.amenity) { 234 | // Discard nodes with ram_poi_type different than what was uploaded. 235 | if (data.tags.ram_poi_type && poiTypes.indexOf(data.tags.ram_poi_type) !== -1) { 236 | pois.push(data); 237 | } else { 238 | dbgSkipped++; 239 | } 240 | } else { 241 | nodeStack[data.id] = data; 242 | } 243 | } else if (data.type === 'way') { 244 | if (data.tags && data.tags.amenity) { 245 | // Discard ways with ram_poi_type different than what was uploaded. 246 | if (data.tags.ram_poi_type && poiTypes.indexOf(data.tags.ram_poi_type) !== -1) { 247 | pois.push(data); 248 | data.nodes.forEach(n => { 249 | if (nodeStack[n]) { 250 | pois.push(nodeStack[n]); 251 | delete nodeStack[n]; 252 | } 253 | }); 254 | } else { 255 | dbgSkipped++; 256 | } 257 | } else { 258 | rn.push(data); 259 | data.nodes.forEach(n => { 260 | if (nodeStack[n]) { 261 | rn.push(nodeStack[n]); 262 | delete nodeStack[n]; 263 | } 264 | }); 265 | } 266 | } 267 | next(); 268 | }; 269 | 270 | const end = function (next) { 271 | DEBUG && console.log('collectPOIs', 'missing/invalid ram_poi_type', dbgSkipped); 272 | // Sort. 273 | pois.sort(a => a.type === 'node' ? -1 : 1); 274 | setImmediate(() => _resolve(pois)); 275 | 276 | rn.sort(a => a.type === 'node' ? -1 : 1); 277 | rn.forEach(o => this.push(o)); 278 | next(); 279 | }; 280 | 281 | stream = stream.pipe(through2.obj(write, end)); 282 | return {stream, deferred}; 283 | } 284 | -------------------------------------------------------------------------------- /app/services/export-road-network/index.js: -------------------------------------------------------------------------------- 1 | // only ES5 is allowed in this file 2 | require('babel-register')({ 3 | presets: [ 'es2015' ] 4 | }); 5 | 6 | // load the server 7 | require('./export-road-network'); 8 | 9 | // 10 | // The export road network script is setup so that it run on a different node process 11 | // using fork. This allows us to offload the main server not causing blocking 12 | // operations. 13 | // 14 | -------------------------------------------------------------------------------- /app/services/plugins.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import jwksRsa from 'jwks-rsa'; 3 | import config from '../config'; 4 | 5 | module.exports = function (hapiServer, cb) { 6 | hapiServer.register(require('hapi-auth-jwt2'), err => { 7 | if (err) cb(err); 8 | 9 | if (config.auth && config.auth.strategy === 'jwt') { 10 | hapiServer.auth.strategy('jwt', 'jwt', true, { 11 | complete: true, 12 | key: jwksRsa.hapiJwt2Key({ 13 | cache: true, 14 | rateLimit: true, 15 | jwksRequestsPerMinute: 5, 16 | jwksUri: `${config.auth.issuer}.well-known/jwks.json` 17 | }), 18 | verifyOptions: { 19 | audience: config.auth.audience, 20 | issuer: config.auth.issuer, 21 | algorithms: ['RS256'] 22 | }, 23 | validateFunc: (decoded, request, callback) => { 24 | if (decoded && decoded.sub) { 25 | return callback(null, true); 26 | } 27 | return callback(null, false); 28 | } 29 | }); 30 | } 31 | hapiServer.register([ 32 | // RRA OSM P2P Server 33 | { 34 | register: require('../plugins/rra-osm-p2p-server') 35 | }, 36 | 37 | // Good console. 38 | { 39 | register: require('good'), 40 | options: { 41 | reporters: { 42 | console: [ 43 | { 44 | module: 'good-squeeze', 45 | name: 'Squeeze', 46 | args: [{ 47 | response: '*', 48 | log: '*' 49 | }] 50 | }, 51 | { 52 | module: 'good-console' 53 | }, 'stdout'] 54 | } 55 | } 56 | }, 57 | 58 | // Route loader 59 | { 60 | register: require('hapi-router'), 61 | options: { 62 | routes: 'app/routes/*.js' 63 | } 64 | }, 65 | 66 | // Pagination 67 | { 68 | register: require('../plugins/hapi-paginate'), 69 | options: { 70 | limit: 100, 71 | routes: [ 72 | {route: '/projects', methods: 'GET'}, 73 | {route: '/projects/{projId}/scenarios', methods: 'GET'}, 74 | {route: '/projects/{projId}/scenarios/{scId}/results/raw', methods: 'GET'} 75 | ] 76 | } 77 | } 78 | // Plugin registration done. 79 | ], (err) => cb(err)); 80 | }); 81 | }; 82 | -------------------------------------------------------------------------------- /app/services/project-setup/admin-bounds.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import bbox from '@turf/bbox'; 3 | import _ from 'lodash'; 4 | import Promise from 'bluebird'; 5 | 6 | import db from '../../db/'; 7 | import { getPropInsensitive } from '../../utils/utils'; 8 | import { createAdminBoundsVT } from '../../utils/vector-tiles'; 9 | import { 10 | getJSONFileContents, 11 | putFileStream, 12 | removeFile 13 | } from '../../s3/utils'; 14 | import { downloadWbCatalogProjectFile } from '../../utils/wbcatalog'; 15 | 16 | /** 17 | * Processes the Admin boundaries depending on the source. 18 | * 19 | * Admin bounds 20 | * Catalog: 21 | * - Download from server 22 | * - Cleanup and store in DB 23 | * - Create vector tiles 24 | * File: 25 | * - Cleanup and store in DB 26 | * - Create vector tiles 27 | * 28 | * @param {number} projId Project id 29 | * @param {number} scId Scenario id 30 | * @param {object} options Additional parameters 31 | * @param {object} options.op Operation instance 32 | * @param {object} options.emitter Emitter to coordinate execution 33 | * @param {object} options.logger Output logger 34 | */ 35 | export default async function (projId, scId, {op, emitter, logger}) { 36 | logger && logger.log('process admin areas'); 37 | await op.log('process:admin-bounds', {message: 'Processing admin areas'}); 38 | 39 | const source = await db('projects_source_data') 40 | .select('*') 41 | .where('project_id', projId) 42 | .where('name', 'admin-bounds') 43 | .first(); 44 | 45 | if (source.type === 'wbcatalog') { 46 | await downloadWbCatalogProjectFile(projId, source, logger); 47 | } 48 | 49 | // The remaining process is the same for both sources. 50 | // Get the file data. 51 | const adminBoundsData = await db('projects_files') 52 | .select('*') 53 | .where('project_id', projId) 54 | .where('type', 'admin-bounds') 55 | .first(); 56 | 57 | const adminBoundsFc = await getJSONFileContents(adminBoundsData.path); 58 | 59 | if (!adminBoundsFc.features) { 60 | throw new Error('Invalid administrative boundaries file'); 61 | } 62 | 63 | const filteredAA = { 64 | 'type': 'FeatureCollection', 65 | 'features': adminBoundsFc.features 66 | .filter(o => !!o.properties[getPropInsensitive(o.properties, 'name')] && o.geometry.type !== 'Point') 67 | .map(o => { 68 | // Normalize name prop. 69 | o.properties.name = o.properties[getPropInsensitive(o.properties, 'name')]; 70 | return o; 71 | }) 72 | }; 73 | 74 | // Clean the tables so any remnants of previous attempts are removed. 75 | // This avoids primary keys collisions. 76 | await Promise.all([ 77 | db('projects_aa') 78 | .where('project_id', projId) 79 | .del(), 80 | db('scenarios_settings') 81 | .where('scenario_id', scId) 82 | .where('key', 'admin_areas') 83 | .del() 84 | ]); 85 | 86 | // Populate DB with admin areas. 87 | await db.transaction(function (trx) { 88 | let adminAreas = _(filteredAA.features) 89 | .sortBy(o => _.kebabCase(o.properties.name)) 90 | .map(o => { 91 | return { 92 | name: o.properties.name, 93 | type: o.properties.type || 'Admin Area', 94 | geometry: JSON.stringify(o.geometry.coordinates), 95 | project_id: projId 96 | }; 97 | }) 98 | .value(); 99 | 100 | let adminAreasBbox = bbox(filteredAA); 101 | 102 | return Promise.all([ 103 | trx('projects') 104 | .update({ 105 | bbox: JSON.stringify(adminAreasBbox), 106 | updated_at: (new Date()) 107 | }) 108 | .where('id', projId), 109 | 110 | trx.batchInsert('projects_aa', adminAreas) 111 | .returning('id'), 112 | 113 | trx('scenarios_settings') 114 | .insert({ 115 | scenario_id: scId, 116 | key: 'admin_areas', 117 | value: '[]', 118 | created_at: (new Date()), 119 | updated_at: (new Date()) 120 | }) 121 | .where('id', projId) 122 | ]); 123 | }); 124 | 125 | // Update the admin bounds file with the filtered features. 126 | // A clean file is needed for the VT generation. 127 | const fc = { 128 | 'type': 'FeatureCollection', 129 | 'features': filteredAA.features.map(o => ({ 130 | type: 'Feature', 131 | properties: { 132 | name: o.properties.name, 133 | type: o.properties.type || 'admin-area', 134 | project_id: projId 135 | }, 136 | geometry: o.geometry 137 | })) 138 | }; 139 | 140 | const fileName = `admin-bounds_${Date.now()}`; 141 | const filePath = `project-${projId}/${fileName}`; 142 | 143 | // Get current file and remove it 144 | const fileData = await db('projects_files') 145 | .select('*') 146 | .where('project_id', projId) 147 | .where('type', 'admin-bounds') 148 | .first(); 149 | 150 | await putFileStream(filePath, JSON.stringify(fc)); 151 | 152 | await db('projects_files') 153 | .update({ 154 | name: fileName, 155 | path: filePath, 156 | updated_at: (new Date()) 157 | }) 158 | .where('id', fileData.id); 159 | 160 | await removeFile(fileData.path); 161 | 162 | // Emit data for other processes to use. 163 | emitter.emit('admin-bounds:data', adminBoundsFc); 164 | 165 | if (process.env.DS_ENV !== 'test') { 166 | await createAdminBoundsVT(projId, scId, op, filePath).promise; 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /app/services/project-setup/common.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Promise from 'bluebird'; 3 | import EventEmitter from 'events'; 4 | 5 | /** 6 | * Special emitter that tracks the events that already happened and resolves 7 | * then immediately if that's the case. Basically it listens to the past and 8 | * to the future. 9 | */ 10 | export class ProjectEventEmitter extends EventEmitter { 11 | constructor () { 12 | super(); 13 | this.emitted = {}; 14 | } 15 | 16 | /** 17 | * Overrides the emit method to store the events emitted and their results. 18 | */ 19 | emit (...args) { 20 | // Store the results of emitted events. 21 | const [event, ...results] = args; 22 | this.emitted[event] = results; 23 | super.emit(...args); 24 | } 25 | 26 | /** 27 | * Resolves a promise once all the events fired once. 28 | * The promise is resolved with an object keyed by the event name containing 29 | * the result of each event. The results of each event will be an array, even 30 | * if the emit method was called just with one argument. 31 | * As soon as the method is called, it checks if the events were already emitted 32 | * and if so, resolves them immediately preventing "waiting for Godot" 33 | * 34 | * @example 35 | * waitForEvents('event1', 'event2') 36 | * { 37 | * 'event1': result, 38 | * 'event2': result2 39 | * } 40 | * 41 | * Note: 42 | * The event listeners are removed once triggered but non triggered events 43 | * will presist, possibly causing unwanted side effects. If there's no need 44 | * to wait for events anymore, they have to be removed manually. 45 | * 46 | * Note2: 47 | * For the scope of this script the above is not an issue because all the 48 | * events are cleared once the process exits (on error or success), therefore 49 | * there's no risk that lingering events contaminate different executions. 50 | * 51 | * @param {string} events Events to listen for 52 | * 53 | * @returns promise 54 | */ 55 | async waitForEvents (...events) { 56 | return new Promise((resolve) => { 57 | let completed = 0; 58 | let results = {}; 59 | events.forEach(e => { 60 | // Was the event emitted already? 61 | if (this.emitted[e]) { 62 | results[e] = this.emitted[e]; 63 | if (++completed === events.length) resolve(results); 64 | } else { 65 | // Setup a once listener that gets all the passed arguments. 66 | this.once(e, (...args) => { 67 | results[e] = args; 68 | if (++completed === events.length) resolve(results); 69 | }); 70 | } 71 | }); 72 | }); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /app/services/project-setup/index.js: -------------------------------------------------------------------------------- 1 | // only ES5 is allowed in this file 2 | require('babel-register')({ 3 | presets: [ 'es2015' ] 4 | }); 5 | 6 | // load the server 7 | require('./project-setup'); 8 | 9 | // 10 | // The project setup script is setup so that it run on a different node process 11 | // using fork. This allows us to offload the main server not causing blocking 12 | // operations. 13 | // 14 | -------------------------------------------------------------------------------- /app/services/project-setup/origins.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import centerOfMass from '@turf/center-of-mass'; 3 | 4 | import db from '../../db/'; 5 | import { getPropInsensitive } from '../../utils/utils'; 6 | import { 7 | getJSONFileContents 8 | } from '../../s3/utils'; 9 | import { downloadWbCatalogProjectFile } from '../../utils/wbcatalog'; 10 | 11 | /** 12 | * Processes the Origins depending on the source. 13 | * 14 | * Origins 15 | * Catalog: 16 | * - Download from server 17 | * - Cleanup and store in DB 18 | * File: 19 | * - Cleanup and store in DB 20 | * 21 | * @param {number} projId Project id 22 | * @param {object} options Additional parameters 23 | * @param {object} options.op Operation instance 24 | * @param {object} options.logger Output logger 25 | */ 26 | export default async function (projId, {op, logger}) { 27 | logger && logger.log('process origins'); 28 | await op.log('process:origins', {message: 'Processing origins'}); 29 | 30 | const source = await db('projects_source_data') 31 | .select('*') 32 | .where('project_id', projId) 33 | .where('name', 'origins') 34 | .first(); 35 | 36 | let originsData; 37 | if (source.type === 'wbcatalog') { 38 | originsData = await downloadWbCatalogProjectFile(projId, source, logger); 39 | } 40 | 41 | if (source.type === 'file') { 42 | originsData = await db('projects_files') 43 | .select('*') 44 | .where('project_id', projId) 45 | .where('type', 'origins') 46 | .first(); 47 | } 48 | 49 | // Clean the tables so any remnants of previous attempts are removed. 50 | // This avoids primary keys collisions. 51 | await db('projects_origins') 52 | .where('project_id', projId) 53 | .del(); 54 | 55 | const indicators = originsData.data.indicators; 56 | const neededProps = indicators.map(o => o.key); 57 | 58 | const originsFC = await getJSONFileContents(originsData.path); 59 | 60 | logger && logger.log('origins before filter', originsFC.features.length); 61 | const features = originsFC.features.filter(feat => { 62 | const props = Object.keys(feat.properties); 63 | return neededProps.every(o => props.indexOf(o) !== -1); 64 | }); 65 | logger && logger.log('origins after filter', features.length); 66 | 67 | const originsIndicators = []; 68 | const origins = features.map(feat => { 69 | const coordinates = feat.geometry.type === 'Point' 70 | ? feat.geometry.coordinates 71 | : centerOfMass(feat).geometry.coordinates; 72 | 73 | // Will be flattened later. 74 | // The array is constructed in this way so we can match the index of the 75 | // results array and attribute the correct id. 76 | const featureIndicators = indicators.map(ind => ({ 77 | key: ind.key, 78 | label: ind.label, 79 | value: parseInt(feat.properties[ind.key]) 80 | })); 81 | originsIndicators.push(featureIndicators); 82 | 83 | return { 84 | project_id: projId, 85 | name: feat.properties[getPropInsensitive(feat.properties, 'name')] || 'N/A', 86 | coordinates: JSON.stringify(coordinates) 87 | }; 88 | }); 89 | 90 | await db.transaction(async function (trx) { 91 | const ids = await trx.batchInsert('projects_origins', origins) 92 | .returning('id'); 93 | 94 | // Add ids to the originsIndicators and flatten the array in the process. 95 | let flat = []; 96 | originsIndicators.forEach((resInd, resIdx) => { 97 | resInd.forEach(ind => { 98 | ind.origin_id = ids[resIdx]; 99 | flat.push(ind); 100 | }); 101 | }); 102 | 103 | await trx.batchInsert('projects_origins_indicators', flat); 104 | }); 105 | } 106 | -------------------------------------------------------------------------------- /app/services/project-setup/poi.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import Promise from 'bluebird'; 3 | import centerOfMass from '@turf/center-of-mass'; 4 | 5 | import db from '../../db/'; 6 | import { 7 | getJSONFileContents, 8 | putFileStream 9 | } from '../../s3/utils'; 10 | import { importPOI } from '../rra-osm-p2p'; 11 | import * as overpass from '../../utils/overpass'; 12 | import { downloadWbCatalogPoiFile } from '../../utils/wbcatalog'; 13 | 14 | /** 15 | * Processes the POIs depending on the source. 16 | * 17 | * Points of interest: 18 | * Catalog: 19 | * - Download from server 20 | * - Import into osm-p2p ** 21 | * OSM: 22 | * - Import from overpass * 23 | * - Import into osm-p2p ** 24 | * File: 25 | * - Import into osm-p2p ** 26 | * 27 | * @param {number} projId Project id 28 | * @param {number} scId Scenario id 29 | * @param {object} options Additional parameters 30 | * @param {object} options.op Operation instance 31 | * @param {object} options.emitter Emitter to coordinate execution 32 | * @param {object} options.logger Output logger 33 | * @param {object} options.appLogger Main output logger to create additional 34 | * logger groups 35 | */ 36 | export default async function (projId, scId, {op, emitter, logger, appLogger}) { 37 | logger && logger.log('process points of interest'); 38 | await op.log('process:poi', {message: 'Processing points of interest'}); 39 | 40 | const source = await db('scenarios_source_data') 41 | .select('*') 42 | .where('scenario_id', scId) 43 | .where('name', 'poi') 44 | .first(); 45 | 46 | // Contains the info about the files as is in the database. 47 | let fileData; 48 | // Contains the poi data keyed by POI type. 49 | let poisData = {}; 50 | if (source.type === 'wbcatalog') { 51 | fileData = await downloadWbCatalogPoiFile(projId, scId, source, logger); 52 | } 53 | 54 | if (source.type === 'file') { 55 | fileData = await db('scenarios_files') 56 | .select('*') 57 | .where('project_id', projId) 58 | .where('scenario_id', scId) 59 | .where('type', 'poi'); 60 | } 61 | 62 | // Load the data into poisData keying it by type. 63 | if (source.type === 'wbcatalog' || source.type === 'file') { 64 | const filesContent = await Promise.map(fileData, file => getJSONFileContents(file.path)); 65 | fileData.forEach((f, idx) => { poisData[f.subtype] = filesContent[idx]; }); 66 | } 67 | 68 | if (source.type === 'osm') { 69 | logger && logger.log('poi is waiting for events (admin-bounds:data)...'); 70 | // If importing from OSM we need to wait for the admin bounds. 71 | const result = await emitter.waitForEvents('admin-bounds:data'); 72 | logger && logger.log('poi is waiting for events (admin-bounds:data)... done'); 73 | const [adminBoundsFc] = result['admin-bounds:data']; 74 | poisData = await importOSMPOIs(projId, scId, overpass.fcBbox(adminBoundsFc), source.data.osmPoiTypes, op, logger); 75 | } 76 | 77 | // Wait for the road network to know if edition is enabled or not. 78 | logger && logger.log('poi is waiting for events (road-network:active-editing)...'); 79 | const result = await emitter.waitForEvents('road-network:active-editing'); 80 | logger && logger.log('poi is waiting for events (road-network:active-editing)... done'); 81 | const [allowImport] = result['road-network:active-editing']; 82 | 83 | if (allowImport) { 84 | // Merge all feature collection together. 85 | // Add a property to keep track of the poi type. 86 | let fc = { 87 | type: 'FeatureCollection', 88 | features: Object.keys(poisData).reduce((acc, key) => { 89 | const features = poisData[key].features.map(feat => { 90 | return { 91 | ...feat, 92 | properties: { 93 | ...feat.properties, 94 | ram_poi_type: key 95 | }, 96 | geometry: feat.geometry.type !== 'Point' 97 | ? centerOfMass(feat).geometry 98 | : feat.geometry 99 | }; 100 | }); 101 | return acc.concat(features); 102 | }, []) 103 | }; 104 | 105 | const poiLogger = appLogger.group(`p${projId} s${scId} poi import`); 106 | poiLogger && poiLogger.log('process poi'); 107 | return importPOI(projId, scId, op, fc, poiLogger); 108 | } 109 | } 110 | 111 | async function importOSMPOIs (projId, scId, bbox, poiTypes, op, logger) { 112 | logger && logger.log('Importing pois from overpass for bbox (S,W,N,E):', bbox); 113 | logger && logger.log('POI types:', poiTypes); 114 | 115 | await op.log('process:poi', {message: 'Importing poi from OSM'}); 116 | 117 | // Clean the tables so any remnants of previous attempts are removed. 118 | // This avoids primary keys collisions and duplication. 119 | await db('scenarios_files') 120 | .where('project_id', projId) 121 | .where('scenario_id', scId) 122 | .where('type', 'poi') 123 | .del(); 124 | 125 | let osmGeoJSON; 126 | try { 127 | osmGeoJSON = await overpass.importPOI(bbox, poiTypes); 128 | } catch (err) { 129 | // Just to log error 130 | logger && logger.log('Error importing from overpass', err.message); 131 | throw err; 132 | } 133 | 134 | logger && logger.log('Got POIS. Saving to S3 and db'); 135 | 136 | let dbInsertions = []; 137 | let fileUploadPromises = []; 138 | let emptyPOI = []; 139 | 140 | Object.keys(osmGeoJSON).forEach(poiType => { 141 | // Filter out pois without anything 142 | if (osmGeoJSON[poiType].features.length) { 143 | const fileName = `poi_${poiType}_${Date.now()}`; 144 | const filePath = `scenario-${scId}/${fileName}`; 145 | 146 | // Prepare for db insertion. 147 | dbInsertions.push({ 148 | name: fileName, 149 | type: 'poi', 150 | subtype: poiType, 151 | path: filePath, 152 | project_id: projId, 153 | scenario_id: scId, 154 | created_at: (new Date()), 155 | updated_at: (new Date()) 156 | }); 157 | 158 | // Save each poi type to S3. 159 | // Store as function to avoid immediate execution. 160 | fileUploadPromises.push(() => putFileStream(filePath, JSON.stringify(osmGeoJSON[poiType]))); 161 | } else { 162 | emptyPOI.push(poiType); 163 | } 164 | }); 165 | 166 | if (emptyPOI.length) { 167 | logger && logger.log(`No POI were returned for [${emptyPOI.join(', ')}]`); 168 | throw new Error(`No POI were returned for [${emptyPOI.join(', ')}]`); 169 | } 170 | 171 | // Save to database. 172 | const promises = fileUploadPromises.concat(() => db.batchInsert('scenarios_files', dbInsertions)); 173 | 174 | // Using promise.map to take advantage of concurrency. 175 | await Promise.map(promises, p => p(), {concurrency: 3}); 176 | 177 | return osmGeoJSON; 178 | } 179 | -------------------------------------------------------------------------------- /app/services/project-setup/profile.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import db from '../../db/'; 3 | import { 4 | putFileStream 5 | } from '../../s3/utils'; 6 | import { downloadWbCatalogProjectFile } from '../../utils/wbcatalog'; 7 | import { getOSRMProfileDefaultSpeedSettings, renderProfileFile } from '../../utils/osrm-profile'; 8 | 9 | /** 10 | * Processes the Profile depending on the source. 11 | * 12 | * Profile: 13 | * Catalog: 14 | * - Download from server 15 | * Default: 16 | * - Copy default profile 17 | * File: 18 | * - No action 19 | * 20 | * @param {number} projId Project id 21 | * @param {object} options Additional parameters 22 | * @param {object} options.logger Output logger 23 | */ 24 | export default async function (projId, {logger}) { 25 | logger && logger.log('process profile'); 26 | 27 | const source = await db('projects_source_data') 28 | .select('*') 29 | .where('project_id', projId) 30 | .where('name', 'profile') 31 | .first(); 32 | 33 | if (source.type === 'wbcatalog') { 34 | return downloadWbCatalogProjectFile(projId, source, logger); 35 | } 36 | 37 | if (source.type === 'default') { 38 | // Generate default profile. 39 | const fileName = `profile_${Date.now()}`; 40 | const filePath = `project-${projId}/${fileName}`; 41 | 42 | const defaultSettings = getOSRMProfileDefaultSpeedSettings(); 43 | 44 | // Update source data. 45 | await db('projects_source_data') 46 | .update({ 47 | data: { settings: defaultSettings } 48 | }) 49 | .where('id', source.id); 50 | 51 | const profile = renderProfileFile(defaultSettings); 52 | await putFileStream(filePath, profile); 53 | return db('projects_files') 54 | .insert({ 55 | name: fileName, 56 | type: 'profile', 57 | path: filePath, 58 | project_id: projId, 59 | created_at: (new Date()), 60 | updated_at: (new Date()) 61 | }); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /app/services/project-setup/project-setup.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import path from 'path'; 3 | import Promise from 'bluebird'; 4 | 5 | import config from '../../config'; 6 | import db from '../../db/'; 7 | import Operation from '../../utils/operation'; 8 | import AppLogger from '../../utils/app-logger'; 9 | 10 | import processAdminBounds from './admin-bounds'; 11 | import processRoadNetwork from './road-network'; 12 | import processProfile from './profile'; 13 | import processOrigins from './origins'; 14 | import processPoi from './poi'; 15 | import { ProjectEventEmitter } from './common'; 16 | 17 | const DEBUG = config.debug; 18 | let appLogger = AppLogger({ output: DEBUG }); 19 | let logger; 20 | 21 | // Emitter to manage execution order. 22 | const projectSetupEmitter = new ProjectEventEmitter(); 23 | 24 | process.on('message', function (e) { 25 | // Capture all the errors. 26 | try { 27 | logger = appLogger.group(`p${e.projId} s${e.scId} proj-setup`); 28 | logger.log('init'); 29 | e.callback = (err) => { 30 | if (err) return process.exit(1); 31 | else process.exit(0); 32 | }; 33 | concludeProjectSetup(e); 34 | } catch (err) { 35 | process.send({type: 'error', data: err.message, stack: err.stack}); 36 | throw err; 37 | } 38 | }); 39 | 40 | // The project setup script is setup so that it run on a different node process 41 | // using fork. This allows us to offload the main server not causing blocking 42 | // operations. 43 | 44 | /** 45 | * Finishes the project setup by processing all the needed files. 46 | * The type of processing done to each file depends on the source and 47 | * different sources have different processing dependencies as outlined below: 48 | * 49 | * Road Network: 50 | * Catalog: 51 | * - Download from server 52 | * - Set editable setting 53 | * - Import into osm-p2p (depends on size) 54 | * - Create vector tiles 55 | * OSM: 56 | * - Import from overpass * 57 | * - Set editable setting 58 | * - Import into osm-p2p (depends on size) 59 | * - Create vector tiles 60 | * File: 61 | * - Set editable setting 62 | * - Import into osm-p2p (depends on size) 63 | * - Create vector tiles 64 | * 65 | * Profile: 66 | * Catalog: 67 | * - Download from server 68 | * Default: 69 | * - Copy default profile 70 | * File: 71 | * - No action 72 | * 73 | * Admin bounds 74 | * Catalog: 75 | * - Download from server 76 | * - Cleanup and store in DB 77 | * - Create vector tiles 78 | * File: 79 | * - Cleanup and store in DB 80 | * - Create vector tiles 81 | * 82 | * Origins 83 | * Catalog: 84 | * - Download from server 85 | * - Cleanup and store in DB 86 | * File: 87 | * - Cleanup and store in DB 88 | * 89 | * Points of interest: 90 | * Catalog: 91 | * - Download from server 92 | * - Import into osm-p2p ** 93 | * OSM: 94 | * - Import from overpass * 95 | * - Import into osm-p2p ** 96 | * File: 97 | * - Import into osm-p2p ** 98 | * 99 | * Notes: 100 | * * Depends on the admin bounds bounding box 101 | * ** Depends on the RN editable setting 102 | * 103 | * Since the execution order depends a lot on the source, all the processing 104 | * is started simultaneously, but then the processes wait for each other using 105 | * events. Once a process reaches a point where it needs data from another 106 | * it will trigger a emitter.waitForEvents(events...) that will only 107 | * resolve once all the events have fired. 108 | * 109 | * @param {object} e Data. 110 | * e.opId Operation Id. It has to be already started. 111 | * e.projId Project Id. 112 | * e.scId Scenario Id. 113 | * e.callback 114 | */ 115 | export async function concludeProjectSetup (e) { 116 | const {opId, projId, scId, callback} = e; 117 | 118 | const op = new Operation(db); 119 | await op.loadById(opId); 120 | 121 | try { 122 | await Promise.all([ 123 | processAdminBounds(projId, scId, {op, emitter: projectSetupEmitter, logger}), 124 | processProfile(projId, {logger}), 125 | processOrigins(projId, {op, logger}), 126 | processRoadNetwork(projId, scId, {op, emitter: projectSetupEmitter, logger, appLogger}), 127 | processPoi(projId, scId, {op, emitter: projectSetupEmitter, logger, appLogger}) 128 | ]); 129 | 130 | // Update dates. 131 | await db.transaction(function (trx) { 132 | return Promise.all([ 133 | trx('projects') 134 | .update({updated_at: (new Date()), status: 'active'}) 135 | .where('id', projId), 136 | trx('scenarios') 137 | .update({updated_at: (new Date()), status: 'active'}) 138 | .where('id', scId) 139 | ]); 140 | }); 141 | 142 | // Finish operation. 143 | await op.log('success', {message: 'Operation complete'}); 144 | await op.finish(); 145 | 146 | logger && logger.log('process complete'); 147 | DEBUG && appLogger && appLogger.toFile(path.resolve(__dirname, `../../../project-setup_p${projId}s${scId}.log`)); 148 | callback(); 149 | } catch (err) { 150 | console.log('err', err); 151 | logger && logger.log('error', err); 152 | DEBUG && appLogger && appLogger.toFile(path.resolve(__dirname, `../../../project-setup_p${projId}s${scId}.log`)); 153 | try { 154 | await op.finish('error', {error: err.message}); 155 | } catch (e) { /* no-action */ } 156 | callback(err); 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /app/services/project-setup/road-network.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import config from '../../config'; 4 | import db from '../../db/'; 5 | import { setScenarioSetting } from '../../utils/utils'; 6 | import { createRoadNetworkVT } from '../../utils/vector-tiles'; 7 | import { 8 | getFileInfo, 9 | getFileContents, 10 | putFileStream 11 | } from '../../s3/utils'; 12 | import { importRoadNetwork, removeDatabase } from '../rra-osm-p2p'; 13 | import * as overpass from '../../utils/overpass'; 14 | import { downloadWbCatalogScenarioFile } from '../../utils/wbcatalog'; 15 | 16 | /** 17 | * Processes the POIs depending on the source. 18 | * 19 | * Road Network: 20 | * Catalog: 21 | * - Download from server 22 | * - Set editable setting 23 | * - Import into osm-p2p (depends on size) 24 | * - Create vector tiles 25 | * OSM: 26 | * - Import from overpass * 27 | * - Set editable setting 28 | * - Import into osm-p2p (depends on size) 29 | * - Create vector tiles 30 | * File: 31 | * - Set editable setting 32 | * - Import into osm-p2p (depends on size) 33 | * - Create vector tiles 34 | * 35 | * @param {number} projId Project id 36 | * @param {number} scId Scenario id 37 | * @param {object} options Additional parameters 38 | * @param {object} options.op Operation instance 39 | * @param {object} options.emitter Emitter to coordinate execution 40 | * @param {object} options.logger Output logger 41 | * @param {object} options.appLogger Main output logger to create additional 42 | * logger groups 43 | */ 44 | export default async function (projId, scId, {op, emitter, logger, appLogger}) { 45 | logger && logger.log('process roadnetwork'); 46 | await op.log('process:road-network', {message: 'Processing road network'}); 47 | 48 | const source = await db('scenarios_source_data') 49 | .select('*') 50 | .where('scenario_id', scId) 51 | .where('name', 'road-network') 52 | .first(); 53 | 54 | let fileData; 55 | if (source.type === 'wbcatalog') { 56 | fileData = await downloadWbCatalogScenarioFile(projId, scId, source, logger); 57 | } 58 | 59 | if (source.type === 'osm') { 60 | logger && logger.log('road-network is waiting for events (admin-bounds:data)...'); 61 | // If importing from OSM we need to wait for the admin bounds. 62 | const result = await emitter.waitForEvents('admin-bounds:data'); 63 | logger && logger.log('road-network is waiting for events (admin-bounds:data)... done'); 64 | const [adminBoundsFc] = result['admin-bounds:data']; 65 | fileData = await importOSMRoadNetwork(projId, scId, overpass.fcBbox(adminBoundsFc), op, logger); 66 | } 67 | 68 | if (source.type === 'file') { 69 | fileData = await db('scenarios_files') 70 | .select('*') 71 | .where('project_id', projId) 72 | .where('scenario_id', scId) 73 | .where('type', 'road-network') 74 | .first(); 75 | } 76 | 77 | const fileInfo = await getFileInfo(fileData.path); 78 | 79 | // Remove the osm-p2p database. 80 | // Since the road network is handled before the pois it will take care 81 | // of doing the cleanup. 82 | await removeDatabase(projId, scId); 83 | 84 | // Disable road network editing if size over threshold. 85 | const allowImport = fileInfo.size < config.roadNetEditMax; 86 | await setScenarioSetting(db, scId, 'rn_active_editing', allowImport); 87 | 88 | if (allowImport) { 89 | const roadNetwork = await getFileContents(fileData.path); 90 | let rnLogger = appLogger.group(`p${projId} s${scId} rn import`); 91 | rnLogger && rnLogger.log('process road network'); 92 | await importRoadNetwork(projId, scId, op, roadNetwork, rnLogger); 93 | } 94 | 95 | // Emit after importing to avoid concurrency. 96 | emitter.emit('road-network:active-editing', allowImport); 97 | 98 | if (process.env.DS_ENV !== 'test') { 99 | await createRoadNetworkVT(projId, scId, op, fileData.path).promise; 100 | } 101 | } 102 | 103 | async function importOSMRoadNetwork (projId, scId, bbox, op, logger) { 104 | logger && logger.log('Importing road network from overpass for bbox (S,W,N,E):', bbox); 105 | 106 | await op.log('process:road-network', {message: 'Importing road network from OSM'}); 107 | 108 | // Clean the tables so any remnants of previous attempts are removed. 109 | // This avoids primary keys collisions and duplication. 110 | await db('scenarios_files') 111 | .where('project_id', projId) 112 | .where('scenario_id', scId) 113 | .where('type', 'road-network') 114 | .del(); 115 | 116 | let osmData; 117 | try { 118 | osmData = await overpass.importRoadNetwork(bbox); 119 | } catch (err) { 120 | // Just to log error 121 | logger && logger.log('Error importing from overpass', err.message); 122 | throw err; 123 | } 124 | 125 | logger && logger.log('Got road network. Saving to S3 and db'); 126 | // Insert file into DB. 127 | let fileName = `road-network_${Date.now()}`; 128 | let filePath = `scenario-${scId}/${fileName}`; 129 | let data = { 130 | name: fileName, 131 | type: 'road-network', 132 | path: filePath, 133 | project_id: projId, 134 | scenario_id: scId, 135 | created_at: (new Date()), 136 | updated_at: (new Date()) 137 | }; 138 | 139 | await putFileStream(filePath, osmData); 140 | await db('scenarios_files').insert(data); 141 | return data; 142 | } 143 | -------------------------------------------------------------------------------- /app/services/rra-osm-p2p.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import fs from 'fs-extra'; 3 | import Promise from 'bluebird'; 4 | import os from 'os'; 5 | import cp from 'child_process'; 6 | import path from 'path'; 7 | import osmdb from 'osm-p2p'; 8 | import osmrouter from 'osm-p2p-server'; 9 | import importer from 'osm-p2p-import'; 10 | 11 | import config from '../config'; 12 | 13 | var dbConnections = {}; 14 | 15 | function getDatabaseName (projId, scId) { 16 | return `p${projId}s${scId}`; 17 | } 18 | 19 | function getDatabaseBaseDir () { 20 | return config.osmP2PDir; 21 | } 22 | 23 | export function getRouter (projId, scId) { 24 | return osmrouter(getDatabase(projId, scId)); 25 | } 26 | 27 | export function getDatabase (projId, scId) { 28 | let baseDir = getDatabaseBaseDir(); 29 | let dbName = getDatabaseName(projId, scId); 30 | 31 | // Create a connection if one is not found. 32 | if (!dbConnections[dbName]) { 33 | dbConnections[dbName] = osmdb(`${baseDir}/${dbName}`); 34 | } 35 | 36 | return dbConnections[dbName]; 37 | } 38 | 39 | export function closeDatabase (projId, scId) { 40 | return new Promise((resolve, reject) => { 41 | let dbName = getDatabaseName(projId, scId); 42 | let db = dbConnections[dbName]; 43 | 44 | // If there's no db stored means that no connection was open for this 45 | // db on the current process. 46 | if (!db) { 47 | return resolve(); 48 | } 49 | 50 | let pending = 3; 51 | const done = () => { 52 | if (--pending === 0) { 53 | delete dbConnections[dbName]; 54 | resolve(); 55 | } 56 | }; 57 | 58 | // Close all the connections. 59 | db.db.close(done); 60 | db.log.db.close(done); 61 | db.kdb.kdb.store.close(done); 62 | }); 63 | } 64 | 65 | export function cloneDatabase (srcProjId, srcScId, destProjId, destScId) { 66 | return new Promise((resolve, reject) => { 67 | let baseDir = getDatabaseBaseDir(); 68 | let srcDbName = getDatabaseName(srcProjId, srcScId); 69 | let destDbName = getDatabaseName(destProjId, destScId); 70 | 71 | fs.copy(`${baseDir}/${srcDbName}`, `${baseDir}/${destDbName}`, {overwrite: false, errorOnExist: true}, err => { 72 | if (err) return reject(err); 73 | return resolve(); 74 | }); 75 | }); 76 | } 77 | 78 | export function removeDatabase (projId, scId) { 79 | return new Promise((resolve, reject) => { 80 | let baseDir = getDatabaseBaseDir(); 81 | let dbName = getDatabaseName(projId, scId); 82 | 83 | fs.remove(`${baseDir}/${dbName}`, err => { 84 | if (err) return reject(err); 85 | 86 | delete dbConnections[dbName]; 87 | return resolve(); 88 | }); 89 | }); 90 | } 91 | 92 | export async function importRoadNetwork (projId, scId, op, roadNetwork, logger) { 93 | const importPromise = Promise.promisify(importer); 94 | const basePath = path.resolve(os.tmpdir(), `road-networkP${projId}S${scId}`); 95 | const osmDb = getDatabase(projId, scId); 96 | 97 | await op.log('process:road-network', {message: 'Road network processing started'}); 98 | try { 99 | await convertToOSMXml(roadNetwork, 'osm', basePath, logger); 100 | } catch (error) { 101 | if (error.message.match(/'list' object has no attribute 'addparent'/)) { 102 | throw new Error('Road network format is not valid.'); 103 | } 104 | throw error; 105 | } 106 | 107 | logger && logger.log('Importing changeset into osm-p2p...'); 108 | 109 | const xml = fs.createReadStream(`${basePath}.osm`); 110 | await importPromise(osmDb, xml); 111 | 112 | // Note: There's no need to close the osm-p2p-db because when the process 113 | // terminates the connection is automatically closed. 114 | return op.log('process:road-network', {message: 'Road network processing finished'}); 115 | } 116 | 117 | export async function importPOI (projId, scId, op, poiFc, logger) { 118 | const importPromise = Promise.promisify(importer); 119 | const basePath = path.resolve(os.tmpdir(), `poiP${projId}S${scId}`); 120 | const osmDb = getDatabase(projId, scId); 121 | 122 | await op.log('process:poi', {message: 'Poi processing started'}); 123 | try { 124 | await convertToOSMXml(JSON.stringify(poiFc), 'geojson', basePath, logger); 125 | } catch (error) { 126 | console.log('value', JSON.stringify(poiFc)); 127 | if (error.message.match(/'list' object has no attribute 'addparent'/)) { 128 | throw new Error('Poi files data is not ram compliant.'); 129 | } 130 | throw error; 131 | } 132 | 133 | logger && logger.log('Importing changeset into osm-p2p...'); 134 | 135 | const xml = fs.createReadStream(`${basePath}.osm`); 136 | await importPromise(osmDb, xml); 137 | 138 | // Note: There's no need to close the osm-p2p-db because when the process 139 | // terminates the connection is automatically closed. 140 | return op.log('process:poi', {message: 'Poi processing finished'}); 141 | } 142 | 143 | function convertToOSMXml (data, dataType, basePath, logger) { 144 | // Create an OSM Change file and store it in system /tmp folder. 145 | return new Promise(async (resolve, reject) => { 146 | logger && logger.log('Creating changeset file...'); 147 | // OGR reads from a file 148 | await fs.writeFile(`${basePath}.${dataType}`, data); 149 | 150 | // Use ogr2osm with: 151 | // -t - a custom translation file. Default only removes empty values 152 | // -o - to specify output file 153 | // -f - to force overwrite 154 | const cmd = path.resolve(__dirname, '../lib/ogr2osm/ogr2osm.py'); 155 | const args = [ 156 | cmd, 157 | `${basePath}.${dataType}`, 158 | '-t', './app/lib/ogr2osm/default_translation.py', 159 | '-o', `${basePath}.osm`, 160 | '-f' 161 | ]; 162 | 163 | const conversionProcess = cp.spawn('python', args); 164 | let processError = ''; 165 | conversionProcess.stderr.on('data', err => { 166 | processError += err.toString(); 167 | }); 168 | conversionProcess.on('close', code => { 169 | if (code !== 0) { 170 | const err = processError || `Unknown error. Code ${code}`; 171 | return reject(new Error(err)); 172 | } 173 | logger && logger.log('Creating changeset file... done'); 174 | return resolve(); 175 | }); 176 | }); 177 | } 178 | -------------------------------------------------------------------------------- /app/services/scenario-create/index.js: -------------------------------------------------------------------------------- 1 | // only ES5 is allowed in this file 2 | require('babel-register')({ 3 | presets: [ 'es2015' ] 4 | }); 5 | 6 | // load the server 7 | require('./scenario-create'); 8 | 9 | // 10 | // The scenario create script is setup so that it run on a different node process 11 | // using fork. This allows us to offload the main server not causing blocking 12 | // operations. 13 | // 14 | -------------------------------------------------------------------------------- /app/services/server.js: -------------------------------------------------------------------------------- 1 | import Hapi from 'hapi'; 2 | import config from '../config'; 3 | 4 | module.exports = function (options, callback) { 5 | var server = {}; 6 | 7 | server.options = options; 8 | 9 | server.hapi = new Hapi.Server({ 10 | connections: { 11 | routes: { 12 | cors: true 13 | } 14 | }, 15 | debug: config.debug && process.env.DS_ENV !== 'test' ? { 16 | log: [ 'error' ], 17 | request: [ 'error' ] 18 | } : false 19 | }); 20 | 21 | server.hapi.connection(server.options.connection); 22 | 23 | // Bootstrap Hapi Server Plugins, passes the server object to the plugins. 24 | require('./plugins')(server.hapi, function (err) { 25 | if (err) callback(err); 26 | 27 | server.start = function (cb) { 28 | server.hapi.log(['info'], 'Database connected'); 29 | server.hapi.start(function () { 30 | server.hapi.log(['info'], 'Server running at:' + server.hapi.info.uri); 31 | if (cb && typeof cb === 'function') { 32 | cb(null); 33 | } 34 | }); 35 | }; 36 | 37 | callback(null, server); 38 | }); 39 | }; 40 | -------------------------------------------------------------------------------- /app/utils/app-logger.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import fs from 'fs'; 3 | 4 | export default function AppLogger (options) { 5 | options = Object.assign({}, { 6 | output: false 7 | }, options); 8 | 9 | let chrono = []; 10 | let history = { 11 | main: [] 12 | }; 13 | 14 | let lastTime = null; 15 | 16 | const getTimeDiff = () => { 17 | let prev = lastTime; 18 | lastTime = Date.now(); 19 | if (!prev) { 20 | return '--'; 21 | } else { 22 | let diff = (lastTime - prev) / 1000; 23 | return `+${diff}`; 24 | } 25 | }; 26 | 27 | const getLogTime = () => { 28 | let d = new Date(); 29 | let h = d.getHours(); 30 | h = h < 10 ? `0${h}` : h; 31 | let m = d.getMinutes(); 32 | m = m < 10 ? `0${m}` : m; 33 | let s = d.getSeconds(); 34 | s = s < 10 ? `0${s}` : s; 35 | let ml = d.getMilliseconds(); 36 | ml = ml < 10 ? `00${ml}` : ml < 100 ? `0${ml}` : ml; 37 | return `${h}:${m}:${s}.${ml}`; 38 | }; 39 | 40 | const _log = (group, ...args) => { 41 | if (!history[group]) history[group] = []; 42 | let t = getLogTime(); 43 | let d = getTimeDiff(); 44 | history[group].push([`[${t} ${d}]`, ...args]); 45 | chrono.push([`[${t} ${d}]`, `[${group}]`, ...args]); 46 | options.output && console.log(`[${t} ${d}]`, `[${group}]`, ...args); 47 | }; 48 | 49 | const _dump = (group) => { 50 | options.output && console.log('--- --- ---'); 51 | options.output && console.log(`[${group}]`); 52 | options.output && history[group].forEach(o => console.log(...o)); 53 | options.output && console.log('--- --- ---'); 54 | }; 55 | 56 | return { 57 | getLogTime, 58 | group: (name) => ({ 59 | getLogTime, 60 | log: (...args) => _log(name, ...args), 61 | dump: () => _dump(name) 62 | }), 63 | log: (...args) => _log('main', ...args), 64 | dump: () => { 65 | options.output && chrono.forEach(o => console.log(...o)); 66 | }, 67 | dumpGroups: () => { 68 | Object.keys(history).forEach(g => _dump(g)); 69 | }, 70 | toFile: (path) => { 71 | let data = chrono.map(o => o.join(' ')).join('\n'); 72 | fs.writeFileSync(path, data); 73 | } 74 | }; 75 | } 76 | -------------------------------------------------------------------------------- /app/utils/aws.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import fetch from 'node-fetch'; 3 | 4 | /** 5 | * NOTE: This file is duplicated on some services. Be sure to update all of them 6 | * - ram-analysis 7 | * - ram-vt 8 | * - ram-backend 9 | */ 10 | 11 | /** 12 | * Cache for the credentials. 13 | */ 14 | let AWSInstanceCredentialsCache = { 15 | accessKey: null, 16 | secretKey: null, 17 | sessionToken: null, 18 | expireTime: null 19 | }; 20 | 21 | /** 22 | * Fetches the instance credentials for a given role name. 23 | * The instance needs to belong to the given role. 24 | * 25 | * @param {string} roleName The role name to use when fetching the credentials 26 | * 27 | * @throws Error if any of the requests fail. 28 | */ 29 | export async function fetchAWSInstanceCredentials (roleName) { 30 | // When inside a container in a ec2 instance (or when using fargate), the ecs 31 | // client adds a varible with the credentials url. If is is available use that. 32 | // Docs at: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html 33 | const relUrl = process.env.AWS_CONTAINER_CREDENTIALS_RELATIVE_URI; 34 | let accessCredUrl = ''; 35 | if (relUrl) { 36 | accessCredUrl = `http://169.254.170.2${relUrl}`; 37 | } else { 38 | // If we're inside an ec2 machine just use the regular url and fetch the 39 | // role if it was not provided. 40 | const awsIAMUrl = 'http://169.254.169.254/latest/meta-data/iam/security-credentials/'; 41 | if (!roleName) { 42 | const roleRes = await fetch(awsIAMUrl, { timeout: 2000 }); 43 | if (roleRes.status >= 400) throw new Error('Unable to fetch role name'); 44 | roleName = await roleRes.text(); 45 | } 46 | accessCredUrl = `${awsIAMUrl}${roleName}`; 47 | } 48 | 49 | const accessRes = await fetch(accessCredUrl, { timeout: 2000 }); 50 | if (accessRes.status >= 400) throw new Error('Unable to fetch access credentials'); 51 | const accessCredentials = await accessRes.json(); 52 | 53 | return { 54 | accessKey: accessCredentials.AccessKeyId, 55 | secretKey: accessCredentials.SecretAccessKey, 56 | sessionToken: accessCredentials.Token, 57 | // Set the expiration back 30min to give some margin. 58 | expireTime: (new Date(accessCredentials.Expiration)).getTime() - 1800 * 1000 59 | }; 60 | } 61 | 62 | /** 63 | * Gets the credentials from cache unless they are expired. 64 | * 65 | * @see fetchAWSInstanceCredentials() 66 | * 67 | * @param {string} roleName The role name to use when fetching the credentials. 68 | * @param {bool} force Force fetching new credentials. Defaults to false. 69 | */ 70 | export async function getAWSInstanceCredentials (roleName, force = false) { 71 | if (force) return fetchAWSInstanceCredentials(roleName); 72 | 73 | if (Date.now() >= AWSInstanceCredentialsCache.expireTime) { 74 | // Fetch new credentials. 75 | AWSInstanceCredentialsCache = await fetchAWSInstanceCredentials(roleName); 76 | } 77 | 78 | return AWSInstanceCredentialsCache; 79 | } 80 | -------------------------------------------------------------------------------- /app/utils/errors.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import ExtendableError from 'es6-error'; 3 | import Boom from 'boom'; 4 | 5 | export class ProjectNotFoundError extends ExtendableError { 6 | constructor (message = 'Project not found', extra) { 7 | super(message); 8 | this.extra = extra; 9 | } 10 | } 11 | 12 | export class ScenarioNotFoundError extends ExtendableError { 13 | constructor (message = 'Scenario not found', extra) { 14 | super(message); 15 | this.extra = extra; 16 | } 17 | } 18 | 19 | export class FileNotFoundError extends ExtendableError { 20 | constructor (message = 'File not found', extra) { 21 | super(message); 22 | this.extra = extra; 23 | } 24 | } 25 | 26 | export class FileExistsError extends ExtendableError { 27 | constructor (message = 'File already exists', extra) { 28 | super(message); 29 | this.extra = extra; 30 | } 31 | } 32 | 33 | export class ProjectStatusError extends ExtendableError { 34 | constructor (message, extra) { 35 | super(message); 36 | this.extra = extra; 37 | } 38 | } 39 | 40 | export class DataConflictError extends ExtendableError { 41 | constructor (message, extra) { 42 | super(message); 43 | this.extra = extra; 44 | } 45 | } 46 | 47 | export class MasterScenarioError extends ExtendableError { 48 | constructor (message, extra) { 49 | super(message); 50 | this.extra = extra; 51 | } 52 | } 53 | 54 | export class DataValidationError extends ExtendableError { 55 | constructor (message, extra) { 56 | super(message); 57 | this.extra = extra; 58 | } 59 | } 60 | 61 | export class DisabledServiceError extends ExtendableError { 62 | constructor (message, extra) { 63 | super(message); 64 | this.extra = extra; 65 | } 66 | } 67 | 68 | /** 69 | * Gets the appropriate Boom response for the given error. Can be passed 70 | * directly to the reply interface. 71 | * This function is specially useful when workin with try/catch blocks that can 72 | * throw multiple errors. 73 | * 74 | * @param {Error} error Error object 75 | * 76 | * @returns Boom response 77 | */ 78 | export function getBoomResponseForError (error) { 79 | // Check for known error types. 80 | if (error instanceof FileNotFoundError) return Boom.notFound(error.message); 81 | if (error instanceof FileExistsError) return Boom.conflict(error.message); 82 | if (error instanceof ProjectNotFoundError) return Boom.notFound(error.message); 83 | if (error instanceof ScenarioNotFoundError) return Boom.notFound(error.message); 84 | if (error instanceof MasterScenarioError) return Boom.conflict(error.message); 85 | if (error instanceof ProjectStatusError) return Boom.badRequest(error.message); 86 | if (error instanceof DataConflictError) return Boom.conflict(error.message); 87 | if (error instanceof DataValidationError) return Boom.badRequest(error.message); 88 | 89 | // Check for known error codes. 90 | if (error.code === 'NoSuchKey') return Boom.notFound('File not found in storage bucket'); 91 | 92 | // Default handling. 93 | console.log('error', error); 94 | return Boom.badImplementation(error); 95 | } 96 | -------------------------------------------------------------------------------- /app/utils/osrm-profile.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import renderProfile from './default.profile.template'; 3 | import * as luafmt from 'lua-fmt'; 4 | 5 | export function toLua (element) { 6 | let properties = []; 7 | 8 | // Array 9 | if (Array.isArray(element)) { 10 | properties = element.map(value => `'${value}'`); 11 | return `{\n${properties.join(',\n')}\n}`; 12 | 13 | // Object 14 | } else if (typeof element === 'object') { 15 | Object.keys(element).forEach(key => { 16 | // Ensure correct indentation. 17 | const lua = toLua(element[key]).toString().replace(/\n/gm, '\n '); 18 | properties.push(` ["${key}"] = ${lua}`); 19 | }); 20 | return `{\n${properties.join(',\n')}\n}`; 21 | 22 | // String 23 | } else if (typeof element === 'string') { 24 | return `"${element}"`; 25 | 26 | // Other 27 | } else { 28 | return element; 29 | } 30 | } 31 | 32 | // How the profile edit works: 33 | // Section are gorups of speeds that can be edited (ex: "Surface Speeds", 34 | // "Tracktype Speeds", "Smoothness Speeds", etc) These are required by the api 35 | // and defined on the server alongside the labels. 36 | // A section can have the `multi` flag set, which means that two levels are 37 | // allowed. 38 | 39 | export function getOSRMProfileDefaultSpeedMeta () { 40 | return [ 41 | { key: 'speed_profile', label: 'Speeds' }, 42 | { key: 'surface_speeds', label: 'Surface Speeds' }, 43 | { key: 'tracktype_speeds', label: 'Tracktype Speeds' }, 44 | { key: 'smoothness_speeds', label: 'Smoothness Speeds' }, 45 | { key: 'maxspeed_table_default', label: 'Maxspeed Default' }, 46 | { key: 'maxspeed_table', label: 'Maxspeed' } 47 | ]; 48 | } 49 | 50 | export function getOSRMProfileDefaultSpeedSettings () { 51 | return { 52 | speed_profile: { 53 | Expressway: 120, 54 | National: 80, 55 | Provincial: 60, 56 | Township: 20, 57 | County: 20, 58 | Rural: 20, 59 | motorway: 90, 60 | motorway_link: 45, 61 | trunk: 85, 62 | trunk_link: 40, 63 | primary: 65, 64 | primary_link: 30, 65 | secondary: 55, 66 | secondary_link: 25, 67 | tertiary: 40, 68 | tertiary_link: 20, 69 | unclassified: 25, 70 | residential: 25, 71 | living_street: 10, 72 | service: 15, 73 | ferry: 5, 74 | movable: 5, 75 | shuttle_train: 10, 76 | default: 10 77 | }, 78 | surface_speeds: { 79 | cement: 80, 80 | compacted: 80, 81 | fine_gravel: 80, 82 | paving_stones: 60, 83 | metal: 60, 84 | bricks: 60, 85 | grass: 40, 86 | wood: 40, 87 | sett: 40, 88 | grass_paver: 40, 89 | gravel: 40, 90 | unpaved: 40, 91 | ground: 40, 92 | dirt: 40, 93 | pebblestone: 40, 94 | tartan: 40, 95 | cobblestone: 30, 96 | clay: 30, 97 | earth: 20, 98 | stone: 20, 99 | rocky: 20, 100 | sand: 20, 101 | mud: 10 102 | }, 103 | tracktype_speeds: { 104 | grade1: 60, 105 | grade2: 40, 106 | grade3: 30, 107 | grade4: 25, 108 | grade5: 20 109 | }, 110 | smoothness_speeds: { 111 | intermediate: 80, 112 | bad: 40, 113 | very_bad: 20, 114 | horrible: 10, 115 | very_horrible: 5, 116 | impassable: 0 117 | }, 118 | maxspeed_table_default: { 119 | urban: 50, 120 | rural: 90, 121 | trunk: 110, 122 | motorway: 130 123 | }, 124 | maxspeed_table: { 125 | 'at:rural': 100, 126 | 'at:trunk': 100, 127 | 'be:motorway': 120, 128 | 'by:urban': 60, 129 | 'by:motorway': 110, 130 | 'ch:rural': 80, 131 | 'ch:trunk': 100, 132 | 'ch:motorway': 120, 133 | 'cz:trunk': 0, 134 | 'cz:motorway': 0, 135 | 'de:living_street': 7, 136 | 'de:rural': 100, 137 | 'de:motorway': 0, 138 | 'dk:rural': 80, 139 | 'fr:rural': 80, 140 | 'gb:nsl_single': 96.54, 141 | 'gb:nsl_dual': 112.63, 142 | 'gb:motorway': 112.63, 143 | 'nl:rural': 80, 144 | 'nl:trunk': 100, 145 | 'no:rural': 80, 146 | 'no:motorway': 110, 147 | 'pl:rural': 100, 148 | 'pl:trunk': 120, 149 | 'pl:motorway': 140, 150 | 'ro:trunk': 100, 151 | 'ru:living_street': 20, 152 | 'ru:urban': 60, 153 | 'ru:motorway': 110, 154 | 'uk:nsl_single': 96.54, 155 | 'uk:nsl_dual': 112.63, 156 | 'uk:motorway': 112.63, 157 | 'za:urban': 60, 158 | 'za:rural': 100, 159 | 'none': 140 160 | } 161 | }; 162 | } 163 | 164 | export function renderProfileFile (settings) { 165 | const data = {}; 166 | for (const key in settings) { 167 | const element = settings[key]; 168 | data[key] = toLua(element); 169 | } 170 | 171 | return luafmt.formatText(renderProfile(data)); 172 | } 173 | -------------------------------------------------------------------------------- /app/utils/overpass.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import rp from 'request-promise'; 3 | import promiseRetry from 'promise-retry'; 4 | import bbox from '@turf/bbox'; 5 | import osmtogeojson from 'osmtogeojson'; 6 | 7 | /** 8 | * Queries Overpass and returns the data as a string. 9 | * 10 | * @param {string} query The Overpass QL query 11 | */ 12 | export function query (format, query) { 13 | return promiseRetry((retry, number) => { 14 | console.log('Fetching data from Overpass... Attempt number:', number); 15 | return rp(`https://overpass.kumi.systems/api/interpreter?data=[out:${format}][timeout:900][maxsize:2000000000];${query}`) 16 | .catch(err => { 17 | // API calls to Overpass are rate limited. Retry if statusCode is 429 18 | if (err.statusCode === 429) { 19 | retry(err); 20 | } else if (err.statusCode === 504) { 21 | throw new Error('Overpass server is busy. Try again later.'); 22 | } 23 | throw err; 24 | }); 25 | }); 26 | } 27 | 28 | /** 29 | * Accepts an array with a bbox in [minX, minY, maxX, maxY] and 30 | * returns an Overpass bbox. 31 | * 32 | * @param {Array} An array with the bounding box [minX, minY, maxX, maxY] 33 | * 34 | * @return {String} A string with the bbox (S,W,N,E) 35 | */ 36 | export function convertBbox (bbox) { 37 | return `${bbox[1]},${bbox[0]},${bbox[3]},${bbox[2]}`; 38 | } 39 | 40 | /** 41 | * Accepts a feature collection a computes the Overpass bbox. 42 | * 43 | * @param {Object} Feature Collection 44 | * 45 | * @return {String} A string with the bbox (S,W,N,E) 46 | */ 47 | export function fcBbox (fc) { 48 | return convertBbox(bbox(fc)); 49 | } 50 | 51 | function handleOverpassSilentError (osmData) { 52 | let remark = null; 53 | 54 | // Handle response in xml format. 55 | if (typeof osmData === 'string') { 56 | let remarkTest = osmData.match('(.*)'); 57 | 58 | if (remarkTest) { 59 | remark = remarkTest[1]; 60 | } 61 | 62 | // Handle response in json format. 63 | } else { 64 | if (osmData.remark) { 65 | remark = osmData.remark; 66 | } 67 | } 68 | 69 | if (remark) { 70 | if (remark.match('Query run out of memory') || remark.match('Query timed out in')) { 71 | throw new Error('Area is too complex to import from OSM'); 72 | } 73 | 74 | throw new Error(remark); 75 | } 76 | 77 | return osmData; 78 | } 79 | 80 | export function importRoadNetwork (bbox) { 81 | let ql = `( 82 | way["highway"]["highway"!~"^footway$|^path$|^bridleway$|^steps$|^pedestrian$"](${bbox}); 83 | >; 84 | ); out body;`; 85 | 86 | return query('xml', ql) 87 | .then(handleOverpassSilentError); 88 | } 89 | 90 | export function importPOI (bbox, poiTypes) { 91 | // Pois selected 92 | let poiGroupsSelected = osmPOIGroups.filter(o => poiTypes.indexOf(o.key) !== -1); 93 | 94 | // Flatten the queries. 95 | let queries = poiGroupsSelected.reduce((acc, val) => acc.concat(val.queries), []); 96 | 97 | // Compute the queries. (Transform from object to string) 98 | queries = queries.map(q => { 99 | let val = q.values.map(v => `^${v}$`).join('|'); 100 | return `"${q.key}"~"${val}"`; 101 | }); 102 | 103 | let ql = `( 104 | ${queries.map(q => (` 105 | node[${q}](${bbox}); 106 | way[${q}](${bbox}); 107 | `)).join('')} 108 | >; 109 | ); out body;`; 110 | 111 | // Query will look something like: 112 | // ( 113 | // node["amenity"~"^clinic$|^doctors$|^hospital$"](-11.89,-38.313,-10.5333431,-37.1525399); 114 | // way["amenity"~"^clinic$|^doctors$|^hospital$"](-11.89,-38.313,-10.5333431,-37.1525399); 115 | // >; 116 | // ); out body; 117 | 118 | return query('json', ql) 119 | .then(osmData => JSON.parse(osmData)) 120 | .then(handleOverpassSilentError) 121 | .then(osmJSON => osmtogeojson(osmJSON, { flatProperties: true })) 122 | .then(osmGeo => { 123 | // Prepare the response object with a feature collection per POI type. 124 | let poiFCs = {}; 125 | poiGroupsSelected.forEach(group => { 126 | poiFCs[group.key] = { 127 | type: 'FeatureCollection', 128 | features: [] 129 | }; 130 | }); 131 | 132 | // Group the feature by poi key 133 | osmGeo.features.forEach(feat => { 134 | poiGroupsSelected.forEach(group => { 135 | if (isFeatureInGroup(feat, group)) { 136 | poiFCs[group.key].features.push(feat); 137 | } 138 | }); 139 | }); 140 | 141 | return poiFCs; 142 | }); 143 | } 144 | 145 | function isFeatureInGroup (feat, group) { 146 | // If the feature has any of the properties used to query it then it belongs 147 | // to the group. 148 | return group.queries.some(query => { 149 | let prop = feat.properties[query.key]; 150 | return prop && query.values.indexOf(prop) !== -1; 151 | }); 152 | } 153 | 154 | export const osmPOIGroups = [ 155 | { 156 | key: 'health', 157 | queries: [ 158 | // Will be converted into: 159 | // '"amenity"~"^clinic$|^doctors$|^hospital$"' 160 | { 161 | key: 'amenity', 162 | values: ['clinic', 'doctors', 'hospital'] 163 | } 164 | ] 165 | }, 166 | { 167 | key: 'education', 168 | queries: [ 169 | { 170 | key: 'amenity', 171 | values: ['college', 'kindergarten', 'school', 'university'] 172 | } 173 | ] 174 | }, 175 | { 176 | key: 'financial', 177 | queries: [ 178 | { 179 | key: 'amenity', 180 | values: ['atm', 'bank', 'bureau_de_change'] 181 | } 182 | ] 183 | } 184 | ]; 185 | -------------------------------------------------------------------------------- /app/utils/service-runner.js: -------------------------------------------------------------------------------- 1 | import EventEmitter from 'events'; 2 | import { fork } from 'child_process'; 3 | import path from 'path'; 4 | import config from '../config'; 5 | const DEBUG = config.debug; 6 | 7 | export default class ServiceRunner extends EventEmitter { 8 | constructor (name, data) { 9 | super(); 10 | this.name = name; 11 | this.data = data || {}; 12 | this.running = false; 13 | this.killed = false; 14 | this.theProcess = null; 15 | } 16 | 17 | start () { 18 | // Set an unused port number. 19 | // process.execArgv.push('--debug=' + (12345)); 20 | // process.execArgv.push('--inspect'); 21 | // Ensure the process can allocate the needed ram. 22 | process.execArgv.push('--max_old_space_size=4096'); 23 | let servicePath = path.resolve(__dirname, `../services/${this.name}/index.js`); 24 | this.theProcess = fork(servicePath); 25 | let processError = null; 26 | 27 | this.theProcess.on('message', function (msg) { 28 | DEBUG && console.log(`ServiceRunner [${this.name}] [message]`, 'msg', msg); 29 | switch (msg.type) { 30 | case 'error': 31 | processError = msg; 32 | break; 33 | } 34 | this.emit('message', msg); 35 | }); 36 | 37 | this.theProcess.on('close', (code, signal) => { 38 | DEBUG && console.log(`ServiceRunner [${this.name}] [close]`, 'code, signal', code, signal); 39 | }); 40 | 41 | this.theProcess.on('error', (err) => { 42 | DEBUG && console.log(`ServiceRunner [${this.name}] [error]`, 'err', err); 43 | }); 44 | 45 | this.theProcess.on('exit', (code, signal) => { 46 | DEBUG && console.log(`ServiceRunner [${this.name}] [exit]`, 'code, signal', code, signal); 47 | this.running = false; 48 | if (code !== 0) { 49 | processError = processError || `Unknown error. Code ${code}`; 50 | if (this.killed) { 51 | processError = 'Process manually terminated'; 52 | } else if (code === null) { 53 | // When code is null the process was terminated. It didn't exit on 54 | // its own. 55 | // Very likely to be out of memory error. 56 | processError = 'Process terminated by system'; 57 | } 58 | this.emit('complete', new Error(processError)); 59 | } else { 60 | this.emit('complete'); 61 | } 62 | }); 63 | 64 | this.theProcess.send(this.data); 65 | this.running = true; 66 | } 67 | 68 | kill () { 69 | if (this.running && this.theProcess) { 70 | this.running = false; 71 | this.killed = true; 72 | this.theProcess.kill(); 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /app/utils/utils.js: -------------------------------------------------------------------------------- 1 | import multiparty from 'multiparty'; 2 | import Promise from 'bluebird'; 3 | 4 | export function parseFormData (req) { 5 | var form = new multiparty.Form(); 6 | return new Promise((resolve, reject) => { 7 | form.parse(req, (err, fields, files) => { 8 | if (err) { 9 | return reject(err); 10 | } 11 | return resolve({ fields, files }); 12 | }); 13 | }); 14 | } 15 | 16 | // Same as an array map, but nulls and undefined are filtered out. 17 | export function mapValid (arr, iterator) { 18 | let holder = []; 19 | arr.forEach((o, i) => { 20 | let r = iterator(o, i, arr); 21 | if (r !== null && typeof r !== undefined) { 22 | holder.push(r); 23 | } 24 | }); 25 | 26 | return holder; 27 | } 28 | 29 | export function getSourceData (db, contentType, id) { 30 | let sourceDataQ; 31 | let filesQ; 32 | let structure; 33 | 34 | switch (contentType) { 35 | case 'project': 36 | sourceDataQ = db('projects_source_data') 37 | .select('*') 38 | .where('project_id', id); 39 | 40 | filesQ = db('projects_files') 41 | .select('id', 'name', 'type', 'data', 'path', 'created_at') 42 | .where('project_id', id); 43 | 44 | structure = { 45 | profile: { 46 | type: null, 47 | files: [], 48 | wbCatalogOptions: {} 49 | }, 50 | 'admin-bounds': { 51 | type: null, 52 | files: [], 53 | wbCatalogOptions: {} 54 | }, 55 | origins: { 56 | type: null, 57 | files: [], 58 | wbCatalogOptions: {} 59 | } 60 | }; 61 | break; 62 | case 'scenario': 63 | sourceDataQ = db('scenarios_source_data') 64 | .select('*') 65 | .where('scenario_id', id); 66 | 67 | filesQ = db('scenarios_files') 68 | .select('id', 'name', 'type', 'subtype', 'path', 'created_at') 69 | .where('scenario_id', id); 70 | 71 | structure = { 72 | 'road-network': { 73 | type: null, 74 | files: [], 75 | osmOptions: {}, 76 | wbCatalogOptions: {} 77 | }, 78 | poi: { 79 | type: null, 80 | files: [], 81 | osmOptions: {}, 82 | wbCatalogOptions: {} 83 | } 84 | }; 85 | break; 86 | default: 87 | throw new Error('Unknown content type: ' + contentType); 88 | } 89 | 90 | return sourceDataQ 91 | .then(sources => { 92 | let filesFetchTypes = []; 93 | 94 | sources.forEach(s => { 95 | structure[s.name].type = s.type; 96 | if (s.type === 'osm') { 97 | // Never going to happen for projects, just scenarios. 98 | structure[s.name].osmOptions = s.data; 99 | } else if (s.type === 'file' || s.type === 'default') { 100 | filesFetchTypes.push(s.name); 101 | } else if (s.type === 'wbcatalog') { 102 | filesFetchTypes.push(s.name); 103 | structure[s.name].wbCatalogOptions = s.data; 104 | } else if (s.type !== 'default') { 105 | throw new Error('Unknown source type: ' + s.type); 106 | } 107 | }); 108 | 109 | if (!filesFetchTypes.length) { 110 | return structure; 111 | } 112 | 113 | return filesQ 114 | .whereIn('type', filesFetchTypes) 115 | .then(files => { 116 | files.forEach(f => { structure[f.type].files.push(f); }); 117 | return structure; 118 | }); 119 | }); 120 | } 121 | 122 | export function getOperationData (db, opName, id) { 123 | return db.select('*') 124 | .from('operations') 125 | .where('operations.scenario_id', id) 126 | .where('operations.name', opName) 127 | .orderBy('created_at', 'desc') 128 | .first() 129 | .then(op => { 130 | if (!op) { 131 | return null; 132 | } 133 | 134 | return db.select('*') 135 | .from('operations_logs') 136 | .where('operation_id', op.id) 137 | .orderBy('created_at') 138 | .then(logs => { 139 | let errored = false; 140 | if (logs.length) { 141 | errored = logs[logs.length - 1].code === 'error'; 142 | } 143 | return { 144 | id: op.id, 145 | status: op.status, 146 | created_at: op.created_at, 147 | updated_at: op.updated_at, 148 | errored, 149 | logs: logs.map(l => ({ 150 | id: l.id, 151 | code: l.code, 152 | data: l.data, 153 | created_at: l.created_at 154 | })) 155 | }; 156 | }); 157 | }); 158 | } 159 | 160 | export function setScenarioSetting (db, scId, key, value) { 161 | // Check if setting exists. 162 | return db('scenarios_settings') 163 | .select('key') 164 | .where('scenario_id', scId) 165 | .where('key', key) 166 | .first() 167 | .then(setting => { 168 | // Update. 169 | if (setting) { 170 | return db('scenarios_settings') 171 | .update({ 172 | value, 173 | updated_at: (new Date()) 174 | }) 175 | .where('scenario_id', scId) 176 | .where('key', key); 177 | 178 | // Insert new. 179 | } else { 180 | return db('scenarios_settings') 181 | .insert({ 182 | scenario_id: scId, 183 | key, 184 | value, 185 | created_at: (new Date()), 186 | updated_at: (new Date()) 187 | }); 188 | } 189 | }); 190 | } 191 | 192 | export function getScenarioSetting (db, scId, key) { 193 | // Check if setting exists. 194 | return db('scenarios_settings') 195 | .select('value') 196 | .where('scenario_id', scId) 197 | .where('key', key) 198 | .first() 199 | .then(setting => { 200 | if (setting) { 201 | try { 202 | // Convert objects, booleans, and integers. 203 | return JSON.parse(setting.value); 204 | } catch (e) { 205 | // Fallback to strings 206 | return setting.value; 207 | } 208 | } else { 209 | return null; 210 | } 211 | }); 212 | } 213 | 214 | export function getPropInsensitive (object, prop) { 215 | // prop can be written in caps or any variant. 216 | // prop, PROP, Prop, PrOp 217 | // Search for the first match an return it. 218 | // If not found return prop. 219 | return Object.keys(object).find(k => k.toLowerCase() === prop) || prop; 220 | } 221 | -------------------------------------------------------------------------------- /app/utils/wbcatalog.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import path from 'path'; 3 | import fs from 'fs-extra'; 4 | import _ from 'lodash'; 5 | import Promise from 'bluebird'; 6 | import https from 'https'; 7 | import os from 'os'; 8 | import fetch from 'node-fetch'; 9 | 10 | import db from '../db/'; 11 | import { 12 | putFile as putFileToS3, 13 | getLocalJSONFileContents 14 | } from '../s3/utils'; 15 | 16 | // Allow unauthorized requests. 17 | // https://github.com/WorldBank-Transport/ram-backend/issues/223 18 | const httpsAgent = new https.Agent({ 19 | rejectUnauthorized: false 20 | }); 21 | 22 | /** 23 | * Downloads a file form the url to the given destination. 24 | * Note: 25 | * This is used to download files form the WBCatalog and therefore uses a 26 | * special https agent that doesn't reject unauthorized certs. See above. 27 | * 28 | * @param {string} url Source url 29 | * @param {string} dest Destination path 30 | */ 31 | function downloadFile (url, dest) { 32 | return new Promise((resolve, reject) => { 33 | fetch(url, {agent: httpsAgent}) 34 | .then(res => { 35 | const stream = fs.createWriteStream(dest); 36 | res.body.pipe(stream); 37 | stream.on('finish', () => resolve(dest)); 38 | stream.on('error', (e) => reject(e)); 39 | }) 40 | .catch(reject); 41 | }); 42 | } 43 | 44 | /** 45 | * Download a file from the WB Catalog and store it in the database. 46 | * 47 | * @param {number} projId Project id 48 | * @param {number} scId Scenario id 49 | * @param {object} source Source object 50 | * @param {object} logger Output logger 51 | */ 52 | async function downloadWbCatalogFile (projId, scId, source, logger) { 53 | // Figure out what we're dealing with from the source name: 54 | const what = { 55 | 'poi': 'poi', // Special case because of multiple files. 56 | 'road-network': 'scenarios', 57 | 'profile': 'projects', 58 | 'origins': 'projects', 59 | 'admin-bounds': 'projects' 60 | }[source.name]; 61 | 62 | // Clean the tables so any remnants of previous attempts are removed. 63 | // This avoids primary keys collisions and duplication. 64 | switch (what) { 65 | case 'projects': 66 | await db('projects_files') 67 | .where('project_id', projId) 68 | .where('type', source.name) 69 | .del(); 70 | break; 71 | case 'scenarios': 72 | case 'poi': 73 | await db('scenarios_files') 74 | .where('project_id', projId) 75 | .where('scenario_id', scId) 76 | .where('type', source.name) 77 | .del(); 78 | break; 79 | } 80 | 81 | return Promise.map(source.data.resources, async (wbCatRes, idx, len) => { 82 | logger && logger.log(`Download from wbcatalog - ${source.name} (${idx + 1} of ${len})...`); 83 | const {key, label} = wbCatRes; 84 | const wbCatalogRes = await db('wbcatalog_resources') 85 | .select('*') 86 | .where('resource_id', key) 87 | .first(); 88 | 89 | let tempPath; 90 | switch (what) { 91 | case 'projects': 92 | tempPath = path.resolve(os.tmpdir(), `p${projId}--${source.name}${path.extname(wbCatalogRes.resource_url)}`); 93 | break; 94 | case 'scenarios': 95 | tempPath = path.resolve(os.tmpdir(), `p${projId}-s${scId}--${source.name}${path.extname(wbCatalogRes.resource_url)}`); 96 | break; 97 | case 'poi': 98 | tempPath = path.resolve(os.tmpdir(), `p${projId}-s${scId}--${source.name}-${label}${path.extname(wbCatalogRes.resource_url)}`); 99 | break; 100 | } 101 | 102 | await downloadFile(wbCatalogRes.resource_url, tempPath); 103 | logger && logger.log(`Download from wbcatalog - ${source.name} (${idx + 1} of ${len})... done`); 104 | 105 | let fileName; 106 | let filePath; 107 | switch (what) { 108 | case 'projects': 109 | fileName = `${source.name}_${Date.now()}`; 110 | filePath = `project-${projId}/${fileName}`; 111 | break; 112 | case 'scenarios': 113 | fileName = `${source.name}_${Date.now()}`; 114 | filePath = `scenario-${scId}/${fileName}`; 115 | break; 116 | case 'poi': 117 | fileName = `${source.name}_${label}_${Date.now()}`; 118 | filePath = `scenario-${scId}/${fileName}`; 119 | break; 120 | } 121 | 122 | logger && logger.log(`Upload wbcatalog file to storage - ${source.name} (${idx + 1} of ${len})...`); 123 | await putFileToS3(filePath, tempPath); 124 | 125 | let data = { 126 | name: fileName, 127 | type: source.name, 128 | path: filePath, 129 | project_id: projId, 130 | created_at: (new Date()), 131 | updated_at: (new Date()) 132 | }; 133 | 134 | // When using a wbcatalog file for the origins figure out which ones of the 135 | // properties are numbers and use those as indicators. 136 | if (source.name === 'origins') { 137 | const originsFileData = await getLocalJSONFileContents(tempPath); 138 | const feat = originsFileData.features[0]; 139 | const featPropKeys = Object.keys(feat.properties).filter(p => { 140 | const val = feat.properties[p]; 141 | const type = typeof val; 142 | return ((type === 'number' || type === 'string') && val !== '') ? !isNaN(Number(val)) : false; 143 | }); 144 | 145 | if (!featPropKeys.length) { 146 | throw new Error('Unable to find valid population estimates on WB Catalog source for Population data'); 147 | } 148 | 149 | // Add the the indicator information to the data to store. 150 | data.data = { 151 | indicators: featPropKeys.map(p => ({key: p, label: p})), 152 | availableInd: featPropKeys 153 | }; 154 | } 155 | 156 | switch (what) { 157 | case 'projects': 158 | await db('projects_files').insert(data); 159 | break; 160 | case 'scenarios': 161 | data.scenario_id = scId; 162 | await db('scenarios_files').insert(data); 163 | break; 164 | case 'poi': 165 | data.scenario_id = scId; 166 | data.subtype = label; 167 | await db('scenarios_files').insert(data); 168 | break; 169 | } 170 | 171 | logger && logger.log(`Upload wbcatalog file to storage - ${source.name} (${idx + 1} of ${len})... done`); 172 | 173 | return data; 174 | }, {concurrency: 3}); 175 | } 176 | 177 | /** 178 | * Download a file from the WB Catalog for Project files 179 | * 180 | * @param {number} projId Project id 181 | * @param {object} source Source object 182 | * @param {object} logger Output logger 183 | * 184 | * @see downloadWbCatalogFile 185 | */ 186 | export async function downloadWbCatalogProjectFile (projId, source, logger) { 187 | source = _.cloneDeep(source); 188 | // Ensure that there is only one resource for these type of files. 189 | source.data.resources = [source.data.resources[0]]; 190 | const files = await downloadWbCatalogFile(projId, null, source, logger); 191 | return files[0]; 192 | } 193 | 194 | /** 195 | * Download a file from the WB Catalog for Scenario files 196 | * 197 | * @param {number} projId Project id 198 | * @param {number} scId Scenario id 199 | * @param {object} source Source object 200 | * @param {object} logger Output logger 201 | * 202 | * @see downloadWbCatalogFile 203 | */ 204 | export async function downloadWbCatalogScenarioFile (projId, scId, source, logger) { 205 | source = _.cloneDeep(source); 206 | // Ensure that there is only one resource for these type of files. 207 | source.data.resources = [source.data.resources[0]]; 208 | const files = await downloadWbCatalogFile(projId, scId, source, logger); 209 | return files[0]; 210 | } 211 | 212 | /** 213 | * Download a file from the WB Catalog for the POI source. 214 | * 215 | * @param {number} projId Project id 216 | * @param {number} scId Scenario id 217 | * @param {object} source Source object 218 | * @param {object} logger Output logger 219 | * 220 | * @see downloadWbCatalogFile 221 | */ 222 | export async function downloadWbCatalogPoiFile (projId, scId, source, logger) { 223 | return downloadWbCatalogFile(projId, scId, source, logger); 224 | } 225 | -------------------------------------------------------------------------------- /docker-compose-dev.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | networks: 3 | ram: 4 | external: 5 | name: ram 6 | services: 7 | ram-postgis: 8 | image: mdillon/postgis:9.6 9 | ports: 10 | - "5432:5432" 11 | environment: 12 | POSTGRES_PASSWORD: ram 13 | POSTGRES_USER: ram 14 | POSTGRES_DB: ram 15 | volumes: 16 | - ./setup/setup-extensions.sh:/docker-entrypoint-initdb.d/setup-extensions.sh 17 | - ./setup/create-test-db.sh:/docker-entrypoint-initdb.d/create-test-db.sh 18 | networks: 19 | - ram 20 | ram-minio: 21 | image: "minio/minio" 22 | ports: 23 | - "9000:9000" 24 | environment: 25 | MINIO_ACCESS_KEY: minio 26 | MINIO_SECRET_KEY: miniostorageengine 27 | command: server /export 28 | networks: 29 | - ram -------------------------------------------------------------------------------- /docker-compose-test.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | ram-postgis: 4 | image: mdillon/postgis:9.6 5 | ports: 6 | - 5432 7 | environment: 8 | POSTGRES_PASSWORD: ramtest 9 | POSTGRES_USER: ramtest 10 | POSTGRES_DB: ramtest 11 | ram-minio: 12 | image: minio/minio 13 | ports: 14 | - 9000 15 | environment: 16 | MINIO_ACCESS_KEY: minio 17 | MINIO_SECRET_KEY: miniostorageengine 18 | # volumes: 19 | # - /guts/projects/ram/minio-stuff/export:/export 20 | # - /guts/projects/ram/minio-stuff/config:/root/.minio 21 | command: server /export 22 | ram-api: 23 | build: ./ 24 | ports: 25 | - 4000 26 | environment: 27 | INSTANCE_ID: ci-test 28 | DB_URI: postgresql://ramtest:ramtest@ram-postgis:5432/ramtest 29 | STORAGE_PORT: 9000 30 | STORAGE_ENGINE: minio 31 | STORAGE_ACCESS_KEY: minio 32 | STORAGE_SECRET_KEY: miniostorageengine 33 | STORAGE_BUCKET: ram-test 34 | STORAGE_REGION: us-east-1 35 | STORAGE_HOST: ram-minio 36 | OSM_P2P_DIR: ./osm-p2p-dbs-test 37 | RAH_GH_REPO: ram/ram 38 | RAH_GH_TOKEN: --redacted-- 39 | RAH_GH_PATH: data 40 | links: 41 | - ram-postgis 42 | - ram-minio -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | # docker network rm ram 2 | # docker network create --driver=bridge --subnet=172.99.99.0/24 --gateway=172.99.99.1 ram 3 | version: '2' 4 | 5 | networks: 6 | ram: 7 | external: 8 | name: ram 9 | 10 | services: 11 | ram-postgis: 12 | image: mdillon/postgis:9.6 13 | container_name: ram-postgis 14 | environment: 15 | POSTGRES_PASSWORD: ram 16 | POSTGRES_USER: ram 17 | POSTGRES_DB: ram 18 | volumes: 19 | - ./setup/setup-extensions.sh:/docker-entrypoint-initdb.d/setup-extensions.sh 20 | ports: 21 | - 5432:5432 22 | networks: 23 | ram: 24 | ipv4_address: 172.99.99.10 25 | 26 | ram-minio: 27 | image: minio/minio 28 | container_name: ram-minio 29 | ports: 30 | - 9000:9000 31 | networks: 32 | ram: 33 | ipv4_address: 172.99.99.15 34 | environment: 35 | MINIO_ACCESS_KEY: minio 36 | MINIO_SECRET_KEY: miniostorageengine 37 | command: server /export 38 | 39 | ram-api: 40 | build: ./ 41 | container_name: ram-api 42 | ports: 43 | - 4000:4000 44 | networks: 45 | - ram 46 | volumes: 47 | - /var/run/docker.sock:/var/run/docker.sock 48 | command: yarn start 49 | environment: 50 | DS_ENV: offline 51 | 52 | ram-frontend: 53 | image: wbtransport/ram-frontend:latest 54 | container_name: ram-frontend 55 | ports: 56 | - 8080:80 57 | networks: 58 | - ram 59 | 60 | ram-id: 61 | image: wbtransport/ram-id:latest 62 | container_name: ram-id 63 | ports: 64 | - 8000:80 65 | networks: 66 | - ram -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | // only ES5 is allowed in this file 2 | require('babel-register')(); 3 | 4 | // load the server 5 | require('./app'); 6 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | echo 'Installing the dependencies' 4 | apt-get -qq update 5 | apt-get -qq install -y \ 6 | curl \ 7 | build-essential \ 8 | apt-transport-https \ 9 | ca-certificates \ 10 | software-properties-common 11 | 12 | # install nodejs 13 | curl -sL https://deb.nodesource.com/setup_8.x | bash - 14 | apt-get -qq install -y nodejs 15 | 16 | # add Docker repository 17 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - 18 | add-apt-repository \ 19 | "deb [arch=amd64] https://download.docker.com/linux/ubuntu \ 20 | $(lsb_release -cs) \ 21 | stable" 22 | 23 | # install yarn repository 24 | curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - 25 | echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list 26 | 27 | apt-get -qq update 28 | apt-get -qq install -y yarn 29 | apt-get -qq install -y docker-ce 30 | 31 | # allow Docker to be used without sudo 32 | groupadd docker 33 | usermod -aG docker $USER 34 | 35 | # install Hyper 36 | curl -sL https://hyper-install.s3.amazonaws.com/hyper-linux-x86_64.tar.gz | tar xzf - 37 | chmod +x hyper 38 | mv ./hyper /usr/local/bin 39 | 40 | # install dependencies for ogr2osm 41 | apt-get -qq install -y python-gdal python-lxml -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ram-backend", 3 | "version": "0.8.5", 4 | "description": "The backend for RAM, a World Bank project to assess rural road accessibility", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.com/WorldBank-Transport/ram-backend" 8 | }, 9 | "author": { 10 | "name": "Development Seed", 11 | "url": "https://developmentseed.org" 12 | }, 13 | "license": "MIT", 14 | "bugs": { 15 | "url": "https://github.com/WorldBank-Transport/ram-backend/issues" 16 | }, 17 | "homepage": "https://github.com/WorldBank-Transport/ram-backend", 18 | "main": "index.js", 19 | "scripts": { 20 | "setup": "DEBUG=true node setup/", 21 | "lint": "eslint app/ test/ --ext .js", 22 | "test-no-env": "DS_ENV=test DEBUG=false mocha test/test-* --require babel-register", 23 | "test": ". test/test.env && yarn test-no-env", 24 | "start": "node index.js", 25 | "nodemon": "nodemon --watch app index.js", 26 | "postinstall": "[ -f app/config/local.js ] || echo 'module.exports = {};' > app/config/local.js" 27 | }, 28 | "engines": { 29 | "node": "8.x.x" 30 | }, 31 | "dependencies": { 32 | "@octokit/rest": "^15.9.2", 33 | "@turf/bbox": "^4.0.2", 34 | "@turf/center-of-mass": "^6.0.1", 35 | "aws-sdk": "^2.372.0", 36 | "babel-plugin-transform-object-rest-spread": "^6.26.0", 37 | "babel-preset-es2015": "^6.18.0", 38 | "babel-register": "^6.23.0", 39 | "bluebird": "^3.4.7", 40 | "boom": "^4.2.0", 41 | "dotenv": "^4.0.0", 42 | "es6-error": "^4.0.2", 43 | "fs-extra": "^2.1.2", 44 | "good": "^7.0.2", 45 | "good-console": "^6.1.3", 46 | "good-squeeze": "^5.0.0", 47 | "hapi": "^16.1.0", 48 | "hapi-auth-jwt2": "^7.3.0", 49 | "hapi-router": "^3.5.0", 50 | "joi": "^10.0.0", 51 | "js-yaml": "^3.12.0", 52 | "jwks-rsa": "^1.1.1", 53 | "knex": "0.12.7", 54 | "lodash": "^4.17.0", 55 | "lua-fmt": "^2.6.0", 56 | "minio": "^3.2.0", 57 | "multiparty": "^4.1.3", 58 | "node-fetch": "^2.1.2", 59 | "node-zip": "^1.1.1", 60 | "obj2osm": "^2.0.1", 61 | "osm-p2p": "^2.0.0", 62 | "osm-p2p-import": "^3.0.4", 63 | "osm-p2p-server": "^2.3.2", 64 | "osm2json": "^2.1.0", 65 | "osmtogeojson": "^3.0.0-beta.2", 66 | "pg": "^6.1.2", 67 | "promise-retry": "^1.1.1", 68 | "request": "^2.79.0", 69 | "request-promise": "^4.2.1", 70 | "temp-dir": "^1.0.0" 71 | }, 72 | "devDependencies": { 73 | "chai": "^3.5.0", 74 | "eslint": "^3.0.1", 75 | "eslint-config-standard": "^6.0.0-beta", 76 | "eslint-plugin-promise": "^3.3.0", 77 | "eslint-plugin-standard": "^2.0.0", 78 | "form-data": "^2.1.4", 79 | "inject-then": "^2.0.8", 80 | "mocha": "^3.2.0", 81 | "mockdate": "^2.0.1", 82 | "nodemon": "^1.11.0", 83 | "stream-to-promise": "^2.2.0" 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /setup/create-test-db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL 5 | CREATE DATABASE ramtest; 6 | CREATE ROLE ramtest WITH LOGIN PASSWORD 'ramtest'; 7 | GRANT ALL PRIVILEGES ON DATABASE "ramtest" TO ramtest; 8 | EOSQL -------------------------------------------------------------------------------- /setup/fixtures/fixtures.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import { fixMeUp } from '../../test/utils/data'; 3 | 4 | export function addData () { 5 | console.log('Importing data'); 6 | return fixMeUp(); 7 | } 8 | -------------------------------------------------------------------------------- /setup/index.js: -------------------------------------------------------------------------------- 1 | // only ES5 is allowed in this file 2 | require('babel-register')(); 3 | 4 | require('./setup'); 5 | -------------------------------------------------------------------------------- /setup/setup-extensions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL 5 | CREATE EXTENSION IF NOT EXISTS unaccent 6 | EOSQL -------------------------------------------------------------------------------- /setup/setup.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import db from '../app/db/'; 3 | import { setupStructure as setupDb } from '../app/db/structure'; 4 | import { setupStructure as setupS3, bucketExists } from '../app/s3/structure'; 5 | import { bucket } from '../app/s3'; 6 | import { addData } from './fixtures/fixtures'; 7 | 8 | const arg = (a) => process.argv.indexOf(a) !== -1; 9 | 10 | async function checkDangerousDbOp () { 11 | const exists = await db.schema.hasTable('scenarios'); 12 | if (exists && !arg('--force-override')) { 13 | console.log('ERROR: Database is not empty.'); 14 | console.log('Use --force-override if you want to delete everything.'); 15 | process.exit(1); 16 | } 17 | } 18 | 19 | async function checkDangerousS3Op () { 20 | const exists = await bucketExists(bucket); 21 | if (exists && !arg('--force-override')) { 22 | console.log('ERROR: Bucket already exists.'); 23 | console.log('Use --force-override if you want to delete everything.'); 24 | process.exit(1); 25 | } 26 | } 27 | 28 | async function main (params) { 29 | try { 30 | if (arg('--help') || arg('-h') || (!arg('--data') && !arg('--db') && !arg('--bucket'))) { 31 | console.log('Options:'); 32 | console.log(' --data', ' Sets up database and data fixtures.'); 33 | console.log(' --db', ' Sets up database without data fixtures.'); 34 | console.log(' --bucket', ' Sets up bucket for file storage.'); 35 | console.log(''); 36 | console.log(' --force-override', ' Use to override safe data check.'); 37 | console.log(' WARNING: All data will be lost'); 38 | console.log(''); 39 | process.exit(0); 40 | } 41 | 42 | if (arg('--data')) { 43 | await checkDangerousDbOp(); 44 | await setupDb(); 45 | await checkDangerousS3Op(); 46 | await setupS3(); 47 | await addData(); 48 | } else { 49 | if (arg('--db')) { 50 | await checkDangerousDbOp(); 51 | await setupDb(); 52 | } 53 | 54 | if (arg('--bucket')) { 55 | await checkDangerousS3Op(); 56 | await setupS3(); 57 | } 58 | } 59 | 60 | console.log('done'); 61 | process.exit(0); 62 | } catch (error) { 63 | console.log(error); 64 | process.exit(1); 65 | } 66 | } 67 | 68 | main(); 69 | -------------------------------------------------------------------------------- /test/test-result-gen.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import { assert } from 'chai'; 3 | 4 | import initServer from '../app/services/server'; 5 | import { setupStructure as setupDdStructure } from '../app/db/structure'; 6 | import { setupStructure as setupStorageStructure } from '../app/s3/structure'; 7 | import { fixMeUp, getSelectedAdminAreas } from './utils/data'; 8 | import db from '../app/db'; 9 | import Operation from '../app/utils/operation'; 10 | 11 | var options = { 12 | connection: {port: 2000, host: '0.0.0.0'} 13 | }; 14 | 15 | var instance; 16 | before(function (done) { 17 | initServer(options, function (_, server) { 18 | instance = server.hapi; 19 | instance.register(require('inject-then'), function (err) { 20 | if (err) throw err; 21 | 22 | done(); 23 | }); 24 | }); 25 | }); 26 | 27 | describe('Result generation', function () { 28 | before('Before - Result generation', function () { 29 | this.timeout(5000); 30 | return setupDdStructure() 31 | .then(() => setupStorageStructure()) 32 | .then(() => fixMeUp()); 33 | }); 34 | 35 | describe('POST /projects/{projId}/scenarios/{scId}/generate', function () { 36 | it('should return not found when getting non existent project', function () { 37 | return instance.injectThen({ 38 | method: 'POST', 39 | url: '/projects/300/scenarios/300/generate' 40 | }).then(res => { 41 | assert.equal(res.statusCode, 404, 'Status code is 404'); 42 | assert.equal(res.result.message, 'Project not found'); 43 | }); 44 | }); 45 | 46 | it('should return not found when getting non existent scenario', function () { 47 | return instance.injectThen({ 48 | method: 'POST', 49 | url: '/projects/2000/scenarios/300/generate' 50 | }).then(res => { 51 | assert.equal(res.statusCode, 404, 'Status code is 404'); 52 | assert.equal(res.result.message, 'Scenario not found'); 53 | }); 54 | }); 55 | 56 | it('should return error when the project setup is not complete', function () { 57 | return instance.injectThen({ 58 | method: 'POST', 59 | url: '/projects/1000/scenarios/1000/generate' 60 | }).then(res => { 61 | assert.equal(res.statusCode, 409, 'Status code is 409'); 62 | assert.equal(res.result.message, 'Project setup not completed'); 63 | }); 64 | }); 65 | 66 | it('should return error when no admin areas are selected', function () { 67 | // Modify db entry. 68 | return db('scenarios_settings') 69 | .update({ 70 | value: '[]' 71 | }) 72 | .where('scenario_id', 2000) 73 | .where('key', 'admin_areas') 74 | .then(() => instance.injectThen({ 75 | method: 'POST', 76 | url: '/projects/2000/scenarios/2000/generate' 77 | })) 78 | .then(res => { 79 | assert.equal(res.statusCode, 409, 'Status code is 409'); 80 | assert.equal(res.result.message, 'No admin areas selected'); 81 | }) 82 | // Set admin areas back to original. 83 | .then(() => db('scenarios_settings') 84 | .update({value: JSON.stringify(getSelectedAdminAreas(2000))}) 85 | .where('scenario_id', 2000) 86 | .where('key', 'admin_areas') 87 | ); 88 | }); 89 | 90 | it('should remove old results and start generation', function () { 91 | // Insert some dummy files to ensure they're deleted. 92 | return db.batchInsert('scenarios_files', [ 93 | { 94 | 'name': 'results', 95 | 'type': 'results-json', 96 | 'path': 'scenario-2000/results_000000', 97 | 'project_id': 2000, 98 | 'scenario_id': 2000, 99 | 'created_at': '2017-02-01T12:00:03.000Z', 100 | 'updated_at': '2017-02-01T12:00:03.000Z' 101 | }, 102 | { 103 | 'name': 'results-all', 104 | 'type': 'results-csv', 105 | 'path': 'scenario-2000/results-all_000000', 106 | 'project_id': 2000, 107 | 'scenario_id': 2000, 108 | 'created_at': '2017-02-01T12:00:03.000Z', 109 | 'updated_at': '2017-02-01T12:00:03.000Z' 110 | } 111 | ]) 112 | .then(() => instance.injectThen({ 113 | method: 'POST', 114 | url: '/projects/2000/scenarios/2000/generate' 115 | })) 116 | .then(res => { 117 | assert.equal(res.statusCode, 200, 'Status code is 200'); 118 | assert.equal(res.result.message, 'Result generation started'); 119 | }) 120 | // Check the files table. 121 | .then(() => db('scenarios_files') 122 | .select('*') 123 | .where('scenario_id', 2000) 124 | .whereIn('type', ['results-json', 'results-csv']) 125 | ) 126 | .then(files => { 127 | assert.lengthOf(files, 0, 'Scenario results is empty'); 128 | }) 129 | // Check the operations table. 130 | .then(() => db('operations') 131 | .select('*') 132 | .where('scenario_id', 2000) 133 | .where('project_id', 2000) 134 | .where('name', 'generate-analysis') 135 | ) 136 | .then(op => { 137 | // Operation is only started when generation starts. 138 | assert.equal(op.length, 0); 139 | }); 140 | }); 141 | 142 | it('should throw error if the results generation is already running', function () { 143 | const op = new Operation(db); 144 | return op.start('generate-analysis', 2000, 2000) 145 | .then(() => op.log('start', {message: 'Operation started'})) 146 | .then(() => instance.injectThen({ 147 | method: 'POST', 148 | url: '/projects/2000/scenarios/2000/generate' 149 | })) 150 | .then(res => { 151 | assert.equal(res.statusCode, 409, 'Status code is 409'); 152 | assert.equal(res.result.message, 'Result generation already running'); 153 | }); 154 | }); 155 | 156 | after(function () { 157 | // Clean operations table for project/scenario 2000 158 | return db('operations') 159 | .where('project_id', 2000) 160 | .where('scenario_id', 2000) 161 | .del(); 162 | }); 163 | }); 164 | 165 | describe('DELETE /projects/{projId}/scenarios/{scId}/generate', function () { 166 | it('should return conflict when getting non existent project', function () { 167 | return instance.injectThen({ 168 | method: 'DELETE', 169 | url: '/projects/300/scenarios/300/generate' 170 | }).then(res => { 171 | assert.equal(res.statusCode, 409, 'Status code is 409'); 172 | assert.equal(res.result.message, 'Result generation not running'); 173 | }); 174 | }); 175 | 176 | it('should return conflict when getting non existent scenario', function () { 177 | return instance.injectThen({ 178 | method: 'DELETE', 179 | url: '/projects/2000/scenarios/300/generate' 180 | }).then(res => { 181 | assert.equal(res.statusCode, 409, 'Status code is 409'); 182 | assert.equal(res.result.message, 'Result generation not running'); 183 | }); 184 | }); 185 | 186 | it('should stop the operation and add an error log', function () { 187 | // There needs to be an ongoing operation to start the script. 188 | // Operation is fully tested on another file so it's safe to use. 189 | let op = new Operation(db); 190 | return op.start('generate-analysis', 2000, 2000) 191 | .then(() => op.log('start', {message: 'Operation started'})) 192 | .then(() => instance.injectThen({ 193 | method: 'DELETE', 194 | url: '/projects/2000/scenarios/2000/generate' 195 | })) 196 | .then(res => { 197 | assert.equal(res.statusCode, 200, 'Status code is 200'); 198 | assert.equal(res.result.message, 'Result generation aborted'); 199 | }) 200 | // Check the operations table. 201 | .then(() => db('operations') 202 | .select('*') 203 | .where('scenario_id', 2000) 204 | .where('project_id', 2000) 205 | .where('name', 'generate-analysis') 206 | .orderBy('id') 207 | .first() 208 | .then(op => { 209 | assert.equal(op.status, 'complete'); 210 | return op.id; 211 | }) 212 | ) 213 | // Check the operations_logs table. 214 | .then(opId => db('operations_logs') 215 | .select('*') 216 | .where('operation_id', opId) 217 | .then(opLogs => { 218 | assert.deepEqual(opLogs[0].data, { message: 'Operation started' }); 219 | assert.deepEqual(opLogs[1].data, { error: 'Operation aborted' }); 220 | }) 221 | ); 222 | }); 223 | }); 224 | }); 225 | -------------------------------------------------------------------------------- /test/test-root.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import { assert } from 'chai'; 3 | 4 | import initServer from '../app/services/server'; 5 | 6 | var options = { 7 | connection: {port: 2000, host: '0.0.0.0'} 8 | }; 9 | 10 | var instance; 11 | before(function (done) { 12 | initServer(options, function (_, server) { 13 | instance = server.hapi; 14 | instance.register(require('inject-then'), function (err) { 15 | if (err) throw err; 16 | 17 | done(); 18 | }); 19 | }); 20 | }); 21 | 22 | describe('root', function () { 23 | describe('endpoint /', function () { 24 | it('should have statusCode 200', function (done) { 25 | instance.injectThen({ 26 | method: 'GET', 27 | url: '/' 28 | }).then(res => { 29 | assert.equal(res.statusCode, 200, 'Status code is 200'); 30 | done(); 31 | }); 32 | }); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /test/test-scenarios-poi.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import { assert } from 'chai'; 3 | 4 | import initServer from '../app/services/server'; 5 | import { setupStructure as setupDdStructure } from '../app/db/structure'; 6 | import { setupStructure as setupStorageStructure } from '../app/s3/structure'; 7 | import { fixMeUp } from './utils/data'; 8 | 9 | var options = { 10 | connection: {port: 2000, host: '0.0.0.0'} 11 | }; 12 | 13 | var instance; 14 | before(function (done) { 15 | initServer(options, function (_, server) { 16 | instance = server.hapi; 17 | instance.register(require('inject-then'), function (err) { 18 | if (err) throw err; 19 | 20 | done(); 21 | }); 22 | }); 23 | }); 24 | 25 | describe('Scenarios Poi', function () { 26 | before('Before - Scenarios', function () { 27 | this.timeout(5000); 28 | return setupDdStructure() 29 | .then(() => setupStorageStructure()) 30 | .then(() => fixMeUp()); 31 | }); 32 | 33 | describe('GET /projects/{projId}/scenarios/{scId}/poi', function () { 34 | it('should return error when type is missing', function () { 35 | return instance.injectThen({ 36 | method: 'GET', 37 | url: '/projects/2000/scenarios/2000/poi' 38 | }).then(res => { 39 | assert.equal(res.statusCode, 400, 'Status code is 400'); 40 | assert.match(res.result.message, /["type" is required]/); 41 | }); 42 | }); 43 | 44 | it('should return not found for invalid type', function () { 45 | return instance.injectThen({ 46 | method: 'GET', 47 | url: '/projects/2000/scenarios/2000/poi?type=invalid' 48 | }).then(res => { 49 | assert.equal(res.statusCode, 404, 'Status code is 404'); 50 | }); 51 | }); 52 | 53 | it('should return the correct data', function () { 54 | return instance.injectThen({ 55 | method: 'GET', 56 | url: '/projects/2000/scenarios/2000/poi?type=pointOfInterest' 57 | }).then(res => { 58 | assert.equal(res.statusCode, 200, 'Status code is 200'); 59 | let data = res.result; 60 | assert.deepEqual(data, [ 61 | { i: 0, c: [ -37.50811, -11.52502 ] }, 62 | { i: 1, c: [ -37.62598, -11.14786 ] }, 63 | { i: 2, c: [ -38.00331, -11.18805 ] }, 64 | { i: 3, c: [ -37.67609, -11.19296 ] }, 65 | { i: 4, c: [ -37.65658, -11.38247 ] }, 66 | { i: 5, c: [ -37.78638, -11.27597 ] } 67 | ]); 68 | }); 69 | }); 70 | }); 71 | }); 72 | -------------------------------------------------------------------------------- /test/test-scenarios-results.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import { assert } from 'chai'; 3 | import _ from 'lodash'; 4 | 5 | import initServer from '../app/services/server'; 6 | import { setupStructure as setupDdStructure } from '../app/db/structure'; 7 | import { setupStructure as setupStorageStructure } from '../app/s3/structure'; 8 | import { fixMeUp } from './utils/data'; 9 | 10 | var options = { 11 | connection: {port: 2000, host: '0.0.0.0'} 12 | }; 13 | 14 | var instance; 15 | before(function (done) { 16 | initServer(options, function (_, server) { 17 | instance = server.hapi; 18 | instance.register(require('inject-then'), function (err) { 19 | if (err) throw err; 20 | 21 | done(); 22 | }); 23 | }); 24 | }); 25 | 26 | describe('Scenario results', function () { 27 | before('Before - Scenario results', function () { 28 | this.timeout(5000); 29 | return setupDdStructure() 30 | .then(() => setupStorageStructure()) 31 | .then(() => fixMeUp()); 32 | }); 33 | 34 | describe('GET /projects/{projId}/scenarios/{scId}/results/geo', function () { 35 | it('should return the correct results for a scenario, type school', function () { 36 | return instance.injectThen({ 37 | method: 'GET', 38 | url: '/projects/2000/scenarios/2000/results/geo?poiType=school&popInd=population' 39 | }).then(res => { 40 | assert.equal(res.statusCode, 200, 'Status code is 200'); 41 | // It is not important fot the query result to be sorted, but we need 42 | // to ensure order for the tests. 43 | let origins = _.sortBy(res.result, 'i'); 44 | assert.equal(origins.length, 3); 45 | assert.deepEqual(origins[0], { 46 | 'n': 'Paripiranga', 47 | 'i': 200001, 48 | 'e': 5000, 49 | 'p': 29459, 50 | 'pn': 0.6, 51 | 'c': [-37.86215, -10.68289] 52 | }); 53 | }); 54 | }); 55 | it('should return the correct results for a scenario, type church', function () { 56 | return instance.injectThen({ 57 | method: 'GET', 58 | url: '/projects/2000/scenarios/2000/results/geo?poiType=church&popInd=population' 59 | }).then(res => { 60 | assert.equal(res.statusCode, 200, 'Status code is 200'); 61 | // It is not important fot the query result to be sorted, but we need 62 | // to ensure order for the tests. 63 | let origins = _.sortBy(res.result, 'i'); 64 | assert.equal(origins.length, 2); 65 | assert.equal(origins[1].e, 350000); 66 | }); 67 | }); 68 | it('should return an error for unknown POI types', function () { 69 | return instance.injectThen({ 70 | method: 'GET', 71 | url: '/projects/2000/scenarios/2000/results/geo?poiType=mockery&popInd=population' 72 | }).then(res => { 73 | assert.equal(res.statusCode, 400, 'Status code is 400'); 74 | assert.equal(res.result.message, '"poiType" must be one of [church, school]'); 75 | }); 76 | }); 77 | }); 78 | }); 79 | -------------------------------------------------------------------------------- /test/test-wbcatalog-source-data.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import { assert } from 'chai'; 3 | 4 | import initServer from '../app/services/server'; 5 | import { setupStructure as setupDdStructure } from '../app/db/structure'; 6 | import { setupStructure as setupStorageStructure } from '../app/s3/structure'; 7 | import db from '../app/db'; 8 | 9 | import { checkValidSource, buildCache, getResourcesFromDb, CACHE_DAYS } from '../app/routes/wbcatalog-source-data'; 10 | 11 | var options = { 12 | connection: {port: 2000, host: '0.0.0.0'} 13 | }; 14 | 15 | var instance; 16 | before(function (done) { 17 | initServer(options, function (_, server) { 18 | instance = server.hapi; 19 | instance.register(require('inject-then'), function (err) { 20 | if (err) throw err; 21 | 22 | done(); 23 | }); 24 | }); 25 | }); 26 | 27 | describe('Wb Catalog Source Data', function () { 28 | before('Before - Wb Catalog Source Data', function () { 29 | this.timeout(5000); 30 | return setupDdStructure() 31 | .then(() => setupStorageStructure()) 32 | .then(() => db.batchInsert('wbcatalog_resources', [ 33 | { 34 | id: 1000, 35 | type: 'profile', 36 | name: 'Profile source name', 37 | resource_id: 'profile-id-1000', 38 | resource_url: 'http://example.com/profile.file', 39 | created_at: new Date() 40 | }, 41 | { 42 | id: 1001, 43 | type: 'profile', 44 | name: 'Profile source name 2', 45 | resource_id: 'profile-id-1001', 46 | resource_url: 'http://example.com/profile2.file', 47 | created_at: new Date() 48 | }, 49 | { 50 | id: 2000, 51 | type: 'admin', 52 | name: 'Admin source name', 53 | resource_id: 'admin-id-2000', 54 | resource_url: 'http://example.com/admin.file', 55 | // Make expired. 56 | created_at: new Date(Date.now() - (CACHE_DAYS + 1) * 86400 * 1000) 57 | } 58 | ])); 59 | }); 60 | 61 | describe('POST /projects/wbcatalog-source-data', function () { 62 | it('should error when the source name is missing', function () { 63 | return instance.injectThen({ 64 | method: 'POST', 65 | url: '/projects/wbcatalog-source-data', 66 | payload: { 67 | } 68 | }).then(res => { 69 | assert.equal(res.statusCode, 400, 'Status code is 400'); 70 | assert.match(res.result.message, /child "sourceName" fails because \["sourceName" is required\]/); 71 | }); 72 | }); 73 | 74 | it('should error when the source name is invalid', function () { 75 | return instance.injectThen({ 76 | method: 'POST', 77 | url: '/projects/wbcatalog-source-data', 78 | payload: { 79 | sourceName: 'invalid' 80 | } 81 | }).then(res => { 82 | assert.equal(res.statusCode, 400, 'Status code is 400'); 83 | assert.match(res.result.message, /child "sourceName" fails because \["sourceName" must be one of \[origins, profile, admin\]\]/); 84 | }); 85 | }); 86 | 87 | it('should not find a valid source when source is expired', function () { 88 | return checkValidSource('admin') 89 | .then(isValid => assert.isFalse(isValid)); 90 | }); 91 | 92 | it('should not find a valid source when source has no data', function () { 93 | return checkValidSource('origins') 94 | .then(isValid => assert.isFalse(isValid)); 95 | }); 96 | 97 | it('should find a valid source when source has valid data', function () { 98 | return checkValidSource('profile') 99 | .then(isValid => assert.isTrue(isValid)); 100 | }); 101 | 102 | it('should return the correct resources from the database', function () { 103 | return getResourcesFromDb('profile') 104 | .then(data => { 105 | assert.deepEqual(data, [ 106 | { 107 | name: 'Profile source name', 108 | resource_id: 'profile-id-1000' 109 | }, 110 | { 111 | name: 'Profile source name 2', 112 | resource_id: 'profile-id-1001' 113 | } 114 | ]); 115 | }); 116 | }); 117 | 118 | it('should build cache from new data', function () { 119 | return buildCache('profile', [{ 120 | id: 'profile-id-1003', 121 | name: 'The new profile', 122 | url: 'http://example.com/new-profile.file' 123 | }]) 124 | // getResourcesFromDb was tested previously. 125 | .then(() => getResourcesFromDb('profile')) 126 | .then(data => { 127 | assert.deepEqual(data, [ 128 | { 129 | name: 'The new profile', 130 | resource_id: 'profile-id-1003' 131 | } 132 | ]); 133 | }); 134 | }); 135 | }); 136 | 137 | describe('POST /scenarios/wbcatalog-source-data', function () { 138 | it('should error when the source name is missing', function () { 139 | return instance.injectThen({ 140 | method: 'POST', 141 | url: '/scenarios/wbcatalog-source-data', 142 | payload: { 143 | } 144 | }).then(res => { 145 | assert.equal(res.statusCode, 400, 'Status code is 400'); 146 | assert.match(res.result.message, /child "sourceName" fails because \["sourceName" is required\]/); 147 | }); 148 | }); 149 | 150 | it('should error when the source name is invalid', function () { 151 | return instance.injectThen({ 152 | method: 'POST', 153 | url: '/scenarios/wbcatalog-source-data', 154 | payload: { 155 | sourceName: 'invalid' 156 | } 157 | }).then(res => { 158 | assert.equal(res.statusCode, 400, 'Status code is 400'); 159 | assert.match(res.result.message, /child "sourceName" fails because \["sourceName" must be one of \[poi, road-network\]\]/); 160 | }); 161 | }); 162 | }); 163 | }); 164 | -------------------------------------------------------------------------------- /test/test.env: -------------------------------------------------------------------------------- 1 | export DS_ENV='test' 2 | export DB_URI='postgresql://ramtest:ramtest@localhost:5432/ramtest' 3 | export STORAGE_PORT='9000' 4 | export STORAGE_ENGINE='minio' 5 | export STORAGE_ACCESS_KEY='minio' 6 | export STORAGE_SECRET_KEY='miniostorageengine' 7 | export STORAGE_BUCKET='ram-test' 8 | export STORAGE_REGION='us-east-1' 9 | export STORAGE_HOST='localhost' 10 | export OSM_P2P_DIR='./osm-p2p-dbs-test' 11 | export ROAD_NET_EDIT_MAX=20971520 # 20MB 12 | export RAH_GH_REPO='ram/ram' 13 | export RAH_GH_TOKEN='--redacted--' 14 | export RAH_GH_PATH='data' 15 | -------------------------------------------------------------------------------- /test/utils/data-sergipe/README.md: -------------------------------------------------------------------------------- 1 | # Overpass queries 2 | 3 | Brief notes on how test data was generated. This fetches OSM data from Sergipe in Brazil and prepares it for use as sample data in RRA. 4 | Note that bbox is in s,w,n,e. 5 | 6 | To replicate this, you may need to install osmtogeojson `npm install osmtogeojson -g`. 7 | 8 | ## Admin boundaries 9 | 10 | ``` 11 | wget "http://overpass-api.de/api/interpreter?data=[out:xml];(node['place'~'town|hamlet'](-11.58632,-38.29284,-10.68085,-37.5););out body;" -O admin-boundaries.osm 12 | ``` 13 | 14 | Then `$ osmtogeojson admin-boundaries.osm > admin-boundaries.geojson` 15 | 16 | ## Village centers 17 | 18 | ``` 19 | wget "http://overpass-api.de/api/interpreter?data=[out:xml];(node['place'~'town|hamlet'](-11.58632,-38.29284,-10.68085,-37.5););out body;" -O villages.osm 20 | ``` 21 | 22 | Then `$ osmtogeojson villages.osm > villages.geojson` 23 | 24 | ## POI - Townhalls 25 | 26 | ``` 27 | wget "http://overpass-api.de/api/interpreter?data=[out:xml];(node['amenity'~'townhall'](-11.58632,-38.29284,-10.68085,-37.5););out body;" -O poi-townhalls.osm 28 | ``` 29 | 30 | Then `$ osmtogeojson poi-townhalls.osm > poi-townhalls.geojson` 31 | 32 | ## Road network 33 | 34 | ``` 35 | wget "http://overpass-api.de/api/interpreter?data=[out:xml];(way['highway'](-11.58632,-38.29284,-10.68085,-37.5););out body;" -O road-network.osm 36 | ``` -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000042.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000042.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000043.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000043.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000044.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000044.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000045.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000045.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000051.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000051.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000063.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000063.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000064.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000064.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000065.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000065.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000066.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000066.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/000067.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/000067.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/CURRENT: -------------------------------------------------------------------------------- 1 | MANIFEST-000004 2 | -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/LOCK: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/LOCK -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/LOG.old: -------------------------------------------------------------------------------- 1 | 2017/05/02-16:53:01.325630 7f12a99b3700 Delete type=3 #1 2 | -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/index/MANIFEST-000004: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/index/MANIFEST-000004 -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/kdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/kdb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/log/000005.ldb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/log/000005.ldb -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/log/CURRENT: -------------------------------------------------------------------------------- 1 | MANIFEST-000004 2 | -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/log/LOCK: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/log/LOCK -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/log/LOG: -------------------------------------------------------------------------------- 1 | 2017/05/02-16:55:03.109029 7f3b2154a700 Recovering log #3 2 | 2017/05/02-16:55:03.490319 7f3b2154a700 Level-0 table #5: started 3 | 2017/05/02-16:55:03.817393 7f3b2154a700 Level-0 table #5: 25109657 bytes OK 4 | 2017/05/02-16:55:03.824480 7f3b2154a700 Delete type=3 #2 5 | 2017/05/02-16:55:03.824518 7f3b2154a700 Delete type=0 #3 6 | -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/log/LOG.old: -------------------------------------------------------------------------------- 1 | 2017/05/02-16:53:01.325630 7f12a91b2700 Delete type=3 #1 2 | -------------------------------------------------------------------------------- /test/utils/data-sergipe/osm-p2p-db/log/MANIFEST-000004: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WorldBank-Transport/ram-backend/5d8f0130763b465f0e1a9e58aa98bfa5673f1f61/test/utils/data-sergipe/osm-p2p-db/log/MANIFEST-000004 -------------------------------------------------------------------------------- /test/utils/data-sergipe/poi-townhalls.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [ 4 | { 5 | "type": "Feature", 6 | "id": "node/3390484067", 7 | "properties": { 8 | "amenity": "townhall", 9 | "name": "Prefeitura Municipal", 10 | "id": "node/3390484067" 11 | }, 12 | "geometry": { 13 | "type": "Point", 14 | "coordinates": [ 15 | -37.5081188, 16 | -11.5250279 17 | ] 18 | } 19 | }, 20 | { 21 | "type": "Feature", 22 | "id": "node/3670839442", 23 | "properties": { 24 | "amenity": "townhall", 25 | "name": "Prefeitura Municipal de Boquim", 26 | "id": "node/3670839442" 27 | }, 28 | "geometry": { 29 | "type": "Point", 30 | "coordinates": [ 31 | -37.6259847, 32 | -11.1478675 33 | ] 34 | } 35 | }, 36 | { 37 | "type": "Feature", 38 | "id": "node/3841705716", 39 | "properties": { 40 | "amenity": "townhall", 41 | "name": "Prefeitura Municipal de Tobias Barreto", 42 | "id": "node/3841705716" 43 | }, 44 | "geometry": { 45 | "type": "Point", 46 | "coordinates": [ 47 | -38.0033176, 48 | -11.1880577 49 | ] 50 | } 51 | }, 52 | { 53 | "type": "Feature", 54 | "id": "node/3844026945", 55 | "properties": { 56 | "amenity": "townhall", 57 | "name": "Prefeitura Municipal de Pedrinhas", 58 | "id": "node/3844026945" 59 | }, 60 | "geometry": { 61 | "type": "Point", 62 | "coordinates": [ 63 | -37.6760995, 64 | -11.1929695 65 | ] 66 | } 67 | }, 68 | { 69 | "type": "Feature", 70 | "id": "node/3845957608", 71 | "properties": { 72 | "amenity": "townhall", 73 | "name": "Prefeitura Municipal de Umbaúba", 74 | "id": "node/3845957608" 75 | }, 76 | "geometry": { 77 | "type": "Point", 78 | "coordinates": [ 79 | -37.6565803, 80 | -11.3824781 81 | ] 82 | } 83 | }, 84 | { 85 | "type": "Feature", 86 | "id": "node/3870177452", 87 | "properties": { 88 | "amenity": "townhall", 89 | "name": "Prefeitura Municipal de Itabiananinha", 90 | "id": "node/3870177452" 91 | }, 92 | "geometry": { 93 | "type": "Point", 94 | "coordinates": [ 95 | -37.786388, 96 | -11.2759715 97 | ] 98 | } 99 | } 100 | ] 101 | } 102 | -------------------------------------------------------------------------------- /test/utils/data-sergipe/results-p2000-s2000.csv: -------------------------------------------------------------------------------- 1 | id,name,population,lat,lon,poi.school,poi.church 2 | 200001,Paripiranga,29459,-10.6828923,-37.8621523,5000,3500 3 | 200002,Jandaíra,10997,-11.5628009,-37.7820255,54700, 4 | 200003,Tobias Barreto,48733,-11.188034,-38.0034554,0,350000 -------------------------------------------------------------------------------- /test/utils/data-sergipe/results-p2000-s2000.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [ 4 | { 5 | "type": "Feature", 6 | "properties": { 7 | "id": 200001, 8 | "name": "Paripiranga", 9 | "pop": 29459, 10 | "eta": { 11 | "school": 5000, 12 | "church": 3500 13 | } 14 | }, 15 | "geometry": { 16 | "type": "Point", 17 | "coordinates": [ 18 | -10.6828923, 19 | -37.8621523 20 | ] 21 | } 22 | }, 23 | { 24 | "type": "Feature", 25 | "properties": { 26 | "id": 200002, 27 | "name": "Jandaíra", 28 | "pop": 10997, 29 | "eta": { 30 | "school": 54700 31 | } 32 | }, 33 | "geometry": { 34 | "type": "Point", 35 | "coordinates": [ 36 | -11.5628009, 37 | -37.7820255 38 | ] 39 | } 40 | }, 41 | { 42 | "type": "Feature", 43 | "properties": { 44 | "id": 200003, 45 | "name": "Tobias Barreto", 46 | "pop": 48733, 47 | "eta": { 48 | "school": 0, 49 | "church": 350000 50 | } 51 | }, 52 | "geometry": { 53 | "type": "Point", 54 | "coordinates": [ 55 | -11.188034, 56 | -38.0034554 57 | ] 58 | } 59 | } 60 | ] 61 | } -------------------------------------------------------------------------------- /test/utils/data-sergipe/results-p2000-s2000.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": 200001, 4 | "name": "Arauá", 5 | "results": [ 6 | { 7 | "id": 200001, 8 | "name": "Paripiranga", 9 | "population": 29459, 10 | "lat": -10.6828923, 11 | "lon": -37.8621523, 12 | "poi": { 13 | "school": 5000, 14 | "church": 3500 15 | } 16 | }, 17 | { 18 | "id": 200002, 19 | "name": "Jandaíra", 20 | "population": 10997, 21 | "lat": -11.5628009, 22 | "lon": -37.7820255, 23 | "poi": { 24 | "school": 54700 25 | } 26 | }, 27 | { 28 | "id": 200003, 29 | "name": "Tobias Barreto", 30 | "population": 48733, 31 | "lat": -11.188034, 32 | "lon": -38.0034554, 33 | "poi": { 34 | "school": 0, 35 | "church": 350000 36 | } 37 | } 38 | ] 39 | } 40 | ] -------------------------------------------------------------------------------- /test/utils/data-sergipe/results.csv: -------------------------------------------------------------------------------- 1 | id,name,population,lat,lon,poi.school,poi.church 2 | 200001,Paripiranga,29459,-37.8621523,-10.6828923,5000,3500 3 | 200002,Jandaíra,10997,-37.7820255,-11.5628009,54700, 4 | 200003,Tobias Barreto,48733,-38.0034554,-11.188034,0,350000 -------------------------------------------------------------------------------- /test/utils/road-network-small.osm: -------------------------------------------------------------------------------- 1 | 2 | 3 | The data included in this document is from www.openstreetmap.org. The data is made available under ODbL. 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /test/utils/test-file: -------------------------------------------------------------------------------- 1 | I'm a poor file from a poor filesystem. -------------------------------------------------------------------------------- /test/utils/test-file-scenario-1200: -------------------------------------------------------------------------------- 1 | test file for scenario 1200 -------------------------------------------------------------------------------- /test/utils/test-file.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "I'm a poor JSON file from a poor filesystem." 3 | } --------------------------------------------------------------------------------