├── utils ├── plan-cerberus-agl.sh ├── plan-cerberus-sat.sh ├── plan-delfi.sh ├── plan_diverse_sat.sh └── plans_to_json.py ├── samples ├── curl_client.sh ├── problem1.pddl ├── domain1.pddl ├── python_client_topq.py ├── js_client.js ├── python_client.py └── sample_swagger_doc.json ├── conf.js ├── storage └── nfs.js ├── app.masterppp.joint.js ├── requirements.txt ├── package.json ├── LICENSE ├── config └── default-unified.json ├── README.md ├── apibase.masterppp.json ├── Dockerfile.onestage ├── Dockerfile.twostage └── app.masterppp.common.js /utils/plan-cerberus-agl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # $1 domain 4 | # $2 problem 5 | # $3 plan file 6 | # $4 json file 7 | 8 | LOG_FILE=run.log 9 | SOURCE="$( dirname "${BASH_SOURCE[0]}" )" 10 | 11 | $SOURCE/plan-agl.py $1 $2 $3 > $LOG_FILE 12 | $SOURCE/../plans_to_json.py --domain $1 --problem $2 --plans-folder . --plan-file $3 --json-file $4 13 | -------------------------------------------------------------------------------- /utils/plan-cerberus-sat.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # $1 domain 4 | # $2 problem 5 | # $3 plan file 6 | # $4 json file 7 | 8 | LOG_FILE=run.log 9 | SOURCE="$( dirname "${BASH_SOURCE[0]}" )" 10 | 11 | $SOURCE/plan-sat.py $1 $2 $3 > $LOG_FILE 12 | $SOURCE/../plans_to_json.py --domain $1 --problem $2 --plans-folder . --plan-file $3 --json-file $4 13 | -------------------------------------------------------------------------------- /samples/curl_client.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | domain=`sed 's/;/\n;/g' $1 | sed '/^;/d' | tr -d '\n'` 4 | problem=`sed 's/;/\n;/g' $2 | sed '/^;/d' | tr -d '\n'` 5 | body="{\"domain\": \"$domain\", \"problem\": \"$problem\", \"numplans\":$3}" 6 | basebody=`echo $body` 7 | curl -d "$basebody" -H "Content-Type: application/json" http://localhost:4501/planners/topk 8 | -------------------------------------------------------------------------------- /utils/plan-delfi.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # $1 domain 4 | # $2 problem 5 | # $3 plan file 6 | # $4 json file 7 | 8 | LOG_FILE=run.log 9 | SOURCE="$( dirname "${BASH_SOURCE[0]}" )" 10 | 11 | $SOURCE/plan-ipc.py --image-from-lifted-task $1 $2 $3 > $LOG_FILE 12 | $SOURCE/../plans_to_json.py --domain $1 --problem $2 --plans-folder . --plan-file $3 --json-file $4 13 | -------------------------------------------------------------------------------- /conf.js: -------------------------------------------------------------------------------- 1 | var nconf = require('nconf'); 2 | const path = require('path'); 3 | const fs = require('mz/fs'); 4 | 5 | nconf.use("memory"); 6 | nconf.argv(); 7 | nconf.env({ separator: "__" }); 8 | var conffile = (nconf.get("conf") || "default-unified.json"); 9 | var confpath = path.join(__dirname, "config", conffile); 10 | if (fs.existsSync(confpath) && fs.statSync(confpath).isFile()) { 11 | nconf.file(confpath); 12 | } 13 | 14 | module.exports = nconf; -------------------------------------------------------------------------------- /storage/nfs.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs-extra'); 2 | var path = require('path'); 3 | 4 | exports.store = function(filepath, destfname, args) { 5 | let destpath = path.join(args.path, destfname); 6 | return fs.copy(filepath, destpath).then(() => { 7 | console.log("Copied file " + filepath + " to " + args.path); 8 | return true; // make sure to return constant promises this way 9 | }).catch(store_err => { 10 | console.error("Error storing " + filepath + " to " + destpath + " with technique nfs."); 11 | }); 12 | } -------------------------------------------------------------------------------- /app.masterppp.joint.js: -------------------------------------------------------------------------------- 1 | const mppCommon = require('./app.masterppp.common'); 2 | const _ = require('lodash'); 3 | 4 | var apibase = mppCommon.coreApiBase(); 5 | const logger = mppCommon.getLogger("default"); 6 | var app = mppCommon.setUpExpress(logger); 7 | var plannersConfig = mppCommon.getEnabledCategories(logger); 8 | 9 | mppCommon.setUpWorkerPlanners(plannersConfig, apibase, app, logger); 10 | 11 | function getPlannerPosts(plannersConfig, task) { 12 | return _.map(_.keys(plannersConfig), function(pc) { 13 | return mppCommon.plannerSelectorPromise(pc, plannersConfig[pc], logger, plannersConfig, task); 14 | }); 15 | } 16 | 17 | mppCommon.addMainTask(plannersConfig, apibase, app, logger, getPlannerPosts); 18 | 19 | mppCommon.startServer(apibase, app, logger); -------------------------------------------------------------------------------- /samples/problem1.pddl: -------------------------------------------------------------------------------- 1 | (define (problem strips-gripper-x-2) 2 | (:domain gripper-strips) 3 | (:objects rooma roomb ball6 ball5 ball4 ball3 ball2 ball1 left right) 4 | (:init (room rooma) 5 | (room roomb) 6 | (ball ball6) 7 | (ball ball5) 8 | (ball ball4) 9 | (ball ball3) 10 | (ball ball2) 11 | (ball ball1) 12 | (at-robby rooma) 13 | (free left) 14 | (free right) 15 | (at ball6 rooma) 16 | (at ball5 rooma) 17 | (at ball4 rooma) 18 | (at ball3 rooma) 19 | (at ball2 rooma) 20 | (at ball1 rooma) 21 | (gripper left) 22 | (gripper right)) 23 | (:goal (and (at ball6 roomb) 24 | (at ball5 roomb) 25 | (at ball4 roomb) 26 | (at ball3 roomb) 27 | (at ball2 roomb) 28 | (at ball1 roomb)))) -------------------------------------------------------------------------------- /utils/plan_diverse_sat.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # $1 domain 4 | # $2 problem 5 | # $3 number of plans (k) 6 | # $4 output 7 | 8 | num_plans=$(( 2*$3 )) 9 | 10 | SOURCE="$( dirname "${BASH_SOURCE[0]}" )" 11 | $SOURCE/plan.py --planner diverse --domain $1 --problem $2 --number-of-plans $num_plans --use-local-folder --clean-local-folder 12 | 13 | PLANSDIR=$(pwd)/found_plans 14 | num_plans=`ls -1q $PLANSDIR/sas_plan.* | wc -l` 15 | 16 | SCORE="subset(compute_stability_metric=true,aggregator_metric=avg,plans_as_multisets=false,plans_subset_size=$3,exact_method=false,dump_plans=true)" 17 | domain="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")" 18 | problem="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")" 19 | 20 | (mkdir -p $PLANSDIR/done && cd $PLANSDIR/done && $DIVERSE_SCORE_COMPUTATION_PATH/fast-downward.py $domain $problem --diversity-score $SCORE --internal-plan-files-path $PLANSDIR --internal-num-plans-to-read $num_plans) 21 | 22 | $SOURCE/../plans_to_json.py --domain $1 --problem $2 --plans-folder $PLANSDIR/done --plan-file sas_plan --json-file $4 23 | -------------------------------------------------------------------------------- /samples/domain1.pddl: -------------------------------------------------------------------------------- 1 | (define (domain gripper-strips) 2 | (:predicates (room ?r) 3 | (ball ?b) 4 | (gripper ?g) 5 | (at-robby ?r) 6 | (at ?b ?r) 7 | (free ?g) 8 | (carry ?o ?g)) 9 | 10 | (:action move 11 | :parameters (?from ?to) 12 | :precondition (and (room ?from) (room ?to) (at-robby ?from)) 13 | :effect (and (at-robby ?to) 14 | (not (at-robby ?from)))) 15 | 16 | 17 | 18 | (:action pick 19 | :parameters (?obj ?room ?gripper) 20 | :precondition (and (ball ?obj) (room ?room) (gripper ?gripper) 21 | (at ?obj ?room) (at-robby ?room) (free ?gripper)) 22 | :effect (and (carry ?obj ?gripper) 23 | (not (at ?obj ?room)) 24 | (not (free ?gripper)))) 25 | 26 | 27 | (:action drop 28 | :parameters (?obj ?room ?gripper) 29 | :precondition (and (ball ?obj) (room ?room) (gripper ?gripper) 30 | (carry ?obj ?gripper) (at-robby ?room)) 31 | :effect (and (at ?obj ?room) 32 | (free ?gripper) 33 | (not (carry ?obj ?gripper))))) 34 | 35 | -------------------------------------------------------------------------------- /samples/python_client_topq.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import sys 4 | import json 5 | 6 | def invoke_planner_service(domainfile, problemfile, k, q, category, planner_name): 7 | import requests 8 | 9 | post_url = 'http://localhost:4501/planners/' + category + '/' + planner_name 10 | with open(domainfile, "r") as d: 11 | task = {} 12 | task["domain"] = d.read() 13 | task["numplans"] = int(k) 14 | task["qualitybound"] = q 15 | with open(problemfile, "r") as p: 16 | task["problem"] = p.read() 17 | 18 | resp = requests.post(post_url, json=task) 19 | if resp.status_code != 200: 20 | raise Exception('POST %s {}'.format(resp.status_code) % post_url) 21 | 22 | return resp.json() 23 | 24 | 25 | if __name__ == "__main__": 26 | category = "topq" 27 | planner_name = "iterative-unordered-topq" 28 | # planner_name = "kstar-topq" 29 | 30 | ret = invoke_planner_service(sys.argv[1], sys.argv[2], int(sys.argv[3]), float(sys.argv[4]), category, planner_name) 31 | 32 | print(json.dumps(ret, sort_keys=True, indent=4, separators=(',', ': '))) -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | absl-py==0.10.0 2 | asn1crypto==0.24.0 3 | astor==0.8.1 4 | backports.weakref==1.0.post1 5 | cachetools==3.1.1 6 | certifi==2020.6.20 7 | chardet==3.0.4 8 | cryptography==2.1.4 9 | enum34==1.1.6 10 | funcsigs==1.0.2 11 | functools32==3.2.3.post2 12 | futures==3.3.0 13 | gast==0.2.2 14 | google-auth==1.22.1 15 | google-auth-oauthlib==0.4.1 16 | google-pasta==0.2.0 17 | grpcio==1.32.0 18 | h5py==2.10.0 19 | idna==2.6 20 | ipaddress==1.0.17 21 | Keras==2.3.1 22 | Keras-Applications==1.0.8 23 | Keras-Preprocessing==1.1.0 24 | keyring==10.6.0 25 | keyrings.alt==3.0 26 | Markdown==3.1.1 27 | mock==3.0.5 28 | numpy==1.16.6 29 | oauthlib==3.1.0 30 | opt-einsum==2.3.2 31 | Pillow==6.2.2 32 | protobuf==3.13.0 33 | pyasn1==0.4.8 34 | pyasn1-modules==0.2.8 35 | pycrypto==2.6.1 36 | pygobject==3.26.1 37 | pyxdg==0.25 38 | PyYAML==5.3.1 39 | requests==2.24.0 40 | requests-oauthlib==1.3.0 41 | rsa==4.5 42 | scipy==1.2.3 43 | SecretStorage==2.3.1 44 | six==1.11.0 45 | subprocess32==3.5.4 46 | tensorboard==2.1.0 47 | tensorflow-cpu==2.1.0 48 | tensorflow-estimator==2.1.0 49 | termcolor==1.1.0 50 | urllib3==1.25.11 51 | Werkzeug==1.0.1 52 | wrapt==1.12.1 -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "masterplan-worker", 3 | "version": "1.0.0", 4 | "description": "ai-c-masterplan planners and their APIs", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.ibm.com/ai-c-masterplan/masterplan-worker" 8 | }, 9 | "main": "app.masterppp.joint.js", 10 | "scripts": { 11 | "test": "echo \"Error: no test specified\" && exit 1", 12 | "start": "node app.masterppp.joint.js" 13 | }, 14 | "author": "Michael Katz and Octavian Udrea", 15 | "license": "MIT", 16 | "dependencies": { 17 | "archiver-promise": "^1.0.0", 18 | "bl": "^4.0.3", 19 | "bluebird": "^3.5.1", 20 | "body-parser": "^1.18.2", 21 | "cfenv": "^1.2.4", 22 | "child-process-promise": "^2.2.1", 23 | "connect-timeout": "^1.9.0", 24 | "express": "^4.16.3", 25 | "express-http-proxy": "^1.6.2", 26 | "fs-extra": "^6.0.1", 27 | "jszip": "^3.5.0", 28 | "lodash": "^4.17.21", 29 | "log4js": "^4.3.0", 30 | "multer": "^1.4.2", 31 | "mz": "^2.7.0", 32 | "nconf": "^0.10.0", 33 | "sugar": "^2.0.4", 34 | "swagger-ui-express": "^4.0.2", 35 | "tmp-promise": "^1.0.4" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 International Business Machines 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /samples/js_client.js: -------------------------------------------------------------------------------- 1 | const http = require('http'); 2 | const fs = require('fs'); 3 | 4 | var myargs = process.argv.slice(2); 5 | 6 | if(myargs.length < 2) { 7 | console.log("You must provide a domain and problem file."); 8 | process.exit(1); 9 | } 10 | 11 | const dom = fs.readFileSync(myargs[0], 'utf-8'); 12 | const prob = fs.readFileSync(myargs[1], 'utf-8'); 13 | const k = ((myargs.length > 2) ? parseInt(myargs[2]) : 5) || 5; 14 | 15 | const body = { 16 | domain: dom, 17 | problem: prob, 18 | numplans: k 19 | }; 20 | 21 | let req = http.request("http://localhost:4501/planners/topk", { 22 | method: 'POST', 23 | headers: { 24 | 'Content-Type': 'application/json' 25 | } 26 | }, res => { 27 | console.log(`STATUS: ${res.statusCode}`); 28 | console.log(`HEADERS: ${JSON.stringify(res.headers)}`); 29 | res.setEncoding('utf8'); 30 | res.on('data', (chunk) => { 31 | console.log('BODY follows:======'); 32 | console.log(JSON.stringify(JSON.parse(chunk), null, 2)); 33 | }); 34 | res.on('end', () => { 35 | console.log('No more data in response.'); 36 | }); 37 | }); 38 | 39 | req.on('error', (e) => { 40 | console.error(`problem with request: ${e.message}`); 41 | }); 42 | 43 | req.write(JSON.stringify(body)); 44 | req.end(); -------------------------------------------------------------------------------- /samples/python_client.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import sys 4 | import json 5 | 6 | def invoke_planner_service(domainfile, problemfile, k, category, planner_name): 7 | import requests 8 | 9 | post_url = 'http://localhost:4501/planners/' + category + '/' + planner_name 10 | with open(domainfile, "r") as d: 11 | task = {} 12 | task["domain"] = d.read() 13 | task["numplans"] = int(k) 14 | with open(problemfile, "r") as p: 15 | task["problem"] = p.read() 16 | 17 | resp = requests.post(post_url, json=task) 18 | if resp.status_code != 200: 19 | raise Exception('POST %s {}'.format(resp.status_code) % post_url) 20 | 21 | return resp.json() 22 | 23 | 24 | if __name__ == "__main__": 25 | category = "topk" 26 | planner_name = "kstar-topk" 27 | planner_name = "iterative-topk" 28 | 29 | # category = "optimal" 30 | # planner_name = "delfi1" 31 | 32 | category = "satisficing" 33 | planner_name = "seq-sat-cerberus" 34 | 35 | # category = "agile" 36 | # planner_name = "seq-agl-cerberus" 37 | 38 | ret = invoke_planner_service(sys.argv[1], sys.argv[2], int(sys.argv[3]), category, planner_name) 39 | 40 | print(json.dumps(ret, sort_keys=True, indent=4, separators=(',', ': '))) -------------------------------------------------------------------------------- /utils/plans_to_json.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import argparse 4 | 5 | import json,sys 6 | import os 7 | import glob 8 | import re 9 | 10 | 11 | def create_plan_from_file(plan_file): 12 | with open(plan_file) as f: 13 | content = f.readlines() 14 | content = [x.strip() for x in content] 15 | actions = [x[1:-1] for x in content if x.startswith("(")] 16 | ret = { 'actions' : actions } 17 | cost = [x for x in content if not x.startswith("(") and "cost" in x] 18 | ## Assuming for now only one such entry 19 | # mkatz: Assumption does not hold for SymbA, no cost reported 20 | # assert(len(cost) == 1) 21 | if len(cost) >= 1: 22 | q = re.findall(r'; cost = (\d+)', cost[0], re.M) 23 | ret['cost'] = q[0] 24 | 25 | return ret 26 | 27 | 28 | def main(args): 29 | plan_file_name = args.plan_file+"*" 30 | plan_files = glob.glob(os.path.join(args.plans_folder, plan_file_name)) 31 | 32 | unique_plans = set() 33 | plans = [] 34 | for plan_file in plan_files: 35 | plan = create_plan_from_file(plan_file) 36 | if plan is not None: 37 | actions_tuple = tuple(plan['actions']) 38 | if actions_tuple not in unique_plans: 39 | unique_plans.add(actions_tuple) 40 | plans.append(plan) 41 | 42 | with open(args.json_file, "w") as f: 43 | ## dumping plans into one json 44 | d = { "plans": plans } 45 | json.dump(d, f, indent=4, sort_keys=True) 46 | 47 | 48 | 49 | if __name__ == "__main__": 50 | parser = argparse.ArgumentParser( 51 | add_help=False) 52 | 53 | parser.add_argument("--domain") 54 | parser.add_argument("--problem") 55 | parser.add_argument("--plans-folder") 56 | parser.add_argument("--plan-file") 57 | parser.add_argument("--json-file") 58 | 59 | args = parser.parse_args() 60 | main(args) 61 | -------------------------------------------------------------------------------- /config/default-unified.json: -------------------------------------------------------------------------------- 1 | { 2 | "logging": { 3 | "appenders": { 4 | "console": { "type": "console" }, 5 | "logfileworker": { "type": "file", "filename": "logs/planning-worker.log"} 6 | }, 7 | "categories": { 8 | "default": { "appenders": ["console", "logfileworker"], "level": "debug" } 9 | } 10 | }, 11 | "server": { 12 | "port": 4501, 13 | "timeout": "3600000", 14 | "run_categories": "*" 15 | }, 16 | "storage": { 17 | "type": "nfs", 18 | "args": { 19 | "path": "/tmp" 20 | } 21 | }, 22 | "planners": { 23 | "optimal": { 24 | "default": "delfi1", 25 | "planners": { 26 | "delfi1": { 27 | "cmd": "plan-delfi.sh ", 28 | "basedir": "delfi" 29 | } 30 | } 31 | }, 32 | "satisficing" : { 33 | "default": "seq-sat-cerberus", 34 | "planners": { 35 | "seq-sat-cerberus": { 36 | "cmd": "plan-sat.sh ", 37 | "basedir": "cerberus" 38 | } 39 | } 40 | }, 41 | "agile" : { 42 | "default": "seq-agl-cerberus", 43 | "planners": { 44 | "seq-agl-cerberus": { 45 | "cmd": "plan-agl.sh ", 46 | "basedir": "cerberus" 47 | } 48 | } 49 | }, 50 | "topk" : { 51 | "default": "iterative-topk", 52 | "planners": { 53 | "iterative-topk": { 54 | "cmd": "plan.py --planner topk --domain --problem --number-of-plans --symmetries --use-local-folder --clean-local-folder --plans-as-json --results-file ", 55 | "basedir": "forbiditerative" 56 | }, 57 | "kstar-topk" : { 58 | "cmd": "plan_topk.sh ", 59 | "basedir": "kstar" 60 | } 61 | } 62 | }, 63 | "diverse" : { 64 | "default": "iterative-diverse", 65 | "planners": { 66 | "iterative-diverse": { 67 | "cmd": "plan_diverse_sat.sh ", 68 | "basedir": "forbiditerative" 69 | } 70 | } 71 | }, 72 | "topq" : { 73 | "default": "iterative-unordered-topq", 74 | "planners": { 75 | "iterative-unordered-topq": { 76 | "cmd": "plan.py --planner unordered_topq --domain --problem --quality-bound --symmetries --use-local-folder --clean-local-folder --plans-as-json --results-file ", 77 | "basedir": "forbiditerative" 78 | }, 79 | "kstar-topq" : { 80 | "cmd": "plan_topq.sh ", 81 | "basedir": "kstar" 82 | }, 83 | "kstar-unordered-topq" : { 84 | "cmd": "plan_unordered_topq.sh ", 85 | "basedir": "kstar" 86 | } 87 | } 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Planning service 2 | 3 | The repository contains a docker configuration file and code that allow fetching, building, and deploying a service with the following open-source planners 4 | 1. Delfi1, the winner of the sequential optimal track of IPC2018 5 | 2. Cerberus, participant in satisficing and agile tracks of IPC2018 6 | 3. ForbidIterative suite of top-k, top-quality, and diverse planners 7 | 4. K* based top-k planner 8 | 9 | ## Build and run the service 10 | * Two-stage build via ```docker build -t -f Dockerfile.twostage .``` 11 | * Run the service locally via ```docker run -d -p 4501:4501 --env server__hostname=`hostname`:4501 --name ```. A couple of notes: 12 | * This will make the service available on your local machine on port 4501. See the Docker User Guide for manipulating port mappings and binding to specific interfaces. 13 | * The ```--env``` options are only necessary for the swagger UI available at (http://localhost:4501/api-docs) to properly bind to your physical host name and port instead of the container name. If you do not plan to use the swagger UI, you can safely skip it. 14 | 15 | ## Use the service 16 | * A swagger UI is available at http://:4501/. Please note you should use your actual host name and not localhost, as the browser will prevent calls from going through. 17 | * For a simple command line sample, run ```samples/curl_client.sh samples/domain1.pddl samples/problem1.pddl 5``` for an example. 18 | * If you are interested in a Python client, run ```python samples/python_client.py samples/domain1.pddl samples/problem1.pddl 5``` for an example. 19 | * If you are interested in a JavaScript client, run (you need Node.js v12+ installed) ```node samples/js_client.js samples/domain1.pddl samples/problem1.pddl 5``` for an example. 20 | * You can get a different client using the Swagger specification in ```samples/sample_swagger_doc.json``` (note the actual service specification is dynamically generated based on the configuration). You can then use either [Swagger Codegen](https://github.com/swagger-api/swagger-codegen) or the online [Swagger Editor](https://editor.swagger.io) to generate a client in your language of choice. 21 | 22 | ## Using alternate storage for processed requests 23 | By default, requests received by the service will be stored to the local file system under ```/tmp```. Each request will contain a zip of the original planning task and of the returned response, 24 | as well as the console and error logs, if any. Users of the service can choose not to store their request by using the **dontstore** URL parameter. 25 | 26 | You do have the option of configuring different type of storage for your processed requests as follows: 27 | * In ```config/default-unified.json```, change the ```type``` attribute to something that uniquely identifies your storage method. As an example, let's assume this is ```cloud```. 28 | * Define a new module called ```storage/cloud.js```. Define and implement one exported function as follows: ```exports.store = function(filepath, destfname, args) { ... }```. The arguments are: 29 | - ```filepath``` is the local path to the file containing the request and response as a zip when this method is called. You may need to upload, copy, etc. this file depending on your storage methodology. 30 | - ```destfname``` is a suggested destination file/bucket/topic/etc... name. You can use this to retrieve the file in the future, or completely ignore it and produce your own. This is guaranteed to be unique while the service is running continuously. 31 | - ```args``` is a copy of the ```args``` object you define in ```config/default-unified.json``` under ```storage::args```. You can pass anything you like here to configure your storage service, e.g., cloud IAM credentials, URLs, tokens, etc. - your function needs to interpret this in a meaningful way. 32 | * You can completely disable storage of requests/responses by removing the ```storage``` entry in ```config/default-unified.json```. 33 | 34 | ## Logfiles 35 | By default, logfiles are output to the console (the current Dockerfile runs the service with a logging level of ```debug```), as well as to the file specified in ```config/default-unified.json``` under ```logging```. Use the ```docker logs``` comand to get the latest, or set up a container log monitoring service of your choosing. 36 | -------------------------------------------------------------------------------- /apibase.masterppp.json: -------------------------------------------------------------------------------- 1 | { 2 | "swagger": "2.0", 3 | "info": { 4 | "version": "1.0.0", 5 | "title": "Masterplan worker", 6 | "description": "API to execute a variety of planners on a given planning task", 7 | "contact": { 8 | "name": "Michael Katz", 9 | "email": "Michael.Katz1@ibm.com" 10 | }, 11 | "license": { 12 | "name": "MIT", 13 | "url": "https://opensource.org/licenses/MIT" 14 | } 15 | }, 16 | "host": "", 17 | "basePath": "/", 18 | "schemes" : [ 19 | "http" 20 | ], 21 | "consumes": [ 22 | "application/json" 23 | ], 24 | "produces": [ 25 | "application/json" 26 | ], 27 | "definitions": { 28 | "PlanningTask": { 29 | "type": "object", 30 | "required": [ "domain", "problem"], 31 | "properties": { 32 | "domain": { 33 | "type": "string", 34 | "description": "the planning domain in PDDL" 35 | }, 36 | "problem": { 37 | "type": "string", 38 | "description": "the planning problem in PDDL" 39 | }, 40 | "numplans": { 41 | "type": "integer", 42 | "format": "int32", 43 | "description": "for planners returning multiple plans, the requested number of plans" 44 | }, 45 | "qualitybound": { 46 | "type": "float", 47 | "format": "float32", 48 | "description": "for planners bounding quality, the requested relative bound on plan quality" 49 | } 50 | } 51 | }, 52 | "PlanningResult": { 53 | "type": "object", 54 | "properties": { 55 | "planner": { 56 | "type": "string", 57 | "description": "planner used to achieve this result" 58 | }, 59 | "length": { 60 | "type": "integer", 61 | "format": "int64", 62 | "description": "length of resulting plan" 63 | }, 64 | "output": { 65 | "type": "string", 66 | "description": "planner raw output" 67 | }, 68 | "parse_status": { 69 | "type": "string", 70 | "description": "indicates whether plan parsing was ok (for compatibility only)", 71 | "enum": [ 72 | "parse_ok" 73 | ] 74 | }, 75 | "plan": { 76 | "type": "array", 77 | "description": "list of actions in the plan", 78 | "items": { 79 | "type": "string" 80 | } 81 | }, 82 | "type": { 83 | "type": "string", 84 | "description": "type of parsing - for compatibility only", 85 | "enum": [ 86 | "simple" 87 | ] 88 | }, 89 | "logPath": { 90 | "type": "string", 91 | "description": "path to a log file, optional" 92 | }, 93 | "cost": { 94 | "type": "integer", 95 | "format": "int64", 96 | "decription": "plan cost" 97 | } 98 | } 99 | }, 100 | "PlanningResultMultiple": { 101 | "allOf": [ 102 | { 103 | "$ref": "#/definitions/PlanningResult" 104 | }, 105 | { 106 | "type": "object", 107 | "properties": { 108 | "additional_results": { 109 | "type": "array", 110 | "items": { 111 | "$ref":"#/definitions/PlanningResult" 112 | } 113 | } 114 | } 115 | } 116 | ] 117 | }, 118 | "AllPlannersResult": { 119 | "type": "object", 120 | "additionalProperties": { 121 | "$ref":"#/definitions/PlanningResultMultiple" 122 | } 123 | } 124 | }, 125 | "paths": { 126 | 127 | } 128 | } -------------------------------------------------------------------------------- /Dockerfile.onestage: -------------------------------------------------------------------------------- 1 | FROM ubuntu:18.04 2 | 3 | RUN apt-get update && \ 4 | apt-get install -y locales jq vim wget curl gawk \ 5 | cmake g++ g++-multilib make python python-dev python-pip 6 | 7 | # Install basic dev tools 8 | RUN pip install --upgrade pip 9 | RUN pip install h5py keras numpy pillow scipy tensorflow-cpu subprocess32 10 | 11 | RUN curl -sL https://deb.nodesource.com/setup_12.x | bash && apt-get install -y nodejs build-essential 12 | 13 | # Set up environment variables 14 | RUN locale-gen en_US.UTF-8 15 | ENV LANG=en_US.UTF-8 \ 16 | CXX=g++ \ 17 | HOME=/app \ 18 | BASE_DIR=/app/planners 19 | 20 | # Create required directories 21 | RUN mkdir -p $HOME && mkdir -p $BASE_DIR 22 | WORKDIR $BASE_DIR 23 | 24 | 25 | ################################# 26 | # Download and Install Delfi IPC2018 version 27 | ################################# 28 | ENV DL_URL=https://bitbucket.org/ipc2018-classical/team23/get/ipc-2018-seq-opt.tar.gz 29 | RUN curl -SL $DL_URL | tar -xz \ 30 | && mv ipc2018-classical-team23* delfi \ 31 | && cd delfi \ 32 | && sed -i 's/-Werror//g' src/cmake_modules/FastDownwardMacros.cmake \ 33 | && python ./build.py release64 \ 34 | && cd symba \ 35 | && sed -i 's/-Werror//g' src/search/Makefile \ 36 | && ./build 37 | 38 | RUN echo 'alias delfi="python ${BASE_DIR}/delfi/plan-ipc.py --image-from-lifted-task"' >> ~/.bashrc 39 | 40 | 41 | ################################# 42 | # Download and Install Cerberus, post-IPC2018 version 43 | ################################# 44 | ENV RB_URL=https://github.com/ctpelok77/fd-red-black-postipc2018/archive/master.tar.gz 45 | RUN curl -SL $RB_URL | tar -xz \ 46 | && mv fd-red-black-postipc2018* cerberus \ 47 | && cd cerberus \ 48 | && python ./build.py -j 2 49 | RUN echo 'alias cerberus-sat="python ${BASE_DIR}/cerberus/plan-sat.py"' >> ~/.bashrc 50 | RUN echo 'alias cerberus-agl="python ${BASE_DIR}/cerberus/plan-agl.py"' >> ~/.bashrc 51 | 52 | 53 | ################################# 54 | # Download and Install ForbidIterative 55 | ################################# 56 | ENV FI_URL=https://zenodo.org/record/3246774/files/ForbidIterative.tar.gz 57 | RUN curl -SL $FI_URL | tar -xz \ 58 | && mv ForbidIterative forbiditerative \ 59 | && cd forbiditerative \ 60 | && sed -i 's/-Werror//g' src/cmake_modules/FastDownwardMacros.cmake \ 61 | && python ./build.py release64 62 | 63 | ENV DS_URL=https://zenodo.org/record/3404122/files/DiverseScore.tar.gz 64 | RUN mkdir diversescore && cd diversescore \ 65 | && curl -SL $DS_URL | tar -xz \ 66 | && python ./build.py 67 | 68 | ENV DIVERSE_FAST_DOWNWARD_PLANNER_PATH=${BASE_DIR}/cerberus 69 | ENV DIVERSE_SCORE_COMPUTATION_PATH=${BASE_DIR}/diversescore 70 | 71 | RUN echo 'alias fi-topk="python ${BASE_DIR}/forbiditerative/plan_topk.sh"' >> ~/.bashrc 72 | RUN echo 'alias fi-topq="python ${BASE_DIR}/forbiditerative/plan_unordered_topq.sh"' >> ~/.bashrc 73 | RUN echo 'alias fi-diverse="python ${BASE_DIR}/forbiditerative/plan_diverse_sat.sh"' >> ~/.bashrc 74 | 75 | 76 | ################################# 77 | # Download and Install K* 78 | ################################# 79 | ENV KSTAR_URL=https://github.com/ctpelok77/kstar/archive/master.tar.gz 80 | RUN curl -SL $KSTAR_URL | tar -xz \ 81 | && mv kstar-* kstar \ 82 | && cd kstar \ 83 | && python ./build.py release64 84 | RUN echo 'alias kstar="python ${BASE_DIR}/kstar/fast-downward.py --build release64"' >> ~/.bashrc 85 | 86 | ################################# 87 | # Setup NodeJS application dependencies 88 | ################################# 89 | COPY package.json $HOME/ 90 | 91 | WORKDIR $HOME 92 | RUN npm install 93 | ENV BLUEBIRD_DEBUG=1 94 | ENV DEBUG=* 95 | EXPOSE 4501 96 | 97 | 98 | ENV DIVERSE_FAST_DOWNWARD_PLANNER_PATH=${BASE_DIR}/cerberus 99 | ENV DIVERSE_SCORE_COMPUTATION_PATH=${BASE_DIR}/diversescore 100 | COPY utils/plans_to_json.py $BASE_DIR/ 101 | 102 | ## Creating a run script for Delfi 103 | RUN echo '#!/bin/bash' > $BASE_DIR/delfi/plan-delfi.sh 104 | RUN echo 'LOG_FILE=run.log' >> $BASE_DIR/delfi/plan-delfi.sh 105 | RUN echo 'SOURCE="$( dirname "${BASH_SOURCE[0]}" )"' >> $BASE_DIR/delfi/plan-delfi.sh 106 | RUN echo '$SOURCE/plan-ipc.py --image-from-lifted-task $1 $2 $3 > $LOG_FILE' >> $BASE_DIR/delfi/plan-delfi.sh 107 | RUN echo '$SOURCE/../plans_to_json.py --domain $1 --problem $2 --plans-folder . --plan-file $3 --json-file $4' >> $BASE_DIR/delfi/plan-delfi.sh 108 | RUN chmod 755 $BASE_DIR/delfi/plan-delfi.sh 109 | 110 | ## Creating a run script for Cerberus 111 | RUN echo '#!/bin/bash' > $BASE_DIR/cerberus/plan-sat.sh 112 | RUN echo 'LOG_FILE=run.log' >> $BASE_DIR/cerberus/plan-sat.sh 113 | RUN echo 'SOURCE="$( dirname "${BASH_SOURCE[0]}" )"' >> $BASE_DIR/cerberus/plan-sat.sh 114 | RUN echo '$SOURCE/plan-sat.py $1 $2 $3 > $LOG_FILE' >> $BASE_DIR/cerberus/plan-sat.sh 115 | RUN echo '$SOURCE/../plans_to_json.py --domain $1 --problem $2 --plans-folder . --plan-file $3 --json-file $4' >> $BASE_DIR/cerberus/plan-sat.sh 116 | RUN chmod 755 $BASE_DIR/cerberus/plan-sat.sh 117 | 118 | RUN echo '#!/bin/bash' > $BASE_DIR/cerberus/plan-agl.sh 119 | RUN echo 'LOG_FILE=run.log' >> $BASE_DIR/cerberus/plan-agl.sh 120 | RUN echo 'SOURCE="$( dirname "${BASH_SOURCE[0]}" )"' >> $BASE_DIR/cerberus/plan-agl.sh 121 | RUN echo '$SOURCE/plan-agl.py $1 $2 $3 > $LOG_FILE' >> $BASE_DIR/cerberus/plan-agl.sh 122 | RUN echo '$SOURCE/../plans_to_json.py --domain $1 --problem $2 --plans-folder . --plan-file $3 --json-file $4' >> $BASE_DIR/cerberus/plan-agl.sh 123 | RUN chmod 755 $BASE_DIR/cerberus/plan-agl.sh 124 | 125 | ################################# 126 | # Copy NodeJS service 127 | ################################# 128 | RUN mkdir $HOME/samples 129 | COPY samples/domain1.pddl $HOME/samples/ 130 | COPY samples/problem1.pddl $HOME/samples/ 131 | COPY config $HOME/config 132 | COPY storage $HOME/storage 133 | COPY apibase.masterppp.json $HOME/ 134 | COPY conf.js $HOME/ 135 | COPY app.masterppp.common.js $HOME/ 136 | COPY app.masterppp.joint.js $HOME/ 137 | 138 | CMD ["npm", "start"] 139 | -------------------------------------------------------------------------------- /Dockerfile.twostage: -------------------------------------------------------------------------------- 1 | FROM ubuntu:18.04 as builder 2 | 3 | RUN apt-get update && \ 4 | apt-get install -y locales jq vim wget curl gawk \ 5 | cmake g++ g++-multilib make python python-dev python-pip 6 | 7 | # Install basic dev tools 8 | RUN pip install --upgrade pip 9 | # RUN pip install h5py keras numpy pillow scipy tensorflow-cpu subprocess32 10 | 11 | # Set up environment variables 12 | RUN locale-gen en_US.UTF-8 13 | ENV LANG=en_US.UTF-8 \ 14 | CXX=g++ \ 15 | HOME=/app \ 16 | BASE_DIR=/app/planners \ 17 | DELFI_URL=https://bitbucket.org/ipc2018-classical/team23/get/ipc-2018-seq-opt.tar.gz \ 18 | RB_BUILD_COMMIT_ID=821fad1 \ 19 | FI_BUILD_COMMIT_ID=1751d5e \ 20 | DIV_SC_BUILD_COMMIT_ID=0d2c2e4 \ 21 | KSTAR_BUILD_COMMIT_ID=d78ec31 22 | 23 | 24 | 25 | 26 | # Create required directories 27 | RUN mkdir -p $HOME && mkdir -p $BASE_DIR 28 | WORKDIR $BASE_DIR 29 | 30 | ################################# 31 | # Download and Install Delfi IPC2018 version 32 | ################################# 33 | RUN curl -SL $DELFI_URL | tar -xz \ 34 | && mv ipc2018-classical-team23* delfi \ 35 | && cd delfi \ 36 | && sed -i 's/-Werror//g' src/cmake_modules/FastDownwardMacros.cmake \ 37 | && python ./build.py release64 \ 38 | && cd symba \ 39 | && sed -i 's/-Werror//g' src/search/Makefile \ 40 | && ./build 41 | 42 | ################################# 43 | # Download and Install Cerberus, post-IPC2018 version 44 | ################################# 45 | WORKDIR $BASE_DIR/cerberus/ 46 | 47 | # Fetch the code at the right commit ID from the Github repo 48 | RUN curl -L https://github.com/ctpelok77/fd-red-black-postipc2018/archive/${RB_BUILD_COMMIT_ID}.tar.gz | tar xz --strip=1 \ 49 | # Invoke the build script with appropriate options 50 | && python ./build.py -j4 \ 51 | # Strip the main binary to reduce size 52 | && strip --strip-all builds/release/bin/downward 53 | 54 | ################################# 55 | # Download and Install ForbidIterative 56 | ################################# 57 | 58 | WORKDIR $BASE_DIR/forbiditerative/ 59 | # Fetch the code at the right commit ID from the Github repo 60 | RUN curl -L https://github.com/IBM/forbiditerative/archive/${FI_BUILD_COMMIT_ID}.tar.gz | tar xz --strip=1 \ 61 | && python ./build.py -j4 release64 \ 62 | # Strip the main binary to reduce size 63 | && strip --strip-all builds/release64/bin/downward 64 | 65 | 66 | WORKDIR $BASE_DIR/diversescore/ 67 | # Fetch the code at the right commit ID from the Github repo 68 | RUN curl -L https://github.com/IBM/diversescore/archive/${DIV_SC_BUILD_COMMIT_ID}.tar.gz | tar xz --strip=1 \ 69 | # Invoke the build script with appropriate options 70 | && python ./build.py -j4 \ 71 | # Strip the main binary to reduce size 72 | && strip --strip-all builds/release/bin/downward 73 | 74 | 75 | ################################# 76 | # Download and Install K* 77 | ################################# 78 | WORKDIR $BASE_DIR/kstar/ 79 | # Fetch the code at the right commit ID from the Github repo 80 | RUN curl -L https://github.com/ctpelok77/kstar/archive/${KSTAR_BUILD_COMMIT_ID}.tar.gz | tar xz --strip=1 \ 81 | # Invoke the build script with appropriate options 82 | && python ./build.py -j4 release64 \ 83 | # Strip the main binary to reduce size 84 | && strip --strip-all builds/release64/bin/downward 85 | 86 | ############################################################################### 87 | ## Second stage: the image to run the planners 88 | ## 89 | ## This is the image that will be distributed, we will simply copy here 90 | ## the files that we fetched and compiled in the previous image and that 91 | ## are strictly necessary to run the planners. 92 | ## Also, installing nodejs here. 93 | ############################################################################### 94 | 95 | FROM ubuntu:18.04 96 | 97 | # Install any package needed to *run* the planner 98 | # RUN apt-get update && apt-get install --no-install-recommends -y \ 99 | # python python-setuptools python-pip \ 100 | # && rm -rf /var/lib/apt/lists/* 101 | 102 | RUN apt-get update && \ 103 | apt-get install -y locales curl gawk \ 104 | # cmake g++ g++-multilib make \ 105 | python python-dev python-pip \ 106 | && rm -rf /var/lib/apt/lists/* 107 | 108 | # Set up environment variables 109 | RUN locale-gen en_US.UTF-8 110 | ENV LANG=en_US.UTF-8 \ 111 | CXX=g++ \ 112 | HOME=/app \ 113 | BASE_DIR=/app/planners \ 114 | DIVERSE_FAST_DOWNWARD_PLANNER_PATH=/app/planners/cerberus \ 115 | DIVERSE_SCORE_COMPUTATION_PATH=/app/planners/diversescore 116 | 117 | # Create required directories 118 | RUN mkdir -p $HOME && mkdir -p $BASE_DIR 119 | WORKDIR $BASE_DIR 120 | 121 | 122 | # Install basic dev tools 123 | RUN pip install --upgrade pip 124 | # RUN pip install h5py keras numpy pillow scipy tensorflow-cpu subprocess32 125 | COPY requirements.txt ${BASE_DIR}/ 126 | RUN pip install -r requirements.txt 127 | 128 | ## Copying Delfi planner essential files 129 | WORKDIR ${BASE_DIR}/delfi/ 130 | COPY --from=builder ${BASE_DIR}/delfi/dl_model ./dl_model 131 | COPY --from=builder ${BASE_DIR}/delfi/plan-ipc.py ${BASE_DIR}/delfi/fast-downward.py ${BASE_DIR}/delfi/create-image-from-graph.py ${BASE_DIR}/delfi/timers.py ${BASE_DIR}/delfi/symba.py ./ 132 | COPY --from=builder ${BASE_DIR}/delfi/builds/release64/bin/ ./builds/release64/bin/ 133 | COPY --from=builder ${BASE_DIR}/delfi/driver ./driver 134 | COPY --from=builder ${BASE_DIR}/delfi/symba/src/preprocess/preprocess ./symba/src/preprocess/preprocess 135 | # COPY --from=builder ${BASE_DIR}/delfi/symba/src/search/{downward,downward-1,downward-2,downward-4,dispatch,unitcost} ./symba/src/search/ 136 | COPY --from=builder ${BASE_DIR}/delfi/symba/src/search/downward ${BASE_DIR}/delfi/symba/src/search/downward-1 ${BASE_DIR}/delfi/symba/src/search/downward-2 ${BASE_DIR}/delfi/symba/src/search/downward-4 ${BASE_DIR}/delfi/symba/src/search/dispatch ${BASE_DIR}/delfi/symba/src/search/unitcost ./symba/src/search/ 137 | COPY --from=builder ${BASE_DIR}/delfi/symba/src/translate ./symba/src/translate 138 | # COPY --from=builder ${BASE_DIR}/delfi/symba/src/{plan,plan-ipc} ./symba/src/ 139 | COPY --from=builder ${BASE_DIR}/delfi/symba/src/plan ${BASE_DIR}/delfi/symba/src/plan-ipc ./symba/src/ 140 | ## Modifying ${BASE_DIR}/delfi/plan-ipc.py to point to a correct location of abstract_structure_module 141 | RUN sed -i 's#src#builds/release64/bin#g' ${BASE_DIR}/delfi/plan-ipc.py 142 | ## Copying a run script for Delfi 143 | COPY utils/plan-delfi.sh ${BASE_DIR}/delfi/ 144 | # RUN echo 'alias delfi="python ${BASE_DIR}/delfi/plan-ipc.py --image-from-lifted-task"' >> ~/.bashrc 145 | 146 | 147 | ## Copying Cerberus planner essential files 148 | WORKDIR ${BASE_DIR}/cerberus/ 149 | COPY --from=builder ${BASE_DIR}/cerberus/fast-downward.py ${BASE_DIR}/cerberus/plan-sat.py ${BASE_DIR}/cerberus/plan-agl.py ./ 150 | COPY --from=builder ${BASE_DIR}/cerberus/builds/release/bin/ ./builds/release/bin/ 151 | COPY --from=builder ${BASE_DIR}/cerberus/driver ./driver 152 | ## Copying run scripts for Cerberus 153 | COPY utils/plan-cerberus-sat.sh $BASE_DIR/cerberus/plan-sat.sh 154 | COPY utils/plan-cerberus-agl.sh $BASE_DIR/cerberus/plan-agl.sh 155 | 156 | # RUN echo 'alias cerberus-sat="python ${BASE_DIR}/cerberus/plan-sat.py"' >> ~/.bashrc 157 | # RUN echo 'alias cerberus-agl="python ${BASE_DIR}/cerberus/plan-agl.py"' >> ~/.bashrc 158 | 159 | 160 | ## Copying ForbidIterative planner essential files 161 | WORKDIR ${BASE_DIR}/diversescore/ 162 | COPY --from=builder ${BASE_DIR}/diversescore/fast-downward.py . 163 | COPY --from=builder ${BASE_DIR}/diversescore/builds/release/bin/ ./builds/release/bin/ 164 | COPY --from=builder ${BASE_DIR}/diversescore/driver ./driver 165 | WORKDIR ${BASE_DIR}/forbiditerative/ 166 | COPY --from=builder ${BASE_DIR}/forbiditerative/fast-downward.py ${BASE_DIR}/forbiditerative/copy_plans.py ${BASE_DIR}/forbiditerative/plan.py ${BASE_DIR}/forbiditerative/planner_call.py ${BASE_DIR}/forbiditerative/timers.py ./ 167 | COPY --from=builder ${BASE_DIR}/forbiditerative/builds/release64/bin/ ./builds/release64/bin/ 168 | COPY --from=builder ${BASE_DIR}/forbiditerative/driver ./driver 169 | COPY --from=builder ${BASE_DIR}/forbiditerative/iterative ./iterative 170 | # COPY --from=builder ${BASE_DIR}/forbiditerative/plan_topk.sh . 171 | # COPY --from=builder ${BASE_DIR}/forbiditerative/plan_unordered_topq.sh . 172 | # COPY --from=builder ${BASE_DIR}/forbiditerative/plan_diverse_agl.sh . 173 | # COPY --from=builder ${BASE_DIR}/forbiditerative/plan_diverse_sat.sh . 174 | COPY utils/plan_diverse_sat.sh . 175 | 176 | 177 | 178 | ## Copying K* planner essential files 179 | WORKDIR ${BASE_DIR}/kstar/ 180 | COPY --from=builder ${BASE_DIR}/kstar/fast-downward.py ${BASE_DIR}/kstar/plan_topk.sh ${BASE_DIR}/kstar/plan_topq.sh ${BASE_DIR}/kstar/plan_unordered_topq.sh ./ 181 | COPY --from=builder ${BASE_DIR}/kstar/builds/release64/bin/ ./builds/release64/bin/ 182 | COPY --from=builder ${BASE_DIR}/kstar/driver ./driver 183 | # RUN echo 'alias kstar="python ${BASE_DIR}/kstar/fast-downward.py --build release64"' >> ~/.bashrc 184 | 185 | ## For all planners 186 | # RUN echo "file has changed" 187 | COPY utils/plans_to_json.py $BASE_DIR/ 188 | 189 | ################################# 190 | # Setup NodeJS and application dependencies 191 | ################################# 192 | RUN curl -sL https://deb.nodesource.com/setup_12.x | bash && apt-get install -y nodejs build-essential 193 | 194 | COPY package.json $HOME/ 195 | 196 | WORKDIR $HOME 197 | RUN npm install 198 | ENV BLUEBIRD_DEBUG=1 199 | ENV DEBUG=* 200 | EXPOSE 4501 201 | 202 | ################################# 203 | # Copy NodeJS service 204 | ################################# 205 | RUN mkdir $HOME/samples 206 | COPY samples/domain1.pddl samples/problem1.pddl $HOME/samples/ 207 | COPY config $HOME/config 208 | COPY storage $HOME/storage 209 | COPY apibase.masterppp.json conf.js app.masterppp.common.js app.masterppp.joint.js $HOME/ 210 | 211 | CMD ["npm", "start"] 212 | -------------------------------------------------------------------------------- /samples/sample_swagger_doc.json: -------------------------------------------------------------------------------- 1 | { 2 | "swagger": "2.0", 3 | "info": { 4 | "version": "1.0.0", 5 | "title": "Masterplan worker", 6 | "description": "API to execute a variety of planners on a given planning task", 7 | "contact": { 8 | "name": "Michael Katz", 9 | "email": "Michael.Katz1@ibm.com" 10 | }, 11 | "license": { 12 | "name": "MIT", 13 | "url": "https://opensource.org/licenses/MIT" 14 | } 15 | }, 16 | "host": "localhost", 17 | "basePath": "/", 18 | "schemes": [ 19 | "https" 20 | ], 21 | "consumes": [ 22 | "application/json" 23 | ], 24 | "produces": [ 25 | "application/json" 26 | ], 27 | "definitions": { 28 | "PlanningTask": { 29 | "type": "object", 30 | "required": [ 31 | "domain", 32 | "problem" 33 | ], 34 | "properties": { 35 | "domain": { 36 | "type": "string", 37 | "description": "the planning domain in PDDL" 38 | }, 39 | "problem": { 40 | "type": "string", 41 | "description": "the planning problem in PDDL" 42 | }, 43 | "numplans": { 44 | "type": "integer", 45 | "format": "int32", 46 | "description": "for planners returning multiple plans, the requested number of plans" 47 | } 48 | }, 49 | "example": { 50 | "domain": "(define (domain gripper-strips)\n (:predicates (room ?r)\n\t\t(ball ?b)\n\t\t(gripper ?g)\n\t\t(at-robby ?r)\n\t\t(at ?b ?r)\n\t\t(free ?g)\n\t\t(carry ?o ?g))\n\n (:action move\n :parameters (?from ?to)\n :precondition (and (room ?from) (room ?to) (at-robby ?from))\n :effect (and (at-robby ?to)\n\t\t (not (at-robby ?from))))\n\n\n\n (:action pick\n :parameters (?obj ?room ?gripper)\n :precondition (and (ball ?obj) (room ?room) (gripper ?gripper)\n\t\t\t (at ?obj ?room) (at-robby ?room) (free ?gripper))\n :effect (and (carry ?obj ?gripper)\n\t\t (not (at ?obj ?room)) \n\t\t (not (free ?gripper))))\n\n\n (:action drop\n :parameters (?obj ?room ?gripper)\n :precondition (and (ball ?obj) (room ?room) (gripper ?gripper)\n\t\t\t (carry ?obj ?gripper) (at-robby ?room))\n :effect (and (at ?obj ?room)\n\t\t (free ?gripper)\n\t\t (not (carry ?obj ?gripper)))))\n\n", 51 | "problem": "(define (problem strips-gripper-x-2)\n (:domain gripper-strips)\n (:objects rooma roomb ball6 ball5 ball4 ball3 ball2 ball1 left right)\n (:init (room rooma)\n (room roomb)\n (ball ball6)\n (ball ball5)\n (ball ball4)\n (ball ball3)\n (ball ball2)\n (ball ball1)\n (at-robby rooma)\n (free left)\n (free right)\n (at ball6 rooma)\n (at ball5 rooma)\n (at ball4 rooma)\n (at ball3 rooma)\n (at ball2 rooma)\n (at ball1 rooma)\n (gripper left)\n (gripper right))\n (:goal (and (at ball6 roomb)\n (at ball5 roomb)\n (at ball4 roomb)\n (at ball3 roomb)\n (at ball2 roomb)\n (at ball1 roomb))))", 52 | "numplans": 5, 53 | "qualitybound": 1 54 | } 55 | }, 56 | "PlanningResult": { 57 | "type": "object", 58 | "properties": { 59 | "planner": { 60 | "type": "string", 61 | "description": "planner used to achieve this result" 62 | }, 63 | "length": { 64 | "type": "integer", 65 | "format": "int64", 66 | "description": "length of resulting plan" 67 | }, 68 | "output": { 69 | "type": "string", 70 | "description": "planner raw output" 71 | }, 72 | "parse_status": { 73 | "type": "string", 74 | "description": "indicates whether plan parsing was ok (for compatibility only)", 75 | "enum": [ 76 | "parse_ok" 77 | ] 78 | }, 79 | "plan": { 80 | "type": "array", 81 | "description": "list of actions in the plan", 82 | "items": { 83 | "type": "string" 84 | } 85 | }, 86 | "type": { 87 | "type": "string", 88 | "description": "type of parsing - for compatibility only", 89 | "enum": [ 90 | "simple" 91 | ] 92 | }, 93 | "logPath": { 94 | "type": "string", 95 | "description": "path to a log file, optional" 96 | }, 97 | "cost": { 98 | "type": "integer", 99 | "format": "int64", 100 | "decription": "plan cost" 101 | } 102 | } 103 | }, 104 | "PlanningResultMultiple": { 105 | "allOf": [ 106 | { 107 | "$ref": "#/definitions/PlanningResult" 108 | }, 109 | { 110 | "type": "object", 111 | "properties": { 112 | "additional_results": { 113 | "type": "array", 114 | "items": { 115 | "$ref": "#/definitions/PlanningResult" 116 | } 117 | } 118 | } 119 | } 120 | ] 121 | }, 122 | "AllPlannersResult": { 123 | "type": "object", 124 | "additionalProperties": { 125 | "$ref": "#/definitions/PlanningResultMultiple" 126 | } 127 | } 128 | }, 129 | "paths": { 130 | "/planners/optimal/delfi1": { 131 | "post": { 132 | "description": "Executes delfi1 on a planning task and returns resulting plan", 133 | "operationId": "delfi1", 134 | "consumes": [ 135 | "application/json" 136 | ], 137 | "parameters": [ 138 | { 139 | "name": "task", 140 | "in": "body", 141 | "description": "the planning task", 142 | "schema": { 143 | "$ref": "#/definitions/PlanningTask" 144 | } 145 | } 146 | ], 147 | "responses": { 148 | "200": { 149 | "description": "resulting plan", 150 | "schema": { 151 | "$ref": "#/definitions/PlanningResultMultiple" 152 | } 153 | }, 154 | "default": { 155 | "description": "unexpected error", 156 | "schema": { 157 | "type": "string" 158 | } 159 | } 160 | } 161 | } 162 | }, 163 | "/planners/optimal": { 164 | "post": { 165 | "description": "Executes optimal on a planning task and returns resulting plan", 166 | "operationId": "optimal", 167 | "consumes": [ 168 | "application/json" 169 | ], 170 | "parameters": [ 171 | { 172 | "name": "task", 173 | "in": "body", 174 | "description": "the planning task", 175 | "schema": { 176 | "$ref": "#/definitions/PlanningTask" 177 | } 178 | } 179 | ], 180 | "responses": { 181 | "200": { 182 | "description": "resulting plan", 183 | "schema": { 184 | "$ref": "#/definitions/PlanningResultMultiple" 185 | } 186 | }, 187 | "default": { 188 | "description": "unexpected error", 189 | "schema": { 190 | "type": "string" 191 | } 192 | } 193 | } 194 | } 195 | }, 196 | "/planners/satisficing/seq-sat-cerberus": { 197 | "post": { 198 | "description": "Executes seq-sat-cerberus on a planning task and returns resulting plan", 199 | "operationId": "seq-sat-cerberus", 200 | "consumes": [ 201 | "application/json" 202 | ], 203 | "parameters": [ 204 | { 205 | "name": "task", 206 | "in": "body", 207 | "description": "the planning task", 208 | "schema": { 209 | "$ref": "#/definitions/PlanningTask" 210 | } 211 | } 212 | ], 213 | "responses": { 214 | "200": { 215 | "description": "resulting plan", 216 | "schema": { 217 | "$ref": "#/definitions/PlanningResultMultiple" 218 | } 219 | }, 220 | "default": { 221 | "description": "unexpected error", 222 | "schema": { 223 | "type": "string" 224 | } 225 | } 226 | } 227 | } 228 | }, 229 | "/planners/satisficing": { 230 | "post": { 231 | "description": "Executes satisficing on a planning task and returns resulting plan", 232 | "operationId": "satisficing", 233 | "consumes": [ 234 | "application/json" 235 | ], 236 | "parameters": [ 237 | { 238 | "name": "task", 239 | "in": "body", 240 | "description": "the planning task", 241 | "schema": { 242 | "$ref": "#/definitions/PlanningTask" 243 | } 244 | } 245 | ], 246 | "responses": { 247 | "200": { 248 | "description": "resulting plan", 249 | "schema": { 250 | "$ref": "#/definitions/PlanningResultMultiple" 251 | } 252 | }, 253 | "default": { 254 | "description": "unexpected error", 255 | "schema": { 256 | "type": "string" 257 | } 258 | } 259 | } 260 | } 261 | }, 262 | "/planners/agile/seq-agl-cerberus": { 263 | "post": { 264 | "description": "Executes seq-agl-cerberus on a planning task and returns resulting plan", 265 | "operationId": "seq-agl-cerberus", 266 | "consumes": [ 267 | "application/json" 268 | ], 269 | "parameters": [ 270 | { 271 | "name": "task", 272 | "in": "body", 273 | "description": "the planning task", 274 | "schema": { 275 | "$ref": "#/definitions/PlanningTask" 276 | } 277 | } 278 | ], 279 | "responses": { 280 | "200": { 281 | "description": "resulting plan", 282 | "schema": { 283 | "$ref": "#/definitions/PlanningResultMultiple" 284 | } 285 | }, 286 | "default": { 287 | "description": "unexpected error", 288 | "schema": { 289 | "type": "string" 290 | } 291 | } 292 | } 293 | } 294 | }, 295 | "/planners/agile": { 296 | "post": { 297 | "description": "Executes agile on a planning task and returns resulting plan", 298 | "operationId": "agile", 299 | "consumes": [ 300 | "application/json" 301 | ], 302 | "parameters": [ 303 | { 304 | "name": "task", 305 | "in": "body", 306 | "description": "the planning task", 307 | "schema": { 308 | "$ref": "#/definitions/PlanningTask" 309 | } 310 | } 311 | ], 312 | "responses": { 313 | "200": { 314 | "description": "resulting plan", 315 | "schema": { 316 | "$ref": "#/definitions/PlanningResultMultiple" 317 | } 318 | }, 319 | "default": { 320 | "description": "unexpected error", 321 | "schema": { 322 | "type": "string" 323 | } 324 | } 325 | } 326 | } 327 | }, 328 | "/planners/topk/iterative-topk": { 329 | "post": { 330 | "description": "Executes iterative-topk on a planning task and returns resulting plan", 331 | "operationId": "iterative-topk", 332 | "consumes": [ 333 | "application/json" 334 | ], 335 | "parameters": [ 336 | { 337 | "name": "task", 338 | "in": "body", 339 | "description": "the planning task", 340 | "schema": { 341 | "$ref": "#/definitions/PlanningTask" 342 | } 343 | } 344 | ], 345 | "responses": { 346 | "200": { 347 | "description": "resulting plan", 348 | "schema": { 349 | "$ref": "#/definitions/PlanningResultMultiple" 350 | } 351 | }, 352 | "default": { 353 | "description": "unexpected error", 354 | "schema": { 355 | "type": "string" 356 | } 357 | } 358 | } 359 | } 360 | }, 361 | "/planners/topk/kstar-topk": { 362 | "post": { 363 | "description": "Executes kstar-topk on a planning task and returns resulting plan", 364 | "operationId": "kstar-topk", 365 | "consumes": [ 366 | "application/json" 367 | ], 368 | "parameters": [ 369 | { 370 | "name": "task", 371 | "in": "body", 372 | "description": "the planning task", 373 | "schema": { 374 | "$ref": "#/definitions/PlanningTask" 375 | } 376 | } 377 | ], 378 | "responses": { 379 | "200": { 380 | "description": "resulting plan", 381 | "schema": { 382 | "$ref": "#/definitions/PlanningResultMultiple" 383 | } 384 | }, 385 | "default": { 386 | "description": "unexpected error", 387 | "schema": { 388 | "type": "string" 389 | } 390 | } 391 | } 392 | } 393 | }, 394 | "/planners/topk": { 395 | "post": { 396 | "description": "Executes topk on a planning task and returns resulting plan", 397 | "operationId": "topk", 398 | "consumes": [ 399 | "application/json" 400 | ], 401 | "parameters": [ 402 | { 403 | "name": "task", 404 | "in": "body", 405 | "description": "the planning task", 406 | "schema": { 407 | "$ref": "#/definitions/PlanningTask" 408 | } 409 | } 410 | ], 411 | "responses": { 412 | "200": { 413 | "description": "resulting plan", 414 | "schema": { 415 | "$ref": "#/definitions/PlanningResultMultiple" 416 | } 417 | }, 418 | "default": { 419 | "description": "unexpected error", 420 | "schema": { 421 | "type": "string" 422 | } 423 | } 424 | } 425 | } 426 | }, 427 | "/planners/diverse/iterative-diverse": { 428 | "post": { 429 | "description": "Executes iterative-diverse on a planning task and returns resulting plan", 430 | "operationId": "iterative-diverse", 431 | "consumes": [ 432 | "application/json" 433 | ], 434 | "parameters": [ 435 | { 436 | "name": "task", 437 | "in": "body", 438 | "description": "the planning task", 439 | "schema": { 440 | "$ref": "#/definitions/PlanningTask" 441 | } 442 | } 443 | ], 444 | "responses": { 445 | "200": { 446 | "description": "resulting plan", 447 | "schema": { 448 | "$ref": "#/definitions/PlanningResultMultiple" 449 | } 450 | }, 451 | "default": { 452 | "description": "unexpected error", 453 | "schema": { 454 | "type": "string" 455 | } 456 | } 457 | } 458 | } 459 | }, 460 | "/planners/diverse": { 461 | "post": { 462 | "description": "Executes diverse on a planning task and returns resulting plan", 463 | "operationId": "diverse", 464 | "consumes": [ 465 | "application/json" 466 | ], 467 | "parameters": [ 468 | { 469 | "name": "task", 470 | "in": "body", 471 | "description": "the planning task", 472 | "schema": { 473 | "$ref": "#/definitions/PlanningTask" 474 | } 475 | } 476 | ], 477 | "responses": { 478 | "200": { 479 | "description": "resulting plan", 480 | "schema": { 481 | "$ref": "#/definitions/PlanningResultMultiple" 482 | } 483 | }, 484 | "default": { 485 | "description": "unexpected error", 486 | "schema": { 487 | "type": "string" 488 | } 489 | } 490 | } 491 | } 492 | }, 493 | "/planners/topq/iterative-unordered-topq": { 494 | "post": { 495 | "description": "Executes iterative-unordered-topq on a planning task and returns resulting plan", 496 | "operationId": "iterative-unordered-topq", 497 | "consumes": [ 498 | "application/json" 499 | ], 500 | "parameters": [ 501 | { 502 | "name": "task", 503 | "in": "body", 504 | "description": "the planning task", 505 | "schema": { 506 | "$ref": "#/definitions/PlanningTask" 507 | } 508 | } 509 | ], 510 | "responses": { 511 | "200": { 512 | "description": "resulting plan", 513 | "schema": { 514 | "$ref": "#/definitions/PlanningResultMultiple" 515 | } 516 | }, 517 | "default": { 518 | "description": "unexpected error", 519 | "schema": { 520 | "type": "string" 521 | } 522 | } 523 | } 524 | } 525 | }, 526 | "/planners/topq/kstar-topq": { 527 | "post": { 528 | "description": "Executes kstar-topq on a planning task and returns resulting plan", 529 | "operationId": "kstar-topq", 530 | "consumes": [ 531 | "application/json" 532 | ], 533 | "parameters": [ 534 | { 535 | "name": "task", 536 | "in": "body", 537 | "description": "the planning task", 538 | "schema": { 539 | "$ref": "#/definitions/PlanningTask" 540 | } 541 | } 542 | ], 543 | "responses": { 544 | "200": { 545 | "description": "resulting plan", 546 | "schema": { 547 | "$ref": "#/definitions/PlanningResultMultiple" 548 | } 549 | }, 550 | "default": { 551 | "description": "unexpected error", 552 | "schema": { 553 | "type": "string" 554 | } 555 | } 556 | } 557 | } 558 | }, 559 | "/planners/topq": { 560 | "post": { 561 | "description": "Executes topq on a planning task and returns resulting plan", 562 | "operationId": "topq", 563 | "consumes": [ 564 | "application/json" 565 | ], 566 | "parameters": [ 567 | { 568 | "name": "task", 569 | "in": "body", 570 | "description": "the planning task", 571 | "schema": { 572 | "$ref": "#/definitions/PlanningTask" 573 | } 574 | } 575 | ], 576 | "responses": { 577 | "200": { 578 | "description": "resulting plan", 579 | "schema": { 580 | "$ref": "#/definitions/PlanningResultMultiple" 581 | } 582 | }, 583 | "default": { 584 | "description": "unexpected error", 585 | "schema": { 586 | "type": "string" 587 | } 588 | } 589 | } 590 | } 591 | }, 592 | "/planners/task": { 593 | "post": { 594 | "description": "Executes all available categories of planners on a planning task and returns resulting plan", 595 | "operationId": "allplanners", 596 | "consumes": [ 597 | "application/json" 598 | ], 599 | "parameters": [ 600 | { 601 | "name": "task", 602 | "in": "body", 603 | "description": "the planning task", 604 | "schema": { 605 | "$ref": "#/definitions/PlanningTask" 606 | } 607 | } 608 | ], 609 | "responses": { 610 | "200": { 611 | "description": "resulting plans", 612 | "schema": { 613 | "$ref": "#/definitions/AllPlannersResult" 614 | } 615 | }, 616 | "default": { 617 | "description": "unexpected error", 618 | "schema": { 619 | "type": "string" 620 | } 621 | } 622 | } 623 | } 624 | } 625 | } 626 | } -------------------------------------------------------------------------------- /app.masterppp.common.js: -------------------------------------------------------------------------------- 1 | var express = require('express'); 2 | var nconf = require('./conf'); 3 | var Promise = require('bluebird'); 4 | var _ = require('lodash'); 5 | var path = require('path'); 6 | var fs = require('mz/fs'); 7 | var util = require('util'); 8 | var bodyparser = require('body-parser'); 9 | var tmp = require('tmp-promise'); 10 | var childproc = require('child-process-promise').exec; 11 | var apibase = require('./apibase.masterppp.json'); 12 | const swaggerUI = require('swagger-ui-express'); 13 | var archiver = require('archiver-promise'); 14 | var Sugar = require('sugar'); 15 | const log4js = require("log4js"); 16 | var timeout = require('connect-timeout') 17 | var jszip = require('jszip'); 18 | var multer = require('multer'); 19 | 20 | log4js.configure(nconf.get("logging")); 21 | var upload = multer({dest: 'uploads/'}); 22 | 23 | module.exports.coreApiBase = function() { 24 | let dom = fs.readFileSync('samples/domain1.pddl', 'utf-8'); 25 | let prob = fs.readFileSync('samples/problem1.pddl', 'utf-8'); 26 | let sampledata = { 27 | domain: dom, 28 | problem: prob, 29 | numplans: 5, 30 | qualitybound: 1.0 31 | }; 32 | apibase.definitions["PlanningTask"].example = sampledata; 33 | return apibase; 34 | }; 35 | 36 | module.exports.getLogger = function(name) { 37 | return log4js.getLogger(name); 38 | } 39 | 40 | module.exports.getOpenAPIDescription = function(pname) { 41 | return { 42 | "post": { 43 | "description": "Executes " + pname + " on a planning task and returns resulting plan", 44 | "operationId": pname, 45 | "consumes": ["application/json"], 46 | "parameters": [{ 47 | "name": "task", 48 | "in": "body", 49 | "description": "the planning task", 50 | "schema": { 51 | "$ref": "#/definitions/PlanningTask" 52 | } 53 | }], 54 | "responses": { 55 | "200": { 56 | "description": "resulting plan", 57 | "schema": { 58 | "$ref": "#/definitions/PlanningResultMultiple" 59 | } 60 | }, 61 | "default": { 62 | "description": "unexpected error", 63 | "schema": { 64 | "type": "string" 65 | } 66 | } 67 | } 68 | } 69 | }; 70 | }; 71 | 72 | var storage_opt = nconf.get("storage"); 73 | 74 | function readPlannerOutput(procoutput, path, logger) { 75 | logger.info("Reading planner output from path " + path); 76 | return fs.readFile(path, 'utf-8').then(content => { 77 | logger.info("Content is: " + content); 78 | let objcontent = JSON.parse(content); 79 | objcontent.raw_output = JSON.stringify(procoutput, null, 2); 80 | return objcontent; 81 | }); 82 | } 83 | 84 | function runAndCatch(command, timeout) { 85 | return childproc(command, {timeout: timeout}) 86 | .catch(err => { 87 | return Promise.resolve(err); 88 | }); 89 | } 90 | 91 | function runPlanner(pname, pconfig, task, logger) { 92 | let domain = task.domain; 93 | let problem = task.problem; 94 | let numplans = task.numplans; 95 | let qualitybound = task.qualitybound; 96 | var p_tmpFiles = [ 97 | tmp.tmpName({ template: '/tmp/domain-XXXXXX' }), 98 | tmp.tmpName({ template: '/tmp/problem-XXXXXX' }), 99 | tmp.tmpName({ template: '/tmp/plan-XXXXXX' }), 100 | tmp.tmpName({ template: '/tmp/task-XXXXXX.zip' }), 101 | tmp.tmpName({ template: '/tmp/sas_plan-XXXXXX' }) 102 | ]; 103 | 104 | return Promise.all(p_tmpFiles) 105 | .then(filenames => { 106 | var p_writeDomProb = [ 107 | fs.writeFile(filenames[0], domain), 108 | fs.writeFile(filenames[1], problem) 109 | ]; 110 | return Promise.all(p_writeDomProb).then(() => { 111 | let pcmd_raw = path.join('planners', pconfig.basedir ? path.join(pconfig.basedir, pconfig.cmd) : pconfig.cmd); 112 | let pcmd = pcmd_raw.replace(//g, filenames[0]) 113 | .replace(//g, filenames[1]) 114 | .replace(//g, filenames[2].toLowerCase() /* fix for planners binaries. */ ) 115 | .replace(//g, filenames[4].toLowerCase()) 116 | .replace(//g, numplans) 117 | .replace(//g,qualitybound); 118 | logger.info('About to run: ' + pcmd); 119 | return runAndCatch(pcmd, parseInt(nconf.get("server:timeout")) || 0 ) 120 | .then(procoutput => readPlannerOutput(procoutput, filenames[2].toLowerCase() /* fix for planner binaries */ , logger)) 121 | .then(result => { 122 | let retObj = _.cloneDeep(result); 123 | retObj.planner = pname; 124 | retObj.length = _.isArray(retObj.actions) ? retObj.actions.length : NaN; 125 | retObj.parse_status = 'ok'; 126 | if (task.dontstore || _.isUndefined(storage_opt) || _.isUndefined(storage_opt.type)) { 127 | logger.info("Sending response for " + pname + ":\n" + JSON.stringify(retObj, null, 2)); 128 | logger.info("-----------------------------------------------------------------------------"); 129 | return Promise.resolve({ 130 | code: 200, 131 | obj: retObj 132 | }); 133 | } else { 134 | let enhanced_retObj = _.clone(retObj); 135 | enhanced_retObj.planner = { 136 | "name": pname, 137 | "command": pconfig.cmd 138 | }; 139 | var archive = archiver(filenames[3], { store: true }); 140 | // add the pddl domain and problem 141 | archive.file(filenames[0], { name: 'domain.pddl' }); 142 | archive.file(filenames[1], { name: 'problem.pddl' }); 143 | // add standard output and error 144 | archive.append(result.raw_output, { name: 'raw_output.log' }); 145 | // add result json 146 | archive.append(JSON.stringify(enhanced_retObj, null, 2), { name: 'result.json' }); 147 | return archive.finalize().then(() => { 148 | var storage = require('./storage/' + storage_opt.type); 149 | let destfname = pname.replace(/\W+/g, "_") + '_' + (new Sugar.Date()).format('{yyyy}_{MM}_{dd}_{HH}_{mm}_{ss}_{SSS}') + '.zip'; 150 | return storage.store(filenames[3], destfname, storage_opt.args).then(() => { 151 | logger.info("Sending response for " + pname + ":\n" + JSON.stringify(retObj, null, 2)); 152 | logger.info("-----------------------------------------------------------------------------"); 153 | return Promise.resolve({ 154 | code: 200, 155 | obj: retObj 156 | }); 157 | }); 158 | }); 159 | } 160 | }).catch(err => { 161 | logger.error("Error running " + pcmd + ":" + JSON.stringify(err)); 162 | return Promise.resolve({ 163 | code: 500, 164 | msg: 'Error occurred: ' + JSON.stringify(err) 165 | }); 166 | }); 167 | }); 168 | }).catch(e => { 169 | logger.error("Error running planner " + pname + ":" + e); 170 | return Promise.resolve({ 171 | code: 500, 172 | msg: 'Error: ' + e 173 | }); 174 | }); 175 | } 176 | 177 | function extractBaseTask(req, res) { 178 | if (!req.body.domain) { 179 | res.status(400).send("No domain specified"); 180 | return undefined; 181 | } 182 | if (!req.body.problem) { 183 | res.status(400).send("No problem specified"); 184 | return undefined; 185 | } 186 | let numplans = parseInt(req.body.numplans) || 5; 187 | let qualitybound = parseFloat(req.body.qualitybound) || 1.0; 188 | let task = { 189 | domain: req.body.domain, 190 | problem: req.body.problem, 191 | dontstore: req.query.dontstore, 192 | numplans: numplans, 193 | qualitybound: qualitybound 194 | }; 195 | return task; 196 | } 197 | 198 | function extractZippedTask(req, res, logger) { 199 | // there should be req.file containing the zip archive 200 | return fs.promises.readFile(req.file.path).then(buf => { 201 | return jszip.loadAsync(buf).then(zipObj => { 202 | let f = zipObj.file('task.json'); 203 | if(f) { 204 | return tmp.tmpName({ template: '/tmp/task-XXXXXX.json' }).then(fname => { 205 | logger.debug("Will write the file to " + fname); 206 | return new Promise((resolve, reject) => { 207 | f.nodeStream().pipe(fs.createWriteStream(fname)).on('finish', () => { 208 | logger.debug("JSON dumped, about to read " + fname); 209 | fs.promises.readFile(fname, 'utf-8').then(content => { 210 | resolve(JSON.parse(content)); 211 | }).catch(reject); 212 | }); 213 | }); 214 | }); 215 | } else { 216 | res.status(400).send('Cannot find task.json inside the zipped contents.'); 217 | return undefined; 218 | } 219 | }); 220 | }).catch(e => { 221 | logger.error("Error extracting zip task: " + e); 222 | console.log("STACK:" + e.stack); 223 | res.status(400).send('Failed to parse the zip archive'); 224 | return undefined; 225 | }); 226 | } 227 | 228 | function processBaseOutput(res, ret, logger) { 229 | if (ret.obj) { 230 | logger.info("Returning from planner " + JSON.stringify(ret.obj, null, 2)); 231 | res.status(ret.code).json(ret.obj); 232 | } else if (ret.msg) { 233 | logger.error("Planner failed with code " + ret.code + " and message " + ret.msg); 234 | res.status(ret.code).send(ret.msg); 235 | } else { 236 | logger.error("Planner failed with unknown error"); 237 | res.status(500).send('Error'); 238 | } 239 | } 240 | 241 | function processZippedOutput(res, ret, logger) { 242 | if (ret.obj) { 243 | logger.info("Returning from planner " + JSON.stringify(ret.obj, null, 2)); 244 | tmp.tmpName({ template: '/tmp/result-XXXXXX.zip' }).then(fname => { 245 | var archive = archiver(fname); 246 | archive.append(JSON.stringify(ret.obj, null, 2), {name: 'result.json'}); 247 | archive.finalize().then( () => { 248 | res.status(ret.code).sendFile(fname); 249 | }); 250 | }); 251 | } else if (ret.msg) { 252 | logger.error("Planner failed with code " + ret.code + " and message " + ret.msg); 253 | res.status(ret.code).send(ret.msg); 254 | } else { 255 | logger.error("Planner failed with unknown error"); 256 | res.status(500).send('Error'); 257 | } 258 | } 259 | 260 | module.exports.plannerInvocation = function(pname, pconfig, logger) { 261 | return (req, res) => { 262 | logger.info('Received request for ' + pname); 263 | let task = extractBaseTask(req, res); 264 | if(!task) { return } // already returned an error 265 | runPlanner(pname, pconfig, task, logger).then(ret => { 266 | processBaseOutput(res, ret, logger); 267 | }).catch(e => { 268 | logger.error(e); 269 | res.status(500).send('Error'); 270 | }); 271 | }; 272 | }; 273 | 274 | module.exports.plannerInvocationZipped = function(pname, pconfig, logger) { 275 | return (req, res) => { 276 | logger.info('Received request for ' + pname); 277 | extractZippedTask(req, res, logger).then(task => { 278 | if(!task) { return; } // already returned an error 279 | runPlanner(pname, pconfig, task, logger).then(ret => { 280 | processZippedOutput(res, ret, logger); 281 | }).catch(e => { 282 | logger.error(e); 283 | res.status(500).send('Error'); 284 | }); 285 | }); 286 | }; 287 | }; 288 | 289 | module.exports.plannerSelectorPromise = function(categ, categObj, logger, plannersConfig, task) { 290 | if (!('selector' in categObj)) { 291 | let plannerKey = '' 292 | if ('default' in categObj) { 293 | plannerKey = categObj.default; 294 | } else { 295 | // just take the first one if any 296 | let allplanners = _.keys(categObj.planners) 297 | if (allplanners.length == 0) { 298 | logger.error("No planners configured for category " + categ); 299 | } else { 300 | plannerKey = allplanners[0]; 301 | } 302 | } 303 | return runPlanner(plannerKey, categObj.planners[plannerKey], task, logger); 304 | } 305 | 306 | var p_tmpFiles = [ 307 | tmp.tmpName({ template: '/tmp/domain-XXXXXX' }), 308 | tmp.tmpName({ template: '/tmp/problem-XXXXXX' }) 309 | ]; 310 | 311 | return Promise.all(p_tmpFiles).then(filenames => { 312 | var p_writeDomProb = [ 313 | fs.writeFile(filenames[0], task.domain), 314 | fs.writeFile(filenames[1], task.problem) 315 | ]; 316 | return Promise.all(p_writeDomProb).then(() => { 317 | let pcmd_raw = path.join('planners', selectorConfig.cmd); 318 | let pcmd = pcmd_raw.replace(//g, filenames[0]) .replace(//g, filenames[1]); 319 | return childproc(pcmd).then(procoutput => { 320 | let selectedContent = procoutput.stdout; 321 | let lines = selectedContent.trim().split('\n'); 322 | let plannerPromises = []; 323 | for (let l of lines) { 324 | // find the planner 325 | let pname = l.trim(); 326 | if (pname in plannersConfig) { 327 | plannerPromises.add( 328 | runPlanner(pname, 329 | categObj.planners[pname], 330 | task, 331 | logger 332 | ) 333 | ); 334 | } 335 | } 336 | return Promise.race(plannerPromises); 337 | }); 338 | }); 339 | }); 340 | }; 341 | 342 | module.exports.plannerSelector = function(categ, categObj, logger, plannersConfig) { 343 | const promiseFunc = this.plannerSelectorPromise; 344 | return function(req, res) { 345 | let task = extractBaseTask(req, res); 346 | if(!task) { return; } // already returned an error 347 | promiseFunc(categ, categObj, logger, plannersConfig, task).then(ret => { 348 | processBaseOutput(res, ret, logger); 349 | }).catch(e => { 350 | logger.error("Error " + e); 351 | res.status(500).send(e); 352 | }); 353 | }; 354 | }; 355 | 356 | module.exports.plannerSelectorZipped = function(categ, categObj, logger, plannersConfig) { 357 | const promiseFunc = this.plannerSelectorPromise; 358 | return function(req, res) { 359 | extractZippedTask(req, res, logger).then(() => { 360 | if(!task) { return; } // already returned an error 361 | promiseFunc(categ, categObj, logger, plannersConfig, task).then(ret => { 362 | processZippedOutput(res, ret, logger); 363 | }).catch(e => { 364 | logger.error("Error " + e); 365 | res.status(500).send(e); 366 | }); 367 | }); 368 | }; 369 | }; 370 | 371 | module.exports.setUpExpress = function(logger) { 372 | var app = express(); 373 | app.use(bodyparser.json({ limit: '50mb' })); 374 | app.get("/", (req, res) => { res.redirect("/api-docs/"); }); 375 | app.use( 376 | log4js.connectLogger(logger, { 377 | level: "info", 378 | format: (req, res, format) => 379 | format(`:remote-addr :method :url ${JSON.stringify(req.body, null, 2)}`) 380 | }) 381 | ); 382 | app.use(timeout(1800000)); 383 | return app; 384 | }; 385 | 386 | module.exports.getEnabledCategories = function(logger) { 387 | var plannersConfig = nconf.get("planners"); 388 | var runtimeCategories = nconf.get("server:run-categories") || '*'; 389 | var filter; 390 | 391 | if ((runtimeCategories.trim() === '*') || _.isEmpty(runtimeCategories.trim())) {} else { 392 | filter = runtimeCategories.split(/,\s*/); 393 | } 394 | 395 | var retCateg = {}; 396 | for(categ in plannersConfig) { 397 | if (filter && !_.includes(filter, categ)) { continue; } 398 | retCateg[categ] = _.cloneDeep(plannersConfig[categ]); 399 | retCateg[categ].planners = _.pickBy(plannersConfig[categ].planners, (val, key) => val.cmd); 400 | logger.info("In category " + categ + ", selected planners " + _.keys(retCateg[categ].planners)); 401 | } 402 | logger.info("Finished picking selected categories, result is: " + JSON.stringify(retCateg, null, 2)); 403 | return retCateg; 404 | }; 405 | 406 | module.exports.startServer = function(apibase, app, logger) { 407 | var cfEnv = require('cfenv'); 408 | var PORT = process.env.PORT || nconf.get("server:port") || 4501; 409 | apibase.host = cfEnv.getAppEnv().isLocal 410 | ? (nconf.get("server:hostname") || ((require("os").hostname() + ":" + PORT))) 411 | : cfEnv.getAppEnv().url; 412 | app.use('/api-docs', swaggerUI.serve, swaggerUI.setup(apibase)); 413 | var myserver = app.listen(PORT, () => logger.info('Listening on port ' + PORT)); 414 | myserver.timeout=1800000; 415 | }; 416 | 417 | module.exports.setUpWorkerPlanners = function(plannersConfig, apibase, app, logger) { 418 | for (categ in plannersConfig) { 419 | logger.info("Setting up category " + categ); 420 | for (pc in plannersConfig[categ].planners) { 421 | logger.info("Setting up planner " + pc); 422 | let thisPlannerFunc = this.plannerInvocation(pc, plannersConfig[categ].planners[pc], logger); 423 | apibase.paths["/planners/" + categ + "/" + pc] = this.getOpenAPIDescription(pc); 424 | app.post('/planners/' + categ + "/" + pc, thisPlannerFunc); 425 | app.post('/planners/' + categ + '/' + pc + '/zip', upload.single('task'), this.plannerInvocationZipped(pc, plannersConfig[categ].planners[pc], logger)); 426 | } 427 | apibase.paths['/planners/' + categ] = this.getOpenAPIDescription(categ); 428 | app.post('/planners/' + categ, this.plannerSelector(categ, plannersConfig[categ], logger)); 429 | app.post('/planners/' + categ + '/zip', upload.single('task'), this.plannerSelectorZipped(categ, plannersConfig[categ], logger)); 430 | } 431 | app.get('/alive', (req, res) => { res.status(200).send('I\'m alive!'); }); 432 | }; 433 | 434 | module.exports.taskDescription = { 435 | "post": { 436 | "description": "Executes all available categories of planners on a planning task and returns resulting plan", 437 | "operationId": "allplanners", 438 | "consumes": ["application/json"], 439 | "parameters": [{ 440 | "name": "task", 441 | "in": "body", 442 | "description": "the planning task", 443 | "schema": { 444 | "$ref": "#/definitions/PlanningTask" 445 | } 446 | }], 447 | "responses": { 448 | "200": { 449 | "description": "resulting plans", 450 | "schema": { 451 | "$ref": "#/definitions/AllPlannersResult" 452 | } 453 | }, 454 | "default": { 455 | "description": "unexpected error", 456 | "schema": { 457 | "type": "string" 458 | } 459 | } 460 | } 461 | } 462 | }; 463 | 464 | module.exports.addMainTask = function(plannersConfig, apibase, app, logger, plannerPostsFunction) { 465 | apibase.paths['/planners/task'] = this.taskDescription; 466 | app.post('/planners/task', (req, res) => { 467 | if (!req.body.domain) { 468 | res.status(400).send("No domain specified"); 469 | return; 470 | } 471 | if (!req.body.problem) { 472 | res.status(400).send("No problem specified"); 473 | return; 474 | } 475 | let numplans = parseInt(req.body.numplans) || 5; 476 | let qualitybound = parseFloat(req.body.qualitybound) || 1.0 477 | 478 | let task = { 479 | domain: req.body.domain, 480 | problem: req.body.problem, 481 | dontstore: req.query.dontstore || false, 482 | numplans: numplans, 483 | qualitybound: qualitybound 484 | }; 485 | var plannerPosts = plannerPostsFunction(plannersConfig, task); 486 | logger.info("There are " + plannerPosts.length + " planner promises."); 487 | Promise.all(plannerPosts).then(results => { 488 | logger.info('RESULTS:::' + JSON.stringify(results)); 489 | res.json(_.zipObject(_.keys(plannersConfig), results)); 490 | }).catch(err => { 491 | logger.info("Error occurred while retrieving planner results:" + JSON.stringify(err, null, 2)); 492 | res.status(500).send({ "error": JSON.stringify(err, null, 2) }); 493 | }); 494 | }); 495 | }; 496 | 497 | --------------------------------------------------------------------------------