├── .gitignore ├── README.md ├── __init__.py ├── archive ├── README.md └── py3-offline-script.py ├── planning.domains.py ├── planning_domains_api.py ├── scripts ├── formalism-initialization │ ├── classical │ │ ├── create-meta.py │ │ ├── data.py │ │ └── gen-db.py │ └── rddl.py └── tag-fix │ ├── processed_result5.json │ └── tag_updater.py └── web ├── collection.html ├── domain.html ├── example.html ├── planning-domains.js ├── problem.html └── web.js /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .vscode 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | API Tools 2 | ========== 3 | 4 | This repository houses a collection of tools and scripts for interacting with the interface found at [api.planning.domains](http://api.planning.domains). More information can be found on that website. Briefly, the following tools are included: 5 | 6 | * **archive/**: A collection of scripts that are not currently supported, but may provide useful in the future. 7 | * **scripts/**: A collection of files used for extracting and generating information for the database. 8 | * **web/**: Javascript library for interacting with api.planning.domains in a plug-and-play fashion. Also html views to the problems/domains/collections found on the api website. 9 | * **planning.domains.py**: A command-line utility for interacting with api.planning.domains (fetching the problems, querying the database, etc). 10 | * **planning_domains_api.py**: A python library providing query functionality to api.planning.domains. 11 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AI-Planning/api-tools/c0feed87980f1f905e6753076691b1311ea5beff/__init__.py -------------------------------------------------------------------------------- /archive/README.md: -------------------------------------------------------------------------------- 1 | This directory contains useful files that may be incorporated into the core set off api tools. A brief description of each file can be found below. 2 | 3 | * **py3-offline-script.py**: Source for the planning.domains.py file. It contains the necessary machinery to store meta-data as xml files, and will be used to provide offline access to the meta-data for the planning domains. 4 | -------------------------------------------------------------------------------- /archive/py3-offline-script.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import os 5 | import datetime 6 | import gzip 7 | import zipfile 8 | import copy 9 | 10 | from os.path import isfile, isdir, join 11 | 12 | import xml.etree.ElementTree as etree 13 | 14 | import urllib.request 15 | import urllib.parse 16 | 17 | langAttribute = "{http://www.w3.org/XML/1998/namespace}lang" 18 | 19 | domainPath = None 20 | installationSettings = None 21 | installationTree = None 22 | 23 | defaultNamespace = "http://settings.planning.domains" 24 | 25 | def checkExists(pd_dir): 26 | """Check ~/.planning.domains exists, and is not a file""" 27 | 28 | if isfile("pd_dir"): 29 | print("Fatal error: need to store settings in {0}, but there is a file with that name".format(pd_dir)) 30 | exit(1) 31 | 32 | 33 | if not isdir(pd_dir): 34 | 35 | 36 | print("""== Pre-release client for planning.domains == 37 | 38 | This is pre-release software, for accessing the content on planning.domains. The backend of the site is undergoing heavy revision, 39 | so do not distribute this software: it may stop working in the future. Note it is released without warranty (including the implied 40 | warranties of merchantability or fitness for a particular purpose). Send bug reports to Andrew Coles. 41 | 42 | """) 43 | 44 | 45 | 46 | 47 | print("Making directory {0}".format(pd_dir)) 48 | try: 49 | os.mkdir(pd_dir) 50 | 51 | except OSError: 52 | print("Cannot make directory") 53 | exit(1) 54 | 55 | def saveSettings(): 56 | 57 | global installationTree 58 | 59 | settingsXML = join(pd_dir,"settings.xml") 60 | 61 | with open(settingsXML,"wb") as settingsFile: 62 | installationTree.write(settingsFile) 63 | 64 | 65 | 66 | 67 | 68 | def loadSettings(home_dir,pd_dir): 69 | """Get the domain path from the settings.xml file in pd_dir. If no domain path exists, ask for one.""" 70 | 71 | settingsXML = join(pd_dir,"settings.xml") 72 | 73 | #print("Loading settings from {0}".format(settingsXML)) 74 | 75 | global installationTree 76 | global installationSettings 77 | global domainPath 78 | 79 | if isfile(settingsXML): 80 | installationTree = etree.parse(settingsXML) 81 | installationSettings = installationTree.getroot() 82 | 83 | for child in installationSettings: 84 | if child.tag == "domain_path": 85 | domainPath = child.text 86 | 87 | if isdir(domainPath): 88 | return 89 | else: 90 | try: 91 | os.mkdir(domainPath) 92 | except OSError: 93 | print("Error in settings.xml: domains directory {0} does not exist, and cannot be made".format(domainPath)) 94 | exit(1) 95 | 96 | print("Warning when reading settings.xml: domains directory {0} did not exist, but it was created".format(domainPath)) 97 | 98 | return 99 | 100 | if installationSettings is None: 101 | installationSettings = etree.Element("{http://settings.planning.domains}settings") 102 | installationTree = etree.ElementTree(installationSettings) 103 | 104 | domainPath = input("Enter path for installing files (or hit enter to use {0}): ".format(join(home_dir,"planning.domains"))) 105 | 106 | domainPath = domainPath.lstrip() 107 | domainpath = domainPath.rstrip() 108 | 109 | if domainPath == "": 110 | domainPath = join(home_dir,"planning.domains") 111 | 112 | if isfile(domainPath): 113 | print("Fatal error: there is already a file called {0}".format(domainPath)) 114 | exit(1) 115 | 116 | if not isdir(domainPath): 117 | try: 118 | os.mkdir(domainPath) 119 | except OSError: 120 | print("Cannot make directory {0}".format(domainPath)) 121 | exit(1) 122 | 123 | etree.SubElement(installationSettings,"domain_path").text = domainPath 124 | 125 | saveSettings() 126 | 127 | 128 | 129 | 130 | 131 | def update(packagesPath): 132 | """Download the latest package list""" 133 | urllib.request.urlretrieve("http://raw.planning.domains/packages.xml.gz",packagesPath) 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | def find(root,argument): 142 | """Search for packages whose title or ID contains the given argument""" 143 | matchingIDs = [] 144 | 145 | for child in root: 146 | if child.tag == "domain": 147 | if child.attrib is None: 148 | print("Fatal error: found a domain without an ID"); 149 | exit(1) 150 | 151 | if 'id' not in child.attrib: 152 | print("Fatal error: found a domain without an ID"); 153 | exit(1) 154 | 155 | thisID = child.attrib['id'] 156 | 157 | matches = False 158 | thisTitle = None 159 | titleLanguage = None 160 | 161 | if argument in thisID: 162 | matches = True 163 | 164 | for innerchild in child: 165 | if innerchild.tag == "title": 166 | if argument in innerchild.text: 167 | matches = True 168 | 169 | if thisTitle is None: 170 | #note the first title found 171 | thisTitle = innerchild.text 172 | titleLanguage = innerchild.attrib[langAttribute] 173 | else: 174 | #if the first title isn't in English... 175 | if titleLanguage != "en" and innerchild.attrib[langAttribute] == en: 176 | thisTitle = innerchild.text 177 | titleLanguage = "en" 178 | 179 | if matches: 180 | matchingIDs.append((thisID,thisTitle)) 181 | 182 | if len(matchingIDs) == 0: 183 | print("Cannot find a title or ID containing {0}".format(argument)) 184 | 185 | else: 186 | print("Domains with title or ID containing {0}:".format(argument)) 187 | for (x,y) in matchingIDs: 188 | print("ID: {0} , Title: {1}".format(x,y)) 189 | 190 | def getDateFromDomain(domainNode,dateTag): 191 | for child in domainNode: 192 | if child.tag == dateTag: 193 | return child.text 194 | 195 | raise LookupError 196 | 197 | def downloadIfNew(child,dateTag,fileTag): 198 | 199 | thisID = child.attrib['id'] 200 | latestDate = getDateFromDomain(child,dateTag) 201 | 202 | global installationSettings 203 | 204 | filesToRemove = [] 205 | dirsToRemove = [] 206 | 207 | installedTag = "installed_{0}".format(fileTag) 208 | 209 | for installed in installationSettings: 210 | if installed.tag == installedTag: 211 | if installed.attrib['ref'] == thisID: 212 | innerchild = installed[0] 213 | 214 | if innerchild.tag != "date": 215 | print("Malformed settings.xml file, cannot continue: expect to find 'date' as the first child of {0}".format(installedTag)) 216 | exit(1) 217 | 218 | if innerchild.text >= latestDate: 219 | print("{1} for {0} is already the latest version".format(thisID,fileTag)) 220 | return False, [], [] 221 | 222 | else: 223 | 224 | for innerchild in installed[1:]: 225 | if innerchild.tag == "file": 226 | filesToRemove.append(innerchild.text) 227 | elif innerchild.tag == "dir": 228 | dirsToRemove.append(innerchild.text) 229 | 230 | installationSettings.remove(installed) 231 | 232 | break 233 | 234 | data = {} 235 | data['id'] = thisID 236 | data['type'] = fileTag 237 | parameters = urllib.parse.urlencode(data) 238 | 239 | print("Downloading {1} for {0}...".format(thisID,fileTag)) 240 | urllib.request.urlretrieve("http://raw.planning.domains/fetch.php?{0}".format(parameters),\ 241 | join(pd_dir,"{0}.zip".format(fileTag)) ) 242 | 243 | return True, filesToRemove, dirsToRemove 244 | 245 | 246 | def downloadDomainAndProblemsIfNew(child): 247 | return downloadIfNew(child,"files_last_modified","domain_and_problems") 248 | 249 | 250 | def downloadMetadataIfNew(child): 251 | return downloadIfNew(child,"metadata_last_modified","metadata") 252 | 253 | 254 | def install(root,argument,pd_dir): 255 | global domainPath 256 | global installationSettings 257 | 258 | """Install the package with the given ID, to the given domain path""" 259 | for child in root: 260 | if child.tag == "domain": 261 | if child.attrib is None: 262 | print("Fatal error: found a domain without an ID"); 263 | exit(1) 264 | 265 | if 'id' not in child.attrib: 266 | print("Fatal error: found a domain without an ID"); 267 | exit(1) 268 | 269 | thisID = child.attrib['id'] 270 | 271 | if thisID == argument: 272 | 273 | needMetadata, metadataFiles, metadataDirs = downloadMetadataIfNew(child) 274 | needDomain, domainFiles, domainDirs = downloadDomainAndProblemsIfNew(child) 275 | 276 | if not needMetadata and not needDomain: 277 | print("{0} is already the latest version".format(thisID)) 278 | continue 279 | 280 | 281 | if needMetadata: 282 | 283 | if len(metadataFiles) > 0: 284 | print("Removing old metadata for {0}".format(thisID)) 285 | 286 | for f in metadataFiles: 287 | """Removing a metadata file""" 288 | os.remove(join(domainPath,f)) 289 | 290 | print("Installing new metadata for {0}".format(thisID)) 291 | else: 292 | print("Installing metadata for {0}".format(thisID)) 293 | 294 | 295 | with zipfile.ZipFile(join(pd_dir,"metadata.zip"),'r') as metadataZip: 296 | metadataZip.extractall(domainPath) 297 | 298 | newDetailsRoot = etree.SubElement(installationSettings,"installed_metadata", attrib={'ref':thisID}) 299 | etree.SubElement(newDetailsRoot, "date").text = getDateFromDomain(child,"metadata_last_modified") 300 | 301 | for info in metadataZip.infolist(): 302 | etree.SubElement(newDetailsRoot,"file").text = info.filename 303 | #print(info.filename) 304 | 305 | os.remove(join(pd_dir,"metadata.zip")) 306 | 307 | if needDomain: 308 | 309 | if len(domainFiles) > 0: 310 | print("Removing old domain and problems for {0}".format(thisID)) 311 | 312 | for f in domainFiles: 313 | """Removing a domain/problem file""" 314 | os.remove(join(domainPath,f)) 315 | 316 | print("Installing new domain and probelms for {0}".format(thisID)) 317 | else: 318 | print("Installing domain and problems for {0}".format(thisID)) 319 | 320 | 321 | with zipfile.ZipFile(join(pd_dir,"domain_and_problems.zip"),'r') as problemsZip: 322 | problemsZip.extractall(domainPath) 323 | 324 | newDetailsRoot = etree.SubElement(installationSettings,"installed_domain_and_problems", attrib={'ref':thisID}) 325 | etree.SubElement(newDetailsRoot, "date").text = getDateFromDomain(child,"files_last_modified") 326 | 327 | for info in problemsZip.infolist(): 328 | etree.SubElement(newDetailsRoot,"file").text = info.filename 329 | #print(info.filename) 330 | 331 | os.remove(join(pd_dir,"domain_and_problems.zip")) 332 | 333 | print("Updating settings.xml") 334 | saveSettings() 335 | 336 | 337 | 338 | 339 | def upgrade(packageList,pd_dir): 340 | """Update any installed packages to newer versions, according to date-stamps""" 341 | 342 | global domainPath 343 | global installationSettings 344 | 345 | with gzip.open(packageList,'rb') as packagesFile: 346 | tree = etree.parse(packagesFile) 347 | root = tree.getroot() 348 | 349 | latestMetadataDates = {} 350 | latestFilesDates = {} 351 | 352 | for child in root: 353 | if child.tag == "domain": 354 | id = child.attrib['id'] 355 | for timechild in child: 356 | if timechild.tag == "files_last_modified": 357 | latestFilesDates[id] = timechild.text 358 | 359 | elif timechild.tag == "metadata_last_modified": 360 | latestMetadataDates[id] = timechild.text 361 | 362 | toUpdate = {} 363 | 364 | for installed in installationSettings: 365 | if installed.tag == "installed_metadata": 366 | id = installed.attrib['ref'] 367 | 368 | if id not in latestMetadataDates: 369 | print("Warning: metadata is installed for {0}, but it is no longer on the package list".format(id)) 370 | continue 371 | 372 | innerchild = installed[0] 373 | 374 | if innerchild.tag != "date": 375 | print("Malformed settings.xml file, cannot continue: expect to find 'date' as the first child of 'installed_metadata'") 376 | exit(1) 377 | 378 | if latestMetadataDates[id] > innerchild.text: 379 | toUpdate[id] = (True,False) 380 | 381 | for installed in installationSettings: 382 | if installed.tag == "installed_domain_and_problems": 383 | id = installed.attrib['ref'] 384 | 385 | if id not in latestFilesDates: 386 | print("Warning: domain and problem files are installed for {0}, but it is no longer on the package list".format(id)) 387 | continue 388 | 389 | innerchild = installed[0] 390 | 391 | if innerchild.tag != "date": 392 | print("Malformed settings.xml file, cannot continue: expect to find 'date' as the first child of 'installed_domain_and_problems'") 393 | exit(1) 394 | 395 | if latestFilesDates[id] > innerchild.text: 396 | toUpdate[id] = (True,True) 397 | 398 | 399 | for id in toUpdate: 400 | if toUpdate[id] == (True,True): 401 | print("Updating metadata and domain/problems for {0}".format(id)) 402 | else: 403 | print("Updating metadata for {0}".format(id)) 404 | 405 | install(root,id,pd_dir) 406 | 407 | print("All up to date") 408 | 409 | if __name__ == "__main__": 410 | 411 | home_dir = os.path.expanduser("~") 412 | 413 | pd_dir = join(home_dir,".planning.domains") 414 | 415 | checkExists(pd_dir) 416 | 417 | loadSettings(home_dir, pd_dir) 418 | 419 | if installationSettings is None: 420 | print("Fatal error: could not establish installation settings") 421 | exit(1) 422 | 423 | #don't download the package list twice, if the script is ran with the update option, but packages.xml was missing 424 | downloadedPackageList = False 425 | packageList = join(pd_dir,"packages.xml.gz") 426 | 427 | if not isfile(packageList): 428 | print("No package list found, downloading it") 429 | update(packageList) 430 | downloadedPackageList = True 431 | 432 | 433 | if len(sys.argv) == 1: 434 | print("""No command-line options given. Usage: 435 | 436 | planning.domains.py update Update the packages.xml list to the latest version 437 | planning.domains.py upgrade Upgrade installed packages (and/or their metadata) to the latest version 438 | planning.domains.py find [string] Find packages whose title/ID contains 'string' 439 | planning.domains.py install [id] [id] ... Install the packages with the IDs given""") 440 | 441 | exit(0) 442 | 443 | 444 | root = None 445 | 446 | i = 1 447 | 448 | while i < len(sys.argv): 449 | if sys.argv[i] == "update": 450 | if downloadedPackageList: 451 | print("Already downloaded package list") 452 | else: 453 | print("Downloading package list") 454 | update(packageList) 455 | downloadedPackageList = True 456 | 457 | i += 1 458 | 459 | elif sys.argv[i] == "upgrade": 460 | 461 | upgrade(packageList,pd_dir) 462 | i += 1 463 | 464 | else: 465 | 466 | command = sys.argv[i] 467 | i += 1 468 | 469 | if i == len(sys.argv): 470 | print("Error: expected an argument after {0}".format(command)) 471 | exit(1) 472 | 473 | while i < len(sys.argv): 474 | argument = sys.argv[i] 475 | i += 1 476 | 477 | argument = argument.rstrip() 478 | argument = argument.lstrip() 479 | 480 | if len(argument) == 0: 481 | print("Warning: expected non-empty argument after {0}".format(command)) 482 | continue 483 | 484 | 485 | if root is None: 486 | with gzip.open(packageList,'rb') as packagesFile: 487 | tree = etree.parse(packagesFile) 488 | root = tree.getroot() 489 | 490 | if command == "find": 491 | find(root,argument) 492 | 493 | elif command == "install": 494 | install(root,argument,pd_dir) 495 | 496 | 497 | -------------------------------------------------------------------------------- /planning.domains.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import argparse, os, pprint, sys 4 | 5 | import xml.etree.ElementTree as etree 6 | 7 | import planning_domains_api as api 8 | 9 | langAttribute = "{http://www.w3.org/XML/1998/namespace}lang" 10 | 11 | domainPath = None 12 | installationSettings = None 13 | installationTree = None 14 | userEmail = None 15 | userToken = None 16 | 17 | defaultNamespace = "http://settings.planning.domains" 18 | 19 | 20 | 21 | def checkExists(pd_dir): 22 | """Check ~/.planning.domains exists, and is not a file""" 23 | 24 | if os.path.isfile(pd_dir): 25 | print("Fatal error: need to store settings in {0}, but there is a file with that name".format(pd_dir)) 26 | exit(1) 27 | 28 | 29 | if not os.path.isdir(pd_dir): 30 | 31 | 32 | print(""" 33 | == Pre-release client for planning.domains == 34 | 35 | This is pre-release software, for accessing the content on 36 | api.planning.domains. It is released without warranty 37 | (including the implied warranties of merchantability 38 | or fitness for a particular purpose). 39 | 40 | Send bug reports to Andrew Coles (andrew.coles@kcl.ac.uk) 41 | or Christian Muise (christian.muise@gmail.com) 42 | 43 | """) 44 | 45 | 46 | print("Making directory {0}...\n".format(pd_dir)) 47 | try: 48 | os.mkdir(pd_dir) 49 | 50 | except OSError: 51 | print("Cannot make directory") 52 | exit(1) 53 | 54 | 55 | 56 | def saveSettings(): 57 | 58 | global installationTree 59 | 60 | settingsXML = os.path.join(pd_dir,"settings.xml") 61 | 62 | with open(settingsXML,"wb") as settingsFile: 63 | installationTree.write(settingsFile) 64 | 65 | 66 | 67 | def fetchPlanningDomains(domainPath): 68 | try: 69 | resp = input("Clone the domain repository (~50Mb download / ~1Gb uncompressed) to directory {0}? (y/n) ".format(domainPath)) 70 | if 'y' == resp: 71 | os.system("git clone https://github.com/AI-Planning/classical-domains.git {0}".format(domainPath)) 72 | else: 73 | print("Aborting fetching domains for the directory {0}".format(domainPath)) 74 | except OSError: 75 | print("Cannot make directory {0}".format(domainPath)) 76 | exit(1) 77 | 78 | 79 | 80 | def loadSettings(home_dir,pd_dir): 81 | """Get the domain path from the settings.xml file in pd_dir. If no domain path exists, ask for one.""" 82 | 83 | settingsXML = os.path.join(pd_dir,"settings.xml") 84 | 85 | #print("Loading settings from {0}".format(settingsXML)) 86 | 87 | global installationTree 88 | global installationSettings 89 | global domainPath 90 | global userEmail 91 | global userToken 92 | 93 | if os.path.isfile(settingsXML): 94 | installationTree = etree.parse(settingsXML) 95 | installationSettings = installationTree.getroot() 96 | 97 | for child in installationSettings: 98 | if child.tag == "domain_path": 99 | domainPath = child.text 100 | 101 | if not os.path.isdir(domainPath): 102 | fetchPlanningDomains(domainPath) 103 | 104 | if child.tag == "email": 105 | userEmail = child.text 106 | 107 | if child.tag == "token": 108 | userToken = child.text 109 | 110 | return 111 | 112 | if installationSettings is None: 113 | installationSettings = etree.Element("{http://settings.planning.domains}settings") 114 | installationTree = etree.ElementTree(installationSettings) 115 | 116 | domainPath = input("Enter path for installing files (or hit enter to use {0}): ".format(os.path.join(home_dir,"planning.domains"))) 117 | 118 | domainPath = domainPath.lstrip() 119 | domainpath = domainPath.rstrip() 120 | 121 | if domainPath == "": 122 | domainPath = os.path.join(home_dir,"planning.domains") 123 | 124 | if os.path.isfile(domainPath): 125 | print("Fatal error: there is already a file called {0}".format(domainPath)) 126 | exit(1) 127 | 128 | if not os.path.isdir(domainPath): 129 | fetchPlanningDomains(domainPath) 130 | 131 | etree.SubElement(installationSettings,"domain_path").text = domainPath 132 | 133 | userEmail = input("Enter email for API updates: ") 134 | userToken = input("Enter token for API updates (leave blank if none provided): ") 135 | 136 | etree.SubElement(installationSettings,"email").text = userEmail 137 | etree.SubElement(installationSettings,"token").text = userToken 138 | 139 | saveSettings() 140 | 141 | 142 | def register(): 143 | global userEmail 144 | global userToken 145 | 146 | userEmail = input("Enter email for API updates (leave blank for %s): " % userEmail) or userEmail 147 | userToken = input("Enter token for API updates (leave blank for %s): " % userToken) or userToken 148 | 149 | list(filter(lambda x: x.tag == 'email', installationSettings))[0].text = userEmail 150 | list(filter(lambda x: x.tag == 'token', installationSettings))[0].text = userToken 151 | 152 | saveSettings() 153 | 154 | print("Email and token settings saved!\n") 155 | 156 | 157 | def find(sub, arg, form): 158 | """Find an object of type sub that matches argument arg.""" 159 | 160 | if sub == 'collections': 161 | res = api.find_collections(arg, form) 162 | elif sub == 'domains': 163 | res = api.find_domains(arg, form) 164 | elif sub == 'problems': 165 | res = api.find_problems(arg, form) 166 | else: 167 | print("Error: Unrecognized sub-command, {0}".format(sub)) 168 | exit(1) 169 | 170 | pprint.pprint(res, sort_dicts=False) 171 | 172 | def show(sub, arg, form): 173 | """Show an object of type sub that matches the id arg.""" 174 | 175 | arg = int(arg) 176 | 177 | if sub == 'plan': 178 | print(api.get_plan(arg, form)) 179 | return 180 | 181 | if sub == 'collection': 182 | res = api.get_collection(arg, form) 183 | elif sub == 'domain': 184 | res = api.get_domain(arg, form) 185 | elif sub == 'problem': 186 | res = api.get_problem(arg, form) 187 | else: 188 | print("Error: Unrecognized sub-command, {0}".format(sub)) 189 | exit(1) 190 | 191 | pprint.pprint(res, sort_dicts=False) 192 | 193 | def submit_plan(pid, pfile, formalism): 194 | with open(pfile) as f: 195 | plan = f.read() 196 | api.submit_plan(pid, plan, formalism) 197 | 198 | def cache(cid, outdir, formalism, include_data = False): 199 | 200 | print("Caching collection %d to [%s] (data included = %s)..." % (cid, outdir, str(include_data))) 201 | 202 | if os.path.exists(outdir): 203 | print("Error: Output directory already exists.") 204 | exit(1) 205 | else: 206 | os.mkdir(outdir) 207 | 208 | domains = {} 209 | problem_data = {} 210 | domain_data = api.get_domains(cid, formalism) 211 | domain_names = [dom['domain_name'] for dom in domain_data] 212 | assert len(set(domain_names)) == len(domain_names), "Error: It appears as though the collection has repeated domain names." 213 | 214 | for dom in domain_data: 215 | 216 | dname = dom['domain_name'] 217 | 218 | # Map the domain name to the list of domain-problem pairs and problem data 219 | domains[dname] = [] 220 | problem_data[dname] = [] 221 | 222 | # Make the directory for the domain 223 | os.mkdir(os.path.join(outdir, dname)) 224 | 225 | # Turn the links into relative paths for this machine 226 | probs = [api.localize(p) for p in api.get_problems(dom['domain_id'], formalism)] 227 | 228 | # Copy the domain and problem files to their appropriate directory 229 | for i in range(len(probs)): 230 | dpath = os.path.join(dname, "domain_%.2d.pddl" % (i+1)) 231 | ppath = os.path.join(dname, "prob_%.2d.pddl" % (i+1)) 232 | 233 | os.system("cp %s %s" % (probs[i]['domain_path'], os.path.join(outdir,dpath))) 234 | os.system("cp %s %s" % (probs[i]['problem_path'], os.path.join(outdir,ppath))) 235 | 236 | domains[dname].append((dpath,ppath)) 237 | 238 | if include_data: 239 | problem_data[dname].append(probs[i]) 240 | problem_data[dname][-1]['domain_path'] = os.path.abspath(os.path.join(outdir,dpath)) 241 | problem_data[dname][-1]['problem_path'] = os.path.abspath(os.path.join(outdir,ppath)) 242 | 243 | with open(os.path.join(outdir, "domains.py"), 'w') as f: 244 | f.write('\n# Use "from domains import DOMAINS" to get the benchmark set\n') 245 | if include_data: 246 | f.write('\n# Use "from domains import DATA" to get the problem data (aligns with the DOMAINS list)\n') 247 | f.write('\nDOMAINS = ') 248 | f.write(pprint.pformat(domains)) 249 | if include_data: 250 | f.write('\n\nDATA = ') 251 | f.write(pprint.pformat(problem_data)) 252 | f.write('\n') 253 | 254 | print("Done!\n") 255 | 256 | if __name__ == "__main__": 257 | 258 | home_dir = os.path.expanduser("~") 259 | 260 | pd_dir = os.path.join(home_dir,".planning.domains") 261 | 262 | checkExists(pd_dir) 263 | 264 | loadSettings(home_dir, pd_dir) 265 | 266 | if installationSettings is None: 267 | print("Fatal error: could not establish installation settings") 268 | exit(1) 269 | 270 | 271 | parser = argparse.ArgumentParser(description="Planning Domains CLI") 272 | subparsers = parser.add_subparsers(dest="command") 273 | 274 | def add_formalism_argument(subparser): 275 | subparser.add_argument("--formalism", choices=["classical", "rddl"], default="classical", help="Specify the formalism for the command.") 276 | 277 | # Update 278 | update_parser = subparsers.add_parser("update", help="Update the local domain repository.") 279 | 280 | # Register 281 | register_parser = subparsers.add_parser("register", help="Register your email and token for making API edits.") 282 | 283 | # Find 284 | find_parser = subparsers.add_parser("find", help="Find collections, domains, or problems whose title/ID contains a string.") 285 | find_parser.add_argument("--type", choices=["collections", "domains", "problems"], help="Type of item to find.") 286 | find_parser.add_argument("--query", help="String to search for in the titles/IDs.") 287 | add_formalism_argument(find_parser) 288 | 289 | # Show 290 | show_parser = subparsers.add_parser("show", help="Find and show collections, domains, problems, or plans with a specific ID.") 291 | show_parser.add_argument("--type", choices=["collection", "domain", "problem", "plan"], help="Type of item to show.") 292 | show_parser.add_argument("--id", type=int, help="ID of the item to show.") 293 | add_formalism_argument(show_parser) 294 | 295 | # List 296 | list_parser = subparsers.add_parser("list", help="Lists collections, tags, or problems with a null attribute setting.") 297 | list_parser.add_argument("--type", choices=["collections", "tags", "null-attribute"], help="Type of items to list.") 298 | list_parser.add_argument("--attribute", nargs="?", default=None, help="Attribute setting to search for (only for null-attribute).") 299 | add_formalism_argument(list_parser) 300 | 301 | # Tag 302 | tag_parser = subparsers.add_parser("tag", help="Tag a collection, domain, or problem with a specific tag.") 303 | tag_parser.add_argument("--type", choices=["collection", "domain", "problem"], help="Type of item to tag.") 304 | tag_parser.add_argument("--id", type=int, help="ID of the item to tag.") 305 | tag_parser.add_argument("--tag", help="Tag to add to the item.") 306 | add_formalism_argument(tag_parser) 307 | 308 | # Untag 309 | untag_parser = subparsers.add_parser("untag", help="Untag a collection, domain, or problem with a specific tag.") 310 | untag_parser.add_argument("--type", choices=["collection", "domain", "problem"], help="Type of item to untag.") 311 | untag_parser.add_argument("--id", type=int, help="ID of the item to untag.") 312 | untag_parser.add_argument("--tag", help="Tag to remove from the item.") 313 | add_formalism_argument(untag_parser) 314 | 315 | # Submit plan 316 | submit_plan_parser = subparsers.add_parser("submit-plan", help="Submit the provided plan for validation and possible storage.") 317 | submit_plan_parser.add_argument("--id", type=int, help="Problem ID for which the plan is provided.") 318 | submit_plan_parser.add_argument("--plan", type=argparse.FileType("r"), help="File containing the plan to submit.") 319 | add_formalism_argument(submit_plan_parser) 320 | 321 | # Cache 322 | cache_parser = subparsers.add_parser("cache", help="Collect all of the domains in a collection into a specified folder.") 323 | cache_parser.add_argument("--id", type=int, help="Collection ID to cache.") 324 | cache_parser.add_argument("--folder", help="Folder to store the cached collection.") 325 | add_formalism_argument(cache_parser) 326 | 327 | # Cache-all 328 | cache_all_parser = subparsers.add_parser("cache-all", help="Collect all domains in a collection into a specified folder, including problem data and statistics.") 329 | cache_all_parser.add_argument("--id", type=int, help="Collection ID to cache.") 330 | cache_all_parser.add_argument("--folder", help="Folder to store the cached collection and problem data/statistics.") 331 | add_formalism_argument(cache_all_parser) 332 | 333 | args = parser.parse_args() 334 | 335 | if args.command is None: 336 | parser.print_help() 337 | exit(0) 338 | 339 | # update 340 | if args.command == "update": 341 | if api.checkForDomainPath(): 342 | print("Updating...") 343 | os.system("cd {0}; git pull".format(api.DOMAIN_PATH)) 344 | else: 345 | print("Error: Domain path is not set.") 346 | 347 | # cache 348 | elif args.command == "cache": 349 | if args.id is None or args.folder is None: 350 | print("Error: Must provide a collection ID and folder name.") 351 | exit(1) 352 | cache(args.id, args.folder, args.formalism) 353 | 354 | # cache-all 355 | elif args.command == "cache-all": 356 | if args.id is None or args.folder is None: 357 | print("Error: Must provide a collection ID and folder name.") 358 | exit(1) 359 | cache(args.id, args.folder, args.formalism, True) 360 | 361 | 362 | # register 363 | elif args.command == "register": 364 | register() 365 | 366 | # submit 367 | elif args.command == "submit-plan": 368 | if args.id is None or args.plan is None: 369 | print("Error: Must provide a problem ID and plan file.") 370 | exit(1) 371 | submit_plan(args.id, args.plan, args.formalism) 372 | 373 | # list 374 | elif args.command == "list": 375 | if args.type is None: 376 | print("Error: Must provide a list type.") 377 | exit(1) 378 | if args.type == "tags": 379 | print("{0}\t{1}\n".format('Tag Name'.rjust(26), 'Description')) 380 | tags = api.get_tags(args.formalism) 381 | for t in sorted(tags.keys()): 382 | print("{0}\t{1}".format(t.rjust(26), tags[t])) 383 | print() 384 | elif args.type == "collections": 385 | cols = {c['collection_id']: c for c in api.get_collections(args.formalism)} 386 | for cid in sorted(cols.keys()): 387 | c = cols[cid] 388 | print() 389 | print(" ID: {0}".format(c['collection_id'])) 390 | print(" Name: {0}".format(c['collection_name'])) 391 | print(" #Doms: {0}".format(len(c['domain_set']))) 392 | print("Description: {0}".format(c['description'])) 393 | print() 394 | elif args.type == "null-attribute": 395 | if args.attribute is None: 396 | print("Error: Must provide an attribute name.") 397 | exit(1) 398 | nullprobs = api.get_null_attribute_problems(args.attribute, args.formalism) 399 | if len(nullprobs) < 25: 400 | pprint.pprint(nullprobs) 401 | else: 402 | print("{0} problems have {1} set to null. 10 examples:\n".format(len(nullprobs), args.attribute)) 403 | print('\n'.join([" - {0}: {1}".format(i, nullprobs[i]) for i in list(nullprobs.keys())[:10]])) 404 | print(' - ...') 405 | else: 406 | print("Error: Unknown list type.") 407 | exit(1) 408 | 409 | # find 410 | elif args.command == "find": 411 | if args.type is None or args.query is None: 412 | print("Error: Must provide a search type and query.") 413 | exit(1) 414 | find(args.type, args.query, args.formalism) 415 | 416 | # show 417 | elif args.command == "show": 418 | if args.type is None or args.id is None: 419 | print("Error: Must provide a show type and ID.") 420 | exit(1) 421 | show(args.type, args.id, args.formalism) 422 | 423 | # tag 424 | elif args.command == "tag": 425 | if args.type is None or args.id is None or args.tag is None: 426 | print("Error: Must provide a tag type, ID, and tag name.") 427 | exit(1) 428 | 429 | if args.type == "collection": 430 | api.tag_collection(args.id, args.tag, args.formalism) 431 | elif args.type == "domain": 432 | api.tag_domain(args.id, args.tag, args.formalism) 433 | elif args.type == "problem": 434 | api.tag_problem(args.id, args.tag, args.formalism) 435 | else: 436 | print("Error: Can only tag a collection, domain, or problem.") 437 | exit(1) 438 | 439 | # untag 440 | elif args.command == "untag": 441 | if args.type is None or args.id is None or args.tag is None: 442 | print("Error: Must provide an untag type, ID, and tag name.") 443 | exit(1) 444 | if args.type == "collection": 445 | api.untag_collection(args.id, args.tag, args.formalism) 446 | elif args.type == "domain": 447 | api.untag_domain(args.id, args.tag, args.formalism) 448 | elif args.type == "problem": 449 | api.untag_problem(args.id, args.tag, args.formalism) 450 | else: 451 | print("Error: Can only untag a collection, domain, or problem.") 452 | exit(1) 453 | 454 | else: 455 | parser.print_help() 456 | exit(0) 457 | print() 458 | 459 | 460 | -------------------------------------------------------------------------------- /planning_domains_api.py: -------------------------------------------------------------------------------- 1 | 2 | import http.client, urllib.parse, json, os 3 | import xml.etree.ElementTree as etree 4 | 5 | URL = 'api.planning.domains' 6 | VERSION = '0.5' 7 | 8 | DOMAIN_PATH = False 9 | USER_EMAIL = False 10 | USER_TOKEN = False 11 | 12 | def checkForDomainPath(): 13 | """Returns the domain path if one exists and is saved in the settings.xml""" 14 | 15 | home_dir = os.path.expanduser("~") 16 | pd_dir = os.path.join(home_dir,".planning.domains") 17 | settingsXML = os.path.join(pd_dir,"settings.xml") 18 | 19 | if not os.path.isdir(pd_dir) or not os.path.isfile(settingsXML): 20 | return False 21 | 22 | installationTree = etree.parse(settingsXML) 23 | if installationTree is None: 24 | return False 25 | 26 | installationSettings = installationTree.getroot() 27 | if installationSettings is None: 28 | return False 29 | 30 | domainPath = str(list(filter(lambda x: x.tag == 'domain_path', installationSettings))[0].text) 31 | if not os.path.isdir(domainPath): 32 | return False 33 | 34 | global DOMAIN_PATH 35 | global USER_EMAIL 36 | global USER_TOKEN 37 | DOMAIN_PATH = domainPath 38 | if 'email' in [x.tag for x in installationSettings]: 39 | USER_EMAIL = list(filter(lambda x: x.tag == 'email', installationSettings))[0].text 40 | if 'token' in [x.tag for x in installationSettings]: 41 | USER_TOKEN = list(filter(lambda x: x.tag == 'token', installationSettings))[0].text 42 | return True 43 | 44 | def query(qs, formalism, qtype="GET", params={}, offline=False, format='/json'): 45 | 46 | assert not offline, "Error: Offline mode is not supported currently." 47 | 48 | headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"} 49 | 50 | params = urllib.parse.urlencode(params) 51 | conn = http.client.HTTPSConnection(URL) 52 | if formalism == "": 53 | conn.request(qtype, f"{format}/{qs}", params, headers) 54 | else: 55 | conn.request(qtype, f"{format}/{formalism}/{qs}", params, headers) 56 | response = conn.getresponse() 57 | tmp = response.read().decode('utf-8') 58 | if "
Payload Too Large
" in tmp: 59 | data = { "error": True, "message": "Payload too large."} 60 | else: 61 | try: 62 | data = json.loads(tmp) 63 | except: 64 | data = { "error": True, "message": f"Invalid JSON response:\n{tmp}"} 65 | conn.close() 66 | 67 | return data 68 | 69 | def simple_query(qs, formalism): 70 | res = query(qs, formalism) 71 | if res['error']: 72 | print ("Error: %s" % res['message']) 73 | return [] 74 | else: 75 | return res['result'] 76 | 77 | def update_stat(stat_type, iid, attribute, value, description, formalism): 78 | 79 | params = {'user': USER_EMAIL, 80 | 'password': USER_TOKEN, 81 | 'key': attribute, 82 | 'value': value, 83 | 'desc': description} 84 | 85 | res = query("update%s/%d" % (stat_type, iid), 86 | formalism, 87 | qtype='POST', 88 | params=params, 89 | offline=False, 90 | format='') 91 | 92 | if res['error']: 93 | print ("Error: %s" % res['message']) 94 | else: 95 | print ("Result: %s" % str(res)) 96 | 97 | def change_tag(tag_type, iid, tid, formalism): 98 | 99 | params = {'user': USER_EMAIL, 100 | 'password': USER_TOKEN, 101 | 'tag_id': tid} 102 | 103 | res = query("%s/%d" % (tag_type, iid), 104 | formalism, 105 | qtype='POST', 106 | params=params, 107 | offline=False, 108 | format='') 109 | 110 | if res['error']: 111 | print ("Error: %s" % res['message']) 112 | else: 113 | print ("Result: %s" % str(res)) 114 | 115 | def create_collection(name, description, tags, ipc, formalism): 116 | 117 | params = {'user': USER_EMAIL, 118 | 'password': USER_TOKEN, 119 | 'formalism': formalism, 120 | 'name': name, 121 | 'ipc': ipc, 122 | 'desc': description, 123 | 'tags': tags, 124 | } 125 | path = f"{formalism}/collection" 126 | res = query(path, 127 | formalism, 128 | qtype='POST', 129 | params = params, 130 | offline=False 131 | ) 132 | 133 | if res['error']: 134 | print ("Error: %s" % res['message']) 135 | return [] 136 | else: 137 | print ("Result: %s" % str(res)) 138 | return res['result'] 139 | 140 | 141 | def get_version(): 142 | """Return the current API version""" 143 | return str(query('version', "")['version']) 144 | 145 | 146 | def get_tags(formalism): 147 | """Get the list of available tags""" 148 | return {t['name']: t['description'] for t in simple_query("tags", formalism)} 149 | 150 | 151 | def get_collections(formalism, ipc = None): 152 | """Return the collections, optionally which are IPC or non-IPC""" 153 | res = query('collections/', formalism) 154 | if res['error']: 155 | print ("Error: %s" % res['message']) 156 | return [] 157 | else: 158 | if ipc is not None: 159 | return list(filter(lambda x: x['ipc'] == ipc, res['result'])) 160 | else: 161 | return res['result'] 162 | 163 | def get_collection(cid, formalism): 164 | """Return the collection of a given id""" 165 | return simple_query("collection/%d" % cid, formalism) 166 | 167 | def find_collections(name, formalism): 168 | """Find the collections matching the string name""" 169 | return simple_query("collections/search?collection_name=%s" % name, formalism) 170 | 171 | def update_collection_stat(cid, attribute, value, description, formalism): 172 | """Update the attribute stat with a given value and description""" 173 | update_stat('collection', cid, attribute, value, description, formalism) 174 | 175 | def tag_collection(cid, tagname, formalism): 176 | """Tag the collection with a given tag""" 177 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 178 | if tagname not in tag2id: 179 | print ("Error: Tag %s does not exist" % tagname) 180 | else: 181 | change_tag("tagcollection", cid, tag2id[tagname], formalism) 182 | 183 | def untag_collection(cid, tagname, formalism): 184 | """Remove a given tag from a collection""" 185 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 186 | if tagname not in tag2id: 187 | print ("Error: Tag %s does not exist" % tagname) 188 | else: 189 | change_tag("untagcollection", cid, tag2id[tagname], formalism) 190 | 191 | 192 | 193 | def get_domains(cid, formalism): 194 | """Return the set of domains for a given collection id""" 195 | return simple_query("domains/%d" % cid, formalism) 196 | 197 | def get_domain(did, formalism): 198 | """Return the domain for a given domain id""" 199 | return simple_query("domain/%d" % did, formalism) 200 | 201 | def find_domains(name, formalism): 202 | """Return the domains matching the string name""" 203 | return simple_query("domains/search?domain_name=%s" % name, formalism) 204 | 205 | def update_domain_stat(did, attribute, value, description, formalism): 206 | """Update the attribute stat with a given value and description""" 207 | update_stat('domain', did, attribute, value, description, formalism) 208 | 209 | def tag_domain(did, tagname, formalism): 210 | """Tag the domain with a given tag""" 211 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 212 | if tagname not in tag2id: 213 | print ("Error: Tag %s does not exist" % tagname) 214 | else: 215 | change_tag("tagdomain", did, tag2id[tagname], formalism) 216 | 217 | def untag_domain(did, tagname, formalism): 218 | """Remove a given tag from a domain""" 219 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 220 | if tagname not in tag2id: 221 | print ("Error: Tag %s does not exist" % tagname) 222 | else: 223 | change_tag("untagdomain", did, tag2id[tagname], formalism) 224 | 225 | 226 | def get_problems(did, formalism): 227 | """Return the set of problems for a given domain id""" 228 | return map(localize, simple_query("problems/%d" % did, formalism)) 229 | 230 | def get_problem(pid, formalism): 231 | """Return the problem for a given problem id""" 232 | return localize(simple_query("problem/%d" % pid, formalism)) 233 | 234 | def find_problems(name, formalism): 235 | """Return the problems matching the string name""" 236 | return list(map(localize, simple_query("problems/search?problem_name=%s" % name, formalism))) 237 | 238 | def update_problem_stat(pid, attribute, value, description, formalism): 239 | """Update the attribute stat with a given value and description""" 240 | update_stat('problem', pid, attribute, value, description, formalism) 241 | 242 | def get_null_attribute_problems(attribute, formalism): 243 | """Fetches all of the problems that do not have the attribute set yet""" 244 | return {i['id']: (i['domain_path'], i['problem_path']) 245 | for i in map(localize, simple_query("nullattribute/%s" % attribute, formalism))} 246 | 247 | def tag_problem(pid, tagname, formalism): 248 | """Tag the problem with a given tag""" 249 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 250 | if tagname not in tag2id: 251 | print ("Error: Tag %s does not exist" % tagname) 252 | else: 253 | change_tag("tagproblem", pid, tag2id[tagname], formalism) 254 | 255 | def untag_problem(pid, tagname, formalism): 256 | """Remove a given tag from a problem""" 257 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 258 | if tagname not in tag2id: 259 | print ("Error: Tag %s does not exist" % tagname) 260 | else: 261 | change_tag("untagproblem", pid, tag2id[tagname], formalism) 262 | 263 | def get_plan(pid, formalism): 264 | """Return the existing plan for a problem if it exists""" 265 | res = simple_query("plan/%d" % pid, formalism) 266 | if res: 267 | return res['plan'].strip() 268 | return res 269 | 270 | 271 | def submit_plan(pid, plan, formalism): 272 | """Submit the provided plan for validation and possible storage""" 273 | 274 | params = {'plan': plan, 'email': USER_EMAIL} 275 | 276 | res = query("submitplan/%d" % pid, 277 | formalism, 278 | qtype='POST', 279 | params=params, 280 | offline=False, 281 | format='') 282 | if res['error']: 283 | print ("Error: %s" % res['message']) 284 | else: 285 | print ("Result: %s" % str(res)) 286 | 287 | 288 | def localize(prob): 289 | """Convert the relative paths to local ones""" 290 | if not DOMAIN_PATH: 291 | return prob 292 | 293 | toRet = {k:prob[k] for k in prob} 294 | 295 | pathKeys = ['domain_path', 'problem_path'] 296 | for key in pathKeys: 297 | if key in toRet: 298 | toRet[key] = os.path.join(DOMAIN_PATH, prob[key]) 299 | 300 | return toRet 301 | 302 | 303 | def generate_lab_suite(cid, formalism): 304 | """Uses the lab API to generate a suite of problems in a collection""" 305 | try: 306 | from downward.suites import Problem 307 | except: 308 | print ("\n Error: Lab does not seem to be installed ( https://lab.readthedocs.io/ )\n") 309 | return 310 | 311 | SUITE = [] 312 | for d in get_domains(cid, formalism): 313 | for p in get_problems(d['domain_id'], formalism): 314 | SUITE.append(Problem(p['domain'], p['problem'], 315 | domain_file = p['domain_path'], 316 | problem_file = p['problem_path'], 317 | properties = {'api_problem_id': p['problem_id']})) 318 | return SUITE 319 | 320 | 321 | if not checkForDomainPath(): 322 | print ("\n Warning: No domain path is set\n") 323 | 324 | try: 325 | if VERSION != get_version(): 326 | print (f"\n Warning: Script version ({VERSION}) doesn't match API ({get_version()}). Do you have the latest version of this file?\n") 327 | except: 328 | pass 329 | -------------------------------------------------------------------------------- /scripts/formalism-initialization/classical/create-meta.py: -------------------------------------------------------------------------------- 1 | 2 | import os, pprint 3 | from krrt.utils import get_file_list, write_file 4 | 5 | from data import * 6 | 7 | forbidden_files = ['__init__', 'api.py'] 8 | 9 | def get_name(dom): 10 | suffixes = ['-sat', '-opt', '-strips', '-fulladl', '-06', '-08', '-00', '-02', '98', '00', '-simpleadl', '-adl'] 11 | name = dom.split('/')[-1] 12 | for s in suffixes: 13 | name = name.split(s)[0] 14 | #if '-' in name: 15 | # print "Check name: %s" % name 16 | if '' == name: 17 | print ("Error: empty name from %s" % dom) 18 | return name 19 | 20 | 21 | def handle_single(dom): 22 | towrite = 'domains = [\n' 23 | 24 | extra_domain = False 25 | domdata = {} 26 | 27 | domdata['name'] = get_name(dom) 28 | domdata['description'] = domain_description[get_name(dom)] 29 | 30 | # Special Cases: 31 | # IPC-2000: freecell (non-pfiles) 32 | # IPC-2002: satellite (p#-pfile#.pddl) 33 | # IPC-2002: freecell (pfile#) 34 | 35 | if './freecell' == dom: 36 | extra_domain = True 37 | 38 | domdata['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['pfile','/domain.pddl']))] 39 | domdata['ipc'] = '2000' 40 | 41 | domdata2 = {} 42 | domdata2['name'] = domdata['name'] 43 | domdata2['description'] = domain_description[get_name(dom)] 44 | domdata2['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl'], match_list=['pfile']))] 45 | domdata2['ipc'] = '2002' 46 | 47 | elif './satellite' == dom: 48 | extra_domain = True 49 | 50 | domdata['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl']))] 51 | domdata['ipc'] = ipc_map.get(dom[2:]) 52 | 53 | domdata2 = {} 54 | domdata2['name'] = domdata['name'] 55 | domdata2['description'] = domain_description[get_name(dom)] 56 | domdata2['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl','-HC-']))] 57 | domdata2['ipc'] = '2002' 58 | 59 | else: 60 | domdata['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl','/domain-nosplit.pddl','/orig-domain.pddl']))] 61 | domdata['ipc'] = ipc_map.get(dom[2:]) 62 | 63 | towrite += pprint.pformat(domdata) 64 | if extra_domain: 65 | towrite += ',\n' 66 | towrite += pprint.pformat(domdata2) 67 | towrite += '\n]' 68 | 69 | #print "To be printed:\n-------" 70 | #print towrite 71 | #print "-------\n" 72 | 73 | print ("Handling single domain: %s" % dom) 74 | write_file(dom+'/api.py', towrite) 75 | 76 | 77 | def handle_double(dom): 78 | towrite = 'domains = [\n' 79 | 80 | domdata = {} 81 | 82 | domdata['name'] = get_name(dom) 83 | domdata['description'] = domain_description[get_name(dom)] 84 | 85 | domfiles = get_file_list(dom, match_list=['domain'], forbidden_list=forbidden_files) 86 | prbfiles = get_file_list(dom, forbidden_list=forbidden_files+['domain']) 87 | 88 | if len(domfiles) == len(prbfiles): 89 | def remdom(dom): 90 | toret = dom 91 | for s in ['-domain', 'domain_']: 92 | toret = ''.join(toret.split(s)) 93 | return toret 94 | dmap = {remdom(d): d for d in domfiles} 95 | if all([k in prbfiles for k in dmap]): 96 | print ("Handling multi-domain: %s" % dom) 97 | assert len(set(dmap.keys())) == len(set(prbfiles)) 98 | domdata['problems'] = [(dmap[prob][2:], prob[2:]) for prob in sorted(prbfiles)] 99 | domdata['ipc'] = ipc_map.get(dom[2:]) 100 | elif dom in ['./psr-small', './airport']: 101 | print ("Handling custom 50-problem domain: %s" % dom) 102 | assert 100 == len(get_file_list(dom, match_list=['pddl'], forbidden_list=forbidden_files)) 103 | probs = [] 104 | for i in range(1,51): 105 | d = get_file_list(dom, match_list=["p%02d-domain" % i], forbidden_list=forbidden_files) 106 | p = get_file_list(dom, match_list=["p%02d-" % i], forbidden_list=forbidden_files+['domain']) 107 | assert 1 == len(d), str(d) 108 | assert 1 == len(p), str(p) 109 | probs.append((d[0][2:], p[0][2:])) 110 | domdata['problems'] = sorted(probs) 111 | domdata['ipc'] = ipc_map.get(dom[2:]) 112 | else: 113 | print ("Unhandled balanced multi-domain: %s" % dom) 114 | return 115 | else: 116 | print ("Unhandled lopsided multi-domain: %s" % dom) 117 | 118 | 119 | towrite += pprint.pformat(domdata) 120 | towrite += '\n]' 121 | 122 | #print "To be printed:\n-------" 123 | #print towrite 124 | #print "-------\n" 125 | 126 | write_file(dom+'/api.py', towrite) 127 | 128 | 129 | domains = get_file_list('.', forbidden_list=['.py']) 130 | 131 | single_dom = [] 132 | multi_dom = [] 133 | done_dom = [] 134 | 135 | print 136 | 137 | for dom in domains: 138 | 139 | if os.path.isfile(dom+'/api.py'): 140 | done_dom.append(dom) 141 | else: 142 | if os.path.isfile(dom+'/domain.pddl'): 143 | single_dom.append(dom) 144 | for i in get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl']): 145 | if 'dom' in i.split('/')[-1]: 146 | print ("Warning: Double domain in %s must be handled." % dom) 147 | else: 148 | multi_dom.append(dom) 149 | os.system("touch %s/__init__.py" % dom) 150 | 151 | print ("\nSingle doms: %d" % len(single_dom)) 152 | print (map(get_name, single_dom)) 153 | print ("\nMulti doms: %d" % len(multi_dom)) 154 | print (map(get_name, multi_dom)) 155 | print ("\nDone doms: %d" % len(done_dom)) 156 | print (map(get_name, done_dom)) 157 | print () 158 | 159 | for ipc in ipcs: 160 | for dom in ipc: 161 | if not os.path.isdir('./'+dom): 162 | print ("Bad dom: %s" % dom) 163 | 164 | for dom in single_dom: 165 | handle_single(dom) 166 | 167 | for dom in multi_dom: 168 | handle_double(dom) 169 | 170 | print 171 | -------------------------------------------------------------------------------- /scripts/formalism-initialization/classical/data.py: -------------------------------------------------------------------------------- 1 | 2 | # Mapping of folders to the IPC they come from 3 | 4 | # Special cases not included: 5 | # IPC-2000: freecell (non-pfiles) 6 | # IPC-2002: satellite (p#-pfile#.pddl) 7 | # IPC-2002: freecell (pfile#) 8 | 9 | ipc1998 = 'assembly / gripper / logistics98 / movie / mprime / mystery'.split(' / ') 10 | ipc2000 = 'blocks / elevators-00-full / elevators-00-adl / elevators-00-strips / logistics00 / schedule'.split(' / ') 11 | ipc2002 = 'depot / driverlog / zenotravel / rovers-02'.split(' / ') 12 | ipc2004 = 'airport / airport-adl / pipesworld-tankage / pipesworld-notankage / optical-telegraphs / philosophers / psr-small / psr-middle / psr-large / satellite / settlers'.split(' / ') 13 | ipc2006 = 'openstacks / pathways / pipesworld-06 / rovers / storage / tpp / trucks'.split(' / ') 14 | ipc2008 = 'elevators-opt08-strips / openstacks-opt08-adl / openstacks-opt08-strips / parcprinter-08-strips / pegsol-08-strips / scanalyzer-08-strips / sokoban-opt08-strips / transport-opt08-strips / woodworking-opt08-strips / elevators-sat08-strips / openstacks-sat08-adl / openstacks-sat08-strips / sokoban-sat08-strips / transport-sat08-strips / woodworking-sat08-strips / cybersec'.split(' / ') 15 | ipc2011 = 'barman-opt11-strips / elevators-opt11-strips / floortile-opt11-strips / nomystery-opt11-strips / openstacks-opt11-strips / parcprinter-opt11-strips / parking-opt11-strips / pegsol-opt11-strips / scanalyzer-opt11-strips / sokoban-opt11-strips / tidybot-opt11-strips / transport-opt11-strips / visitall-opt11-strips / woodworking-opt11-strips / barman-sat11-strips / elevators-sat11-strips / floortile-sat11-strips / nomystery-sat11-strips / openstacks-sat11-strips / parcprinter-sat11-strips / parking-sat11-strips / pegsol-sat11-strips / scanalyzer-sat11-strips / sokoban-sat11-strips / tidybot-sat11-strips / transport-sat11-strips / visitall-sat11-strips / woodworking-sat11-strips'.split(' / ') 16 | ipc2014 = 'barman-opt14-strips / barman-sat14-strips / cavediving / childsnack-opt14-strips / childsnack-sat14-strips / citycar-opt14-adl / citycar-sat14-adl / floortile-opt14-strips / floortile-sat14-strips / ged-opt14-strips / ged-sat14-strips / hiking-opt14-strips / hiking-sat14-strips / maintenance-opt14-adl / maintenance-sat14-adl / openstacks-opt14-strips / openstacks-sat14-strips / parking-opt14-strips / parking-sat14-strips / tetris-opt14-strips / tetris-sat14-strips / thoughtful-sat14-strips / tidybot-opt14-strips / transport-opt14-strips / transport-sat14-strips / visitall-opt14-strips / visitall-sat14-strips'.split(' / ') 17 | 18 | ipcs = [ipc1998, ipc2000, ipc2002, ipc2004, ipc2006, ipc2008, ipc2011, ipc2014] 19 | 20 | ipc_map = {} 21 | 22 | for (ipc,year) in zip(ipcs, '1998 2000 2002 2004 2006 2008 2011 2014'.split()): 23 | for dom in ipc: 24 | ipc_map[dom] = year 25 | 26 | # General domain descriptions to populate the database 27 | domain_description = { 28 | 'airport': 'Developed by Jorg Hoffmann and Sebastian Trug. Planners control the ground traffic on airports. The competition test suites were generated by exporting traffic situations arising during simulation runs in the airport simulation tool Astras (by Wolfgang Hatzack). The largest instances in the test suites are realistic encodings of Munich airport.', 29 | 'assembly': 'The goal is to assemble a complex object made out of subassemblies. The sequence of steps must obey a given partial order. In addition, through poor engineering design, many subassemblies must be installed temporarily in one assembly, then removed and given a permanent home in another. This domain was created by Drew McDermott.', 30 | 'barman': 'In this domain there is a robot barman that manipulates drink dispensers, glasses and a shaker. The goal is to find a plan of the robots actions that serves a desired set of drinks. In this domain deletes of actions encode relevant knowledge given that robot hands can only grasp one object at a time and given that glasses need to be empty and clean to be filled.', 31 | 'blocks': 'The blocks world is one of the most famous planning domains in artificial intelligence. Imagine a set of cubes (blocks) sitting on a table. The goal is to build one or more vertical stacks of blocks. The catch is that only one block may be moved at a time: it may either be placed on the table or placed atop another block. Because of this, any blocks that are, at a given time, under another block cannot be moved.', 32 | 'cavediving': 'There are a set of divers, each of who can carry 4 tanks of air. These divers must be hired to go into an underwater cave and either take photos or prepare the way for other divers by dropping full tanks of air. The cave is too narrow for more than one diver to enter at a time. The cave system is represented by an undirected acyclic graph. Divers have a single point of entry. Certain leaf nodes of the cave branches are objectives that the divers must photograph. Swimming and photographing both consume air tanks. Divers must exit the cave and decompress at the end. They can therefore only make a single trip into the cave. Certain divers have no confidence in other divers and will refuse to work if someone they have no confidence in has already worked. Divers have hiring costs inversely proportional to how hard they are to work with.', 33 | 'childsnack': 'This domain is to plan how to make and serve sandwiches for a group of children in which some are allergic to gluten. There are two actions for making sandwiches from their ingredients. The first one makes a sandwich and the second one makes a sandwich taking into account that all ingredients are gluten-free. There are also actions to put a sandwich on a tray and to serve sandwiches. Problems in this domain define the ingredients to make sandwiches at the initial state. Goals consist of having all kids served with a sandwich to which they are not allergic.', 34 | 'citycar': 'This model aims to simulate the impact of road building / demolition on traffic flows. A city is represented as an acyclic graph, in which each node is a junction and edges are "potential" roads. Some cars start from different positions and have to reach their final destination as soon as possible. The agent has a finite number of roads available, which can be built for connecting two junctions and allowing a car to move between them. Roads can also be removed, and placed somewhere else, if needed. In order to place roads or to move cars, the destination junction must be clear, i.e., no cars should be in there.', 35 | 'cybersec': 'A domain that models the cyber security model of vulnerability analysis for cyber defense.', 36 | 'depot': 'This domain was devised in order to see what would happen if two previously well-researched domains were joined together. These were the logistics and blocks domains. They are combined to form a domain in which trucks can transport crates around and then the crates must be stacked onto pallets at their destinations. The stacking is achieved using hoists, so the stacking problem is like a blocks-world problem with hands. Trucks can behave like "tables", since the pallets on which crates are stacked are limited.', 37 | 'driverlog': 'This domain involves driving trucks around delivering packages between locations. The complication is that the trucks require drivers who must walk between trucks in order to drive them. The paths for walking and the roads for driving form different maps on the locations.', 38 | 'elevators': 'The scenario is the following: There is a building with N+1 floors, numbered from 0 to N. The building can be separated in blocks of size M+1, where M divides N. Adjacent blocks have a common floor. For example, suppose N=12 and M=4, then we have 13 floors in total (ranging from 0 to 12), which form 3 blocks of 5 floors each, being 0 to 4, 4 to 8 and 8 to 12. The building has K fast (accelarating) elevators that stop only in floors that are multiple of M/2 (so M has to be an even number). Each fast elevator has a capacity of X persons. Furthermore, within each block, there are L slow elevators, that stop at every floor of the block. Each slow elevator has a capacity of Y persons (usually Y=1 is a (float) input parameter of the generator. The parameter C denotes the ratio between the available fuel vs. the minimum amount required. The problem becomes more constrained when C approaches 1.', 53 | 'nomystery': 'Nomystery is a transportation domain designed to study resource constrained planning. In this domain, a truck moves in a weighted graph; a set of packages must be transported between nodes; actions move along edges, and load/unload packages; each move consumes the edge weight in fuel. In brief, Nomystery is a straightforward problem similar to the ones contained in many IPC benchmarks. Its key feature is that it comes with a domain-specific optimal solver allowing to control the constrainedness of the resources. The generator first creates a random connected undirected graph with n nodes, and it adds k packages with random origins and destinations. The edge weights are uniformly drawn between 1 and an integer W. The optimal solver computes the minimum required amount of fuel M, and the initial fuel supply is set to [C xM], where C >=1 is a (float) input parameter of the generator. The parameter C denotes the ratio between the available fuel vs. the minimum amount required. The problem becomes more constrained when C approaches 1.', 54 | 'openstacks': 'The openstacks domain is based on the "minimum maximum simultaneous open stacks" combinatorial optimization problem, which can be stated as follows: A manufacturer has a number of orders, each for a combination of different products, and can only make one product at a time. The total required quantity of each product is made at the same time (because changing from making one product to making another requires a production stop). From the time that the first product included in an order is made to the time that all products included in the order have been made, the order is said to be "open" and during this time it requires a "stack" (a temporary storage space). The problem is to order the making of the different products so that the maximum number of stacks that are in use simultaneously, or equivalently the number of orders that are in simultaneous production, is minimized (because each stack takes up space in the production area).', 55 | 'optical-telegraphs': 'Developed by Stefan Edelkamp. Planners are asked to find deadlocks in communication protocols, translated into PDDL from the Promela specification language. Deadlocks were specified via blocked transitions and processes. The representation chosen for the processes are finite state transition systems, while communication channels are modelled by queues with moving head and tail pointers. The communication protocols used in IPC-4 were the dining philosophers problem, as well as an optical telegraph routing problem.', 56 | 'parcprinter': 'This domain models the operation of the multi-engine printer, for which one prototype is developed at the Palo Alto Research Center (PARC). This type of printer can handle multiple print jobs simultaneously. Multiple sheets, belonging to the same job or different jobs, can be printed simultaneously using multiple Image Marking Engines (IME). Each IME can either be color, which can print both color and black&white images, or mono, which can only print black&white image. Each sheet needs to go through multiple printer components such as feeder, transporter, IME, inverter, finisher and need to arrive at the finisher in order. Thus, sheet (n+1) needs to be stacked in the same finisher with sheet n of the same job, but needs to arrive at the finisher right after sheet n (no other sheet stacked in between those two consecutive sheets). Given that the IMEs are heterogeneous (mixture of color and mono) and can run at different speeds, optimizing the operation of this printer for a mixture of print jobs, each of them is an arbitrary mixture of color/b&w pages that are either simplex (one-sided print) or duplex (two-sided print) is a hard problem.', 57 | 'parking': 'This domain is original from the learning part of IPC2008. The domain involves parking cars on a street with N curb locations, and where cars can be double-parked but not triple-parked. The goal is to find a plan to move from one configuration of parked cars to another configuration, by driving cars from one curb location to another. The problems in the competition contain 2*(N-1) cars, which allows one free curb space and guarantees solvability.', 58 | 'pathways': 'Finding a sequence of biochemical (pathways) reactions in an organism producing certain substances.', 59 | 'pathways-noneg': 'Finding a sequence of biochemical (pathways) reactions in an organism producing certain substances.', 60 | 'pegsol': 'This domain models the Peg Solitaire game. (http://en.wikipedia.org/wiki/Peg_solitaire)', 61 | 'philosophers': 'Developed by Stefan Edelkamp. Planners are asked to find deadlocks in communication protocols, translated into PDDL from the Promela specification language. Deadlocks were specified via blocked transitions and processes. The representation chosen for the processes are finite state transition systems, while communication channels are modelled by queues with moving head and tail pointers. The communication protocols used in IPC-4 were the dining philosophers problem, as well as an optical telegraph routing problem.', 62 | 'pipesworld': 'Developed by Frederico Liporace and Jorg Hoffmann. Planners control the flow of oil derivatives through a pipeline network, obeying various constraints such as product compatibility, tankage restrictions, and (in the most complex domain version) goal deadlines. One interesting aspect of the domain is that, if one inserts something into the one end of a pipeline segment, something potentially completely different comes out at the other end. This gives rise to several subtle phenomena that can arise in the creation of a plan.', 63 | 'pipesworld-notankage': 'Developed by Frederico Liporace and Jorg Hoffmann. Planners control the flow of oil derivatives through a pipeline network, obeying various constraints such as product compatibility, tankage restrictions, and (in the most complex domain version) goal deadlines. One interesting aspect of the domain is that, if one inserts something into the one end of a pipeline segment, something potentially completely different comes out at the other end. This gives rise to several subtle phenomena that can arise in the creation of a plan.', 64 | 'pipesworld-tankage': 'Developed by Frederico Liporace and Jorg Hoffmann. Planners control the flow of oil derivatives through a pipeline network, obeying various constraints such as product compatibility, tankage restrictions, and (in the most complex domain version) goal deadlines. One interesting aspect of the domain is that, if one inserts something into the one end of a pipeline segment, something potentially completely different comes out at the other end. This gives rise to several subtle phenomena that can arise in the creation of a plan.', 65 | 'psr-large': 'Developed by Sylvie Thiebaux and Jorg Hoffmann. Planners must resupply a number of lines in a faulty electricity network. The flow of electricity through the network, at any point in time, is given by a transitive closure over the network connections, subject to the states of the switches and electricity supply devices. The domain is therefore a good example of the usefulness of derived predicates in real-world applications.', 66 | 'psr-middle': 'Developed by Sylvie Thiebaux and Jorg Hoffmann. Planners must resupply a number of lines in a faulty electricity network. The flow of electricity through the network, at any point in time, is given by a transitive closure over the network connections, subject to the states of the switches and electricity supply devices. The domain is therefore a good example of the usefulness of derived predicates in real-world applications.', 67 | 'psr-small': 'Developed by Sylvie Thiebaux and Jorg Hoffmann. Planners must resupply a number of lines in a faulty electricity network. The flow of electricity through the network, at any point in time, is given by a transitive closure over the network connections, subject to the states of the switches and electricity supply devices. The domain is therefore a good example of the usefulness of derived predicates in real-world applications.', 68 | 'rovers': 'Inspired by planetary rovers problems, this domain requires that a collection of rovers navigate a planet surface, finding samples and communicating them back to a lander.', 69 | 'satellite': 'The first of the domains inspired by space-applications is a first step towards the "Ambitious Spacecraft" described by David Smith at AIPS-2000. It involves planning and scheduling a collection of observation tasks between multiple satellites, each equipped in slightly different ways.', 70 | 'scanalyzer': 'This domain models the problem of automatic greenhouse logistic management.', 71 | 'schedule': 'Encodes a simple Scheduling kind of problem where a number of objects need to be processed using a collection of machines. Possible actions are polishing, punching holes, painting etc. All actions need uniform time, which is modelled by a do-time-step operator. If that operator is applied, then all busy machines are no longer busy, and all scheduled objects are no longer scheduled---this is also an example of the kind of conditional effects that are used in the representation.', 72 | 'settlers': 'This one was for the numeric track and proved to be a very tough resource management domain. Several interesting issues in encoding arise as well as the subsequent problem of planning with the domain. In particular, resources can be combined to construct vehicles of various kinds. Since these vehicles are not available initially, this is an example of a problem in which new objects are created. PDDL does not conveniently support this concept at present, so it is necessary to name "potential" vehicles at the outset, which can be realised through construction. A very high degree of redundant symmetry exists between these "potential" vehicles, since it does not matter which vehicle names are actually used for the vehicles that are realised in a plan. Planners that begin by grounding all actions can be swamped by the large numbers of potential actions involving these potential vehicles, which could be realised as one of several different types of actual vehicles. Plan quality is judged by a linear combination of labour use, pollution creation and resource consumption. There is scope for constructing very hard metrics that involve maximising housing construction subject to an increasing pollution penalty (say), to ensure that optimal plan quality is bounded.', 73 | 'sokoban': 'This domain models the Sokoban game. (http://en.wikipedia.org/wiki/Sokoban)', 74 | 'storage': 'Moving and storing crates of goods by hoists from containers to depots with spatial maps.', 75 | 'tetris': 'This is a simplified version of the well-known Tetris. All the pieces (1x1, 2x1, L) are randomly distributed on a NxN grid. The goal is to move them in order to free the upper half of the grid. The pieces can be rotated or translated. Each movement action has a different cost, accordingly to the size of the piece.', 76 | 'thoughtful': 'This domain is original from the learning part of IPC2008. The domain represents a well-known solitaire card game.', 77 | 'tidybot': 'The Tidybot domain models a household cleaning task, in which one or more robots must pick up a set of objects and put them into goal locations. The world is structured as a 2d grid, divided into navigable locations and surfaces on which objects may lie. Robots have a gripper, which moves relative to the robot, up to some maximum radius. Existing objects block the gripper, so that it may be necessary to move one object out of the way to put another one down. Robots can carry one object at a time in the gripper, but may also make use of a cart, that can hold multiple objects. The instance generator creates worlds that contain rectangular surfaces ("tables"), as well as U-shaped enclosures ("cupboards"), which are the goal locations of objects.', 78 | 'tpp': 'Traveling and buying goods at selected markets minimizing costs (from OR with variants, NP-hard).', 79 | 'transport': 'Each vehicle can transport some packages depending on its capacity and moving has a cost depending on the length of the road. Picking up or dropping a package costs 1.', 80 | 'trucks': 'Moving packages between locations by trucks under certain spatial constraints and delivering deadlines.', 81 | 'visitall': 'An agent in the middle of a square grid nxn must visit all the cells in the grid. Solving optimally the delete relaxation h+ gives the exact goal distance as long as there exists a hamiltonian path visiting every cell. Recall that in a 1xn grid, no hamiltonian path exists.', 82 | 'woodworking': 'Simulates the works in a woodworking workshop where there is some quantity of wood that has to be polished, coloured, etc. using different tools with different costs. Parameters of each problem are the parts to be done and the quantity (in % of necessary) of available wood (boards). The higher the number of parts and the boards the more difficult the problem is.', 83 | 'zenotravel': 'The zenotravel domain involves transporting people around in planes, using different modes of movement: fast and slow. The key to this domain is that, where the expressive power of the numeric tracks is used, the fast movement consumes fuel faster than slow movement, making the search for a good quality plan (one using less fuel) much harder.', 84 | # 85 | # FF Domains 86 | 'tyreworld': 'Typed STRIPS domain. Replace a flat tyre with a spare one. This involves fetching the tools (wrench, jack, pump) from the boot, undoing the nuts on the flat tyre, jacking up the (appropriate) hub(s), removing the tyre, doing up the spare one, etc. Adapted for several tyres by simply increasing the number of flat tyres to be replaced.', 87 | 'tsp': 'Untyped STRIPS domain. Extremely simple version of TSP. The locations are connected by a complete graph, i.e. each location is accessible from each other location. The edges all have equal cost---one moving operation---and the goal is simply to have all locations visited. An optimal solution simply visits all locations once in an arbitrary ordering.', 88 | 'briefcaseworld': 'Typed classical ADL domain, using conditional effects. Transport a number of objects from their start- to their goal-locations, using a briefcase. Each location is accessible from each other location, objects can be put into the briefcase or taken out of the briefcase. When a move is made between locations, then all objects inside the briefcase are also moved, which is encoded by a conditional effect.', 89 | 'hanoi': 'Untyped STRIPS encoding of the well-known Towers of Hanoi problem.', 90 | 'fridge': 'Typed ADL domain using comlex ADL preconditions (that simplify to STRIPS constructs after instantiation). Original was STRIPS domain, adaption uses quantification over all screws in precondititions, to allow for a flexible number of those. For a number of fridges, unfasten the screws holding the backplane, then remove the backplanes and exchange the broken compressor with a new one, then re-assemble the fridge and turn it on.', 91 | 'ferry': 'Untyped STRIPS domain. Transport a number of cars from their start- to their goal-locations, using a ferry. Each location is accessible from each other location, cars can be debarked or boarded, the ferry can always carry only one car at a time.', 92 | 'blocks-3op': 'Classical untyped STRIPS domain, where stackable blocks need to be re-assembled on a table with unlimited space. Representation uses 3 operators, moving a block from the table to another block, a block from another block to the table, or a block from a block to another block. Semantically, the representation does not use a robot arm, in difference to the 4 operator representation. The initial state specifies a complete state, the goal state specifies only the on relations required between any two blocks.' 93 | } 94 | 95 | -------------------------------------------------------------------------------- /scripts/formalism-initialization/classical/gen-db.py: -------------------------------------------------------------------------------- 1 | 2 | import importlib, os, pprint 3 | from krrt.utils import get_file_list, write_file 4 | 5 | ################################ 6 | ## 7 | ## This should be set from 8 | ## the output of this script 9 | ## 10 | ############################# 11 | 12 | done_domains = set(['./pathways-noneg', './openstacks-opt08-strips', './tetris-sat14-strips', './movie', './openstacks-sat14-strips', './elevators-00-strips', './miconic', './childsnack-sat14-strips', './cybersec', './transport-opt11-strips', './woodworking-sat08-strips', './visitall-opt11-strips', './pipesworld-06', './scanalyzer-sat11-strips', './transport-sat14-strips', './openstacks-sat11-strips', './parking-sat11-strips', './pipesworld-tankage', './openstacks-sat08-adl', './elevators-opt11-strips', './maintenance-sat14-adl', './citycar-sat14-adl', './mystery', './tidybot-opt11-strips', './driverlog', './trucks', './logistics00', './thoughtful-sat14-strips', './pipesworld-notankage', './transport-sat08-strips', './parcprinter-opt11-strips', './airport-adl', './pegsol-08-strips', './elevators-00-adl', './logistics98', './tidybot-sat11-strips', './woodworking-sat11-strips', './elevators-00-full', './freecell', './openstacks-strips', './cavediving', './depot', './sokoban-opt08-strips', './visitall-sat11-strips', './visitall-opt14-strips', './miconic-simpleadl', './elevators-sat08-strips', './zenotravel', './no-mprime', './maintenance-opt14-adl', './scanalyzer-08-strips', './floortile-opt14-strips', './parking-opt14-strips', './sokoban-sat08-strips', './citycar-opt14-adl', './woodworking-opt08-strips', './childsnack-opt14-strips', './blocks', './openstacks-opt14-strips', './tidybot-opt14-strips', './pegsol-sat11-strips', './openstacks-opt11-strips', './tetris-opt14-strips', './elevators-opt08-strips', './barman-opt11-strips', './parcprinter-sat11-strips', './psr-small', './floortile-sat14-strips', './barman-sat14-strips', './visitall-sat14-strips', './sokoban-opt11-strips', './airport', './sokoban-sat11-strips', './transport-opt08-strips', './psr-large', './optical-telegraphs', './parking-opt11-strips', './transport-sat11-strips', './no-mystery', './miconic-fulladl', './gripper', './philosophers', './openstacks-sat08-strips', './scanalyzer-opt11-strips', './rovers', './psr-middle', './parcprinter-08-strips', './mprime', './schedule', './barman-sat11-strips', './settlers', './pegsol-opt11-strips', './barman-opt14-strips', './ged-sat14-strips', './rovers-02', './floortile-opt11-strips', './parking-sat14-strips', './elevators-sat11-strips', './openstacks', './floortile-sat11-strips', './hiking-opt14-strips', './pathways', './assembly', './nomystery-sat11-strips', './hiking-sat14-strips', './openstacks-opt08-adl', './storage', './tpp', './nomystery-opt11-strips', './ged-opt14-strips', './trucks-strips', './grid', './transport-opt14-strips', './satellite', './woodworking-opt11-strips']) 13 | 14 | domains = get_file_list('.', forbidden_list=['.py']) 15 | 16 | def gen_js(domain): 17 | toret = "\t\tDomain.forge({dom_name:'%s', description:'%s'}).save().then(function(model) {\n" % (domain['name'], domain['description']) 18 | for (d,p) in domain['problems']: 19 | pname = p.split('/')[-1] 20 | domurl = "classical/%s" % d 21 | proburl = "classical/%s" % p 22 | toret += "\t\t\tProblem.forge({prob_name:'%s', domain:model.id, dom_url:'%s', prob_url:'%s'}).save();\n" % (pname, domurl, proburl) 23 | toret += "\t\t});\n" 24 | return toret 25 | 26 | dbcode = '' 27 | for dom in domains: 28 | if dom not in done_domains: 29 | done_domains.add(dom) 30 | mod = importlib.import_module(dom[2:] + '.api') 31 | for d in mod.domains: 32 | dbcode += '\n' 33 | dbcode += gen_js(d) 34 | print ("\ndone_domains = %s\n" % str(done_domains)) 35 | write_file('out.js.py', dbcode) 36 | -------------------------------------------------------------------------------- /scripts/formalism-initialization/rddl.py: -------------------------------------------------------------------------------- 1 | 2 | # Documents new collections *Formatted for incoming RDDL collections on 2023-03-29 3 | # https://github.com/ataitler/rddlrepository 4 | 5 | 6 | import os, sys, glob 7 | 8 | import importlib.util 9 | 10 | def add_IPPC_instances(rootdir): 11 | # Use glob to get IPPC directories, ignoring __init__.py 12 | ippc_folders = glob.glob(f"{rootdir}/*/") 13 | 14 | assert len(ippc_folders) == 3, "There should be 3 IPPC folders" 15 | 16 | # Handle IPPC2011 and IPPC2014 first 17 | for ippc_name in ['IPPC2011', 'IPPC2014']: 18 | # Get the domain folders 19 | domain_folders = glob.glob(f"{rootdir}/{ippc_name}/*/") 20 | 21 | # Assert each domain folder has MDP and POMDP folders 22 | for domain_folder in domain_folders: 23 | assert os.path.isdir(f"{domain_folder}MDP/"), f"{domain_folder} does not have MDP folder" 24 | assert os.path.isdir(f"{domain_folder}POMDP/"), f"{domain_folder} does not have POMDP folder" 25 | 26 | for style in ['MDP', 'POMDP']: 27 | # folder with the instances 28 | instance_folder = f"{domain_folder}{style}/" 29 | 30 | # import the __init__.py file 31 | spec = importlib.util.spec_from_file_location("module_name", f"{instance_folder}/__init__.py") 32 | module = importlib.util.module_from_spec(spec) 33 | spec.loader.exec_module(module) 34 | domain_info = module.info 35 | 36 | # Get the instances and domain file 37 | instance_list = glob.glob(f"{instance_folder}/instance*") 38 | domain_file = glob.glob(f"{instance_folder}/domain*.rddl") 39 | 40 | # Make sure there is only one domain file 41 | assert len(domain_file) == 1, f"{instance_folder} has more than one domain file" 42 | 43 | # Make sure there is more than one instance 44 | assert len(instance_list) > 1, f"{instance_folder} has less than two instances" 45 | 46 | # TODO: Work with the info, domain, and instances to create the collection, domain, and problems 47 | 48 | 49 | print(f'{ippc_name}: {len(domain_folders)} domains') 50 | 51 | # Handle IPPC2018 52 | ippc_name = 'IPPC2018' 53 | # Get the domain folders 54 | domain_folders = glob.glob(f"{rootdir}/{ippc_name}/*/") 55 | print(f'{ippc_name}: {len(domain_folders)} domains') 56 | 57 | def add_standalone_instances(rootdir): 58 | pass 59 | 60 | topdir = input("Path to top-level directory: ") 61 | 62 | if not os.path.isdir(topdir): 63 | print("Provided path is not a valid directory.") 64 | sys.exit(1) 65 | 66 | add_IPPC_instances(topdir) 67 | # add_standalone_instances(topdir) 68 | 69 | exit(0) 70 | 71 | name = info['name'] 72 | description = info['description'] 73 | ipc = info['context'] 74 | tags = info['tags'] 75 | viz = info['viz'] 76 | 77 | # Call planning_domains_api.py function 78 | current = os.path.dirname(os.path.realpath(__file__)) 79 | parent_directory = os.path.dirname(current) 80 | sys.path.append(parent_directory) 81 | 82 | import planning_domains_api as planning_api 83 | planning_api.create_collection(name, description, tags, ipc, formalism) 84 | -------------------------------------------------------------------------------- /scripts/tag-fix/processed_result5.json: -------------------------------------------------------------------------------- 1 | { 2 | "63": { 3 | "stated": [ 4 | ":typing", 5 | ":action-costs" 6 | ], 7 | "val": [ 8 | ":typing", 9 | ":action-costs" 10 | ] 11 | }, 12 | "83": { 13 | "stated": [ 14 | ":strips" 15 | ], 16 | "val": [ 17 | ":strips" 18 | ] 19 | }, 20 | "99": { 21 | "stated": [ 22 | ":adl", 23 | ":typing" 24 | ], 25 | "val": [ 26 | ":typing", 27 | ":conditional-effects", 28 | ":negative-preconditions" 29 | ] 30 | }, 31 | "17": { 32 | "stated": [], 33 | "val": [ 34 | ":strips" 35 | ] 36 | }, 37 | "12": { 38 | "stated": [ 39 | ":strips" 40 | ], 41 | "val": [ 42 | ":strips" 43 | ] 44 | }, 45 | "22": { 46 | "stated": [ 47 | ":equality", 48 | ":action-costs" 49 | ], 50 | "val": [ 51 | ":equality", 52 | ":action-costs" 53 | ] 54 | }, 55 | "16": { 56 | "stated": [ 57 | ":strips" 58 | ], 59 | "val": [ 60 | ":strips" 61 | ] 62 | }, 63 | "15": { 64 | "stated": [ 65 | ":adl", 66 | ":typing" 67 | ], 68 | "val": [ 69 | ":typing", 70 | ":equality", 71 | ":conditional-effects", 72 | ":negative-preconditions" 73 | ] 74 | }, 75 | "87": { 76 | "stated": [ 77 | ":strips", 78 | ":typing", 79 | ":action-costs" 80 | ], 81 | "val": [ 82 | ":typing", 83 | ":action-costs" 84 | ] 85 | }, 86 | "19": { 87 | "stated": [ 88 | ":strips", 89 | ":typing" 90 | ], 91 | "val": [ 92 | ":typing", 93 | ":strips" 94 | ] 95 | }, 96 | "8": { 97 | "stated": [ 98 | ":typing", 99 | ":action-costs" 100 | ], 101 | "val": [ 102 | ":typing", 103 | ":action-costs" 104 | ] 105 | }, 106 | "5": { 107 | "stated": [ 108 | ":typing", 109 | ":action-costs" 110 | ], 111 | "val": [ 112 | ":typing", 113 | ":action-costs" 114 | ] 115 | }, 116 | "4": { 117 | "stated": [ 118 | ":typing", 119 | ":action-costs" 120 | ], 121 | "val": [ 122 | ":typing", 123 | ":action-costs" 124 | ] 125 | }, 126 | "13": { 127 | "stated": [ 128 | ":typing", 129 | ":action-costs" 130 | ], 131 | "val": [ 132 | ":typing", 133 | ":action-costs" 134 | ] 135 | }, 136 | "11": { 137 | "stated": [ 138 | ":typing", 139 | ":action-costs" 140 | ], 141 | "val": [ 142 | ":typing", 143 | ":action-costs" 144 | ] 145 | }, 146 | "25": { 147 | "stated": [ 148 | ":strips", 149 | ":typing" 150 | ], 151 | "val": [ 152 | ":typing", 153 | ":strips" 154 | ] 155 | }, 156 | "9": { 157 | "stated": [ 158 | ":typing", 159 | ":action-costs" 160 | ], 161 | "val": [ 162 | ":typing", 163 | ":action-costs" 164 | ] 165 | }, 166 | "51": { 167 | "stated": [ 168 | ":typing", 169 | ":action-costs" 170 | ], 171 | "val": [ 172 | ":typing", 173 | ":action-costs" 174 | ] 175 | }, 176 | "96": { 177 | "stated": [ 178 | ":typing", 179 | ":action-costs" 180 | ], 181 | "val": [ 182 | ":typing", 183 | ":negative-preconditions", 184 | ":action-costs" 185 | ] 186 | }, 187 | "40": { 188 | "stated": [ 189 | ":strips" 190 | ], 191 | "val": [ 192 | ":strips" 193 | ] 194 | }, 195 | "78": { 196 | "stated": [ 197 | ":typing", 198 | ":action-costs" 199 | ], 200 | "val": [ 201 | ":typing", 202 | ":action-costs" 203 | ] 204 | }, 205 | "102": { 206 | "stated": [], 207 | "val": [ 208 | ":strips" 209 | ] 210 | }, 211 | "27": { 212 | "stated": [ 213 | ":typing" 214 | ], 215 | "val": [ 216 | ":typing", 217 | ":strips" 218 | ] 219 | }, 220 | "56": { 221 | "stated": [ 222 | ":typing", 223 | ":equality", 224 | ":negative-preconditions", 225 | ":action-costs" 226 | ], 227 | "val": [ 228 | ":typing", 229 | ":equality", 230 | ":negative-preconditions", 231 | ":action-costs" 232 | ] 233 | }, 234 | "39": { 235 | "stated": [ 236 | ":typing", 237 | ":equality", 238 | ":negative-preconditions", 239 | ":action-costs" 240 | ], 241 | "val": [ 242 | ":typing", 243 | ":equality", 244 | ":negative-preconditions", 245 | ":action-costs" 246 | ] 247 | }, 248 | "62": { 249 | "stated": [ 250 | ":strips" 251 | ], 252 | "val": [ 253 | ":strips" 254 | ] 255 | }, 256 | "103": { 257 | "stated": [ 258 | ":adl" 259 | ], 260 | "val": [ 261 | ":typing", 262 | ":conditional-effects", 263 | ":disjunctive-preconditions", 264 | ":negative-preconditions" 265 | ] 266 | }, 267 | "72": { 268 | "stated": [ 269 | ":typing", 270 | ":action-costs" 271 | ], 272 | "val": [ 273 | ":typing", 274 | ":action-costs" 275 | ] 276 | }, 277 | "115": { 278 | "stated": [ 279 | ":typing", 280 | ":equality", 281 | ":negative-preconditions", 282 | ":action-costs", 283 | ":conditional-effects" 284 | ], 285 | "val": [ 286 | ":typing", 287 | ":conditional-effects", 288 | ":negative-preconditions", 289 | ":action-costs" 290 | ] 291 | }, 292 | "84": { 293 | "stated": [ 294 | ":strips", 295 | ":typing" 296 | ], 297 | "val": [ 298 | ":typing", 299 | ":strips" 300 | ] 301 | }, 302 | "67": { 303 | "stated": [], 304 | "val": [ 305 | ":strips" 306 | ] 307 | }, 308 | "49": { 309 | "stated": [ 310 | ":typing" 311 | ], 312 | "val": [ 313 | ":typing", 314 | ":strips" 315 | ] 316 | }, 317 | "54": { 318 | "stated": [ 319 | ":strips", 320 | ":equality", 321 | ":typing" 322 | ], 323 | "val": [ 324 | ":typing", 325 | ":equality" 326 | ] 327 | }, 328 | "94": { 329 | "stated": [ 330 | ":typing", 331 | ":adl" 332 | ], 333 | "val": [ 334 | ":typing", 335 | ":disjunctive-preconditions" 336 | ] 337 | }, 338 | "118": { 339 | "stated": [ 340 | ":typing", 341 | ":action-costs" 342 | ], 343 | "val": [ 344 | ":typing", 345 | ":action-costs" 346 | ] 347 | }, 348 | "57": { 349 | "stated": [ 350 | ":typing", 351 | ":action-costs" 352 | ], 353 | "val": [ 354 | ":typing", 355 | ":action-costs" 356 | ] 357 | }, 358 | "90": { 359 | "stated": [ 360 | ":strips", 361 | ":typing" 362 | ], 363 | "val": [ 364 | ":typing", 365 | ":strips" 366 | ] 367 | }, 368 | "110": { 369 | "stated": [ 370 | ":adl" 371 | ], 372 | "val": [ 373 | ":adl" 374 | ] 375 | }, 376 | "100": { 377 | "stated": [ 378 | ":typing" 379 | ], 380 | "val": [ 381 | ":typing", 382 | ":strips" 383 | ] 384 | }, 385 | "112": { 386 | "stated": [ 387 | ":strips" 388 | ], 389 | "val": [ 390 | ":strips" 391 | ] 392 | }, 393 | "68": { 394 | "stated": [], 395 | "val": [ 396 | ":strips" 397 | ] 398 | }, 399 | "113": { 400 | "stated": [], 401 | "val": [ 402 | ":strips" 403 | ] 404 | }, 405 | "119": { 406 | "stated": [], 407 | "val": [ 408 | ":strips" 409 | ] 410 | }, 411 | "44": { 412 | "stated": [], 413 | "val": [ 414 | ":strips" 415 | ] 416 | }, 417 | "61": { 418 | "stated": [ 419 | ":adl" 420 | ], 421 | "val": [ 422 | ":adl" 423 | ] 424 | }, 425 | "91": { 426 | "stated": [ 427 | ":typing", 428 | ":action-costs" 429 | ], 430 | "val": [ 431 | ":typing", 432 | ":action-costs" 433 | ] 434 | }, 435 | "82": { 436 | "stated": [ 437 | ":typing", 438 | ":action-costs" 439 | ], 440 | "val": [ 441 | ":typing", 442 | ":action-costs" 443 | ] 444 | }, 445 | "32": { 446 | "stated": [ 447 | ":equality", 448 | ":strips" 449 | ], 450 | "val": [ 451 | ":strips" 452 | ] 453 | }, 454 | "45": { 455 | "stated": [ 456 | ":typing", 457 | ":adl" 458 | ], 459 | "val": [ 460 | ":typing", 461 | ":disjunctive-preconditions" 462 | ] 463 | }, 464 | "77": { 465 | "stated": [ 466 | ":typing" 467 | ], 468 | "val": [ 469 | ":typing", 470 | ":strips" 471 | ] 472 | }, 473 | "98": { 474 | "stated": [ 475 | ":typing", 476 | ":adl" 477 | ], 478 | "val": [ 479 | ":typing", 480 | ":disjunctive-preconditions", 481 | ":negative-preconditions" 482 | ] 483 | }, 484 | "86": { 485 | "stated": [ 486 | ":typing", 487 | ":action-costs" 488 | ], 489 | "val": [ 490 | ":typing", 491 | ":action-costs" 492 | ] 493 | }, 494 | "35": { 495 | "stated": [ 496 | ":typing" 497 | ], 498 | "val": [ 499 | ":typing", 500 | ":strips" 501 | ] 502 | }, 503 | "81": { 504 | "stated": [ 505 | ":strips", 506 | ":typing", 507 | ":action-costs" 508 | ], 509 | "val": [ 510 | ":typing", 511 | ":action-costs" 512 | ] 513 | }, 514 | "52": { 515 | "stated": [ 516 | ":equality", 517 | ":action-costs" 518 | ], 519 | "val": [ 520 | ":equality", 521 | ":action-costs" 522 | ] 523 | }, 524 | "60": { 525 | "stated": [ 526 | ":strips" 527 | ], 528 | "val": [ 529 | ":strips" 530 | ] 531 | }, 532 | "121": { 533 | "stated": [], 534 | "val": [ 535 | ":typing", 536 | ":strips" 537 | ] 538 | }, 539 | "122": { 540 | "stated": [], 541 | "val": [ 542 | ":strips" 543 | ] 544 | }, 545 | "126": { 546 | "stated": [], 547 | "val": [ 548 | ":strips" 549 | ] 550 | }, 551 | "127": { 552 | "stated": [], 553 | "val": [ 554 | ":strips" 555 | ] 556 | }, 557 | "107": { 558 | "stated": [ 559 | ":strips", 560 | ":typing", 561 | ":action-costs" 562 | ], 563 | "val": [ 564 | ":typing", 565 | ":action-costs" 566 | ] 567 | }, 568 | "101": { 569 | "stated": [ 570 | ":negative-preconditions", 571 | ":equality" 572 | ], 573 | "val": [ 574 | ":equality" 575 | ] 576 | }, 577 | "117": { 578 | "stated": [ 579 | ":typing", 580 | ":equality", 581 | ":negative-preconditions", 582 | ":action-costs", 583 | ":conditional-effects" 584 | ], 585 | "val": [ 586 | ":equality", 587 | ":conditional-effects", 588 | ":typing", 589 | ":negative-preconditions", 590 | ":action-costs" 591 | ] 592 | }, 593 | "88": { 594 | "stated": [ 595 | ":typing", 596 | ":action-costs" 597 | ], 598 | "val": [ 599 | ":typing", 600 | ":action-costs" 601 | ] 602 | }, 603 | "120": { 604 | "stated": [ 605 | ":adl", 606 | ":typing", 607 | ":conditional-effects" 608 | ], 609 | "val": [ 610 | ":typing", 611 | ":conditional-effects" 612 | ] 613 | }, 614 | "114": { 615 | "stated": [ 616 | ":adl" 617 | ], 618 | "val": [ 619 | ":typing", 620 | ":conditional-effects", 621 | ":disjunctive-preconditions", 622 | ":negative-preconditions" 623 | ] 624 | }, 625 | "97": { 626 | "stated": [ 627 | ":strips", 628 | ":typing" 629 | ], 630 | "val": [ 631 | ":typing", 632 | ":strips" 633 | ] 634 | }, 635 | "6": { 636 | "stated": [ 637 | ":typing", 638 | ":action-costs" 639 | ], 640 | "val": [ 641 | ":typing", 642 | ":action-costs" 643 | ] 644 | }, 645 | "123": { 646 | "stated": [ 647 | ":adl" 648 | ], 649 | "val": [ 650 | ":typing", 651 | ":negative-preconditions", 652 | ":conditional-effects" 653 | ] 654 | }, 655 | "124": { 656 | "stated": [ 657 | ":strips" 658 | ], 659 | "val": [ 660 | ":strips" 661 | ] 662 | }, 663 | "129": { 664 | "stated": [ 665 | ":strips" 666 | ], 667 | "val": [ 668 | ":strips" 669 | ] 670 | }, 671 | "47": { 672 | "stated": [ 673 | ":adl", 674 | ":typing", 675 | ":conditional-effects" 676 | ], 677 | "val": [ 678 | ":typing", 679 | ":conditional-effects" 680 | ] 681 | }, 682 | "28": { 683 | "stated": [ 684 | ":strips" 685 | ], 686 | "val": [ 687 | ":typing", 688 | ":strips" 689 | ] 690 | }, 691 | "26": { 692 | "stated": [ 693 | ":typing", 694 | ":action-costs", 695 | ":adl" 696 | ], 697 | "val": [ 698 | ":typing", 699 | ":conditional-effects", 700 | ":negative-preconditions", 701 | ":action-costs" 702 | ] 703 | }, 704 | "30": { 705 | "stated": [ 706 | ":strips" 707 | ], 708 | "val": [ 709 | ":strips" 710 | ] 711 | }, 712 | "33": { 713 | "stated": [ 714 | ":equality", 715 | ":strips" 716 | ], 717 | "val": [ 718 | ":strips" 719 | ] 720 | }, 721 | "128": { 722 | "stated": [ 723 | ":strips" 724 | ], 725 | "val": [ 726 | ":strips" 727 | ] 728 | }, 729 | "14": { 730 | "stated": [ 731 | ":strips", 732 | ":typing" 733 | ], 734 | "val": [ 735 | ":typing", 736 | ":strips" 737 | ] 738 | }, 739 | "79": { 740 | "stated": [ 741 | ":typing", 742 | ":action-costs" 743 | ], 744 | "val": [ 745 | ":typing", 746 | ":action-costs" 747 | ] 748 | }, 749 | "80": { 750 | "val": [ 751 | ":action-costs" 752 | ] 753 | }, 754 | "58": { 755 | "stated": [ 756 | ":typing", 757 | ":action-costs" 758 | ], 759 | "val": [ 760 | ":typing", 761 | ":action-costs" 762 | ] 763 | }, 764 | "23": { 765 | "stated": [ 766 | ":typing" 767 | ], 768 | "val": [ 769 | ":typing", 770 | ":strips" 771 | ] 772 | }, 773 | "59": { 774 | "stated": [ 775 | ":typing", 776 | ":adl", 777 | ":action-costs" 778 | ], 779 | "val": [ 780 | ":typing", 781 | ":disjunctive-preconditions", 782 | ":negative-preconditions", 783 | ":action-costs" 784 | ] 785 | }, 786 | "111": { 787 | "stated": [ 788 | ":typing", 789 | ":action-costs" 790 | ], 791 | "val": [ 792 | ":typing", 793 | ":action-costs" 794 | ] 795 | }, 796 | "31": { 797 | "stated": [ 798 | ":typing", 799 | ":action-costs" 800 | ], 801 | "val": [ 802 | ":typing", 803 | ":action-costs" 804 | ] 805 | }, 806 | "29": { 807 | "stated": [ 808 | ":strips" 809 | ], 810 | "val": [ 811 | ":strips" 812 | ] 813 | }, 814 | "55": { 815 | "stated": [ 816 | ":typing", 817 | ":action-costs" 818 | ], 819 | "val": [ 820 | ":typing", 821 | ":action-costs" 822 | ] 823 | }, 824 | "46": { 825 | "stated": [ 826 | ":strips", 827 | ":typing", 828 | ":action-costs" 829 | ], 830 | "val": [ 831 | ":typing", 832 | ":action-costs" 833 | ] 834 | }, 835 | "65": { 836 | "stated": [ 837 | ":typing", 838 | ":adl", 839 | ":action-costs" 840 | ], 841 | "val": [ 842 | ":typing", 843 | ":disjunctive-preconditions", 844 | ":negative-preconditions", 845 | ":action-costs" 846 | ] 847 | }, 848 | "20": { 849 | "stated": [ 850 | ":typing" 851 | ], 852 | "val": [ 853 | ":typing", 854 | ":action-costs" 855 | ] 856 | }, 857 | "69": { 858 | "stated": [ 859 | ":strips", 860 | ":typing", 861 | ":equality" 862 | ], 863 | "val": [ 864 | ":typing", 865 | ":negative-preconditions" 866 | ] 867 | }, 868 | "53": { 869 | "stated": [ 870 | ":typing", 871 | ":action-costs" 872 | ], 873 | "val": [ 874 | ":typing", 875 | ":action-costs" 876 | ] 877 | }, 878 | "85": { 879 | "stated": [ 880 | ":typing", 881 | ":action-costs" 882 | ], 883 | "val": [ 884 | ":typing", 885 | ":action-costs" 886 | ] 887 | }, 888 | "7": { 889 | "stated": [ 890 | ":strips", 891 | ":typing", 892 | ":action-costs" 893 | ], 894 | "val": [ 895 | ":typing", 896 | ":action-costs" 897 | ] 898 | }, 899 | "38": { 900 | "stated": [ 901 | ":strips" 902 | ], 903 | "val": [ 904 | ":strips" 905 | ] 906 | }, 907 | "18": { 908 | "stated": [ 909 | ":strips", 910 | ":typing", 911 | ":action-costs" 912 | ], 913 | "val": [ 914 | ":typing", 915 | ":action-costs" 916 | ] 917 | }, 918 | "106": { 919 | "stated": [ 920 | ":equality", 921 | ";;", 922 | "equality", 923 | "needed", 924 | "for", 925 | "blocked", 926 | "transition", 927 | "in", 928 | "case", 929 | "of", 930 | "a", 931 | "mismatch", 932 | ":typing" 933 | ], 934 | "val": [ 935 | ":typing", 936 | ":derived-predicates", 937 | ":equality", 938 | ":disjunctive-preconditions", 939 | ":negative-preconditions" 940 | ] 941 | }, 942 | "89": { 943 | "stated": [ 944 | ":strips", 945 | ":equality", 946 | ":typing" 947 | ], 948 | "val": [ 949 | ":typing", 950 | ":equality" 951 | ] 952 | }, 953 | "71": { 954 | "stated": [ 955 | ":typing" 956 | ], 957 | "val": [ 958 | ":typing", 959 | ":strips" 960 | ] 961 | }, 962 | "109": { 963 | "stated": [ 964 | ":strips", 965 | ":typing", 966 | ":equality" 967 | ], 968 | "val": [ 969 | ":typing", 970 | ":negative-preconditions" 971 | ] 972 | }, 973 | "116": { 974 | "stated": [ 975 | ":typing", 976 | ":equality" 977 | ], 978 | "val": [ 979 | ":typing", 980 | ":strips" 981 | ] 982 | }, 983 | "48": { 984 | "stated": [ 985 | ":adl", 986 | ":derived-predicates" 987 | ], 988 | "val": [ 989 | ":equality", 990 | ":typing", 991 | ":derived-predicates", 992 | ":disjunctive-preconditions", 993 | ":negative-preconditions" 994 | ] 995 | }, 996 | "75": { 997 | "stated": [ 998 | ":typing", 999 | ":adl" 1000 | ], 1001 | "val": [ 1002 | ":typing", 1003 | ":disjunctive-preconditions", 1004 | ":negative-preconditions" 1005 | ] 1006 | }, 1007 | "76": { 1008 | "stated": [ 1009 | ":typing", 1010 | ":action-costs" 1011 | ], 1012 | "val": [ 1013 | ":typing", 1014 | ":action-costs" 1015 | ] 1016 | }, 1017 | "70": { 1018 | "stated": [ 1019 | ":typing", 1020 | ":equality" 1021 | ], 1022 | "val": [ 1023 | ":typing", 1024 | ":strips" 1025 | ] 1026 | }, 1027 | "64": { 1028 | "stated": [ 1029 | ":strips" 1030 | ], 1031 | "val": [ 1032 | ":strips" 1033 | ] 1034 | }, 1035 | "73": { 1036 | "stated": [ 1037 | ":typing", 1038 | ":action-costs" 1039 | ], 1040 | "val": [ 1041 | ":typing", 1042 | ":action-costs" 1043 | ] 1044 | }, 1045 | "74": { 1046 | "stated": [ 1047 | ":typing" 1048 | ], 1049 | "val": [ 1050 | ":typing", 1051 | ":action-costs" 1052 | ] 1053 | }, 1054 | "41": { 1055 | "stated": [ 1056 | ":adl", 1057 | ":derived-predicates" 1058 | ], 1059 | "val": [ 1060 | ":equality", 1061 | ":typing", 1062 | ":derived-predicates", 1063 | ":disjunctive-preconditions", 1064 | ":negative-preconditions" 1065 | ] 1066 | }, 1067 | "104": { 1068 | "stated": [ 1069 | ":typing", 1070 | ":action-costs" 1071 | ], 1072 | "val": [ 1073 | ":typing", 1074 | ":action-costs" 1075 | ] 1076 | }, 1077 | "138": { 1078 | "stated": [ 1079 | ":strips", 1080 | ":negative-preconditions" 1081 | ], 1082 | "val": [ 1083 | ":equality", 1084 | ":negative-preconditions" 1085 | ] 1086 | }, 1087 | "92": { 1088 | "stated": [ 1089 | ":equality", 1090 | ";;", 1091 | "equality", 1092 | "needed", 1093 | "for", 1094 | "blocked", 1095 | "transition", 1096 | "in", 1097 | "case", 1098 | "of", 1099 | "a", 1100 | "mismatch", 1101 | ":typing" 1102 | ], 1103 | "val": [ 1104 | ":negative-preconditions", 1105 | ":disjunctive-preconditions", 1106 | ":derived-predicates", 1107 | ":typing", 1108 | ":equality" 1109 | ] 1110 | }, 1111 | "125": { 1112 | "stated": [ 1113 | ":strips", 1114 | ":typing", 1115 | ":quanitfied-precondition" 1116 | ], 1117 | "val": [ 1118 | ":typing", 1119 | ":disjunctive-preconditions", 1120 | ":negative-preconditions" 1121 | ] 1122 | }, 1123 | "50": { 1124 | "stated": [ 1125 | ":typing" 1126 | ], 1127 | "val": [ 1128 | ":typing", 1129 | ":strips" 1130 | ] 1131 | }, 1132 | "37": { 1133 | "stated": [ 1134 | ":adl", 1135 | ":typing" 1136 | ], 1137 | "val": [ 1138 | ":typing", 1139 | ":conditional-effects", 1140 | ":negative-preconditions" 1141 | ] 1142 | }, 1143 | "43": { 1144 | "stated": [ 1145 | ":strips" 1146 | ], 1147 | "val": [ 1148 | ":strips" 1149 | ] 1150 | }, 1151 | "66": { 1152 | "stated": [ 1153 | ":typing", 1154 | ":action-costs" 1155 | ], 1156 | "val": [ 1157 | ":typing", 1158 | ":negative-preconditions", 1159 | ":action-costs" 1160 | ] 1161 | }, 1162 | "42": { 1163 | "stated": [ 1164 | ":typing", 1165 | ":action-costs" 1166 | ], 1167 | "val": [ 1168 | ":typing", 1169 | ":action-costs" 1170 | ] 1171 | }, 1172 | "95": { 1173 | "stated": [ 1174 | ":typing", 1175 | ":action-costs" 1176 | ], 1177 | "val": [ 1178 | ":typing", 1179 | ":action-costs" 1180 | ] 1181 | }, 1182 | "93": { 1183 | "stated": [ 1184 | ":typing", 1185 | ":action-costs" 1186 | ], 1187 | "val": [ 1188 | ":typing", 1189 | ":action-costs" 1190 | ] 1191 | }, 1192 | "24": { 1193 | "stated": [ 1194 | ":typing", 1195 | ":action-costs" 1196 | ], 1197 | "val": [ 1198 | ":typing", 1199 | ":action-costs" 1200 | ] 1201 | }, 1202 | "34": { 1203 | "stated": [ 1204 | ":typing", 1205 | ":action-costs" 1206 | ], 1207 | "val": [ 1208 | ":typing", 1209 | ":action-costs" 1210 | ] 1211 | }, 1212 | "10": { 1213 | "stated": [ 1214 | ":typing", 1215 | ":action-costs" 1216 | ], 1217 | "val": [ 1218 | ":typing", 1219 | ":action-costs" 1220 | ] 1221 | }, 1222 | "150": { 1223 | "stated": [ 1224 | ":strips", 1225 | ":negative-preconditions" 1226 | ], 1227 | "val": [ 1228 | ":equality", 1229 | ":negative-preconditions" 1230 | ] 1231 | }, 1232 | "142": { 1233 | "stated": [ 1234 | ":typing", 1235 | ":action-costs", 1236 | ":negative-preconditions" 1237 | ], 1238 | "val": [ 1239 | ":typing", 1240 | ":negative-preconditions", 1241 | ":action-costs" 1242 | ] 1243 | }, 1244 | "36": { 1245 | "stated": [ 1246 | ":typing", 1247 | ":action-costs" 1248 | ], 1249 | "val": [ 1250 | ":typing", 1251 | ":action-costs" 1252 | ] 1253 | }, 1254 | "108": { 1255 | "stated": [ 1256 | ":typing", 1257 | ":action-costs" 1258 | ], 1259 | "val": [ 1260 | ":typing", 1261 | ":action-costs" 1262 | ] 1263 | }, 1264 | "105": { 1265 | "stated": [ 1266 | ":strips", 1267 | ":typing", 1268 | ":equality" 1269 | ], 1270 | "val": [ 1271 | ":typing", 1272 | ":negative-preconditions" 1273 | ] 1274 | }, 1275 | "130": { 1276 | "stated": [ 1277 | ":typing", 1278 | ":action-costs", 1279 | ":negative-preconditions" 1280 | ], 1281 | "val": [ 1282 | ":typing", 1283 | ":negative-preconditions", 1284 | ":action-costs" 1285 | ] 1286 | }, 1287 | "146": { 1288 | "stated": [ 1289 | ":typing", 1290 | ":adl" 1291 | ], 1292 | "val": [ 1293 | ":typing", 1294 | ":conditional-effects", 1295 | ":negative-preconditions" 1296 | ] 1297 | }, 1298 | "140": { 1299 | "stated": [ 1300 | ":typing", 1301 | ":conditional-effects", 1302 | ":action-costs", 1303 | ":negative-preconditions" 1304 | ], 1305 | "val": [ 1306 | ":typing", 1307 | ":conditional-effects", 1308 | ":negative-preconditions", 1309 | ":action-costs" 1310 | ] 1311 | }, 1312 | "136": { 1313 | "stated": [ 1314 | ":adl", 1315 | ":action-costs", 1316 | ":negative-preconditions" 1317 | ], 1318 | "val": [ 1319 | ":typing", 1320 | ":equality", 1321 | ":action-costs" 1322 | ] 1323 | }, 1324 | "133": { 1325 | "stated": [ 1326 | ":adl", 1327 | ":typing", 1328 | ":negative-preconditions", 1329 | ":equality", 1330 | ":action-costs" 1331 | ], 1332 | "val": [ 1333 | ":typing", 1334 | ":negative-preconditions", 1335 | ":action-costs" 1336 | ] 1337 | }, 1338 | "141": { 1339 | "stated": [ 1340 | ":typing", 1341 | ":equality" 1342 | ], 1343 | "val": [ 1344 | ":typing", 1345 | ":action-costs" 1346 | ] 1347 | }, 1348 | "139": { 1349 | "stated": [ 1350 | ":typing", 1351 | ":negative-preconditions" 1352 | ], 1353 | "val": [ 1354 | ":typing", 1355 | ":negative-preconditions" 1356 | ] 1357 | }, 1358 | "135": { 1359 | "stated": [ 1360 | ":typing", 1361 | ":adl" 1362 | ], 1363 | "val": [ 1364 | ":typing", 1365 | ":conditional-effects", 1366 | ":negative-preconditions" 1367 | ] 1368 | }, 1369 | "134": { 1370 | "stated": [ 1371 | ":strips", 1372 | ":typing", 1373 | ":equality", 1374 | ":negative-preconditions" 1375 | ], 1376 | "val": [ 1377 | ":typing", 1378 | ":equality" 1379 | ] 1380 | }, 1381 | "145": { 1382 | "stated": [ 1383 | ":adl", 1384 | ":typing", 1385 | ":negative-preconditions", 1386 | ":equality", 1387 | ":action-costs" 1388 | ], 1389 | "val": [ 1390 | ":typing", 1391 | ":negative-preconditions", 1392 | ":action-costs" 1393 | ] 1394 | }, 1395 | "153": { 1396 | "stated": [ 1397 | ":action-costs", 1398 | ":conditional-effects", 1399 | ":typing", 1400 | ":negative-preconditions", 1401 | ":adl" 1402 | ], 1403 | "val": [ 1404 | ":typing", 1405 | ":negative-preconditions", 1406 | ":conditional-effects", 1407 | ":action-costs" 1408 | ] 1409 | }, 1410 | "144": { 1411 | "stated": [ 1412 | ":adl", 1413 | ":action-costs" 1414 | ], 1415 | "val": [ 1416 | ":equality", 1417 | ":conditional-effects", 1418 | ":typing", 1419 | ":negative-preconditions", 1420 | ":action-costs" 1421 | ] 1422 | }, 1423 | "147": { 1424 | "stated": [ 1425 | ":strips", 1426 | ":typing", 1427 | ":equality", 1428 | ":negative-preconditions" 1429 | ], 1430 | "val": [ 1431 | ":typing", 1432 | ":equality" 1433 | ] 1434 | }, 1435 | "148": { 1436 | "stated": [ 1437 | ":adl", 1438 | ":action-costs", 1439 | ":negative-preconditions" 1440 | ], 1441 | "val": [ 1442 | ":typing", 1443 | ":equality", 1444 | ":action-costs" 1445 | ] 1446 | }, 1447 | "149": { 1448 | "stated": [ 1449 | ":typing", 1450 | ":negative-preconditions", 1451 | ":conditional-effects" 1452 | ], 1453 | "val": [ 1454 | ":equality", 1455 | ":conditional-effects", 1456 | ":typing", 1457 | ":negative-preconditions", 1458 | ":action-costs" 1459 | ] 1460 | }, 1461 | "151": { 1462 | "stated": [ 1463 | ":typing", 1464 | ":conditional-effects", 1465 | ":action-costs", 1466 | ":negative-preconditions" 1467 | ], 1468 | "val": [ 1469 | ":typing", 1470 | ":conditional-effects", 1471 | ":negative-preconditions", 1472 | ":action-costs" 1473 | ] 1474 | }, 1475 | "152": { 1476 | "stated": [ 1477 | ":typing", 1478 | ":negative-preconditions" 1479 | ], 1480 | "val": [ 1481 | ":typing", 1482 | ":negative-preconditions" 1483 | ] 1484 | }, 1485 | "132": { 1486 | "stated": [ 1487 | ":adl", 1488 | ":action-costs" 1489 | ], 1490 | "val": [ 1491 | ":equality", 1492 | ":conditional-effects", 1493 | ":typing", 1494 | ":negative-preconditions", 1495 | ":action-costs" 1496 | ] 1497 | }, 1498 | "131": { 1499 | "stated": [ 1500 | ":equality", 1501 | ":typing", 1502 | ":conditional-effects", 1503 | ":negative-preconditions" 1504 | ], 1505 | "val": [ 1506 | ":typing", 1507 | ":equality", 1508 | ":conditional-effects", 1509 | ":negative-preconditions" 1510 | ] 1511 | }, 1512 | "137": { 1513 | "stated": [ 1514 | ":typing", 1515 | ":negative-preconditions", 1516 | ":conditional-effects" 1517 | ], 1518 | "val": [ 1519 | ":equality", 1520 | ":conditional-effects", 1521 | ":typing", 1522 | ":negative-preconditions", 1523 | ":action-costs" 1524 | ] 1525 | }, 1526 | "143": { 1527 | "stated": [ 1528 | ":equality", 1529 | ":typing", 1530 | ":conditional-effects", 1531 | ":negative-preconditions" 1532 | ], 1533 | "val": [ 1534 | ":typing", 1535 | ":equality", 1536 | ":conditional-effects", 1537 | ":negative-preconditions" 1538 | ] 1539 | } 1540 | } -------------------------------------------------------------------------------- /scripts/tag-fix/tag_updater.py: -------------------------------------------------------------------------------- 1 | 2 | import os, sys 3 | 4 | # Add parent directory to path 5 | sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 6 | 7 | import json 8 | import planning_domains_api as api 9 | import ast 10 | 11 | # Pull requirments from precomputed json file 12 | f = open("processed_result5.json") 13 | processed_requirements = json.load(f) 14 | 15 | DEBUG = False 16 | 17 | 18 | def update_tags(resource, id, *, current, required): 19 | assert resource in ["collection", "domain", "problem"] 20 | 21 | # Remove incorrect tags 22 | for tag in current: 23 | if tag not in required: 24 | print(f"Untagging {tag} from {resource}: {id}") 25 | if DEBUG: 26 | print(f"Untagging {tag} from {resource}: {id}") 27 | elif resource == "collection": 28 | api.untag_collection(id, tag) 29 | elif resource == "domain": 30 | api.untag_domain(id, tag) 31 | elif resource == "problem": 32 | api.untag_problem(id, tag) 33 | 34 | # Add required tags 35 | for tag in required: 36 | if tag not in current: 37 | print(f"Tagging {tag} from {resource}: {id}") 38 | if DEBUG: 39 | print(f"Tagging {tag} from {resource}: {id}") 40 | elif resource == "collection": 41 | api.tag_collection(id, tag) 42 | elif resource == "domain": 43 | api.tag_domain(id, tag) 44 | elif resource == "problem": 45 | api.tag_problem(id, tag) 46 | 47 | 48 | collections = api.get_collections() 49 | for collection in collections: 50 | collection_id = int(collection["collection_id"]) 51 | 52 | # Keep track of all requirements inside this collection 53 | collection_required_tags = set() 54 | 55 | domains = api.get_domains(collection_id) 56 | assert ast.literal_eval(collection['domain_set']).sort() == [x['domain_id'] for x in domains].sort(), "domain_set property should contain all domains" 57 | for domain in domains: 58 | domain_id = domain['domain_id'] 59 | 60 | # pull pre-computed domain requirements 61 | required_tags = processed_requirements[str(domain_id)]['val'] 62 | 63 | domain_current_tags = ast.literal_eval(domain['tags']) 64 | update_tags("domain", domain_id, current=domain_current_tags, required=required_tags) 65 | 66 | # Update collection_required_tags with the union of itself and this domain's requirements 67 | collection_required_tags.update(required_tags) 68 | 69 | problems = api.get_problems(domain_id) 70 | for problem in problems: 71 | problem_current_tags = ast.literal_eval(problem['tags']) 72 | update_tags("problem", problem['problem_id'], current=problem_current_tags, required=required_tags) 73 | 74 | collection_current_tags = ast.literal_eval(collection['tags']) 75 | update_tags("collection", collection_id, current=collection_current_tags, required=list(collection_required_tags)) 76 | 77 | 78 | # import requests 79 | # for cid in range(1, 5002): 80 | # # post to api.planning.domains/rebuildtags/domain/ 81 | # print(f"Rebuilding tags for collection {cid}") 82 | # requests.post(f"https://api.planning.domains/rebuildtags/problem/{cid}") 83 | 84 | -------------------------------------------------------------------------------- /web/collection.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |
11 | 14 |

??

15 |

??

16 |
17 |
18 |

Domains

19 |
20 | ?? 21 |
22 |
23 | 34 | 35 | -------------------------------------------------------------------------------- /web/domain.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |
11 | 14 |

??

15 |

Description

16 |

??

17 |

??

18 |
19 |
20 |

Problems

21 |
22 | ?? 23 |
24 |
25 | 36 | 37 | -------------------------------------------------------------------------------- /web/example.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Example Web Interface for api.planning.domains 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 |
27 |

28 | 29 |

Collections:

30 |
Loading...
31 | 32 |

Showing IPC-2008 Domains:

33 |
Loading...
34 | 35 |

Showing Sokoban Problems:

36 |
Loading...
37 | 38 |

Problem Navigator

39 |
Loading...
40 | 41 | 55 |
56 | 57 | 58 | -------------------------------------------------------------------------------- /web/planning-domains.js: -------------------------------------------------------------------------------- 1 | 2 | var VERSION = 0.4; 3 | 4 | // Delay the version check just so we can be sure jquery is loaded 5 | setTimeout(function(){ 6 | $.getJSON('http://api.planning.domains/json/version', function(data) { 7 | if (VERSION != data.version) 8 | alert('Warning: Your javascript API library appears to be outdated! Please download the latest version.'); 9 | }); 10 | }, 1000); 11 | 12 | var headings = {'problem_id': 'ID', 13 | 'problem': 'Problem', 14 | 'lower_bound': 'Lower Bound', 15 | 'upper_bound': 'Upper Bound', 16 | 'domain_id': 'ID', 17 | 'domain_name': 'Domain', 18 | 'tags': 'Tags', 19 | 'collection_id': 'ID', 20 | 'collection_name': 'Collection', 21 | 'description': 'Description', 22 | 'max_effective_width': 'Max Width', 23 | 'hplus': 'H-Plus', 24 | 'domain': 'Domain', 25 | 'domain_set': 'Domain Set' 26 | }; 27 | 28 | var default_problem_headings = ['problem_id','problem','lower_bound','upper_bound']; 29 | var default_domain_headings = ['domain_id', 'domain_name', 'tags', 'description']; 30 | var default_collection_headings = ['collection_id', 'collection_name', 'description']; 31 | 32 | function val(v) { 33 | if (v == null) 34 | return '-'; 35 | else 36 | return v; 37 | } 38 | 39 | function format_problems(data, heads, select_func) { 40 | if (typeof heads === 'undefined') 41 | heads = default_problem_headings; 42 | return format_table(data.sort(function(a,b){return a.problem.localeCompare(b.problem)}), heads, select_func); 43 | } 44 | 45 | function format_domains(data, heads, select_func) { 46 | if (typeof heads === 'undefined') 47 | heads = default_domain_headings; 48 | return format_table(data.sort(function(a,b){return a.domain_name.localeCompare(b.domain_name)}), heads, select_func); 49 | } 50 | 51 | function format_collections(data, heads, select_func) { 52 | if (typeof heads == 'undefined') 53 | heads = default_collection_headings; 54 | return format_table(data.sort(function(a,b){return a.collection_id - b.collection_id}), heads, select_func); 55 | } 56 | 57 | function format_table(data, heads, select_func) { 58 | 59 | var html, i, j; 60 | 61 | html = '\n'; 62 | html += '\n'; 63 | for (i = 0; i < heads.length; i++) { 64 | html += '\n'; 65 | } 66 | html += '\n'; 67 | 68 | html += '\n'; 69 | for (i = 0; i < data.length; i++) { 70 | 71 | if (typeof select_func === 'undefined') 72 | html += ''; 73 | else 74 | html += ''; 75 | 76 | for (j = 0; j < heads.length; j++) { 77 | html += ''; 78 | } 79 | html += ''; 80 | } 81 | html += '\n'; 82 | 83 | html += '
' + headings[heads[i]] + '
' + val(data[i][heads[j]]) + '
'; 84 | 85 | return html; 86 | } 87 | 88 | 89 | 90 | function fetch_problems(qs, parent, select_func) { 91 | $.getJSON('http://api.planning.domains/json/classical'+qs, function(data) { 92 | if (data.error) 93 | $(parent).html('Error:' + data.message); 94 | else 95 | $(parent).html(format_problems(data.result, 96 | default_problem_headings, 97 | select_func)); 98 | }); 99 | } 100 | 101 | function fetch_domains(qs, parent, select_func) { 102 | $.getJSON('http://api.planning.domains/json/classical'+qs, function(data) { 103 | if (data.error) 104 | $(parent).html('Error:' + data.message); 105 | else 106 | $(parent).html(format_domains(data.result, 107 | default_domain_headings, 108 | select_func)); 109 | }); 110 | } 111 | 112 | function fetch_collections(qs, parent, select_func) { 113 | $.getJSON('http://api.planning.domains/json/classical'+qs, function(data) { 114 | if (data.error) 115 | $(parent).html('Error:' + data.message); 116 | else 117 | $(parent).html(format_collections(data.result, 118 | default_collection_headings, 119 | select_func)); 120 | }); 121 | } 122 | 123 | function _navigator_dom2prob(did) { 124 | window.nav_prev_step = _navigator_col2dom; 125 | $('#nav-back-button').show(); 126 | $(window.nav_parent).html('
Loading...
'); 127 | fetch_problems('/problems/'+did, window.nav_parent, window.nav_func); 128 | } 129 | 130 | function _navigator_col2dom(cid) { 131 | window.nav_prev_step = _navigator_cols; 132 | window.nav_prev_setting = cid; 133 | $('#nav-back-button').show(); 134 | $(window.nav_parent).html('
Loading...
'); 135 | fetch_domains('/domains/'+cid, window.nav_parent, '_navigator_dom2prob'); 136 | } 137 | 138 | function _navigator_cols() { 139 | $('#nav-back-button').hide(); 140 | $(window.nav_parent).html('
Loading...
'); 141 | fetch_collections('/collections', window.nav_parent, '_navigator_col2dom'); 142 | } 143 | 144 | function _navigator_back() { 145 | window.nav_prev_step(window.nav_prev_setting); 146 | } 147 | 148 | function insert_navigator(parent, select_func) { 149 | window.nav_parent = '#sub-' + parent.slice(1); 150 | window.nav_func = select_func; 151 | $(parent).html('
'); 152 | _navigator_cols(); 153 | } 154 | 155 | -------------------------------------------------------------------------------- /web/problem.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |
11 | 15 |

??

16 |

??

17 |
18 |
19 |

Domain

20 | 21 |

??

22 |

??

23 |
24 |
25 |

Plan

26 |
27 | Show 28 |

??

29 |
30 |
31 |
32 |

Data

33 |
????
34 |
????
35 |
??
36 |
??
37 |
38 | 49 | 50 | -------------------------------------------------------------------------------- /web/web.js: -------------------------------------------------------------------------------- 1 | 2 | //problem 3 | function formatProblemData_problem(data) { 4 | $("p.p_id").html("Problem ID: " + data.problem_id + "download"); 5 | $("p.d_id").html("Domain ID: " + data.domain_id + "download"); 6 | $("h1.title").html(data.domain + "/" + data.problem); 7 | tags = data.tags.substring(1, (data.tags.length - 1)); 8 | tags = tags.split(",").map(x => "" + x + "") 9 | tagstring = "" 10 | for (let index = 0; index < tags.length; index++) { 11 | tagstring += tags[index]; 12 | } 13 | if (tagstring.length > 13) { 14 | $("p.tags").html("Problem tags: " + tagstring); 15 | } else { 16 | $("p.tags").html("Problem tags: None"); 17 | } 18 | $("h4.d_link").html("" + data.domain + ""); 19 | $("details.lowerbound").html("Lower bound: " + data.lower_bound + "" + data.lower_bound_description); 20 | $("details.upperbound").html("Upper bound: " + data.upper_bound + "" + data.upper_bound_description); 21 | $("details.av_ef_w").html("Average effective width: " + data.average_effective_width + "" + data.average_effective_width_description); 22 | $("details.max_ef_w").html("Maximum effective width: " + data.max_effective_width + "" + data.max_effective_width_description); 23 | } 24 | 25 | function formatDomainData_problem(domain_data) { 26 | $("p.d_desc").html(domain_data.description); 27 | d_tags = domain_data.tags.substring(1, (domain_data.tags.length - 1)); 28 | d_tags = d_tags.split(",").map(x => "" + x.substring(2, x.length - 1) + ", ") 29 | d_tagstring = "" 30 | for (let index = 0; index < d_tags.length; index++) { 31 | d_tagstring += d_tags[index]; 32 | } 33 | $("p.d_tags").html("Domain tags: " + d_tagstring); 34 | } 35 | 36 | function formatPlanData_problem(plan_data) { 37 | plan = plan_data.plan.split("\n"); 38 | console.log(plan); 39 | if (plan[plan.length - 2].includes("cost = ")) { 40 | cost = plan[plan.length - 2].split("= ")[1].split(" ")[0]; 41 | $("h3.plan_moves").html("Plan (" + cost + ")"); 42 | } else { 43 | $("h3.plan_moves").html("Plan (" + plan.length + ")") 44 | } 45 | plan = plan.map(x => x + "
") 46 | planstring = "" 47 | for (let index = 0; index < plan.length; index++) { 48 | planstring += plan[index]; 49 | } 50 | $("p.plan").html(planstring); 51 | } 52 | 53 | //domain 54 | function formatDomainData_domain(data) { 55 | $("p.id").html("Domain ID: " + data.domain_id); 56 | $("h1.title").html(data.domain_name); 57 | $(".desc").html(data.description); 58 | d_tags = data.tags.substring(1, (data.tags.length - 1)); 59 | d_tags = d_tags.split(",").map(x => "" + x.substring(2, x.length - 1) + ", ") 60 | d_tagstring = "" 61 | for (let index = 0; index < d_tags.length; index++) { 62 | d_tagstring += d_tags[index]; 63 | } 64 | $("p.tags").html("Domain tags: " + d_tagstring); 65 | } 66 | 67 | function formatProblems_domain(data) { 68 | problemsList = "" 69 | // sort based on the data[index].problem field 70 | data.sort(function (a, b) { 71 | return a.problem.localeCompare(b.problem); 72 | }); 73 | for (let index = 0; index < data.length; index++) { 74 | problemsList += "

" + data[index].problem + "

"; 75 | } 76 | $("div.problemlist").html(problemsList); 77 | } 78 | 79 | //collection 80 | function formatCollectionData_collection(data) { 81 | $("p.id").html("Collection ID: " + data.collection_id); 82 | $("h1.title").html(data.collection_name); 83 | $(".desc").html(data.description); 84 | } 85 | 86 | function formatDomainData_collection(data, index) { 87 | resultString = "

" + data.domain_name + "

" + data.description + "

"; 88 | $("div.domain" + index).html(resultString); 89 | } 90 | 91 | //getters 92 | 93 | //problem 94 | function getDomain_problem(domainno) { 95 | $.getJSON('http://api.planning.domains/json/classical/domain/' + domainno, function (data) { 96 | if (data.error) 97 | $(parent).html('Error:' + data.message); 98 | else 99 | data = data.result; 100 | formatDomainData_problem(data); 101 | }); 102 | } 103 | 104 | function getPlan_problem(problemno) { 105 | $.getJSON('https://api.planning.domains/json/classical/plan/' + problemno, function (data) { 106 | if (data.error) 107 | $(parent).html('Error:' + data.message); 108 | else 109 | data = data.result; 110 | formatPlanData_problem(data); 111 | }); 112 | } 113 | 114 | //domain 115 | function getProblems_domain(domainno) { 116 | $.getJSON('http://api.planning.domains/json/classical/problems/' + domainno, function (data) { 117 | if (data.error) 118 | $(parent).html('Error:' + data.message); 119 | else 120 | data = data.result; 121 | formatProblems_domain(data); 122 | }); 123 | } 124 | 125 | //collection 126 | function getDomain_collection(domainno, index) { 127 | $.getJSON('http://api.planning.domains/json/classical/domain/' + domainno, function (data) { 128 | if (data.error) 129 | $(parent).html('Error:' + data.message); 130 | else 131 | data = data.result; 132 | formatDomainData_collection(data, index); 133 | }); 134 | } 135 | 136 | //main functions 137 | 138 | //problem 139 | function getProblem(problemno) { 140 | $.getJSON('http://api.planning.domains/json/classical/problem/' + problemno, function (data) { 141 | if (data.error) 142 | $(parent).html('Error:' + data.message); 143 | else 144 | data = data.result; 145 | formatProblemData_problem(data); 146 | getDomain_problem(data.domain_id); 147 | getPlan_problem(problemno); 148 | }); 149 | } 150 | 151 | //domain 152 | function getDomain(domainno) { 153 | $.getJSON('http://api.planning.domains/json/classical/domain/' + domainno, function (data) { 154 | if (data.error) 155 | $(parent).html('Error:' + data.message); 156 | else 157 | data = data.result; 158 | formatDomainData_domain(data); 159 | getProblems_domain(domainno); 160 | }); 161 | } 162 | 163 | //collection 164 | 165 | function getDomains_collection(collectionno) { 166 | $.getJSON('http://api.planning.domains/json/classical/domains/' + collectionno, function (data) { 167 | if (data.error) 168 | $(parent).html('Error:' + data.message); 169 | else 170 | data = data.result; 171 | 172 | // sort based on the data[index].problem field 173 | data.sort(function (a, b) { 174 | return a.domain_name.localeCompare(b.domain_name); 175 | }); 176 | 177 | domainsListDivs = "" 178 | for (let index = 0; index < data.length; index++) { 179 | domainsListDivs += "
"; 180 | } 181 | $("div.domainlist").html(domainsListDivs); 182 | for (let index = 0; index < data.length; index++) { 183 | formatDomainData_collection(data[index], index); 184 | } 185 | }); 186 | } 187 | function getCollection(collectionno) { 188 | $.getJSON('http://api.planning.domains/json/classical/collection/' + collectionno, function (data) { 189 | console.log(data); 190 | if (data.error) 191 | $(parent).html('Error:' + data.message); 192 | else 193 | data = data.result; 194 | formatCollectionData_collection(data); 195 | getDomains_collection(collectionno); 196 | }); 197 | } 198 | --------------------------------------------------------------------------------