├── .gitignore ├── README.md ├── __init__.py ├── archive ├── README.md └── py3-offline-script.py ├── planning.domains.py ├── planning_domains_api.py ├── scripts ├── formalism-initialization │ ├── classical │ │ ├── create-meta.py │ │ ├── data.py │ │ └── gen-db.py │ └── rddl.py └── tag-fix │ ├── processed_result5.json │ └── tag_updater.py └── web ├── collection.html ├── domain.html ├── example.html ├── planning-domains.js ├── problem.html └── web.js /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .vscode 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | API Tools 2 | ========== 3 | 4 | This repository houses a collection of tools and scripts for interacting with the interface found at [api.planning.domains](http://api.planning.domains). More information can be found on that website. Briefly, the following tools are included: 5 | 6 | * **archive/**: A collection of scripts that are not currently supported, but may provide useful in the future. 7 | * **scripts/**: A collection of files used for extracting and generating information for the database. 8 | * **web/**: Javascript library for interacting with api.planning.domains in a plug-and-play fashion. Also html views to the problems/domains/collections found on the api website. 9 | * **planning.domains.py**: A command-line utility for interacting with api.planning.domains (fetching the problems, querying the database, etc). 10 | * **planning_domains_api.py**: A python library providing query functionality to api.planning.domains. 11 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AI-Planning/api-tools/c0feed87980f1f905e6753076691b1311ea5beff/__init__.py -------------------------------------------------------------------------------- /archive/README.md: -------------------------------------------------------------------------------- 1 | This directory contains useful files that may be incorporated into the core set off api tools. A brief description of each file can be found below. 2 | 3 | * **py3-offline-script.py**: Source for the planning.domains.py file. It contains the necessary machinery to store meta-data as xml files, and will be used to provide offline access to the meta-data for the planning domains. 4 | -------------------------------------------------------------------------------- /archive/py3-offline-script.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import os 5 | import datetime 6 | import gzip 7 | import zipfile 8 | import copy 9 | 10 | from os.path import isfile, isdir, join 11 | 12 | import xml.etree.ElementTree as etree 13 | 14 | import urllib.request 15 | import urllib.parse 16 | 17 | langAttribute = "{http://www.w3.org/XML/1998/namespace}lang" 18 | 19 | domainPath = None 20 | installationSettings = None 21 | installationTree = None 22 | 23 | defaultNamespace = "http://settings.planning.domains" 24 | 25 | def checkExists(pd_dir): 26 | """Check ~/.planning.domains exists, and is not a file""" 27 | 28 | if isfile("pd_dir"): 29 | print("Fatal error: need to store settings in {0}, but there is a file with that name".format(pd_dir)) 30 | exit(1) 31 | 32 | 33 | if not isdir(pd_dir): 34 | 35 | 36 | print("""== Pre-release client for planning.domains == 37 | 38 | This is pre-release software, for accessing the content on planning.domains. The backend of the site is undergoing heavy revision, 39 | so do not distribute this software: it may stop working in the future. Note it is released without warranty (including the implied 40 | warranties of merchantability or fitness for a particular purpose). Send bug reports to Andrew Coles. 41 | 42 | """) 43 | 44 | 45 | 46 | 47 | print("Making directory {0}".format(pd_dir)) 48 | try: 49 | os.mkdir(pd_dir) 50 | 51 | except OSError: 52 | print("Cannot make directory") 53 | exit(1) 54 | 55 | def saveSettings(): 56 | 57 | global installationTree 58 | 59 | settingsXML = join(pd_dir,"settings.xml") 60 | 61 | with open(settingsXML,"wb") as settingsFile: 62 | installationTree.write(settingsFile) 63 | 64 | 65 | 66 | 67 | 68 | def loadSettings(home_dir,pd_dir): 69 | """Get the domain path from the settings.xml file in pd_dir. If no domain path exists, ask for one.""" 70 | 71 | settingsXML = join(pd_dir,"settings.xml") 72 | 73 | #print("Loading settings from {0}".format(settingsXML)) 74 | 75 | global installationTree 76 | global installationSettings 77 | global domainPath 78 | 79 | if isfile(settingsXML): 80 | installationTree = etree.parse(settingsXML) 81 | installationSettings = installationTree.getroot() 82 | 83 | for child in installationSettings: 84 | if child.tag == "domain_path": 85 | domainPath = child.text 86 | 87 | if isdir(domainPath): 88 | return 89 | else: 90 | try: 91 | os.mkdir(domainPath) 92 | except OSError: 93 | print("Error in settings.xml: domains directory {0} does not exist, and cannot be made".format(domainPath)) 94 | exit(1) 95 | 96 | print("Warning when reading settings.xml: domains directory {0} did not exist, but it was created".format(domainPath)) 97 | 98 | return 99 | 100 | if installationSettings is None: 101 | installationSettings = etree.Element("{http://settings.planning.domains}settings") 102 | installationTree = etree.ElementTree(installationSettings) 103 | 104 | domainPath = input("Enter path for installing files (or hit enter to use {0}): ".format(join(home_dir,"planning.domains"))) 105 | 106 | domainPath = domainPath.lstrip() 107 | domainpath = domainPath.rstrip() 108 | 109 | if domainPath == "": 110 | domainPath = join(home_dir,"planning.domains") 111 | 112 | if isfile(domainPath): 113 | print("Fatal error: there is already a file called {0}".format(domainPath)) 114 | exit(1) 115 | 116 | if not isdir(domainPath): 117 | try: 118 | os.mkdir(domainPath) 119 | except OSError: 120 | print("Cannot make directory {0}".format(domainPath)) 121 | exit(1) 122 | 123 | etree.SubElement(installationSettings,"domain_path").text = domainPath 124 | 125 | saveSettings() 126 | 127 | 128 | 129 | 130 | 131 | def update(packagesPath): 132 | """Download the latest package list""" 133 | urllib.request.urlretrieve("http://raw.planning.domains/packages.xml.gz",packagesPath) 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | def find(root,argument): 142 | """Search for packages whose title or ID contains the given argument""" 143 | matchingIDs = [] 144 | 145 | for child in root: 146 | if child.tag == "domain": 147 | if child.attrib is None: 148 | print("Fatal error: found a domain without an ID"); 149 | exit(1) 150 | 151 | if 'id' not in child.attrib: 152 | print("Fatal error: found a domain without an ID"); 153 | exit(1) 154 | 155 | thisID = child.attrib['id'] 156 | 157 | matches = False 158 | thisTitle = None 159 | titleLanguage = None 160 | 161 | if argument in thisID: 162 | matches = True 163 | 164 | for innerchild in child: 165 | if innerchild.tag == "title": 166 | if argument in innerchild.text: 167 | matches = True 168 | 169 | if thisTitle is None: 170 | #note the first title found 171 | thisTitle = innerchild.text 172 | titleLanguage = innerchild.attrib[langAttribute] 173 | else: 174 | #if the first title isn't in English... 175 | if titleLanguage != "en" and innerchild.attrib[langAttribute] == en: 176 | thisTitle = innerchild.text 177 | titleLanguage = "en" 178 | 179 | if matches: 180 | matchingIDs.append((thisID,thisTitle)) 181 | 182 | if len(matchingIDs) == 0: 183 | print("Cannot find a title or ID containing {0}".format(argument)) 184 | 185 | else: 186 | print("Domains with title or ID containing {0}:".format(argument)) 187 | for (x,y) in matchingIDs: 188 | print("ID: {0} , Title: {1}".format(x,y)) 189 | 190 | def getDateFromDomain(domainNode,dateTag): 191 | for child in domainNode: 192 | if child.tag == dateTag: 193 | return child.text 194 | 195 | raise LookupError 196 | 197 | def downloadIfNew(child,dateTag,fileTag): 198 | 199 | thisID = child.attrib['id'] 200 | latestDate = getDateFromDomain(child,dateTag) 201 | 202 | global installationSettings 203 | 204 | filesToRemove = [] 205 | dirsToRemove = [] 206 | 207 | installedTag = "installed_{0}".format(fileTag) 208 | 209 | for installed in installationSettings: 210 | if installed.tag == installedTag: 211 | if installed.attrib['ref'] == thisID: 212 | innerchild = installed[0] 213 | 214 | if innerchild.tag != "date": 215 | print("Malformed settings.xml file, cannot continue: expect to find 'date' as the first child of {0}".format(installedTag)) 216 | exit(1) 217 | 218 | if innerchild.text >= latestDate: 219 | print("{1} for {0} is already the latest version".format(thisID,fileTag)) 220 | return False, [], [] 221 | 222 | else: 223 | 224 | for innerchild in installed[1:]: 225 | if innerchild.tag == "file": 226 | filesToRemove.append(innerchild.text) 227 | elif innerchild.tag == "dir": 228 | dirsToRemove.append(innerchild.text) 229 | 230 | installationSettings.remove(installed) 231 | 232 | break 233 | 234 | data = {} 235 | data['id'] = thisID 236 | data['type'] = fileTag 237 | parameters = urllib.parse.urlencode(data) 238 | 239 | print("Downloading {1} for {0}...".format(thisID,fileTag)) 240 | urllib.request.urlretrieve("http://raw.planning.domains/fetch.php?{0}".format(parameters),\ 241 | join(pd_dir,"{0}.zip".format(fileTag)) ) 242 | 243 | return True, filesToRemove, dirsToRemove 244 | 245 | 246 | def downloadDomainAndProblemsIfNew(child): 247 | return downloadIfNew(child,"files_last_modified","domain_and_problems") 248 | 249 | 250 | def downloadMetadataIfNew(child): 251 | return downloadIfNew(child,"metadata_last_modified","metadata") 252 | 253 | 254 | def install(root,argument,pd_dir): 255 | global domainPath 256 | global installationSettings 257 | 258 | """Install the package with the given ID, to the given domain path""" 259 | for child in root: 260 | if child.tag == "domain": 261 | if child.attrib is None: 262 | print("Fatal error: found a domain without an ID"); 263 | exit(1) 264 | 265 | if 'id' not in child.attrib: 266 | print("Fatal error: found a domain without an ID"); 267 | exit(1) 268 | 269 | thisID = child.attrib['id'] 270 | 271 | if thisID == argument: 272 | 273 | needMetadata, metadataFiles, metadataDirs = downloadMetadataIfNew(child) 274 | needDomain, domainFiles, domainDirs = downloadDomainAndProblemsIfNew(child) 275 | 276 | if not needMetadata and not needDomain: 277 | print("{0} is already the latest version".format(thisID)) 278 | continue 279 | 280 | 281 | if needMetadata: 282 | 283 | if len(metadataFiles) > 0: 284 | print("Removing old metadata for {0}".format(thisID)) 285 | 286 | for f in metadataFiles: 287 | """Removing a metadata file""" 288 | os.remove(join(domainPath,f)) 289 | 290 | print("Installing new metadata for {0}".format(thisID)) 291 | else: 292 | print("Installing metadata for {0}".format(thisID)) 293 | 294 | 295 | with zipfile.ZipFile(join(pd_dir,"metadata.zip"),'r') as metadataZip: 296 | metadataZip.extractall(domainPath) 297 | 298 | newDetailsRoot = etree.SubElement(installationSettings,"installed_metadata", attrib={'ref':thisID}) 299 | etree.SubElement(newDetailsRoot, "date").text = getDateFromDomain(child,"metadata_last_modified") 300 | 301 | for info in metadataZip.infolist(): 302 | etree.SubElement(newDetailsRoot,"file").text = info.filename 303 | #print(info.filename) 304 | 305 | os.remove(join(pd_dir,"metadata.zip")) 306 | 307 | if needDomain: 308 | 309 | if len(domainFiles) > 0: 310 | print("Removing old domain and problems for {0}".format(thisID)) 311 | 312 | for f in domainFiles: 313 | """Removing a domain/problem file""" 314 | os.remove(join(domainPath,f)) 315 | 316 | print("Installing new domain and probelms for {0}".format(thisID)) 317 | else: 318 | print("Installing domain and problems for {0}".format(thisID)) 319 | 320 | 321 | with zipfile.ZipFile(join(pd_dir,"domain_and_problems.zip"),'r') as problemsZip: 322 | problemsZip.extractall(domainPath) 323 | 324 | newDetailsRoot = etree.SubElement(installationSettings,"installed_domain_and_problems", attrib={'ref':thisID}) 325 | etree.SubElement(newDetailsRoot, "date").text = getDateFromDomain(child,"files_last_modified") 326 | 327 | for info in problemsZip.infolist(): 328 | etree.SubElement(newDetailsRoot,"file").text = info.filename 329 | #print(info.filename) 330 | 331 | os.remove(join(pd_dir,"domain_and_problems.zip")) 332 | 333 | print("Updating settings.xml") 334 | saveSettings() 335 | 336 | 337 | 338 | 339 | def upgrade(packageList,pd_dir): 340 | """Update any installed packages to newer versions, according to date-stamps""" 341 | 342 | global domainPath 343 | global installationSettings 344 | 345 | with gzip.open(packageList,'rb') as packagesFile: 346 | tree = etree.parse(packagesFile) 347 | root = tree.getroot() 348 | 349 | latestMetadataDates = {} 350 | latestFilesDates = {} 351 | 352 | for child in root: 353 | if child.tag == "domain": 354 | id = child.attrib['id'] 355 | for timechild in child: 356 | if timechild.tag == "files_last_modified": 357 | latestFilesDates[id] = timechild.text 358 | 359 | elif timechild.tag == "metadata_last_modified": 360 | latestMetadataDates[id] = timechild.text 361 | 362 | toUpdate = {} 363 | 364 | for installed in installationSettings: 365 | if installed.tag == "installed_metadata": 366 | id = installed.attrib['ref'] 367 | 368 | if id not in latestMetadataDates: 369 | print("Warning: metadata is installed for {0}, but it is no longer on the package list".format(id)) 370 | continue 371 | 372 | innerchild = installed[0] 373 | 374 | if innerchild.tag != "date": 375 | print("Malformed settings.xml file, cannot continue: expect to find 'date' as the first child of 'installed_metadata'") 376 | exit(1) 377 | 378 | if latestMetadataDates[id] > innerchild.text: 379 | toUpdate[id] = (True,False) 380 | 381 | for installed in installationSettings: 382 | if installed.tag == "installed_domain_and_problems": 383 | id = installed.attrib['ref'] 384 | 385 | if id not in latestFilesDates: 386 | print("Warning: domain and problem files are installed for {0}, but it is no longer on the package list".format(id)) 387 | continue 388 | 389 | innerchild = installed[0] 390 | 391 | if innerchild.tag != "date": 392 | print("Malformed settings.xml file, cannot continue: expect to find 'date' as the first child of 'installed_domain_and_problems'") 393 | exit(1) 394 | 395 | if latestFilesDates[id] > innerchild.text: 396 | toUpdate[id] = (True,True) 397 | 398 | 399 | for id in toUpdate: 400 | if toUpdate[id] == (True,True): 401 | print("Updating metadata and domain/problems for {0}".format(id)) 402 | else: 403 | print("Updating metadata for {0}".format(id)) 404 | 405 | install(root,id,pd_dir) 406 | 407 | print("All up to date") 408 | 409 | if __name__ == "__main__": 410 | 411 | home_dir = os.path.expanduser("~") 412 | 413 | pd_dir = join(home_dir,".planning.domains") 414 | 415 | checkExists(pd_dir) 416 | 417 | loadSettings(home_dir, pd_dir) 418 | 419 | if installationSettings is None: 420 | print("Fatal error: could not establish installation settings") 421 | exit(1) 422 | 423 | #don't download the package list twice, if the script is ran with the update option, but packages.xml was missing 424 | downloadedPackageList = False 425 | packageList = join(pd_dir,"packages.xml.gz") 426 | 427 | if not isfile(packageList): 428 | print("No package list found, downloading it") 429 | update(packageList) 430 | downloadedPackageList = True 431 | 432 | 433 | if len(sys.argv) == 1: 434 | print("""No command-line options given. Usage: 435 | 436 | planning.domains.py update Update the packages.xml list to the latest version 437 | planning.domains.py upgrade Upgrade installed packages (and/or their metadata) to the latest version 438 | planning.domains.py find [string] Find packages whose title/ID contains 'string' 439 | planning.domains.py install [id] [id] ... Install the packages with the IDs given""") 440 | 441 | exit(0) 442 | 443 | 444 | root = None 445 | 446 | i = 1 447 | 448 | while i < len(sys.argv): 449 | if sys.argv[i] == "update": 450 | if downloadedPackageList: 451 | print("Already downloaded package list") 452 | else: 453 | print("Downloading package list") 454 | update(packageList) 455 | downloadedPackageList = True 456 | 457 | i += 1 458 | 459 | elif sys.argv[i] == "upgrade": 460 | 461 | upgrade(packageList,pd_dir) 462 | i += 1 463 | 464 | else: 465 | 466 | command = sys.argv[i] 467 | i += 1 468 | 469 | if i == len(sys.argv): 470 | print("Error: expected an argument after {0}".format(command)) 471 | exit(1) 472 | 473 | while i < len(sys.argv): 474 | argument = sys.argv[i] 475 | i += 1 476 | 477 | argument = argument.rstrip() 478 | argument = argument.lstrip() 479 | 480 | if len(argument) == 0: 481 | print("Warning: expected non-empty argument after {0}".format(command)) 482 | continue 483 | 484 | 485 | if root is None: 486 | with gzip.open(packageList,'rb') as packagesFile: 487 | tree = etree.parse(packagesFile) 488 | root = tree.getroot() 489 | 490 | if command == "find": 491 | find(root,argument) 492 | 493 | elif command == "install": 494 | install(root,argument,pd_dir) 495 | 496 | 497 | -------------------------------------------------------------------------------- /planning.domains.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import argparse, os, pprint, sys 4 | 5 | import xml.etree.ElementTree as etree 6 | 7 | import planning_domains_api as api 8 | 9 | langAttribute = "{http://www.w3.org/XML/1998/namespace}lang" 10 | 11 | domainPath = None 12 | installationSettings = None 13 | installationTree = None 14 | userEmail = None 15 | userToken = None 16 | 17 | defaultNamespace = "http://settings.planning.domains" 18 | 19 | 20 | 21 | def checkExists(pd_dir): 22 | """Check ~/.planning.domains exists, and is not a file""" 23 | 24 | if os.path.isfile(pd_dir): 25 | print("Fatal error: need to store settings in {0}, but there is a file with that name".format(pd_dir)) 26 | exit(1) 27 | 28 | 29 | if not os.path.isdir(pd_dir): 30 | 31 | 32 | print(""" 33 | == Pre-release client for planning.domains == 34 | 35 | This is pre-release software, for accessing the content on 36 | api.planning.domains. It is released without warranty 37 | (including the implied warranties of merchantability 38 | or fitness for a particular purpose). 39 | 40 | Send bug reports to Andrew Coles (andrew.coles@kcl.ac.uk) 41 | or Christian Muise (christian.muise@gmail.com) 42 | 43 | """) 44 | 45 | 46 | print("Making directory {0}...\n".format(pd_dir)) 47 | try: 48 | os.mkdir(pd_dir) 49 | 50 | except OSError: 51 | print("Cannot make directory") 52 | exit(1) 53 | 54 | 55 | 56 | def saveSettings(): 57 | 58 | global installationTree 59 | 60 | settingsXML = os.path.join(pd_dir,"settings.xml") 61 | 62 | with open(settingsXML,"wb") as settingsFile: 63 | installationTree.write(settingsFile) 64 | 65 | 66 | 67 | def fetchPlanningDomains(domainPath): 68 | try: 69 | resp = input("Clone the domain repository (~50Mb download / ~1Gb uncompressed) to directory {0}? (y/n) ".format(domainPath)) 70 | if 'y' == resp: 71 | os.system("git clone https://github.com/AI-Planning/classical-domains.git {0}".format(domainPath)) 72 | else: 73 | print("Aborting fetching domains for the directory {0}".format(domainPath)) 74 | except OSError: 75 | print("Cannot make directory {0}".format(domainPath)) 76 | exit(1) 77 | 78 | 79 | 80 | def loadSettings(home_dir,pd_dir): 81 | """Get the domain path from the settings.xml file in pd_dir. If no domain path exists, ask for one.""" 82 | 83 | settingsXML = os.path.join(pd_dir,"settings.xml") 84 | 85 | #print("Loading settings from {0}".format(settingsXML)) 86 | 87 | global installationTree 88 | global installationSettings 89 | global domainPath 90 | global userEmail 91 | global userToken 92 | 93 | if os.path.isfile(settingsXML): 94 | installationTree = etree.parse(settingsXML) 95 | installationSettings = installationTree.getroot() 96 | 97 | for child in installationSettings: 98 | if child.tag == "domain_path": 99 | domainPath = child.text 100 | 101 | if not os.path.isdir(domainPath): 102 | fetchPlanningDomains(domainPath) 103 | 104 | if child.tag == "email": 105 | userEmail = child.text 106 | 107 | if child.tag == "token": 108 | userToken = child.text 109 | 110 | return 111 | 112 | if installationSettings is None: 113 | installationSettings = etree.Element("{http://settings.planning.domains}settings") 114 | installationTree = etree.ElementTree(installationSettings) 115 | 116 | domainPath = input("Enter path for installing files (or hit enter to use {0}): ".format(os.path.join(home_dir,"planning.domains"))) 117 | 118 | domainPath = domainPath.lstrip() 119 | domainpath = domainPath.rstrip() 120 | 121 | if domainPath == "": 122 | domainPath = os.path.join(home_dir,"planning.domains") 123 | 124 | if os.path.isfile(domainPath): 125 | print("Fatal error: there is already a file called {0}".format(domainPath)) 126 | exit(1) 127 | 128 | if not os.path.isdir(domainPath): 129 | fetchPlanningDomains(domainPath) 130 | 131 | etree.SubElement(installationSettings,"domain_path").text = domainPath 132 | 133 | userEmail = input("Enter email for API updates: ") 134 | userToken = input("Enter token for API updates (leave blank if none provided): ") 135 | 136 | etree.SubElement(installationSettings,"email").text = userEmail 137 | etree.SubElement(installationSettings,"token").text = userToken 138 | 139 | saveSettings() 140 | 141 | 142 | def register(): 143 | global userEmail 144 | global userToken 145 | 146 | userEmail = input("Enter email for API updates (leave blank for %s): " % userEmail) or userEmail 147 | userToken = input("Enter token for API updates (leave blank for %s): " % userToken) or userToken 148 | 149 | list(filter(lambda x: x.tag == 'email', installationSettings))[0].text = userEmail 150 | list(filter(lambda x: x.tag == 'token', installationSettings))[0].text = userToken 151 | 152 | saveSettings() 153 | 154 | print("Email and token settings saved!\n") 155 | 156 | 157 | def find(sub, arg, form): 158 | """Find an object of type sub that matches argument arg.""" 159 | 160 | if sub == 'collections': 161 | res = api.find_collections(arg, form) 162 | elif sub == 'domains': 163 | res = api.find_domains(arg, form) 164 | elif sub == 'problems': 165 | res = api.find_problems(arg, form) 166 | else: 167 | print("Error: Unrecognized sub-command, {0}".format(sub)) 168 | exit(1) 169 | 170 | pprint.pprint(res, sort_dicts=False) 171 | 172 | def show(sub, arg, form): 173 | """Show an object of type sub that matches the id arg.""" 174 | 175 | arg = int(arg) 176 | 177 | if sub == 'plan': 178 | print(api.get_plan(arg, form)) 179 | return 180 | 181 | if sub == 'collection': 182 | res = api.get_collection(arg, form) 183 | elif sub == 'domain': 184 | res = api.get_domain(arg, form) 185 | elif sub == 'problem': 186 | res = api.get_problem(arg, form) 187 | else: 188 | print("Error: Unrecognized sub-command, {0}".format(sub)) 189 | exit(1) 190 | 191 | pprint.pprint(res, sort_dicts=False) 192 | 193 | def submit_plan(pid, pfile, formalism): 194 | with open(pfile) as f: 195 | plan = f.read() 196 | api.submit_plan(pid, plan, formalism) 197 | 198 | def cache(cid, outdir, formalism, include_data = False): 199 | 200 | print("Caching collection %d to [%s] (data included = %s)..." % (cid, outdir, str(include_data))) 201 | 202 | if os.path.exists(outdir): 203 | print("Error: Output directory already exists.") 204 | exit(1) 205 | else: 206 | os.mkdir(outdir) 207 | 208 | domains = {} 209 | problem_data = {} 210 | domain_data = api.get_domains(cid, formalism) 211 | domain_names = [dom['domain_name'] for dom in domain_data] 212 | assert len(set(domain_names)) == len(domain_names), "Error: It appears as though the collection has repeated domain names." 213 | 214 | for dom in domain_data: 215 | 216 | dname = dom['domain_name'] 217 | 218 | # Map the domain name to the list of domain-problem pairs and problem data 219 | domains[dname] = [] 220 | problem_data[dname] = [] 221 | 222 | # Make the directory for the domain 223 | os.mkdir(os.path.join(outdir, dname)) 224 | 225 | # Turn the links into relative paths for this machine 226 | probs = [api.localize(p) for p in api.get_problems(dom['domain_id'], formalism)] 227 | 228 | # Copy the domain and problem files to their appropriate directory 229 | for i in range(len(probs)): 230 | dpath = os.path.join(dname, "domain_%.2d.pddl" % (i+1)) 231 | ppath = os.path.join(dname, "prob_%.2d.pddl" % (i+1)) 232 | 233 | os.system("cp %s %s" % (probs[i]['domain_path'], os.path.join(outdir,dpath))) 234 | os.system("cp %s %s" % (probs[i]['problem_path'], os.path.join(outdir,ppath))) 235 | 236 | domains[dname].append((dpath,ppath)) 237 | 238 | if include_data: 239 | problem_data[dname].append(probs[i]) 240 | problem_data[dname][-1]['domain_path'] = os.path.abspath(os.path.join(outdir,dpath)) 241 | problem_data[dname][-1]['problem_path'] = os.path.abspath(os.path.join(outdir,ppath)) 242 | 243 | with open(os.path.join(outdir, "domains.py"), 'w') as f: 244 | f.write('\n# Use "from domains import DOMAINS" to get the benchmark set\n') 245 | if include_data: 246 | f.write('\n# Use "from domains import DATA" to get the problem data (aligns with the DOMAINS list)\n') 247 | f.write('\nDOMAINS = ') 248 | f.write(pprint.pformat(domains)) 249 | if include_data: 250 | f.write('\n\nDATA = ') 251 | f.write(pprint.pformat(problem_data)) 252 | f.write('\n') 253 | 254 | print("Done!\n") 255 | 256 | if __name__ == "__main__": 257 | 258 | home_dir = os.path.expanduser("~") 259 | 260 | pd_dir = os.path.join(home_dir,".planning.domains") 261 | 262 | checkExists(pd_dir) 263 | 264 | loadSettings(home_dir, pd_dir) 265 | 266 | if installationSettings is None: 267 | print("Fatal error: could not establish installation settings") 268 | exit(1) 269 | 270 | 271 | parser = argparse.ArgumentParser(description="Planning Domains CLI") 272 | subparsers = parser.add_subparsers(dest="command") 273 | 274 | def add_formalism_argument(subparser): 275 | subparser.add_argument("--formalism", choices=["classical", "rddl"], default="classical", help="Specify the formalism for the command.") 276 | 277 | # Update 278 | update_parser = subparsers.add_parser("update", help="Update the local domain repository.") 279 | 280 | # Register 281 | register_parser = subparsers.add_parser("register", help="Register your email and token for making API edits.") 282 | 283 | # Find 284 | find_parser = subparsers.add_parser("find", help="Find collections, domains, or problems whose title/ID contains a string.") 285 | find_parser.add_argument("--type", choices=["collections", "domains", "problems"], help="Type of item to find.") 286 | find_parser.add_argument("--query", help="String to search for in the titles/IDs.") 287 | add_formalism_argument(find_parser) 288 | 289 | # Show 290 | show_parser = subparsers.add_parser("show", help="Find and show collections, domains, problems, or plans with a specific ID.") 291 | show_parser.add_argument("--type", choices=["collection", "domain", "problem", "plan"], help="Type of item to show.") 292 | show_parser.add_argument("--id", type=int, help="ID of the item to show.") 293 | add_formalism_argument(show_parser) 294 | 295 | # List 296 | list_parser = subparsers.add_parser("list", help="Lists collections, tags, or problems with a null attribute setting.") 297 | list_parser.add_argument("--type", choices=["collections", "tags", "null-attribute"], help="Type of items to list.") 298 | list_parser.add_argument("--attribute", nargs="?", default=None, help="Attribute setting to search for (only for null-attribute).") 299 | add_formalism_argument(list_parser) 300 | 301 | # Tag 302 | tag_parser = subparsers.add_parser("tag", help="Tag a collection, domain, or problem with a specific tag.") 303 | tag_parser.add_argument("--type", choices=["collection", "domain", "problem"], help="Type of item to tag.") 304 | tag_parser.add_argument("--id", type=int, help="ID of the item to tag.") 305 | tag_parser.add_argument("--tag", help="Tag to add to the item.") 306 | add_formalism_argument(tag_parser) 307 | 308 | # Untag 309 | untag_parser = subparsers.add_parser("untag", help="Untag a collection, domain, or problem with a specific tag.") 310 | untag_parser.add_argument("--type", choices=["collection", "domain", "problem"], help="Type of item to untag.") 311 | untag_parser.add_argument("--id", type=int, help="ID of the item to untag.") 312 | untag_parser.add_argument("--tag", help="Tag to remove from the item.") 313 | add_formalism_argument(untag_parser) 314 | 315 | # Submit plan 316 | submit_plan_parser = subparsers.add_parser("submit-plan", help="Submit the provided plan for validation and possible storage.") 317 | submit_plan_parser.add_argument("--id", type=int, help="Problem ID for which the plan is provided.") 318 | submit_plan_parser.add_argument("--plan", type=argparse.FileType("r"), help="File containing the plan to submit.") 319 | add_formalism_argument(submit_plan_parser) 320 | 321 | # Cache 322 | cache_parser = subparsers.add_parser("cache", help="Collect all of the domains in a collection into a specified folder.") 323 | cache_parser.add_argument("--id", type=int, help="Collection ID to cache.") 324 | cache_parser.add_argument("--folder", help="Folder to store the cached collection.") 325 | add_formalism_argument(cache_parser) 326 | 327 | # Cache-all 328 | cache_all_parser = subparsers.add_parser("cache-all", help="Collect all domains in a collection into a specified folder, including problem data and statistics.") 329 | cache_all_parser.add_argument("--id", type=int, help="Collection ID to cache.") 330 | cache_all_parser.add_argument("--folder", help="Folder to store the cached collection and problem data/statistics.") 331 | add_formalism_argument(cache_all_parser) 332 | 333 | args = parser.parse_args() 334 | 335 | if args.command is None: 336 | parser.print_help() 337 | exit(0) 338 | 339 | # update 340 | if args.command == "update": 341 | if api.checkForDomainPath(): 342 | print("Updating...") 343 | os.system("cd {0}; git pull".format(api.DOMAIN_PATH)) 344 | else: 345 | print("Error: Domain path is not set.") 346 | 347 | # cache 348 | elif args.command == "cache": 349 | if args.id is None or args.folder is None: 350 | print("Error: Must provide a collection ID and folder name.") 351 | exit(1) 352 | cache(args.id, args.folder, args.formalism) 353 | 354 | # cache-all 355 | elif args.command == "cache-all": 356 | if args.id is None or args.folder is None: 357 | print("Error: Must provide a collection ID and folder name.") 358 | exit(1) 359 | cache(args.id, args.folder, args.formalism, True) 360 | 361 | 362 | # register 363 | elif args.command == "register": 364 | register() 365 | 366 | # submit 367 | elif args.command == "submit-plan": 368 | if args.id is None or args.plan is None: 369 | print("Error: Must provide a problem ID and plan file.") 370 | exit(1) 371 | submit_plan(args.id, args.plan, args.formalism) 372 | 373 | # list 374 | elif args.command == "list": 375 | if args.type is None: 376 | print("Error: Must provide a list type.") 377 | exit(1) 378 | if args.type == "tags": 379 | print("{0}\t{1}\n".format('Tag Name'.rjust(26), 'Description')) 380 | tags = api.get_tags(args.formalism) 381 | for t in sorted(tags.keys()): 382 | print("{0}\t{1}".format(t.rjust(26), tags[t])) 383 | print() 384 | elif args.type == "collections": 385 | cols = {c['collection_id']: c for c in api.get_collections(args.formalism)} 386 | for cid in sorted(cols.keys()): 387 | c = cols[cid] 388 | print() 389 | print(" ID: {0}".format(c['collection_id'])) 390 | print(" Name: {0}".format(c['collection_name'])) 391 | print(" #Doms: {0}".format(len(c['domain_set']))) 392 | print("Description: {0}".format(c['description'])) 393 | print() 394 | elif args.type == "null-attribute": 395 | if args.attribute is None: 396 | print("Error: Must provide an attribute name.") 397 | exit(1) 398 | nullprobs = api.get_null_attribute_problems(args.attribute, args.formalism) 399 | if len(nullprobs) < 25: 400 | pprint.pprint(nullprobs) 401 | else: 402 | print("{0} problems have {1} set to null. 10 examples:\n".format(len(nullprobs), args.attribute)) 403 | print('\n'.join([" - {0}: {1}".format(i, nullprobs[i]) for i in list(nullprobs.keys())[:10]])) 404 | print(' - ...') 405 | else: 406 | print("Error: Unknown list type.") 407 | exit(1) 408 | 409 | # find 410 | elif args.command == "find": 411 | if args.type is None or args.query is None: 412 | print("Error: Must provide a search type and query.") 413 | exit(1) 414 | find(args.type, args.query, args.formalism) 415 | 416 | # show 417 | elif args.command == "show": 418 | if args.type is None or args.id is None: 419 | print("Error: Must provide a show type and ID.") 420 | exit(1) 421 | show(args.type, args.id, args.formalism) 422 | 423 | # tag 424 | elif args.command == "tag": 425 | if args.type is None or args.id is None or args.tag is None: 426 | print("Error: Must provide a tag type, ID, and tag name.") 427 | exit(1) 428 | 429 | if args.type == "collection": 430 | api.tag_collection(args.id, args.tag, args.formalism) 431 | elif args.type == "domain": 432 | api.tag_domain(args.id, args.tag, args.formalism) 433 | elif args.type == "problem": 434 | api.tag_problem(args.id, args.tag, args.formalism) 435 | else: 436 | print("Error: Can only tag a collection, domain, or problem.") 437 | exit(1) 438 | 439 | # untag 440 | elif args.command == "untag": 441 | if args.type is None or args.id is None or args.tag is None: 442 | print("Error: Must provide an untag type, ID, and tag name.") 443 | exit(1) 444 | if args.type == "collection": 445 | api.untag_collection(args.id, args.tag, args.formalism) 446 | elif args.type == "domain": 447 | api.untag_domain(args.id, args.tag, args.formalism) 448 | elif args.type == "problem": 449 | api.untag_problem(args.id, args.tag, args.formalism) 450 | else: 451 | print("Error: Can only untag a collection, domain, or problem.") 452 | exit(1) 453 | 454 | else: 455 | parser.print_help() 456 | exit(0) 457 | print() 458 | 459 | 460 | -------------------------------------------------------------------------------- /planning_domains_api.py: -------------------------------------------------------------------------------- 1 | 2 | import http.client, urllib.parse, json, os 3 | import xml.etree.ElementTree as etree 4 | 5 | URL = 'api.planning.domains' 6 | VERSION = '0.5' 7 | 8 | DOMAIN_PATH = False 9 | USER_EMAIL = False 10 | USER_TOKEN = False 11 | 12 | def checkForDomainPath(): 13 | """Returns the domain path if one exists and is saved in the settings.xml""" 14 | 15 | home_dir = os.path.expanduser("~") 16 | pd_dir = os.path.join(home_dir,".planning.domains") 17 | settingsXML = os.path.join(pd_dir,"settings.xml") 18 | 19 | if not os.path.isdir(pd_dir) or not os.path.isfile(settingsXML): 20 | return False 21 | 22 | installationTree = etree.parse(settingsXML) 23 | if installationTree is None: 24 | return False 25 | 26 | installationSettings = installationTree.getroot() 27 | if installationSettings is None: 28 | return False 29 | 30 | domainPath = str(list(filter(lambda x: x.tag == 'domain_path', installationSettings))[0].text) 31 | if not os.path.isdir(domainPath): 32 | return False 33 | 34 | global DOMAIN_PATH 35 | global USER_EMAIL 36 | global USER_TOKEN 37 | DOMAIN_PATH = domainPath 38 | if 'email' in [x.tag for x in installationSettings]: 39 | USER_EMAIL = list(filter(lambda x: x.tag == 'email', installationSettings))[0].text 40 | if 'token' in [x.tag for x in installationSettings]: 41 | USER_TOKEN = list(filter(lambda x: x.tag == 'token', installationSettings))[0].text 42 | return True 43 | 44 | def query(qs, formalism, qtype="GET", params={}, offline=False, format='/json'): 45 | 46 | assert not offline, "Error: Offline mode is not supported currently." 47 | 48 | headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"} 49 | 50 | params = urllib.parse.urlencode(params) 51 | conn = http.client.HTTPSConnection(URL) 52 | if formalism == "": 53 | conn.request(qtype, f"{format}/{qs}", params, headers) 54 | else: 55 | conn.request(qtype, f"{format}/{formalism}/{qs}", params, headers) 56 | response = conn.getresponse() 57 | tmp = response.read().decode('utf-8') 58 | if "
Payload Too Large" in tmp: 59 | data = { "error": True, "message": "Payload too large."} 60 | else: 61 | try: 62 | data = json.loads(tmp) 63 | except: 64 | data = { "error": True, "message": f"Invalid JSON response:\n{tmp}"} 65 | conn.close() 66 | 67 | return data 68 | 69 | def simple_query(qs, formalism): 70 | res = query(qs, formalism) 71 | if res['error']: 72 | print ("Error: %s" % res['message']) 73 | return [] 74 | else: 75 | return res['result'] 76 | 77 | def update_stat(stat_type, iid, attribute, value, description, formalism): 78 | 79 | params = {'user': USER_EMAIL, 80 | 'password': USER_TOKEN, 81 | 'key': attribute, 82 | 'value': value, 83 | 'desc': description} 84 | 85 | res = query("update%s/%d" % (stat_type, iid), 86 | formalism, 87 | qtype='POST', 88 | params=params, 89 | offline=False, 90 | format='') 91 | 92 | if res['error']: 93 | print ("Error: %s" % res['message']) 94 | else: 95 | print ("Result: %s" % str(res)) 96 | 97 | def change_tag(tag_type, iid, tid, formalism): 98 | 99 | params = {'user': USER_EMAIL, 100 | 'password': USER_TOKEN, 101 | 'tag_id': tid} 102 | 103 | res = query("%s/%d" % (tag_type, iid), 104 | formalism, 105 | qtype='POST', 106 | params=params, 107 | offline=False, 108 | format='') 109 | 110 | if res['error']: 111 | print ("Error: %s" % res['message']) 112 | else: 113 | print ("Result: %s" % str(res)) 114 | 115 | def create_collection(name, description, tags, ipc, formalism): 116 | 117 | params = {'user': USER_EMAIL, 118 | 'password': USER_TOKEN, 119 | 'formalism': formalism, 120 | 'name': name, 121 | 'ipc': ipc, 122 | 'desc': description, 123 | 'tags': tags, 124 | } 125 | path = f"{formalism}/collection" 126 | res = query(path, 127 | formalism, 128 | qtype='POST', 129 | params = params, 130 | offline=False 131 | ) 132 | 133 | if res['error']: 134 | print ("Error: %s" % res['message']) 135 | return [] 136 | else: 137 | print ("Result: %s" % str(res)) 138 | return res['result'] 139 | 140 | 141 | def get_version(): 142 | """Return the current API version""" 143 | return str(query('version', "")['version']) 144 | 145 | 146 | def get_tags(formalism): 147 | """Get the list of available tags""" 148 | return {t['name']: t['description'] for t in simple_query("tags", formalism)} 149 | 150 | 151 | def get_collections(formalism, ipc = None): 152 | """Return the collections, optionally which are IPC or non-IPC""" 153 | res = query('collections/', formalism) 154 | if res['error']: 155 | print ("Error: %s" % res['message']) 156 | return [] 157 | else: 158 | if ipc is not None: 159 | return list(filter(lambda x: x['ipc'] == ipc, res['result'])) 160 | else: 161 | return res['result'] 162 | 163 | def get_collection(cid, formalism): 164 | """Return the collection of a given id""" 165 | return simple_query("collection/%d" % cid, formalism) 166 | 167 | def find_collections(name, formalism): 168 | """Find the collections matching the string name""" 169 | return simple_query("collections/search?collection_name=%s" % name, formalism) 170 | 171 | def update_collection_stat(cid, attribute, value, description, formalism): 172 | """Update the attribute stat with a given value and description""" 173 | update_stat('collection', cid, attribute, value, description, formalism) 174 | 175 | def tag_collection(cid, tagname, formalism): 176 | """Tag the collection with a given tag""" 177 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 178 | if tagname not in tag2id: 179 | print ("Error: Tag %s does not exist" % tagname) 180 | else: 181 | change_tag("tagcollection", cid, tag2id[tagname], formalism) 182 | 183 | def untag_collection(cid, tagname, formalism): 184 | """Remove a given tag from a collection""" 185 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 186 | if tagname not in tag2id: 187 | print ("Error: Tag %s does not exist" % tagname) 188 | else: 189 | change_tag("untagcollection", cid, tag2id[tagname], formalism) 190 | 191 | 192 | 193 | def get_domains(cid, formalism): 194 | """Return the set of domains for a given collection id""" 195 | return simple_query("domains/%d" % cid, formalism) 196 | 197 | def get_domain(did, formalism): 198 | """Return the domain for a given domain id""" 199 | return simple_query("domain/%d" % did, formalism) 200 | 201 | def find_domains(name, formalism): 202 | """Return the domains matching the string name""" 203 | return simple_query("domains/search?domain_name=%s" % name, formalism) 204 | 205 | def update_domain_stat(did, attribute, value, description, formalism): 206 | """Update the attribute stat with a given value and description""" 207 | update_stat('domain', did, attribute, value, description, formalism) 208 | 209 | def tag_domain(did, tagname, formalism): 210 | """Tag the domain with a given tag""" 211 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 212 | if tagname not in tag2id: 213 | print ("Error: Tag %s does not exist" % tagname) 214 | else: 215 | change_tag("tagdomain", did, tag2id[tagname], formalism) 216 | 217 | def untag_domain(did, tagname, formalism): 218 | """Remove a given tag from a domain""" 219 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 220 | if tagname not in tag2id: 221 | print ("Error: Tag %s does not exist" % tagname) 222 | else: 223 | change_tag("untagdomain", did, tag2id[tagname], formalism) 224 | 225 | 226 | def get_problems(did, formalism): 227 | """Return the set of problems for a given domain id""" 228 | return map(localize, simple_query("problems/%d" % did, formalism)) 229 | 230 | def get_problem(pid, formalism): 231 | """Return the problem for a given problem id""" 232 | return localize(simple_query("problem/%d" % pid, formalism)) 233 | 234 | def find_problems(name, formalism): 235 | """Return the problems matching the string name""" 236 | return list(map(localize, simple_query("problems/search?problem_name=%s" % name, formalism))) 237 | 238 | def update_problem_stat(pid, attribute, value, description, formalism): 239 | """Update the attribute stat with a given value and description""" 240 | update_stat('problem', pid, attribute, value, description, formalism) 241 | 242 | def get_null_attribute_problems(attribute, formalism): 243 | """Fetches all of the problems that do not have the attribute set yet""" 244 | return {i['id']: (i['domain_path'], i['problem_path']) 245 | for i in map(localize, simple_query("nullattribute/%s" % attribute, formalism))} 246 | 247 | def tag_problem(pid, tagname, formalism): 248 | """Tag the problem with a given tag""" 249 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 250 | if tagname not in tag2id: 251 | print ("Error: Tag %s does not exist" % tagname) 252 | else: 253 | change_tag("tagproblem", pid, tag2id[tagname], formalism) 254 | 255 | def untag_problem(pid, tagname, formalism): 256 | """Remove a given tag from a problem""" 257 | tag2id = {t['name']: t['id'] for t in simple_query("tags", formalism)} 258 | if tagname not in tag2id: 259 | print ("Error: Tag %s does not exist" % tagname) 260 | else: 261 | change_tag("untagproblem", pid, tag2id[tagname], formalism) 262 | 263 | def get_plan(pid, formalism): 264 | """Return the existing plan for a problem if it exists""" 265 | res = simple_query("plan/%d" % pid, formalism) 266 | if res: 267 | return res['plan'].strip() 268 | return res 269 | 270 | 271 | def submit_plan(pid, plan, formalism): 272 | """Submit the provided plan for validation and possible storage""" 273 | 274 | params = {'plan': plan, 'email': USER_EMAIL} 275 | 276 | res = query("submitplan/%d" % pid, 277 | formalism, 278 | qtype='POST', 279 | params=params, 280 | offline=False, 281 | format='') 282 | if res['error']: 283 | print ("Error: %s" % res['message']) 284 | else: 285 | print ("Result: %s" % str(res)) 286 | 287 | 288 | def localize(prob): 289 | """Convert the relative paths to local ones""" 290 | if not DOMAIN_PATH: 291 | return prob 292 | 293 | toRet = {k:prob[k] for k in prob} 294 | 295 | pathKeys = ['domain_path', 'problem_path'] 296 | for key in pathKeys: 297 | if key in toRet: 298 | toRet[key] = os.path.join(DOMAIN_PATH, prob[key]) 299 | 300 | return toRet 301 | 302 | 303 | def generate_lab_suite(cid, formalism): 304 | """Uses the lab API to generate a suite of problems in a collection""" 305 | try: 306 | from downward.suites import Problem 307 | except: 308 | print ("\n Error: Lab does not seem to be installed ( https://lab.readthedocs.io/ )\n") 309 | return 310 | 311 | SUITE = [] 312 | for d in get_domains(cid, formalism): 313 | for p in get_problems(d['domain_id'], formalism): 314 | SUITE.append(Problem(p['domain'], p['problem'], 315 | domain_file = p['domain_path'], 316 | problem_file = p['problem_path'], 317 | properties = {'api_problem_id': p['problem_id']})) 318 | return SUITE 319 | 320 | 321 | if not checkForDomainPath(): 322 | print ("\n Warning: No domain path is set\n") 323 | 324 | try: 325 | if VERSION != get_version(): 326 | print (f"\n Warning: Script version ({VERSION}) doesn't match API ({get_version()}). Do you have the latest version of this file?\n") 327 | except: 328 | pass 329 | -------------------------------------------------------------------------------- /scripts/formalism-initialization/classical/create-meta.py: -------------------------------------------------------------------------------- 1 | 2 | import os, pprint 3 | from krrt.utils import get_file_list, write_file 4 | 5 | from data import * 6 | 7 | forbidden_files = ['__init__', 'api.py'] 8 | 9 | def get_name(dom): 10 | suffixes = ['-sat', '-opt', '-strips', '-fulladl', '-06', '-08', '-00', '-02', '98', '00', '-simpleadl', '-adl'] 11 | name = dom.split('/')[-1] 12 | for s in suffixes: 13 | name = name.split(s)[0] 14 | #if '-' in name: 15 | # print "Check name: %s" % name 16 | if '' == name: 17 | print ("Error: empty name from %s" % dom) 18 | return name 19 | 20 | 21 | def handle_single(dom): 22 | towrite = 'domains = [\n' 23 | 24 | extra_domain = False 25 | domdata = {} 26 | 27 | domdata['name'] = get_name(dom) 28 | domdata['description'] = domain_description[get_name(dom)] 29 | 30 | # Special Cases: 31 | # IPC-2000: freecell (non-pfiles) 32 | # IPC-2002: satellite (p#-pfile#.pddl) 33 | # IPC-2002: freecell (pfile#) 34 | 35 | if './freecell' == dom: 36 | extra_domain = True 37 | 38 | domdata['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['pfile','/domain.pddl']))] 39 | domdata['ipc'] = '2000' 40 | 41 | domdata2 = {} 42 | domdata2['name'] = domdata['name'] 43 | domdata2['description'] = domain_description[get_name(dom)] 44 | domdata2['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl'], match_list=['pfile']))] 45 | domdata2['ipc'] = '2002' 46 | 47 | elif './satellite' == dom: 48 | extra_domain = True 49 | 50 | domdata['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl']))] 51 | domdata['ipc'] = ipc_map.get(dom[2:]) 52 | 53 | domdata2 = {} 54 | domdata2['name'] = domdata['name'] 55 | domdata2['description'] = domain_description[get_name(dom)] 56 | domdata2['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl','-HC-']))] 57 | domdata2['ipc'] = '2002' 58 | 59 | else: 60 | domdata['problems'] = [((dom+'/domain.pddl')[2:], prob[2:]) for prob in sorted(get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl','/domain-nosplit.pddl','/orig-domain.pddl']))] 61 | domdata['ipc'] = ipc_map.get(dom[2:]) 62 | 63 | towrite += pprint.pformat(domdata) 64 | if extra_domain: 65 | towrite += ',\n' 66 | towrite += pprint.pformat(domdata2) 67 | towrite += '\n]' 68 | 69 | #print "To be printed:\n-------" 70 | #print towrite 71 | #print "-------\n" 72 | 73 | print ("Handling single domain: %s" % dom) 74 | write_file(dom+'/api.py', towrite) 75 | 76 | 77 | def handle_double(dom): 78 | towrite = 'domains = [\n' 79 | 80 | domdata = {} 81 | 82 | domdata['name'] = get_name(dom) 83 | domdata['description'] = domain_description[get_name(dom)] 84 | 85 | domfiles = get_file_list(dom, match_list=['domain'], forbidden_list=forbidden_files) 86 | prbfiles = get_file_list(dom, forbidden_list=forbidden_files+['domain']) 87 | 88 | if len(domfiles) == len(prbfiles): 89 | def remdom(dom): 90 | toret = dom 91 | for s in ['-domain', 'domain_']: 92 | toret = ''.join(toret.split(s)) 93 | return toret 94 | dmap = {remdom(d): d for d in domfiles} 95 | if all([k in prbfiles for k in dmap]): 96 | print ("Handling multi-domain: %s" % dom) 97 | assert len(set(dmap.keys())) == len(set(prbfiles)) 98 | domdata['problems'] = [(dmap[prob][2:], prob[2:]) for prob in sorted(prbfiles)] 99 | domdata['ipc'] = ipc_map.get(dom[2:]) 100 | elif dom in ['./psr-small', './airport']: 101 | print ("Handling custom 50-problem domain: %s" % dom) 102 | assert 100 == len(get_file_list(dom, match_list=['pddl'], forbidden_list=forbidden_files)) 103 | probs = [] 104 | for i in range(1,51): 105 | d = get_file_list(dom, match_list=["p%02d-domain" % i], forbidden_list=forbidden_files) 106 | p = get_file_list(dom, match_list=["p%02d-" % i], forbidden_list=forbidden_files+['domain']) 107 | assert 1 == len(d), str(d) 108 | assert 1 == len(p), str(p) 109 | probs.append((d[0][2:], p[0][2:])) 110 | domdata['problems'] = sorted(probs) 111 | domdata['ipc'] = ipc_map.get(dom[2:]) 112 | else: 113 | print ("Unhandled balanced multi-domain: %s" % dom) 114 | return 115 | else: 116 | print ("Unhandled lopsided multi-domain: %s" % dom) 117 | 118 | 119 | towrite += pprint.pformat(domdata) 120 | towrite += '\n]' 121 | 122 | #print "To be printed:\n-------" 123 | #print towrite 124 | #print "-------\n" 125 | 126 | write_file(dom+'/api.py', towrite) 127 | 128 | 129 | domains = get_file_list('.', forbidden_list=['.py']) 130 | 131 | single_dom = [] 132 | multi_dom = [] 133 | done_dom = [] 134 | 135 | print 136 | 137 | for dom in domains: 138 | 139 | if os.path.isfile(dom+'/api.py'): 140 | done_dom.append(dom) 141 | else: 142 | if os.path.isfile(dom+'/domain.pddl'): 143 | single_dom.append(dom) 144 | for i in get_file_list(dom, forbidden_list=forbidden_files+['/domain.pddl']): 145 | if 'dom' in i.split('/')[-1]: 146 | print ("Warning: Double domain in %s must be handled." % dom) 147 | else: 148 | multi_dom.append(dom) 149 | os.system("touch %s/__init__.py" % dom) 150 | 151 | print ("\nSingle doms: %d" % len(single_dom)) 152 | print (map(get_name, single_dom)) 153 | print ("\nMulti doms: %d" % len(multi_dom)) 154 | print (map(get_name, multi_dom)) 155 | print ("\nDone doms: %d" % len(done_dom)) 156 | print (map(get_name, done_dom)) 157 | print () 158 | 159 | for ipc in ipcs: 160 | for dom in ipc: 161 | if not os.path.isdir('./'+dom): 162 | print ("Bad dom: %s" % dom) 163 | 164 | for dom in single_dom: 165 | handle_single(dom) 166 | 167 | for dom in multi_dom: 168 | handle_double(dom) 169 | 170 | print 171 | -------------------------------------------------------------------------------- /scripts/formalism-initialization/classical/data.py: -------------------------------------------------------------------------------- 1 | 2 | # Mapping of folders to the IPC they come from 3 | 4 | # Special cases not included: 5 | # IPC-2000: freecell (non-pfiles) 6 | # IPC-2002: satellite (p#-pfile#.pddl) 7 | # IPC-2002: freecell (pfile#) 8 | 9 | ipc1998 = 'assembly / gripper / logistics98 / movie / mprime / mystery'.split(' / ') 10 | ipc2000 = 'blocks / elevators-00-full / elevators-00-adl / elevators-00-strips / logistics00 / schedule'.split(' / ') 11 | ipc2002 = 'depot / driverlog / zenotravel / rovers-02'.split(' / ') 12 | ipc2004 = 'airport / airport-adl / pipesworld-tankage / pipesworld-notankage / optical-telegraphs / philosophers / psr-small / psr-middle / psr-large / satellite / settlers'.split(' / ') 13 | ipc2006 = 'openstacks / pathways / pipesworld-06 / rovers / storage / tpp / trucks'.split(' / ') 14 | ipc2008 = 'elevators-opt08-strips / openstacks-opt08-adl / openstacks-opt08-strips / parcprinter-08-strips / pegsol-08-strips / scanalyzer-08-strips / sokoban-opt08-strips / transport-opt08-strips / woodworking-opt08-strips / elevators-sat08-strips / openstacks-sat08-adl / openstacks-sat08-strips / sokoban-sat08-strips / transport-sat08-strips / woodworking-sat08-strips / cybersec'.split(' / ') 15 | ipc2011 = 'barman-opt11-strips / elevators-opt11-strips / floortile-opt11-strips / nomystery-opt11-strips / openstacks-opt11-strips / parcprinter-opt11-strips / parking-opt11-strips / pegsol-opt11-strips / scanalyzer-opt11-strips / sokoban-opt11-strips / tidybot-opt11-strips / transport-opt11-strips / visitall-opt11-strips / woodworking-opt11-strips / barman-sat11-strips / elevators-sat11-strips / floortile-sat11-strips / nomystery-sat11-strips / openstacks-sat11-strips / parcprinter-sat11-strips / parking-sat11-strips / pegsol-sat11-strips / scanalyzer-sat11-strips / sokoban-sat11-strips / tidybot-sat11-strips / transport-sat11-strips / visitall-sat11-strips / woodworking-sat11-strips'.split(' / ') 16 | ipc2014 = 'barman-opt14-strips / barman-sat14-strips / cavediving / childsnack-opt14-strips / childsnack-sat14-strips / citycar-opt14-adl / citycar-sat14-adl / floortile-opt14-strips / floortile-sat14-strips / ged-opt14-strips / ged-sat14-strips / hiking-opt14-strips / hiking-sat14-strips / maintenance-opt14-adl / maintenance-sat14-adl / openstacks-opt14-strips / openstacks-sat14-strips / parking-opt14-strips / parking-sat14-strips / tetris-opt14-strips / tetris-sat14-strips / thoughtful-sat14-strips / tidybot-opt14-strips / transport-opt14-strips / transport-sat14-strips / visitall-opt14-strips / visitall-sat14-strips'.split(' / ') 17 | 18 | ipcs = [ipc1998, ipc2000, ipc2002, ipc2004, ipc2006, ipc2008, ipc2011, ipc2014] 19 | 20 | ipc_map = {} 21 | 22 | for (ipc,year) in zip(ipcs, '1998 2000 2002 2004 2006 2008 2011 2014'.split()): 23 | for dom in ipc: 24 | ipc_map[dom] = year 25 | 26 | # General domain descriptions to populate the database 27 | domain_description = { 28 | 'airport': 'Developed by Jorg Hoffmann and Sebastian Trug. Planners control the ground traffic on airports. The competition test suites were generated by exporting traffic situations arising during simulation runs in the airport simulation tool Astras (by Wolfgang Hatzack). The largest instances in the test suites are realistic encodings of Munich airport.', 29 | 'assembly': 'The goal is to assemble a complex object made out of subassemblies. The sequence of steps must obey a given partial order. In addition, through poor engineering design, many subassemblies must be installed temporarily in one assembly, then removed and given a permanent home in another. This domain was created by Drew McDermott.', 30 | 'barman': 'In this domain there is a robot barman that manipulates drink dispensers, glasses and a shaker. The goal is to find a plan of the robots actions that serves a desired set of drinks. In this domain deletes of actions encode relevant knowledge given that robot hands can only grasp one object at a time and given that glasses need to be empty and clean to be filled.', 31 | 'blocks': 'The blocks world is one of the most famous planning domains in artificial intelligence. Imagine a set of cubes (blocks) sitting on a table. The goal is to build one or more vertical stacks of blocks. The catch is that only one block may be moved at a time: it may either be placed on the table or placed atop another block. Because of this, any blocks that are, at a given time, under another block cannot be moved.', 32 | 'cavediving': 'There are a set of divers, each of who can carry 4 tanks of air. These divers must be hired to go into an underwater cave and either take photos or prepare the way for other divers by dropping full tanks of air. The cave is too narrow for more than one diver to enter at a time. The cave system is represented by an undirected acyclic graph. Divers have a single point of entry. Certain leaf nodes of the cave branches are objectives that the divers must photograph. Swimming and photographing both consume air tanks. Divers must exit the cave and decompress at the end. They can therefore only make a single trip into the cave. Certain divers have no confidence in other divers and will refuse to work if someone they have no confidence in has already worked. Divers have hiring costs inversely proportional to how hard they are to work with.', 33 | 'childsnack': 'This domain is to plan how to make and serve sandwiches for a group of children in which some are allergic to gluten. There are two actions for making sandwiches from their ingredients. The first one makes a sandwich and the second one makes a sandwich taking into account that all ingredients are gluten-free. There are also actions to put a sandwich on a tray and to serve sandwiches. Problems in this domain define the ingredients to make sandwiches at the initial state. Goals consist of having all kids served with a sandwich to which they are not allergic.', 34 | 'citycar': 'This model aims to simulate the impact of road building / demolition on traffic flows. A city is represented as an acyclic graph, in which each node is a junction and edges are "potential" roads. Some cars start from different positions and have to reach their final destination as soon as possible. The agent has a finite number of roads available, which can be built for connecting two junctions and allowing a car to move between them. Roads can also be removed, and placed somewhere else, if needed. In order to place roads or to move cars, the destination junction must be clear, i.e., no cars should be in there.', 35 | 'cybersec': 'A domain that models the cyber security model of vulnerability analysis for cyber defense.', 36 | 'depot': 'This domain was devised in order to see what would happen if two previously well-researched domains were joined together. These were the logistics and blocks domains. They are combined to form a domain in which trucks can transport crates around and then the crates must be stacked onto pallets at their destinations. The stacking is achieved using hoists, so the stacking problem is like a blocks-world problem with hands. Trucks can behave like "tables", since the pallets on which crates are stacked are limited.', 37 | 'driverlog': 'This domain involves driving trucks around delivering packages between locations. The complication is that the trucks require drivers who must walk between trucks in order to drive them. The paths for walking and the roads for driving form different maps on the locations.', 38 | 'elevators': 'The scenario is the following: There is a building with N+1 floors, numbered from 0 to N. The building can be separated in blocks of size M+1, where M divides N. Adjacent blocks have a common floor. For example, suppose N=12 and M=4, then we have 13 floors in total (ranging from 0 to 12), which form 3 blocks of 5 floors each, being 0 to 4, 4 to 8 and 8 to 12. The building has K fast (accelarating) elevators that stop only in floors that are multiple of M/2 (so M has to be an even number). Each fast elevator has a capacity of X persons. Furthermore, within each block, there are L slow elevators, that stop at every floor of the block. Each slow elevator has a capacity of Y persons (usually Y
??
16 |??
17 | 18 |' + headings[heads[i]] + ' | \n'; 65 | } 66 | html += '
---|
' + val(data[i][heads[j]]) + ' | '; 78 | } 79 | html += '
??
22 | 23 |??
29 |" + x + "
")
9 | tagstring = ""
10 | for (let index = 0; index < tags.length; index++) {
11 | tagstring += tags[index];
12 | }
13 | if (tagstring.length > 13) {
14 | $("p.tags").html("Problem tags: " + tagstring);
15 | } else {
16 | $("p.tags").html("Problem tags: None");
17 | }
18 | $("h4.d_link").html("" + data.domain + "");
19 | $("details.lowerbound").html("" + x.substring(2, x.length - 1) + "
, ")
29 | d_tagstring = ""
30 | for (let index = 0; index < d_tags.length; index++) {
31 | d_tagstring += d_tags[index];
32 | }
33 | $("p.d_tags").html("Domain tags: " + d_tagstring);
34 | }
35 |
36 | function formatPlanData_problem(plan_data) {
37 | plan = plan_data.plan.split("\n");
38 | console.log(plan);
39 | if (plan[plan.length - 2].includes("cost = ")) {
40 | cost = plan[plan.length - 2].split("= ")[1].split(" ")[0];
41 | $("h3.plan_moves").html("Plan (" + cost + ")");
42 | } else {
43 | $("h3.plan_moves").html("Plan (" + plan.length + ")")
44 | }
45 | plan = plan.map(x => x + "" + x.substring(2, x.length - 1) + "
, ")
60 | d_tagstring = ""
61 | for (let index = 0; index < d_tags.length; index++) {
62 | d_tagstring += d_tags[index];
63 | }
64 | $("p.tags").html("Domain tags: " + d_tagstring);
65 | }
66 |
67 | function formatProblems_domain(data) {
68 | problemsList = ""
69 | // sort based on the data[index].problem field
70 | data.sort(function (a, b) {
71 | return a.problem.localeCompare(b.problem);
72 | });
73 | for (let index = 0; index < data.length; index++) {
74 | problemsList += "" + data.description + "
"; 88 | $("div.domain" + index).html(resultString); 89 | } 90 | 91 | //getters 92 | 93 | //problem 94 | function getDomain_problem(domainno) { 95 | $.getJSON('http://api.planning.domains/json/classical/domain/' + domainno, function (data) { 96 | if (data.error) 97 | $(parent).html('Error:' + data.message); 98 | else 99 | data = data.result; 100 | formatDomainData_problem(data); 101 | }); 102 | } 103 | 104 | function getPlan_problem(problemno) { 105 | $.getJSON('https://api.planning.domains/json/classical/plan/' + problemno, function (data) { 106 | if (data.error) 107 | $(parent).html('Error:' + data.message); 108 | else 109 | data = data.result; 110 | formatPlanData_problem(data); 111 | }); 112 | } 113 | 114 | //domain 115 | function getProblems_domain(domainno) { 116 | $.getJSON('http://api.planning.domains/json/classical/problems/' + domainno, function (data) { 117 | if (data.error) 118 | $(parent).html('Error:' + data.message); 119 | else 120 | data = data.result; 121 | formatProblems_domain(data); 122 | }); 123 | } 124 | 125 | //collection 126 | function getDomain_collection(domainno, index) { 127 | $.getJSON('http://api.planning.domains/json/classical/domain/' + domainno, function (data) { 128 | if (data.error) 129 | $(parent).html('Error:' + data.message); 130 | else 131 | data = data.result; 132 | formatDomainData_collection(data, index); 133 | }); 134 | } 135 | 136 | //main functions 137 | 138 | //problem 139 | function getProblem(problemno) { 140 | $.getJSON('http://api.planning.domains/json/classical/problem/' + problemno, function (data) { 141 | if (data.error) 142 | $(parent).html('Error:' + data.message); 143 | else 144 | data = data.result; 145 | formatProblemData_problem(data); 146 | getDomain_problem(data.domain_id); 147 | getPlan_problem(problemno); 148 | }); 149 | } 150 | 151 | //domain 152 | function getDomain(domainno) { 153 | $.getJSON('http://api.planning.domains/json/classical/domain/' + domainno, function (data) { 154 | if (data.error) 155 | $(parent).html('Error:' + data.message); 156 | else 157 | data = data.result; 158 | formatDomainData_domain(data); 159 | getProblems_domain(domainno); 160 | }); 161 | } 162 | 163 | //collection 164 | 165 | function getDomains_collection(collectionno) { 166 | $.getJSON('http://api.planning.domains/json/classical/domains/' + collectionno, function (data) { 167 | if (data.error) 168 | $(parent).html('Error:' + data.message); 169 | else 170 | data = data.result; 171 | 172 | // sort based on the data[index].problem field 173 | data.sort(function (a, b) { 174 | return a.domain_name.localeCompare(b.domain_name); 175 | }); 176 | 177 | domainsListDivs = "" 178 | for (let index = 0; index < data.length; index++) { 179 | domainsListDivs += ""; 180 | } 181 | $("div.domainlist").html(domainsListDivs); 182 | for (let index = 0; index < data.length; index++) { 183 | formatDomainData_collection(data[index], index); 184 | } 185 | }); 186 | } 187 | function getCollection(collectionno) { 188 | $.getJSON('http://api.planning.domains/json/classical/collection/' + collectionno, function (data) { 189 | console.log(data); 190 | if (data.error) 191 | $(parent).html('Error:' + data.message); 192 | else 193 | data = data.result; 194 | formatCollectionData_collection(data); 195 | getDomains_collection(collectionno); 196 | }); 197 | } 198 | --------------------------------------------------------------------------------