├── .gitignore ├── LICENSE ├── MANIFEST.in ├── README.md ├── apps ├── deepnat │ ├── README.md │ ├── alt_natjax.nat │ ├── alt_natjax.py │ ├── mlp.py │ ├── natjax.nat │ ├── natjax.py │ ├── nattorch.nat │ ├── nattorch.py │ └── tmlp.py ├── nat3d │ ├── README.md │ ├── bot.stl │ ├── humpty.py │ ├── requirements.txt │ ├── vp.nat │ └── vp.py └── natgpt │ ├── deepchat.nat │ ├── deepchat.py │ ├── pics │ ├── female_chagall.png │ ├── female_robot.png │ ├── male_chagall.png │ └── male_robot.png │ ├── textual.nat │ └── textual.py ├── clean.sh ├── docs ├── natlog.pdf ├── natlog_deep.pdf ├── natlog_gpt.pdf ├── prolog50.pdf ├── prolog50.txt ├── quotes.txt ├── slides_ICLP21.pdf ├── slides_LPOP22.pdf ├── slides_iclp23.pdf └── small.txt ├── natlog ├── .gitignore ├── __init__.py ├── __main__.py ├── app │ ├── __init__.py │ ├── natapp.py │ └── requirements.txt ├── db.py ├── natlog.py ├── natprogs │ ├── arith.nat │ ├── dall_e.nat │ ├── db.json │ ├── db.nat │ ├── db.tsv │ ├── dbtc.nat │ ├── elements.nat │ ├── elements.tsv │ ├── emu.nat │ ├── facts.nat │ ├── family.nat │ ├── gcol.nat │ ├── gram.nat │ ├── interclausal.nat │ ├── lib.nat │ ├── lib_tests.nat │ ├── loop.nat │ ├── meta.nat │ ├── nrev.nat │ ├── perm.nat │ ├── pets.nat │ ├── pro.nat │ ├── pro2nat.pro │ ├── prolog_progs │ │ ├── pro.pro │ │ └── sudoku4.pro │ ├── py_call.nat │ ├── py_call1.nat │ ├── queens.nat │ ├── queens.pro │ ├── story.nat │ ├── story.txt │ ├── sudoku4.nat │ └── tc.nat ├── ndb.py ├── neural_natlog.py ├── parser.py ├── requirements.txt ├── scanner.py ├── tdb.py ├── test │ ├── __init__.py │ ├── benchmark.py │ └── tests.py ├── textual_natlog.py ├── tools.py └── unify.py ├── run ├── __init__.py ├── app.sh ├── bm.py ├── bm.sh ├── t0.txt ├── t1.txt └── tests.py ├── setup.py ├── softlog ├── requirements.txt ├── softdb.py ├── softlog.py ├── softprog.nat └── softprog.py └── upload.sh /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | 107 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm 108 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 109 | 110 | # User-specific stuff 111 | .idea/**/workspace.xml 112 | .idea/**/tasks.xml 113 | .idea/**/usage.statistics.xml 114 | .idea/**/dictionaries 115 | .idea/**/shelf 116 | 117 | # Generated files 118 | .idea/**/contentModel.xml 119 | 120 | # Sensitive or high-churn files 121 | .idea/**/dataSources/ 122 | .idea/**/dataSources.ids 123 | .idea/**/dataSources.local.xml 124 | .idea/**/sqlDataSources.xml 125 | .idea/**/dynamic.xml 126 | .idea/**/uiDesigner.xml 127 | .idea/**/dbnavigator.xml 128 | 129 | # Gradle 130 | .idea/**/gradle.xml 131 | .idea/**/libraries 132 | 133 | # Gradle and Maven with auto-import 134 | # When using Gradle or Maven with auto-import, you should exclude module files, 135 | # since they will be recreated, and may cause churn. Uncomment if using 136 | # auto-import. 137 | # .idea/modules.xml 138 | # .idea/*.iml 139 | # .idea/modules 140 | # *.iml 141 | # *.ipr 142 | 143 | # CMake 144 | cmake-build-*/ 145 | 146 | # Mongo Explorer plugin 147 | .idea/**/mongoSettings.xml 148 | 149 | # File-based project format 150 | *.iws 151 | 152 | # IntelliJ 153 | out/ 154 | 155 | # mpeltonen/sbt-idea plugin 156 | .idea_modules/ 157 | 158 | # JIRA plugin 159 | atlassian-ide-plugin.xml 160 | 161 | # Cursive Clojure plugin 162 | .idea/replstate.xml 163 | 164 | # Crashlytics plugin (for Android Studio and IntelliJ) 165 | com_crashlytics_export_strings.xml 166 | crashlytics.properties 167 | crashlytics-build.properties 168 | fabric.properties 169 | 170 | # Editor-based Rest Client 171 | .idea/httpRequests 172 | 173 | # Android studio 3.1+ serialized cache file 174 | .idea/caches/build_file_checksums.ser 175 | 176 | .idea/ 177 | 178 | __pycache__/ 179 | 180 | minlog/temp.py 181 | bad/ 182 | 183 | 184 | natlog/natprogs/*.pickle 185 | apps/natgpt/states 186 | 187 | run/UPLOADS/ 188 | 189 | SENT_STORE_CACHE 190 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include ./natlog/natprogs/*.nat 2 | include ./natlog/natprogs/*.json 3 | include ./natlog/natprogs/*.pro 4 | include ./natlog/natprogs/*.tsv 5 | include ./natlog/test/*.py 6 | include ./bak/*.py 7 | include ./bin/*.py 8 | include ./natlog/doc/*.pdf 9 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## A lightweight Prolog-like system designed to smoothly interoperate with deep learning tools 2 | 3 | We closely follow Einstein's *"Everything should be made as simple as possible, but no simpler."* 4 | 5 | At this point, we rely on Python's natural error checking, without doing much to warn about syntactic or semantic errors. This can be added, but this is meant as an *executable specification* of an otherwise simple and natural logic language that we hereby name **Natlog**. 6 | 7 | 8 | #### NEW: 9 | at https://github.com/ptarau/natlog/tree/main/docs there are a few papers explaining Natlog and its applications (including Natlog chatting with GPT4) 10 | 11 | 12 | #### DEMOS: 13 | 14 | - demo apps in folder ```apps/deepnat``` interfacing Natlog with JAX and TORCH based neural nets 15 | - a demo app in folder ```apps/nat3d``` combining Natlog and vpython to build (quite easily!) 3D objects and animations 16 | 17 | 18 | 19 | ### **Natlog** : a succinct overview 20 | 21 | * Terms are represented as nested tuples. 22 | 23 | * A parser and scanner for a simplified Prolog term syntax is used 24 | to turn terms into nested Python tuples. 25 | 26 | Surface syntax of facts, as read from strings, is just whitespace separated words 27 | (with tuples parenthesized) and 28 | sentences ended with ```.``` or ```?```. 29 | Like in Prolog, variables are capitalized, unless quoted. Example programs are in folder ```natprogs```, for instance ```tc.nat```: 30 | 31 | ``` 32 | cat is feline. 33 | tiger is feline. 34 | mouse is rodent. 35 | feline is mammal. 36 | rodent is mammal. 37 | snake is reptile. 38 | mammal is animal. 39 | reptile is animal. 40 | 41 | tc A Rel B : A Rel B. 42 | tc A Rel C : A Rel B, tc B Rel C. 43 | ``` 44 | 45 | After 46 | 47 | ```pip3 install -U natlog``` 48 | 49 | 50 | 51 | To query it, try: 52 | 53 | ``` python3 -i 54 | 55 | >>> from natlog import Natlog, natprogs 56 | >>> n=Natlog(file_name=natprogs()+"tc.nat") 57 | >>> n.query("tc Who is animal ?") 58 | ``` 59 | 60 | It will return answers based on the the transitive closure of the ```is``` relation. 61 | 62 | ``` 63 | QUERY: tc Who is animal ? 64 | ANSWER: {'Who': 'cat'} 65 | ANSWER: {'Who': 'tiger'} 66 | ANSWER: {'Who': 'mouse'} 67 | ANSWER: {'Who': 'feline'} 68 | ANSWER: {'Who': 'rodent'} 69 | ANSWER: {'Who': 'snake'} 70 | ANSWER: {'Who': 'mammal'} 71 | ANSWER: {'Who': 'reptile'} 72 | ``` 73 | 74 | If you are in the folder where the file `tc.nat` is located you could also say 75 | 76 | ``` 77 | python3 -m natlog tc.nat 78 | ``` 79 | and then once the interactive REPL starts type: 80 | 81 | ``` 82 | ?- tc Who is animal ? 83 | ``` 84 | resulting in the same output, with a chance to enter more queries. 85 | 86 | List processing is also supported as in: 87 | 88 | ``` 89 | app () Ys Ys. 90 | app (X Xs) Ys (X Zs) : app Xs Ys Zs. 91 | ``` 92 | 93 | The interpreter supports a ```yield``` mechanism, similar to Python's own. Something like 94 | ``` ^ my_answer X ``` 95 | resulting in my_answer X to be yield as an answer. 96 | 97 | The interpreter has also been extended to handle simple function and generator calls to Python using the same prefix operator syntax: 98 | 99 | - ``` `f A B .. Z R```, resulting in Python function ```f(A,B,,..,Z)``` being called and ```R``` unified with its result 100 | - ``` ``f A B .. Z R```, resulting in Python generator ```f(A,B,..,Z)``` being called and ```R``` unified with its multiple yields, one a time 101 | - ``` ~R A B .. Z ``` for unifying ``` ~ R A B .. Z ``` with matching facts in the term store 102 | - ``` # f A B .. Z```, resulting in ```f(A,B,C,..,Z)``` being called with no result returned 103 | - ``` $ V X```, resulting in value of variable named V being unified with X 104 | - ``` eng X G E```,resulting in first class natlog engine with answer pattern X and goal G being bound to E 105 | - ``` ask G A```, resulting in next answer of engine E being unified to A 106 | 107 | Take a look at ```natprogs/lib.nat``` for examples of built-ins obtained by extending this interface, mostly at source level. 108 | 109 | Take a look at ```natprogs/emu.nat``` for emulation of built-ins in terms of First Class Logic Engines. 110 | 111 | ### A nested tuple store for unification-based tuple mining 112 | 113 | An indexer in combination with the unification algorithm is used to retrieve ground terms matching terms containing logic variables. 114 | 115 | Indexing is on all constants occurring in 116 | ground facts placed in a database. 117 | 118 | As facts are ground, 119 | unification has occurs check and trailing turned off when searching 120 | for a match. 121 | 122 | To try it out, do: 123 | 124 | ```python3 -i ``` 125 | 126 | ``` 127 | >>> from natlog.test.tests import * 128 | >>> dtest() 129 | 130 | ``` 131 | 132 | It gives, after digesting a text and then querying it: 133 | 134 | ``` 135 | QUERY: Who has (a What)? 136 | --> ('John', 'has', ('a', 'car')) 137 | --> ('Mary', 'has', ('a', 'bike')) 138 | 139 | QUERY: Who is (a pilot)? 140 | --> ('John', 'is', ('a', 'pilot')) 141 | 142 | QUERY: 'Mary' is What? 143 | --> ('Mary', 'is', ('a', 'student')) 144 | 145 | QUERY: 'John' is (a What)? 146 | --> ('John', 'is', ('a', 'pilot')) 147 | 148 | QUERY: Who is What? 149 | --> ('Mary', 'is', ('a', 'student')) 150 | --> ('John', 'is', ('a', 'pilot')) 151 | ``` 152 | 153 | ### Neuro-symbolic tuple database 154 | 155 | As an extension to the nested tuple store the neuro-symbolic tuple database uses a machine learning algorithm instead of its indexer.Thus it offers the same interface as the tuple store that it extends. The learner is trained upon loading the database file (from a .nat, .csv or .tsv file) and its inference mechanism is triggered when facts from the database are queried. The stream of tuples returned from the query is then filtered via unification (and possibly, more general integrity constraints, expressed via logic programming constructs). 156 | 157 | #### Example of usage (see more at https://github.com/ptarau/natlog/blob/main/natlog/test/tests.py ) 158 | ``` 159 | def ndb_test() : 160 | nd = neural_natlog(file_name=natprogs()+"dbtc.nat",db_name=natprogs()+"db.nat") 161 | print('RULES') 162 | print(nd) 163 | print('DB FACTS') 164 | print(nd.db) 165 | nd.query("tc Who is_a animal ?") 166 | ``` 167 | The output will show the ```X``` and ```y``` numpy arrays used to fit the sklearn learner and then the logic program's rules and the facts from which the arrays were extracted when the facts were loaded. 168 | 169 | ``` 170 | X: 171 | [[1 0 0 0 0 0 0 0 0 0 0 0] 172 | [0 1 0 0 0 0 0 0 0 0 0 0] 173 | [0 0 1 0 0 0 0 0 0 0 0 0] 174 | [0 0 0 1 0 0 0 0 0 0 0 0] 175 | [0 0 0 0 1 0 0 0 0 0 0 0] 176 | [0 0 0 0 0 1 0 0 0 0 0 0] 177 | [0 0 0 0 0 0 1 0 0 0 0 0] 178 | [0 0 0 0 0 0 0 1 0 0 0 0] 179 | [0 0 0 0 0 0 0 0 1 0 0 0] 180 | [0 0 0 0 0 0 0 0 0 1 0 0] 181 | [0 0 0 0 0 0 0 0 0 0 1 0] 182 | [0 0 0 0 0 0 0 0 0 0 0 1]] 183 | 184 | y: 185 | [[1 0 1 0 0 0 0 0 0 0] 186 | [1 1 1 1 1 1 1 1 1 1] 187 | [1 0 0 0 0 0 0 0 0 0] 188 | [0 1 0 1 0 0 0 0 0 0] 189 | [0 1 0 0 0 0 0 0 0 0] 190 | [0 0 1 1 0 1 0 0 0 0] 191 | [0 0 0 0 1 0 0 0 0 0] 192 | [0 0 0 0 1 0 1 0 0 0] 193 | [0 0 0 0 0 1 1 0 0 1] 194 | [0 0 0 0 0 0 0 1 1 1] 195 | [0 0 0 0 0 0 0 1 0 0] 196 | [0 0 0 0 0 0 0 0 1 0]] 197 | 198 | RULES 199 | (('cat', 'is_a', 'feline'), ()) 200 | ((_0, 'is_a', _1), (('~', _0, 'is', _1),)) 201 | (('tc', _0, _1, _2), ((_0, _1, _3), ('tc1', _3, _1, _2))) 202 | (('tc1', _0, _1, _0), ()) 203 | (('tc1', _0, _1, _2), (('tc', _0, _1, _2),)) 204 | 205 | DB FACTS 206 | (0, ('tiger', 'is', 'feline')) 207 | (1, ('mouse', 'is', 'rodent')) 208 | (2, ('feline', 'is', 'mammal')) 209 | (3, ('rodent', 'is', 'mammal')) 210 | (4, ('snake', 'is', 'reptile')) 211 | (5, ('mammal', 'is', 'animal')) 212 | (6, ('reptile', 'is', 'animal')) 213 | (7, ('bee', 'is', 'insect')) 214 | (8, ('ant', 'is', 'insect')) 215 | (9, ('insect', 'is', 'animal')) 216 | 217 | QUERY: tc Who is_a animal ? 218 | ANSWER: {'Who': 'cat'} 219 | ANSWER: {'Who': 'tiger'} 220 | ANSWER: {'Who': 'mouse'} 221 | ANSWER: {'Who': 'feline'} 222 | ANSWER: {'Who': 'rodent'} 223 | ANSWER: {'Who': 'snake'} 224 | ANSWER: {'Who': 'mammal'} 225 | ANSWER: {'Who': 'reptile'} 226 | ANSWER: {'Who': 'bee'} 227 | ANSWER: {'Who': 'ant'} 228 | ANSWER: {'Who': 'insect'} 229 | 230 | 231 | ``` 232 | -------------------------------------------------------------------------------- /apps/deepnat/README.md: -------------------------------------------------------------------------------- 1 | # Sketch of a Natlog API for Machine Learning with JAX 2 | 3 | To run this app, make sure you do: 4 | 5 | pip3 install natlog 6 | pip3 install jax 7 | 8 | To run the natlog script type: 9 | 10 | ``` 11 | python3 natjax.py 12 | 13 | ``` 14 | 15 | To run its Python equivalent type: 16 | 17 | ``` 18 | pyhon3 pyjax.py 19 | ``` 20 | 21 | 22 | -------------------------------------------------------------------------------- /apps/deepnat/alt_natjax.nat: -------------------------------------------------------------------------------- 1 | def init_weights Features Layer_sizes Weights: 2 | `init_weights Features Layer_sizes Weights. 3 | 4 | linear_layer (W B) Input Output : 5 | `transpose W T, 6 | `matmul Input T M, 7 | `matsum M B Output. 8 | 9 | forward_pass Weights Input Result : 10 | `len Weights L, 11 | with L - 1 as L1, 12 | `crop Weights 0 L1 HiddenWeights, 13 | arg L1 Weights LastWeight, 14 | hidden_loop 0 L1 HiddenWeights Input Penultimate, 15 | linear_layer LastWeight Penultimate Last, 16 | `sigmoid Last Result. 17 | 18 | hidden_loop L L _HiddenWeights Last Last . 19 | hidden_loop I L HiddenWeights In Out: 20 | when I < L, 21 | with I + 1 as J, 22 | arg I HiddenWeights WB, 23 | linear_layer WB In LinOut, 24 | `relu LinOut Temp, 25 | hidden_loop J L HiddenWeights Temp Out. 26 | 27 | 28 | impl 0 0 1. 29 | impl 0 1 1. 30 | impl 1 0 0. 31 | impl 1 1 1. 32 | 33 | in_dataset (X Y Z U) R: 34 | impl X Y A, 35 | impl A Z B, 36 | impl B U R. 37 | 38 | dataset Xss Ys : 39 | findall X (in_dataset X _) XssList, 40 | findall Y (in_dataset _ Y) YsList, 41 | tuple XssList Xss, 42 | tuple YsList Ys. 43 | 44 | 45 | go: 46 | dataset Xss Ys, 47 | `array Xss X, 48 | `array Ys Y, 49 | `type X S, 50 | #print S, 51 | `init_weights 4 (4 8 8 4 1) Weights, 52 | forward_pass Weights X Result, 53 | #print result Result. 54 | 55 | 56 | x1 : `eye 4 M, `dot M M X, #print X, fail. 57 | x2 : 58 | `array [10 20] X, 59 | `array [[1.1 2] [3 4]] W, 60 | `array [5 6] B, 61 | lin_layer (W B) X R, 62 | #print W, 63 | #print B, 64 | #print R, 65 | fail. 66 | 67 | -------------------------------------------------------------------------------- /apps/deepnat/alt_natjax.py: -------------------------------------------------------------------------------- 1 | import jax 2 | from jax import * 3 | import jax.numpy as jnp 4 | from jax.numpy import * 5 | from natlog import Natlog, natprogs 6 | 7 | jax.config.update("jax_enable_x64", True) 8 | DTYPE = jnp.float32 9 | SEED = jax.random.PRNGKey(42) 10 | 11 | shared = dict() 12 | 13 | 14 | def share(f): 15 | shared[f.__name__] = f 16 | return f 17 | 18 | 19 | def share_syms(): 20 | for n, f in globals().items(): 21 | if n not in {'add'} : 22 | shared[n] = f 23 | # shared['shared'] = shared 24 | return shared 25 | 26 | 27 | @jit 28 | def relu(x): 29 | return jnp.maximum(x, 0) 30 | 31 | 32 | @jit 33 | def sigmoid(x): 34 | return 1 / (1 + jnp.exp(-1 * x)) 35 | 36 | @jit 37 | def matsum(x,y): 38 | return jnp.add(x,y) 39 | 40 | def init_weights(features, layer_sizes): 41 | weights = [] 42 | for i, units in enumerate(layer_sizes): 43 | if i == 0: 44 | w = jax.random.uniform(key=SEED, shape=(units, features), minval=-1.0, maxval=1.0, dtype=DTYPE) 45 | else: 46 | w = jax.random.uniform(key=SEED, shape=(units, layer_sizes[i - 1]), minval=-1.0, maxval=1.0, 47 | dtype=DTYPE) 48 | 49 | b = jax.random.uniform(key=SEED, minval=-1.0, maxval=1.0, shape=(units,), dtype=DTYPE) 50 | 51 | weights.append((w, b)) 52 | 53 | return weights 54 | 55 | 56 | def crop(a, l1, l2): 57 | return a[l1:l2] 58 | 59 | 60 | def run_natlog(): 61 | share_syms() 62 | n = Natlog(file_name="alt_natjax.nat", 63 | with_lib=natprogs() + "lib.nat", callables=shared) 64 | #n.query("eq Started 'Natlog'.") 65 | #n.query("`eye 4 M, `matmul M M X, #print X, fail?") 66 | #n.query('go?'), 67 | n.repl() 68 | 69 | 70 | def test_natjax(): 71 | m = eye(3) 72 | mm = matmul(m, m) 73 | print(mm) 74 | s = sum(array((m, m)), 0) 75 | print(s) 76 | 77 | 78 | if __name__ == "__main__": 79 | test_natjax() 80 | run_natlog() 81 | -------------------------------------------------------------------------------- /apps/deepnat/mlp.py: -------------------------------------------------------------------------------- 1 | from sklearn.model_selection import train_test_split 2 | import jax 3 | import jax.numpy as jnp 4 | from jax import grad, jit 5 | import optax 6 | from sklearn.metrics import accuracy_score 7 | 8 | jax.config.update("jax_enable_x64", True) 9 | 10 | DTYPE = jnp.float32 11 | SEED = 0 12 | 13 | 14 | def to_jnp(a): 15 | return jnp.array(a, dtype=DTYPE) 16 | 17 | 18 | def split(X, y, test_size=0.1): 19 | X_train, X_test, y_train, y_test = \ 20 | train_test_split(X, y, test_size=test_size, random_state=SEED) 21 | return X_train, X_test, y_train, y_test 22 | 23 | 24 | def xor(x, y): 25 | return x ^ y 26 | 27 | 28 | def impl(x, y): 29 | return max(abs(1 - x), y) 30 | 31 | 32 | def load_dataset(features, op): 33 | from itertools import product 34 | 35 | n = 2 * features 36 | layer_sizes = [features, n, n + n, n, features, 1] 37 | 38 | def data_x(): 39 | return jnp.array(list(product([-1.0, 1.0], repeat=features))) 40 | 41 | def data_y(): 42 | m = 2 ** features 43 | rs = [] 44 | for xs in data_x(): 45 | r = 0 46 | for x in xs: 47 | x = int((x + 1) / 2) 48 | r = op(r, x) 49 | # rs.append(2*r-1) 50 | rs.append(r) 51 | ys = to_jnp(rs).reshape(m, 1) 52 | print(ys) 53 | return ys 54 | 55 | data = split(data_x(), data_y()) 56 | 57 | epochs = features ** 2 58 | if op == xor: epochs *= 4 59 | return data, layer_sizes, epochs 60 | 61 | 62 | def InitializeWeights(features, layer_sizes, seed): 63 | weights = [] 64 | keys = jax.random.split(seed, len(layer_sizes) + 1) 65 | for i, units in enumerate(layer_sizes): 66 | if i == 0: 67 | w = jax.random.uniform(key=keys[i], shape=(units, features), minval=-1.0, maxval=1.0, dtype=DTYPE) 68 | else: 69 | w = jax.random.uniform(key=keys[i], shape=(units, layer_sizes[i - 1]), minval=-1.0, maxval=1.0, 70 | dtype=DTYPE) 71 | 72 | b = jax.random.uniform(key=keys[-1], minval=-1.0, maxval=1.0, shape=(units,), dtype=DTYPE) 73 | 74 | weights.append([w, b]) 75 | 76 | return weights 77 | 78 | 79 | @jit 80 | def Relu(x): 81 | return jnp.maximum(x, 0) 82 | 83 | 84 | @jit 85 | def Sigmoid(x): 86 | return 1 / (1 + jnp.exp(-1 * x)) 87 | 88 | 89 | @jit 90 | def LinearLayer(weights, input_data): 91 | w, b = weights 92 | return jnp.dot(input_data, w.T) + b 93 | 94 | 95 | @jit 96 | def ForwardPass(weights, input_data): 97 | layer_out = input_data 98 | for i in range(len(weights) - 1): 99 | layer_out = Relu(LinearLayer(weights[i], layer_out)) 100 | last = LinearLayer(weights[-1], layer_out) 101 | # print("LAST:",last) 102 | return Sigmoid(last) 103 | 104 | 105 | @jit 106 | def mse(weights, input_data, actual): 107 | preds = ForwardPass(weights, input_data) 108 | return ((preds - actual) ** 2).mean() 109 | 110 | 111 | Loss = mse 112 | 113 | 114 | @jit 115 | def CalculateGradients(weights, input_data, actual): 116 | return grad(Loss)(weights, input_data, actual) 117 | 118 | 119 | def optimize_grads(weights, input_data, actual, optimizer, opt_state): 120 | gradients = CalculateGradients(weights, input_data, actual) 121 | updates, opt_state = optimizer.update(gradients, opt_state) 122 | weights = optax.apply_updates(weights, updates) 123 | return weights, opt_state 124 | 125 | 126 | def TrainModel(weights, X, Y, learning_rate, epochs): 127 | optimizer = optax.adam(learning_rate) 128 | opt_state = jax.jit(optimizer.init)(weights) 129 | 130 | for i in range(epochs): 131 | loss = Loss(weights, X, Y) 132 | weights, opt_state = optimize_grads(weights, X, Y, optimizer, opt_state) 133 | 134 | if i % 50 == 0: 135 | print("Loss : {:.2f}".format(loss)) 136 | 137 | return weights 138 | 139 | 140 | def run(features, op): 141 | seed = jax.random.PRNGKey(SEED) 142 | learning_rate = jnp.array(0.01) 143 | 144 | (X_train, X_test, Y_train, Y_test), layer_sizes, epochs = load_dataset(features, op) 145 | _, features = X_train.shape 146 | 147 | weights = InitializeWeights(features, layer_sizes, seed) 148 | 149 | weights = TrainModel(weights, X_train, Y_train, learning_rate, epochs) 150 | 151 | train_preds = ForwardPass(weights, X_train) 152 | train_preds = (train_preds > 0.5).astype(DTYPE) 153 | 154 | test_preds = ForwardPass(weights, X_test) 155 | test_preds = (test_preds > 0.5).astype(DTYPE) 156 | 157 | print("Train Loss Score : {:.2f}".format(Loss(weights, X_train, Y_train))) 158 | print("Test Loss Score : {:.2f}".format(Loss(weights, X_test, Y_test))) 159 | 160 | print("Train Accuracy : {:.2f}".format(accuracy_score(Y_train, train_preds))) 161 | print("Test Accuracy : {:.2f}".format(accuracy_score(Y_test, test_preds))) 162 | 163 | 164 | if __name__ == "__main__": 165 | run(features=12, op=impl) 166 | run(features=12, op=xor) 167 | -------------------------------------------------------------------------------- /apps/deepnat/natjax.nat: -------------------------------------------------------------------------------- 1 | impl 0 0 1. 2 | impl 0 1 1. 3 | impl 1 0 0. 4 | impl 1 1 1. 5 | 6 | nand 0 0 1. 7 | nand 0 1 1. 8 | nand 1 0 1. 9 | nand 1 1 0. 10 | 11 | xor 0 0 0. 12 | xor 0 1 1. 13 | xor 1 0 1. 14 | xor 1 1 0. 15 | 16 | iter N Op X Y: 17 | iter_op N Op () E 0 Y, 18 | to_tuple E X. 19 | 20 | iter_op 0 _Op E E R R. 21 | iter_op I Op E1 E2 R1 R3 : 22 | when I > 0, 23 | with I - 1 as J, 24 | Op X R1 R2, 25 | with X + X as XX, 26 | with XX - 1 as X1, 27 | iter_op J Op (X1 E1) E2 R2 R3. 28 | 29 | dataset N Op Xss (Ys): 30 | findall (X Y) (iter N Op X Y) XssYsList, 31 | to_pairs XssYsList XssList YsList, 32 | to_tuple XssList Xss, 33 | to_tuple YsList Ys. 34 | 35 | to_pairs () () (). 36 | to_pairs ((Xs Y) Zss) (Xs Xss) (Y Ys) : 37 | to_pairs Zss Xss Ys. 38 | 39 | to_jax_dataset Xss Ys Xtr Xt Ytr Yt: 40 | `array Xss X, 41 | `array Ys Y0, 42 | `transpose Y0 Y, 43 | `split X Y 0 0.1 (Xtr Xt Ytr Yt), 44 | to_cons_list (X Y Xtr Xt Ytr Yt) Vals, 45 | maplist (` shape) Vals SizesList, 46 | to_tuple SizesList Sizes, 47 | #print sizes are Sizes. 48 | 49 | 50 | hidden_sizes N (M MM M N 1): 51 | with 2 * N as M, 52 | with 4 * N as MM. 53 | 54 | run N Op Seed Epochs Loss Acc LossT AccT: 55 | dataset N Op Xss Ys, 56 | to_jax_dataset Xss Ys X Xt Y Yt, 57 | hidden_sizes N Sizes, 58 | `init_weights N Sizes Seed Weights, 59 | eq Learning_rate 0.01, 60 | `train_model Weights X Y Learning_rate Epochs Model, 61 | `test_model Model X Y (Loss Acc), 62 | `test_model Model Xt Yt (LossT AccT) . 63 | 64 | 65 | run with Op: 66 | run 10 Op 0 4000 Loss Acc LossT AccT, 67 | #print train loss = Loss, 68 | #print train accuracy = Acc, 69 | #print test loss = LossT, 70 | #print test accuracy = AccT. 71 | 72 | 73 | go: run with xor. 74 | go1 : run with nand. 75 | go2 : run with impl. 76 | 77 | 78 | tt0: iter_op 2 xor () B C D, #print result B C D, fail. 79 | 80 | tt1: iter 2 xor E R, #print result E R, fail. 81 | 82 | tt2 : dataset 1 nand Xss Ys, #print Xss Ys, fail. 83 | 84 | tt: $xor F, #test_natjax 10 F 0,fail. 85 | 86 | test_alt_run N OpName _Epochs Loss Acc LossT AccT: 87 | $ OpName Op, 88 | eq Seed 0, 89 | `load_dataset N Op Seed (Data Layer_sizes Epochs), 90 | eq Data (X Xt Y Yt), 91 | #print 'X', 92 | #print X, 93 | #print 'Y', 94 | #print Y, 95 | `init_weights N Layer_sizes Seed Weights, 96 | eq Learning_rate 0.01, 97 | `train_model Weights X Y Learning_rate Epochs Model, 98 | `test_model Model X Y (Loss Acc), 99 | `test_model Model Xt Yt (LossT AccT) . 100 | 101 | alt: 102 | test_alt_run 10 xor 1000 Loss Acc LossT AccT, 103 | #print train loss = Loss, 104 | #print train accuracy = Acc, 105 | #print test loss = LossT, 106 | #print test accuracy = AccT. 107 | -------------------------------------------------------------------------------- /apps/deepnat/natjax.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import jax 3 | from jax import * 4 | import jax.numpy as jnp 5 | from jax.numpy import * 6 | import optax 7 | from sklearn.metrics import accuracy_score 8 | from sklearn.model_selection import train_test_split 9 | 10 | from natlog import Natlog, natprogs 11 | 12 | sys.setrecursionlimit(1 << 14) 13 | jax.config.update("jax_enable_x64", True) 14 | 15 | DTYPE = jnp.float32 16 | 17 | shared = dict() 18 | 19 | 20 | def share(f): 21 | shared[f.__name__] = f 22 | return f 23 | 24 | 25 | def share_syms(): 26 | for n, f in globals().items(): 27 | if n not in {'add'}: 28 | shared[n] = f 29 | return shared 30 | 31 | 32 | # NN design 33 | @jit 34 | def relu(x): 35 | return jnp.maximum(x, 0) 36 | 37 | 38 | @jit 39 | def sigmoid(x): 40 | return 1 / (1 + jnp.exp(-1 * x)) 41 | 42 | 43 | @jit 44 | def matsum(x, y): 45 | return jnp.add(x, y) 46 | 47 | 48 | def init_weights(features, layer_sizes, seed): 49 | KEY = jax.random.PRNGKey(seed) 50 | weights = [] 51 | keys = jax.random.split(KEY, len(layer_sizes) + 1) 52 | for i, units in enumerate(layer_sizes): 53 | if i == 0: 54 | w = jax.random.uniform(key=keys[i], shape=(units, features), minval=-1.0, maxval=1.0, dtype=DTYPE) 55 | else: 56 | w = jax.random.uniform(key=keys[i], shape=(units, layer_sizes[i - 1]), minval=-1.0, maxval=1.0, 57 | dtype=DTYPE) 58 | 59 | b = jax.random.uniform(key=keys[-1], minval=-1.0, maxval=1.0, shape=(units,), dtype=DTYPE) 60 | 61 | weights.append((w, b)) 62 | 63 | return weights 64 | 65 | 66 | @jit 67 | def linear_layer(weights, input_data): 68 | w, b = weights 69 | return dot(input_data, w.T) + b 70 | 71 | 72 | @jit 73 | def mlp_forward_pass(weights, input_data): 74 | layer_out = input_data 75 | for i in range(len(weights) - 1): 76 | layer_out = relu(linear_layer(weights[i], layer_out)) 77 | return sigmoid(linear_layer(weights[-1], layer_out)) 78 | 79 | 80 | @jit 81 | def mse_loss(weights, input_data, actual): 82 | preds = mlp_forward_pass(weights, input_data) 83 | return ((preds - actual) ** 2).mean() 84 | 85 | 86 | @jit 87 | def apply_grad(weights, input_data, actual): 88 | return grad(mse_loss)(weights, input_data, actual) 89 | 90 | 91 | def init_optimizer(weights, learning_rate): 92 | optimizer = optax.adam(learning_rate) 93 | opt_state = jit(optimizer.init)(weights) 94 | return optimizer, opt_state 95 | 96 | 97 | def apply_optimizer(weights, input_data, actual, optimizer, opt_state): 98 | gradients = apply_grad(weights, input_data, actual) 99 | updates, opt_state = optimizer.update(gradients, opt_state) 100 | weights = optax.apply_updates(weights, updates) 101 | return weights, opt_state 102 | 103 | 104 | def train_model(weights, X, Y, learning_rate, epochs): 105 | optimizer, opt_state = init_optimizer(weights, learning_rate) 106 | 107 | for i in range(epochs): 108 | loss = mse_loss(weights, X, Y) 109 | weights, opt_state = apply_optimizer(weights, X, Y, optimizer, opt_state) 110 | 111 | if i % 50 == 0: 112 | print(f"Loss at epoch {i} : \t{loss}") 113 | 114 | return weights 115 | 116 | 117 | def test_model(weights, X, Y): 118 | preds = mlp_forward_pass(weights, X) 119 | preds = (preds > 0.5).astype(DTYPE) 120 | loss = mse_loss(weights, X, Y) 121 | score = accuracy_score(Y, preds) 122 | return loss, score 123 | 124 | 125 | # tools exported to natlog 126 | 127 | # Natlog activation 128 | def run_natlog(): 129 | share_syms() 130 | n = Natlog(file_name="natjax.nat", 131 | with_lib=natprogs() + "lib.nat", callables=shared) 132 | # n.query("eq Started 'Natlog'.") 133 | # n.query("`eye 4 M, `matmul M M X, #print X, fail?") 134 | # n.query('go?'), 135 | n.repl() 136 | 137 | 138 | # testers 139 | 140 | def xor(x, y): 141 | return x ^ y 142 | 143 | 144 | def impl(x, y): 145 | return max(abs(1 - x), y) 146 | 147 | 148 | def to_jnp(a): 149 | return jnp.array(a, dtype=DTYPE) 150 | 151 | 152 | def split(X, y, seed, test_size=0.1): 153 | print('SHAPES:', X.shape, y.shape) 154 | X_train, X_test, y_train, y_test = \ 155 | train_test_split(X, y, test_size=test_size, random_state=seed) 156 | return X_train, X_test, y_train, y_test 157 | 158 | 159 | def load_dataset(features, op, seed): 160 | from itertools import product 161 | 162 | n = 2 * features 163 | layer_sizes = [features, n, n + n, n, features, 1] 164 | 165 | def data_x(): 166 | return jnp.array(list(product([-1.0, 1.0], repeat=features))) 167 | 168 | def data_y(): 169 | m = 2 ** features 170 | rs = [] 171 | for xs in data_x(): 172 | r = 0 173 | for x in xs: 174 | x = int((x + 1) / 2) 175 | r = op(r, x) 176 | # rs.append(2*r-1) 177 | rs.append(r) 178 | ys = to_jnp(rs).reshape(m, 1) 179 | print(ys) 180 | return ys 181 | 182 | data = split(data_x(), data_y(), seed) 183 | 184 | epochs = features ** 2 185 | if op == xor: epochs *= 8 186 | return data, layer_sizes, epochs 187 | 188 | 189 | def test_natjax(features, op, seed): 190 | learning_rate = jnp.array(0.01) 191 | 192 | (X_train, X_test, Y_train, Y_test), layer_sizes, epochs = load_dataset(features, op, seed) 193 | _, features = X_train.shape 194 | 195 | weights = init_weights(features, layer_sizes, seed) 196 | 197 | weights = train_model(weights, X_train, Y_train, learning_rate, epochs) 198 | 199 | train_score, train_acc = test_model(weights, X_train, Y_train) 200 | test_score, test_acc = test_model(weights, X_test, Y_test) 201 | 202 | print("Train Loss Score : {:.2f}".format(train_score)) 203 | print("Test Loss Score : {:.2f}".format(test_score)) 204 | 205 | print("Train Accuracy : {:.2f}".format(train_acc)) 206 | print("Test Accuracy : {:.2f}".format(test_acc)) 207 | 208 | 209 | if __name__ == "__main__": 210 | # test_natjax(features=12, op=xor) 211 | run_natlog() 212 | -------------------------------------------------------------------------------- /apps/deepnat/nattorch.nat: -------------------------------------------------------------------------------- 1 | impl 0 0 1. 2 | impl 0 1 1. 3 | impl 1 0 0. 4 | impl 1 1 1. 5 | 6 | nand 0 0 1. 7 | nand 0 1 1. 8 | nand 1 0 1. 9 | nand 1 1 0. 10 | 11 | xor 0 0 0. 12 | xor 0 1 1. 13 | xor 1 0 1. 14 | xor 1 1 0. 15 | 16 | iter N Op X Y: 17 | iter_op N Op () E 0 Y, 18 | to_tuple E X. 19 | 20 | iter_op 0 _Op E E R R. 21 | iter_op I Op E1 E2 R1 R3 : 22 | when I > 0, 23 | with I - 1 as J, 24 | Op X R1 R2, 25 | with X + X as XX, 26 | with XX - 1 as X1, 27 | iter_op J Op (X1 E1) E2 R2 R3. 28 | 29 | dataset N Op Xss (Ys): 30 | findall (X Y) (iter N Op X Y) XssYsList, 31 | to_pairs XssYsList XssList YsList, 32 | to_tuple XssList Xss, 33 | to_tuple YsList Ys. 34 | 35 | to_pairs () () (). 36 | to_pairs ((Xs Y) Zss) (Xs Xss) (Y Ys) : 37 | to_pairs Zss Xss Ys. 38 | 39 | split_dataset Xss Ys Xtr Xt Ytr Yt: 40 | `split Xss Ys 0 0.1 (Xtr Xt Ytr Yt), 41 | show_sizes (X Y Xtr Xt Ytr Yt). 42 | 43 | show_sizes Arrays: 44 | to_cons_list Arrays Vals, 45 | maplist (` shape) Vals SizesList, 46 | to_tuple SizesList Sizes, 47 | #print sizes are Sizes. 48 | 49 | 50 | hidden_sizes N (M MM M N 1): 51 | with 2 * N as M, 52 | with 4 * N as MM. 53 | 54 | % run 4 xor 0 5 L A LL AA? 55 | 56 | run N Op Seed Epochs Loss Acc LossT AccT: 57 | dataset N Op Xss Ys, 58 | 59 | split_dataset Xss Ys X Xt Y Yt, 60 | hidden_sizes N Sizes, 61 | #print hidden sizes are Sizes, 62 | `train_model X Y Sizes Epochs (Model LossFun), 63 | `test_model Model LossFun X Y (Loss Acc), 64 | `test_model Model LossFun Xt Yt (LossT AccT) . 65 | 66 | 67 | run with Op: 68 | run 12 Op 0 3000 Loss Acc LossT AccT, 69 | nl, 70 | #print train loss = Loss, 71 | #print train accuracy = Acc, 72 | nl, 73 | #print test loss = LossT, 74 | #print test accuracy = AccT. 75 | 76 | 77 | go: run with xor. 78 | go1 : run with nand. 79 | go2 : run with impl. 80 | 81 | 82 | tt0: iter_op 2 xor () B C D, #print result B C D, fail. 83 | 84 | tt1: iter 2 xor E R, #print result E R, fail. 85 | 86 | tt2 : dataset 1 nand Xss Ys, #print Xss Ys, fail. 87 | 88 | test_alt_run N Epochs Loss Acc LossT AccT: 89 | eq Seed 0, 90 | `load_dataset N Seed ((X Xt Y Yt) Layer_sizes), 91 | `train_model X Y Layer_sizes Epochs (Model LossFun), 92 | `test_model Model LossFun X Y (Loss Acc), 93 | `test_model Model LossFun Xt Yt (LossT AccT) . 94 | 95 | alt: 96 | test_alt_run 12 3000 Loss Acc LossT AccT, 97 | nl, 98 | #print train loss = Loss, 99 | #print train accuracy = Acc, 100 | nl, 101 | #print test loss = LossT, 102 | #print test accuracy = AccT. 103 | -------------------------------------------------------------------------------- /apps/deepnat/nattorch.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import random 3 | import torch.nn as nn 4 | import torch.cuda 5 | from torch import tensor 6 | from sklearn.model_selection import train_test_split 7 | import numpy as np 8 | from numpy import array, transpose, shape 9 | import torch.optim as optim 10 | from natlog import Natlog, natprogs 11 | 12 | sys.setrecursionlimit(1 << 14) 13 | 14 | DTYPE = np.float32 15 | TDTYPE = torch.float 16 | 17 | shared = dict() 18 | 19 | 20 | def share(f): 21 | shared[f.__name__] = f 22 | return f 23 | 24 | 25 | def share_syms(): 26 | for n, f in globals().items(): 27 | if n not in {'add'}: 28 | shared[n] = f 29 | return shared 30 | 31 | 32 | def load_dataset(features, seed): 33 | """ 34 | a ^ b ... ^ ... synthetic boolean 35 | truth table dataset - known to be hard to learn 36 | """ 37 | from itertools import product 38 | 39 | n = 2 * features 40 | layer_sizes = [n + n, n, n + n, n] 41 | 42 | def to_np(a): 43 | return np.array(a, dtype=DTYPE) 44 | 45 | def data_x(): 46 | x=list(product([-1.0, 1.0], repeat=features)) 47 | return x 48 | 49 | def data_y(): 50 | m = 2 ** features 51 | rs = [] 52 | for xs in data_x(): 53 | r = 0 54 | for x in xs: 55 | x = int((x + 1) / 2) 56 | r = r ^ x 57 | rs.append(r) 58 | rs=[rs] 59 | return rs 60 | 61 | return split(data_x(), data_y(), seed), layer_sizes 62 | 63 | 64 | def split(Xss, ys, seed, test_size=0.1): 65 | X=np.array(Xss) 66 | y=np.array(ys) 67 | y=y.T 68 | X_train, X_test, y_train, y_test = \ 69 | train_test_split(X, y, test_size=test_size, random_state=seed) 70 | res = tuple(tensor(a, dtype=torch.float) for a in [X_train, X_test, y_train, y_test]) 71 | # print('SPLIT:', res) 72 | return res 73 | 74 | 75 | class LinearNet(nn.Module): 76 | def __init__(self, input_size, sizes, output_size): 77 | super(LinearNet, self).__init__() 78 | 79 | self.num_layers = len(sizes) 80 | 81 | self.linears = nn.ModuleList([nn.Linear(input_size, sizes[0])]) 82 | print(input_size, sizes[0]) 83 | for i in range(1, self.num_layers): 84 | s1 = sizes[i - 1] 85 | s2 = sizes[i] 86 | print('HIDDEN SIZES:', s1, s2) 87 | self.linears.append(nn.Linear(s1, s2)) 88 | 89 | self.linears.append(nn.Linear(sizes[-1], output_size)) 90 | print(sizes[-1], output_size) 91 | 92 | def forward(self, x): 93 | f = nn.ReLU() 94 | # f = nn.Tanh() 95 | for n in self.linears: 96 | x = n(x) 97 | x = f(x) 98 | # g = nn.Sigmoid() 99 | return x # g(x) 100 | 101 | 102 | def accuracy(Y, Y_hat): 103 | return (Y == Y_hat).sum() / len(Y) 104 | 105 | 106 | def train_model(X_train, y_train, sizes, epochs): 107 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 108 | 109 | net = LinearNet(X_train.shape[1], sizes, 1) 110 | lossfun = nn.MSELoss() 111 | optimizer = optim.Adam(net.parameters(), weight_decay=0.001) 112 | 113 | inputs = X_train 114 | 115 | for epoch in range(epochs): 116 | optimizer.zero_grad() 117 | outputs = net(inputs) 118 | loss = lossfun(outputs, y_train) 119 | loss.backward() 120 | optimizer.step() 121 | 122 | if epoch % 50 == 0: print('LOSS AT ', epoch, '\t-->', round(loss.item(), 8)) 123 | 124 | return net, lossfun 125 | 126 | 127 | def test_model(net, lossfun, X, y): 128 | print('STATE DICT:', list(net.state_dict().keys())) 129 | with torch.inference_mode(): 130 | y_hat = net(X) 131 | loss = np.sqrt(lossfun(y_hat, y).detach().numpy()) 132 | preds = y_hat > 0.5 133 | y = y > 0.5 134 | acc = accuracy(y, preds) 135 | res = loss.tolist(), acc.tolist() 136 | # print('RES:', res) 137 | return res 138 | 139 | 140 | def test_nattorch(): 141 | epochs = 600 142 | features = 10 143 | seed = 42 144 | random.seed(seed) 145 | np.random.seed(seed) 146 | torch.random.manual_seed(seed) 147 | dataset, sizes = load_dataset(features, seed) 148 | X_train, X_test, y_train, y_test = [torch.from_numpy(d) for d in dataset] 149 | net, lossfun = train_model(X_train, y_train, sizes, epochs) 150 | train_loss, train_acc = test_model(net, lossfun, X_train, y_train) 151 | test_loss, test_acc = test_model(net, lossfun, X_test, y_test) 152 | 153 | print('\nRESULTS:\n') 154 | 155 | print('Train loss:', train_loss) 156 | print('Test loss:', test_loss) 157 | 158 | print() 159 | 160 | print('Train accuracy:', train_acc) 161 | print('Train accuracy:', test_acc) 162 | 163 | 164 | def lin_test(): 165 | n = LinearNet(4, [5, 10, 15, 6], 1) 166 | print(type(n)) 167 | 168 | for d in load_dataset(4,42): 169 | print(d) 170 | print() 171 | 172 | 173 | # Natlog activation 174 | def run_natlog(): 175 | share_syms() 176 | n = Natlog(file_name="nattorch.nat", 177 | with_lib=natprogs() + "lib.nat", callables=shared) 178 | # n.query("eq Started 'Natlog'.") 179 | # n.query("`eye 4 M, `matmul M M X, #print X, fail?") 180 | # n.query('go?'), 181 | # n.query('alt?'), 182 | n.repl() 183 | 184 | 185 | if __name__ == "__main__": 186 | # test_nattorch() 187 | run_natlog() 188 | -------------------------------------------------------------------------------- /apps/deepnat/tmlp.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import torch.cuda 3 | from sklearn.model_selection import train_test_split 4 | import numpy as np 5 | import torch.optim as optim 6 | 7 | DTYPE = np.float32 8 | 9 | 10 | def load_dataset(features, seed): 11 | """ 12 | a ^ b ... ^ ... synthetic boolean 13 | truth table dataset - known to be hard to learn 14 | """ 15 | from itertools import product 16 | 17 | n = 2 * features 18 | layer_sizes = [n + n, n, n + n, n] 19 | 20 | def to_np(a): 21 | return np.array(a, dtype=DTYPE) 22 | 23 | def data_x(): 24 | return np.array(list(product([-1.0, 1.0], repeat=features)), dtype=DTYPE) 25 | 26 | def data_y(): 27 | m = 2 ** features 28 | rs = [] 29 | for xs in data_x(): 30 | r = 0 31 | for x in xs: 32 | x = int((x + 1) / 2) 33 | r = r ^ x 34 | rs.append(r) 35 | ys = to_np(rs).reshape(m, 1) 36 | return ys 37 | 38 | return split(data_x(), data_y(), seed), layer_sizes 39 | 40 | 41 | def split(X, y, seed, test_size=0.1): 42 | X_train, X_test, y_train, y_test = \ 43 | train_test_split(X, y, test_size=test_size, random_state=seed) 44 | return X_train, X_test, y_train, y_test 45 | 46 | 47 | class LinearNet(nn.Module): 48 | def __init__(self, input_size, sizes, output_size): 49 | super(LinearNet, self).__init__() 50 | self.num_layers = len(sizes) 51 | 52 | self.linears = nn.ModuleList([nn.Linear(input_size, sizes[0])]) 53 | print(input_size, sizes[0]) 54 | for i in range(1, self.num_layers): 55 | s1 = sizes[i - 1] 56 | s2 = sizes[i] 57 | print('SIZES:', s1, s2) 58 | self.linears.append(nn.Linear(s1, s2)) 59 | 60 | self.linears.append(nn.Linear(sizes[-1], output_size)) 61 | print(sizes[-1], output_size) 62 | 63 | def forward(self, x): 64 | f = nn.ReLU() 65 | for n in self.linears: 66 | x = n(x) 67 | x = f(x) 68 | # g=nn.Sigmoid() 69 | return x # g(x) 70 | 71 | 72 | def accuracy(Y, Y_hat): 73 | return (Y == Y_hat).sum() / len(Y) 74 | 75 | 76 | def train(X_train, y_train, sizes, epochs): 77 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 78 | net = LinearNet(X_train.shape[1], sizes, 1) 79 | lossfun = nn.MSELoss() 80 | optimizer = optim.Adam(net.parameters(), weight_decay=0.001) 81 | 82 | inputs = X_train 83 | 84 | outputs = inputs 85 | 86 | for epoch in range(epochs): 87 | optimizer.zero_grad() 88 | outputs = net(inputs) 89 | loss = lossfun(outputs, y_train) 90 | loss.backward() 91 | optimizer.step() 92 | 93 | if epoch % 50 == 0: print('LOSS AT ', epoch, '\t-->', round(loss.item(), 4)) 94 | 95 | return net, lossfun 96 | 97 | 98 | def test(net, lossfun, X, y): 99 | print('STATE DICT:',list(net.state_dict().keys())) 100 | with torch.inference_mode(): 101 | y_hat = net(X) 102 | loss = np.sqrt(lossfun(y_hat, y).detach().numpy()) 103 | preds = y_hat > 0.5 104 | y = y > 0.5 105 | acc = accuracy(y, preds) 106 | return loss.tolist(), acc.tolist() 107 | 108 | 109 | def run(): 110 | epochs = 600 111 | features = 12 112 | seed = 0 113 | np.random.seed(seed) 114 | torch.random.manual_seed(seed) 115 | dataset, sizes = load_dataset(features, seed) 116 | X_train, X_test, y_train, y_test = [torch.from_numpy(d) for d in dataset] 117 | net, lossfun = train(X_train, y_train, sizes, epochs) 118 | train_loss, train_acc = test(net, lossfun, X_train, y_train) 119 | test_loss, test_acc = test(net, lossfun, X_test, y_test) 120 | 121 | print('\nRESULTS:\n') 122 | 123 | print('Train loss:', train_loss) 124 | print('Test loss:', test_loss) 125 | 126 | print() 127 | 128 | print('Train accuracy:', train_acc) 129 | print('Train accuracy:', test_acc) 130 | 131 | 132 | def t1(): 133 | n = LinearNet(4, [5, 10, 15, 6], 1) 134 | print(type(n)) 135 | 136 | for d in load_dataset(4): 137 | print(d) 138 | print() 139 | 140 | 141 | if __name__ == "__main__": 142 | run() 143 | -------------------------------------------------------------------------------- /apps/nat3d/README.md: -------------------------------------------------------------------------------- 1 | # simple 3D animations with Natlog and vpython 2 | 3 | To run this app, make sure you do: 4 | 5 | pip3 install natlog 6 | pip3 install vpython 7 | 8 | To run the natlog script type: 9 | 10 | ``` 11 | pyhon3 vp.py 12 | ``` 13 | 14 | To run its Python equivalent type: 15 | 16 | python3 humpty.py 17 | 18 | -------------------------------------------------------------------------------- /apps/nat3d/humpty.py: -------------------------------------------------------------------------------- 1 | from vp import * 2 | 3 | 4 | def leg(): 5 | s = sphere() 6 | size(s, 1, 2, 1) 7 | col(s, 'blue') 8 | down(s) 9 | return s 10 | 11 | 12 | def head(): 13 | h = sphere() 14 | size(h, 2, 3, 2) 15 | col(h, 'white') 16 | up(h) 17 | up(h) 18 | up(h) 19 | return h 20 | 21 | 22 | def eye(): 23 | s = sphere() 24 | size(s, 0.5, 0.4, 0.4) 25 | col(s, 'red') 26 | closer(s) 27 | closer(s) 28 | up(s) 29 | up(s) 30 | up(s) 31 | up(s) 32 | rotz(s) 33 | return s 34 | 35 | 36 | def humpty_dumpty(): 37 | x = eye() 38 | left(x) 39 | y = eye() 40 | right(y) 41 | h = head() 42 | ll = leg() 43 | left(ll) 44 | rl = leg() 45 | right(rl) 46 | return compound((x, y, h, ll, rl)) 47 | 48 | 49 | def tumble(c): 50 | for _ in range(6): 51 | left(c) 52 | sleep(0.5) 53 | rotx(c) 54 | sleep(0.5) 55 | roty(c) 56 | sleep(0.5) 57 | rotz(c) 58 | right(c) 59 | 60 | 61 | def go(): 62 | start() 63 | c=humpty_dumpty() 64 | tumble(c) 65 | 66 | 67 | if __name__ == "__main__": 68 | go() 69 | -------------------------------------------------------------------------------- /apps/nat3d/requirements.txt: -------------------------------------------------------------------------------- 1 | natlog 2 | vpython 3 | -------------------------------------------------------------------------------- /apps/nat3d/vp.nat: -------------------------------------------------------------------------------- 1 | size X A B C : `vector A B C V, setprop X size V. 2 | 3 | go : 4 | #print humpy dumpy sat on a wall, 5 | humpty_dumpty C, 6 | tumble C, 7 | #print humpy dumpy had a great fall. 8 | 9 | humpty_dumpty C: 10 | eye X, 11 | #left X, 12 | eye Y, 13 | #right Y, 14 | head H, 15 | leg L, 16 | #left L, 17 | leg R, 18 | #right R, 19 | `compound (X Y H L R) C. 20 | 21 | leg S: 22 | `sphere S, 23 | #size S 1 3 1, 24 | #col S blue, 25 | #down S. 26 | 27 | head H: 28 | `sphere H, 29 | size H 2 3 2, 30 | #col H white, 31 | #up H, 32 | #up H, 33 | #up H. 34 | 35 | eye S: 36 | `sphere S, 37 | size S 0.5 0.3 0.3, 38 | #col S red, 39 | #closer S, 40 | #closer S, 41 | #up S, 42 | #up S, 43 | #up S, 44 | #up S, 45 | #rotz S. 46 | 47 | tumble C: 48 | between 1 12 _, 49 | #left C, 50 | #sleep 0.5, 51 | #rotx C, 52 | #sleep 0.5, 53 | #roty C, 54 | #sleep 0.5, 55 | #rotz C, 56 | #right C, 57 | fail. 58 | tumble _ . 59 | 60 | 61 | bot dance: 62 | `sphere S, #resize S 0.1, #col S red, 63 | `bot B, #col B blue, times 2 (# left B), 64 | `bot C, #col C green, times 3 (# right C), 65 | tumble B, 66 | #print done b, 67 | tumble C, 68 | #print done c. 69 | 70 | 71 | times K Action: 72 | between 1 K _, 73 | call Action, 74 | fail. 75 | times K _. 76 | 77 | test1: `sphere S, #sleep 1, #hide S, #sleep 1, #show S. 78 | 79 | test2 : `sphere S, #sleep 1, #left S, sleep 1, #right S, 80 | between 1 5 _, #farther S, fail. 81 | 82 | test3 : 83 | `ellipsoid X, 84 | size X 1 2 3, 85 | #col X red. 86 | 87 | 88 | test4 : 89 | `pyramid P, 90 | #col P blue, 91 | between 1 10 _, 92 | #left P, 93 | #rotx P, 94 | #right P, 95 | #sleep 1, 96 | fail. 97 | -------------------------------------------------------------------------------- /apps/nat3d/vp.py: -------------------------------------------------------------------------------- 1 | from vpython import * 2 | from natlog import Natlog, natprogs 3 | 4 | scene=canvas(width=1200, height=800, title='Natlog 3D DEMO, zoom in!') 5 | 6 | shared = dict() 7 | 8 | 9 | def share(f): 10 | # print('SHARING:', f.__name__) 11 | shared[f.__name__] = f 12 | return f 13 | 14 | 15 | def share_primitives(): 16 | for f in ( 17 | sleep, box, cylinder, cone, sphere, ellipsoid, pyramid, compound, vector, color, helix, ring, 18 | compound 19 | ): 20 | share(f) 21 | shared['scene'] = scene 22 | return shared 23 | 24 | 25 | @share 26 | def start(): 27 | scene.caption = """To rotate "camera", drag with right button or Ctrl-drag. 28 | To zoom, drag with middle button or Alt/Option depressed, or use scroll wheel. 29 | On a two-button mouse, middle is left + right. 30 | To pan left/right and up/down, Shift-drag. 31 | Touch screen: pinch/extend to zoom, swipe or two-finger rotate.""" 32 | return scene 33 | 34 | 35 | @share 36 | def objects(): 37 | return tuple(shared.keys()) 38 | 39 | 40 | delta = 0.5 41 | 42 | 43 | @share 44 | def desc(o): 45 | try: 46 | d = o.__dict__ 47 | except Exception: 48 | d = dict() 49 | print(d) 50 | 51 | 52 | @share 53 | def step(val): 54 | global delta 55 | delta = val 56 | 57 | 58 | @share 59 | def show(x): 60 | x.visible = True 61 | 62 | 63 | @share 64 | def hide(x): 65 | x.visible = False 66 | 67 | 68 | @share 69 | def left(x): 70 | x.pos = x.pos + vector(-delta, 0, 0) 71 | 72 | 73 | @share 74 | def right(x): 75 | x.pos = x.pos + vector(delta, 0, 0) 76 | 77 | 78 | @share 79 | def up(x): 80 | x.pos = x.pos + vector(0, delta, 0) 81 | 82 | 83 | @share 84 | def down(x): 85 | x.pos = x.pos + vector(0, -delta, 0) 86 | 87 | 88 | @share 89 | def closer(x): 90 | x.pos = x.pos + vector(0, 0, delta) 91 | 92 | 93 | @share 94 | def farther(x): 95 | x.pos = x.pos + vector(0, 0, -delta) 96 | 97 | 98 | @share 99 | def rotx(o, angle=90): 100 | o.rotate(angle=angle, axis=vector(1, 0, 0)) 101 | 102 | 103 | @share 104 | def roty(o, angle=90): 105 | o.rotate(angle=angle, axis=vector(0, 1, 0)) 106 | 107 | 108 | @share 109 | def rotz(o, angle=90): 110 | o.rotate(angle=angle, axis=vector(0, 0, 1)) 111 | 112 | 113 | @share 114 | def col(x, c): 115 | if isinstance(c, str): 116 | c = getattr(color, c) 117 | x.color = c 118 | 119 | 120 | @share 121 | def size(o, x, y, z): 122 | v = vector(x, y, z) 123 | o.size = v 124 | 125 | 126 | @share 127 | def resize(o, k): 128 | o.size = o.size * float(k) 129 | 130 | 131 | def normalize(o): 132 | v=o.size 133 | k = 3/(v.x + v.y + v.z) 134 | o.pos = vector(0, 0, 0) 135 | resize(o, k) 136 | 137 | 138 | # tests 139 | 140 | def rtest(i=20): 141 | b = box(size=vector(5, 2, 1)) 142 | b.color = color.blue 143 | 144 | def w(t=2): 145 | sleep(t) 146 | print(b.pos, b.axis) 147 | 148 | for _ in range(i): 149 | # rotx(b) 150 | w() 151 | roty(b) 152 | w() 153 | rotz(b) 154 | 155 | 156 | def ttest(i=10): 157 | s = cone(radius=3) 158 | 159 | def w(t=0.3): 160 | sleep(t) 161 | print(s.pos) 162 | 163 | for _ in range(i): 164 | 165 | w() 166 | s.color = color.blue 167 | w() 168 | for i in range(10): farther(s) 169 | w() 170 | left(s) 171 | w() 172 | up(s) 173 | w() 174 | s.color = color.red 175 | for _ in range(10): closer(s) 176 | w() 177 | right(s) 178 | w() 179 | down(s) 180 | w() 181 | 182 | 183 | def from_stl(fname): # specify file 184 | with open(fname, mode='rb') as fd: 185 | fd.seek(0) 186 | vs = [] 187 | ts = [] # list of triangles to compound 188 | for line in fd.readlines(): 189 | ps = line.split() 190 | if ps[0] == b'facet': 191 | N = vec(float(ps[2]), float(ps[3]), float(ps[4])) 192 | elif ps[0] == b'vertex': 193 | vs.append(vertex(pos=vec(float(ps[1]), float(ps[2]), 194 | float(ps[3])), normal=N, 195 | color=color.white)) 196 | if len(vs) == 3: 197 | ts.append(triangle(vs=vs)) 198 | vs = [] 199 | 200 | return compound(ts) 201 | 202 | @share 203 | def follow(o): 204 | scene.camera.follow(o) 205 | 206 | @share 207 | def camera(x,y,z): 208 | scene.camera.pos=vector(x,y,z) 209 | 210 | @share 211 | def bot(): 212 | b = from_stl('bot.stl') 213 | normalize(b) 214 | return b 215 | 216 | 217 | def nats(): 218 | share_primitives() 219 | start() 220 | n = Natlog(file_name="vp.nat", 221 | with_lib=natprogs() + "lib.nat", callables=shared) 222 | n.query("go.") 223 | #n.query("bot dance.") 224 | n.repl() 225 | 226 | 227 | if __name__ == "__main__": 228 | # ttest() 229 | # rtest() 230 | #bot() 231 | nats() 232 | -------------------------------------------------------------------------------- /apps/natgpt/deepchat.nat: -------------------------------------------------------------------------------- 1 | initialize: 2 | #print 'Welcome' to 'Natlog' based 'ChatGPT' chat app !, 3 | #print 'Please' obtain and set the environment variable 'OPENAI_API_KEY' !, 4 | #print 'Please' type quit to end the chat !, 5 | #print. 6 | 7 | chat_mind M: `'ChatMind' M. 8 | 9 | chat: 10 | `'ChatMind' 400 natchat M, 11 | chat_repl M. 12 | 13 | chat_repl M: 14 | `input 'Question ' Q, 15 | distinct Q quit, 16 | step M Q. 17 | chat_repl M: 18 | #meth_call M persist (). 19 | 20 | step M Q: 21 | `meth_call M ask (Q) A, 22 | #print A. 23 | step M Q: 24 | ^trust chat_repl M. 25 | 26 | -------------------------------------------------------------------------------- /apps/natgpt/deepchat.py: -------------------------------------------------------------------------------- 1 | import os 2 | import webbrowser 3 | import openai 4 | import tiktoken 5 | from natlog import Natlog, natprogs, lconsult 6 | import json 7 | 8 | MAX_TOKENS = 1 << 14 # make shorter if needed e.g. 300 9 | 10 | API_KEY = [os.getenv("OPENAI_API_KEY")] 11 | API_BASE = ["https://api.openai.com/v1"] 12 | 13 | 14 | def ask_llm(model=None, mes=None, temperature=None, n=None): 15 | assert None not in (model, mes, temperature, n), (model, mes, temperature, n) 16 | 17 | def llm_res(r, i): 18 | return r.choices[i].message.content.strip() 19 | 20 | client = openai.OpenAI( 21 | api_key=API_KEY[0], 22 | base_url=API_BASE[0] 23 | ) 24 | 25 | r = client.chat.completions.create( 26 | messages=mes, 27 | model=model, 28 | temperature=temperature, 29 | n=n 30 | ) 31 | 32 | pt = r.usage.prompt_tokens 33 | ct = r.usage.completion_tokens 34 | 35 | answers = [llm_res(r, i) for i in range(n)] 36 | 37 | return answers, pt, ct 38 | 39 | 40 | class ChatMind: 41 | def __init__(self, max_toks=MAX_TOKENS, avatar=None): 42 | self.model = "gpt-4-turbo-preview" 43 | 44 | self.max_toks = max_toks 45 | self.avatar = avatar 46 | 47 | if not self.resume(): 48 | self.short_mem = dict() 49 | self.long_mem = dict() 50 | self.toks = [] 51 | 52 | def to_message(self, quest): 53 | mes = [] 54 | for (q, a) in self.short_mem.items(): 55 | qd = dict(role='user', content=q) 56 | ad = dict(role='assistant', content=a) 57 | mes.extend([qd, ad]) 58 | mes.append(dict(role='user', content=quest)) 59 | return mes 60 | 61 | def get_state(self): 62 | return dict(long_mem=self.long_mem, 63 | short_mem=self.short_mem, 64 | toks=self.toks) 65 | 66 | def set_state(self, state): 67 | self.long_mem = state['long_mem'] 68 | self.short_mem = state['short_mem'] 69 | self.toks = state['toks'] 70 | 71 | def store_name(self): 72 | if self.avatar is None: return None 73 | return 'states/' + self.avatar + '.json' 74 | 75 | def resume(self): 76 | fname = self.store_name() 77 | if fname is not None and exists_file(fname): 78 | state = from_json(fname) 79 | # print('!!!!',json.dumps(state,indent=4)) 80 | self.set_state(state) 81 | return True 82 | return False 83 | 84 | def persist(self): 85 | fname = self.store_name() 86 | if fname is None: return 87 | state = self.get_state() 88 | to_json(state, fname) 89 | 90 | def already_answered(self, quest): 91 | answer = self.short_mem.get(quest, None) 92 | if answer is not None: return answer 93 | answer = self.long_mem.get(quest, None) 94 | return answer 95 | 96 | def __repr__(self): 97 | return json.dumps(self.get_state(), indent=2) 98 | 99 | def ask(self, quest): 100 | if quest == 'quit': 101 | self.persist() 102 | return 'bye' 103 | 104 | answered = self.already_answered(quest) 105 | if answered is not None: 106 | return answered 107 | 108 | self.trim_context(quest) 109 | 110 | assert len(self.short_mem) == len(self.toks) 111 | 112 | mes = self.to_message(quest) 113 | 114 | answers, pt, ct = ask_llm(model=self.model, mes=mes, temperature=0.2, n=1) 115 | answer = answers[0] 116 | 117 | self.toks.append(pt + ct) 118 | 119 | self.short_mem[quest] = answer 120 | 121 | print('LEN SHORT TERM:',len(self.short_mem)) 122 | 123 | return answer 124 | 125 | def trim_context(self, quest): 126 | if len(self.toks) == 0: return 127 | 128 | total_toks = sum(self.toks) 129 | avg_toks = total_toks / len(self.toks) 130 | 131 | quest_toks = count_toks(quest) 132 | tok_estimate = total_toks + quest_toks + 2 * avg_toks # conservative ... 133 | 134 | if tok_estimate > self.max_toks: 135 | k, v = dict_trim(self.short_mem) 136 | self.long_mem[k] = v 137 | self.toks = self.toks[1:] 138 | 139 | 140 | # tools 141 | 142 | def count_toks(text): 143 | enc = tiktoken.get_encoding("gpt2") 144 | toks = enc.encode(text) 145 | return len(toks) 146 | 147 | 148 | def exists_file(fname): 149 | return os.path.exists(fname) 150 | 151 | 152 | def ensure_path(fname): 153 | """ 154 | makes sure path to directory and directory exist 155 | """ 156 | d, _ = os.path.split(fname) 157 | os.makedirs(d, exist_ok=True) 158 | 159 | 160 | def to_json(obj, fname, indent=2): 161 | """ 162 | serializes an object to a json file 163 | assumes object made of array and dicts 164 | """ 165 | ensure_path(fname) 166 | with open(fname, "w") as outf: 167 | json.dump(obj, outf, indent=indent) 168 | 169 | 170 | def from_json(fname): 171 | """ 172 | deserializes an object from a json file 173 | """ 174 | with open(fname, "rt") as inf: 175 | obj = json.load(inf) 176 | return obj 177 | 178 | 179 | def dict_trim(d): 180 | k = next(iter(d)) 181 | v = d.pop(k) 182 | return k, v 183 | 184 | 185 | 186 | def run_natlog(natprog="deepchat.nat"): 187 | n = Natlog(file_name=natprog, 188 | with_lib=natprogs() + "lib.nat", callables=globals()) 189 | next(n.solve('initialize.')) 190 | # n.repl() 191 | for x in n.solve('chat.'): 192 | print() 193 | 194 | 195 | def test_deepchat(): 196 | cm = ChatMind(avatar='you') 197 | 198 | answer = cm.ask('What was the warmest temperature measured on Mars?') 199 | answer = cm.ask('The same question for planet Venus?') 200 | answer = cm.ask('And what about dwarf planet Ceres?') 201 | answer = cm.ask('Is it much colder on Pluto?') 202 | print(f'ANSWER: {answer}\n') 203 | answer = cm.ask('quit') 204 | print(answer, '\n') 205 | 206 | print('STATE:\n', cm) 207 | 208 | 209 | if __name__ == "__main__": 210 | # test_deepchat() 211 | run_natlog() 212 | -------------------------------------------------------------------------------- /apps/natgpt/pics/female_chagall.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/apps/natgpt/pics/female_chagall.png -------------------------------------------------------------------------------- /apps/natgpt/pics/female_robot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/apps/natgpt/pics/female_robot.png -------------------------------------------------------------------------------- /apps/natgpt/pics/male_chagall.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/apps/natgpt/pics/male_chagall.png -------------------------------------------------------------------------------- /apps/natgpt/pics/male_robot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/apps/natgpt/pics/male_robot.png -------------------------------------------------------------------------------- /apps/natgpt/textual.nat: -------------------------------------------------------------------------------- 1 | init. 2 | -------------------------------------------------------------------------------- /apps/natgpt/textual.py: -------------------------------------------------------------------------------- 1 | from natlog.natlog import Natlog,natprogs 2 | from natlog.textual_natlog import TextualNatlog 3 | import stanza 4 | 5 | def to_sents(text, lang='en'): 6 | nlp = stanza.Pipeline(lang=lang, processors='tokenize') 7 | doc = nlp(text) 8 | sents = [] 9 | for sent in doc.sentences: 10 | toks = [] 11 | for token in sent.tokens: 12 | toks.append(token.text) 13 | sent_text = " ".join(toks) 14 | sents.append(sent_text) 15 | return sents # "\n".join(sents) 16 | 17 | def standardize_txt(fname,lang='en'): 18 | 19 | with open(fname,'r') as f: 20 | text=f.read() 21 | sents=to_sents(text, lang=lang) 22 | text="\n".join(sents) 23 | with open(fname,'w') as g: 24 | g.write(text) 25 | 26 | 27 | def start(): 28 | nname ="textual.nat" 29 | dname = natprogs()+'../docs/prolog50.txt' 30 | standardize_txt(dname) 31 | 32 | n = TextualNatlog(file_name=nname, db_name=dname) 33 | n.repl() 34 | 35 | if __name__=="__main__": 36 | start() 37 | -------------------------------------------------------------------------------- /clean.sh: -------------------------------------------------------------------------------- 1 | rm -r -f dist build 2 | rm -r -f *.egg-info 3 | pushd . 4 | cd natlog 5 | rm -r -f __pycache__ 6 | cd test 7 | rm -r -f __pycache__ 8 | popd 9 | pushd . 10 | cd natlog 11 | cd app 12 | rm -r -f __pycache__ 13 | popd 14 | -------------------------------------------------------------------------------- /docs/natlog.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/docs/natlog.pdf -------------------------------------------------------------------------------- /docs/natlog_deep.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/docs/natlog_deep.pdf -------------------------------------------------------------------------------- /docs/natlog_gpt.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/docs/natlog_gpt.pdf -------------------------------------------------------------------------------- /docs/prolog50.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/docs/prolog50.pdf -------------------------------------------------------------------------------- /docs/prolog50.txt: -------------------------------------------------------------------------------- 1 | Reflections on Automation , Learnability and Expressiveness in Logic - based Programming Languages . 2 | This position paper sketches an analysis of the essential features that logic - based programming languages will need to embrace to compete in a quickly evolving field where learnability and expressiveness of language constructs , seen as aspects of a learner ’s user experience , have become dominant decision factors for choosing a programming language or paradigm . 3 | Our analysis centers on the main driving force in the evolution of programming languages : automation of coding tasks , a recurring promise of declarative lan - guages , instrumental for developing software artifacts competitively . 4 | In this context we will focus on taking advantage of the close correspondence between logic - based language constructs and their natural language equivalents , the adoption of language constructs enhancing the expressiveness and learnability of logic - based programming languages and their synergistic uses in interacting declaratively with deep learning frameworks . 5 | Keywords : logic - based programming language constructs , automation , expres- siveness and learnability , coroutining with logic engines , definite clause gram - mars as prompt generators , embedding of logic programming and in deep learn - ing ecosystems . 6 | 1 Introduction Driven by the importance of automation and simplification of coding tasks in a logic programming context , the question we plan to answer is : 7 | What features need to be improved or invented to ensure a lasting ability of logic - based programming languages to compete with languages that have adopted the latest innovations in usability , robustness and easy adoption by newcomers ? 8 | Our short answer is that we need to focus on closeness to natural language , learnability , flexible execution mechanisms and highly expressive language constructs . 9 | We will elaborate in the next sections on why these features matter , with hints on what language constructs are needed for implementing them . 10 | As an application , we will show the effectiveness of some of our proposed language constructs via definite clause - grammar based prompt generators for today ’s text - to - text and text - to - image deep learning systems . 11 | 2 The Challenges 2.1 12 | It is just Automation ( again ) Automation , seen as a replacement of repetitive tasks , has been a persistent theme from which essential computational processes including compilation , partial - evaluation and meta- interpretation have emerged . 13 | Besides competition from today ’s functional programming languages and proof as- sistants , all with strong declarative claims , logic - based languages face even stiffer com- petition from the more radical approach to automation coming from deep learning . 14 | To state it simply , this manifests as replacement of rule - based , symbolic encoding of intelligent behaviors via machine learning , including unsupervised learning among which transformers [ 9 ] trained on large language models have achieved outstanding performance in fields ranging from natural language processing to computational chem - istry and image processing . 15 | For instance , results in natural language processing with prompt - driven generative models like GPT3 [ 1 ] or text - to - image generators like DALL.E [ 5 ] or Stable Diffusion [ 10 ] have outclassed similarly directed symbolic efforts . 16 | In fact , it is quite hard to claim that a conventional programming language ( including a logic - based one ) is as declarative as entering a short prompt sentence describing a picture and getting it back in a few seconds . 17 | We will argue in the next sections that it makes sense for logic - based programming languages to embrace rather than fight these emerging trends . 18 | 2.2 The Shifting of the Declarative Umbrella Logic - based programming languages have shared with their functional counterparts and a few data - base management tools the claim of being “ declarative ” in the very general sense that the code seen as a specification of what needs to be done has clear enough information for the implementation to be able to “ automatically ” figure out how it can be done . 19 | However it is becoming clearer every day that ownership of the declarative umbrella is slowly transitioning to deep neural networks - based machine learning tools that , to state it simply , replace human coding with models directly extracted from labeled and more and more often from raw , unlabeled data . 20 | This suggests the usefulness of closely embedding a logic - based language in this fast evolving ecosystem . 21 | 2.3 The Importance of Learnability Learnability is not a crisply definable concept , but it becomes intuitively clear especially as someone gets fluent in several programming languages and paradigms . 22 | Learnability is experienced positively or negatively when teaching or learning a new programming language and also when adopting it as an ideal software development stage for a given project . 23 | Good barometers for learnability are the learning curves of newcomers ( including children in their early teens ) , the hurdles they experience and the projects they can achieve in a relatively short period of time . 24 | Another one is how well one can pick up the language inductively , simply by looking at coding examples . 25 | 2 26 | When thinking about what background can be assumed in the case of newcomers , irrespectively of age , natural language pops up as a common denominator . 27 | As logic notation originates in natural language there are conspicuous mappings between verbs and predicates and nominal groups as their arguments . 28 | Spatial and tem- poral event streams , in particular visual programming , animations and games relate to logic in more intricate ways and at a more advanced level of mastering a logic - based language . 29 | That hints toward learning methods and language constructs easily mapped syntac - tically and semantically to natural language equivalents . 30 | 2.4 31 | The importance of Expressiveness 32 | As part of their evolution , programming languages learn from each other . 33 | Expressive - ness enhancements are contagious . 34 | More precisely , language constructs that encapsu- late formally clear data objects and their typical usage patterns propagate , often crossing heavily defended programming paradigm border walls . 35 | As Python has been an early adopter of such expressiveness enhancers , it makes sense to consider for adoption some of its language features that one is likely to be impressed even at a first contact , as some of the following : – ease of defining finite functions ( dictionaries , mutable and immutable sequences and sets ) , all exposed as first class citizens – aggregationoperations ( list , set , dictionarycomprehensions ) exposedwithalightweight and flexible syntax – coroutining ( via the yield statement or async annotations ) exposed with a simple and natural syntax – nested parenthesizing and heavy punctuation avoided or reduced via indentation Prolog shares some of those but it is usually via separate libraries or semantically more intricate definitions ( e.g. , setof with care about how variables are quantified as an implementation of set comprehensions ) . 36 | We will explore in the next sections some language constructs covering features where logic - based languages are left behind . 37 | 3 A random Walk in the Space of Solutions 38 | We will next have a glimpse at a “ gradient descent ” in the space of possible solutions to these challenges with hints about suggested language design and language construct improvements that apply specifically to Prolog and Prolog - like languages and to lesser extent , also to their ASP or Datalog cousins . 39 | 3.1 The Testbed : Natlog , a Lightweight Prolog - dialect Embedded in Python Our Python - based Natlog system has been originally introduced in [ 8 ] , to which we re- fer to for syntax , semantics and low level implementation details . 40 | It is currently evolv- ing as a fresh implementation1 , and it will be used as a testbed for the key ideas of this paper . 41 | 1 at https://github.com/ptarau/natlog, ready to install with “ pip3 install natlog ” 3 Prolog ’s semantics , but with a lighter syntax 42 | While fixing semantics as the usual SLD - resolution , we can keep the syntax and the pragmatics of a logic - based language as close as possible to natural language2 . 43 | We have sketched an attempt to that in the Natlog system ’s syntax , that we will adopt here . 44 | As a hint of its syntactic simplifications , here is a short extract from the usual family program in Natlog syntax : sibling of X S : parent of X P , parent of S P , distinct S X. grand parent of X GP : parent of X P , parent of P GP . 45 | ancestor of X A : parent of X P , parent or ancestor P A. parent or ancestor P P. parent or ancestor P A : ancestor of P A. 3.2 A Quick Tour of a few Low - Hanging Expressiveness Lifters Expressiveness is the relevant distinguishing factor between Turing - complete languages . 46 | It can be seen as a pillar of code development automation as clear and compact notation entails that more is delegated to the machine . 47 | A finite function API Finite functions ( tuples , lists , dictionaries , sets ) are instrumental in getting things done with focus on the problem to solve rather than its representation in the language . 48 | In Natlog they are directly borrowed from Python and in systems like SWI - Prolog dictionaries are a built - in datatype . 49 | They can be easily emulated in Prolog but often with a different complexity than if natively implemented . 50 | In an immutable form as well as enabled with backtrackable and non-backtrackable updates , finite functions implemented as dynamic arrays and hash - maps can offer a less procedural and more expressive alternative to reliance on Prolog ’s assert and retract family of built - ins . 51 | Built-ins as functions or generators Reversible code like in Prolog ’s classic append / 3 examples or the use of DCGs in both parsing and generation are nice and unique lan - guage features derived from the underlying SLD - resolution semantics , but trying to lend reversibility and more generally multi-mode uses to built - ins is often a source of perplexity . 52 | Keeping built - ins uniform and predictable , while not giving up on flexibility , can be achieved by restricting them to a few clearly specified uses : – functions with no meaningful return like print , denoted in Natlog by prefixing their Python calls with “ # ” . 2 but not closer , as unnecessary verbosity can hinder expressiveness 4 – functions of N inputs returning a single output as the last argument of the cor - responding predicate with N + 1 arguments , denoted in Natlog by prefixing their calls with a backquote symbol “ ‘ ” . 53 | Note that this syntax , more generally , also cov- ers Python ’s callables and in particular class objects acting as instance constructors . 54 | – generatorswithNinputsyieldingaseriesofoutputvaluesonbacktrackingbybind - ing the N + 1 - th argument of the corresponding predicate , denoted in Natlog by prefixing their call with two backquotes “ ‘‘ ” . 55 | This simplification ( as implemented in Natlog ) would also make type checking eas - ier and enable type inference to propagate from the built-ins to predicates sharing their arguments as a convenient mechanism to implement gradual typing . 56 | 4 A Step on “ The Road Not Taken ” : First Class Logic Engines 57 | While constraint solvers and related coroutining primitives are present in most widely used logic - based languages , first class logic engines , seen as on - demand reflection of the full execution mechanism , as implemented in BinProlog [ 7 ] , have been adopted only in SWI Prolog relative recently3 . 58 | Interestingly , similar constructs have been present as far as in [ 2 ] , where they were efficiently implemented at abstract machine level . 59 | One can think about First Class Logic Engines as a way to ensure the full meta - level reflection of the execution algorithm . 60 | As a result , they enable on - demand computations in an engine rather than the usual eager execution mechanism of Prolog . 61 | We will spend more time on them as we see them as “ the path not taken ” that can bring significant expressiveness benefits to logic - based languages , similarly to the way Python ’s yield primitive supports creation of user - defined generators and other compositional asynchronous programming constructs . 62 | To obtain the full reflection of Natlog ’s multiple - answer generation mechanism , we will make fresh instances of the interpreter first - class objects . 63 | 4.1 A First - class Logic Engines API 64 | A logic engine is a Natlog language processor reflected through an API that allows its computations to be controlled interactively from another logic engine . 65 | This is very much the same thing as a programmer controlling Prolog ’s interactive toplevel loop : launch a new goal , ask for a new answer , interpret it , react to it . 66 | The exception is that it is not the programmer , but it is the program that does it ! 67 | We will next summarize the execution mechanism of Natlog ’s first class logic engines . 68 | The predicate “ eng AnswerPattern Goal Engine ” creates a new instance of the Natlog interpreter , uniquely identified by Engine that shares its code with the currently running program . 69 | It is initialized with Goal as a starting point . 70 | AnswerPattern ensures that answers returned by the engine will be instances of the pattern . 71 | The predicate “ ask Engine AnswerInstance ” tries to harvest the answer com- puted from Goal , as an instance of AnswerPattern . 72 | If an answer is found , it is returned as ( the AnswerInstance ) , otherwise the atom no is returned . 73 | It is used to retrieve 3 https://www.swi-prolog.org/pldoc/man?section=engines 74 | 5 successive answers generated by an engine , on demand . 75 | It is also responsible for actu- ally triggering computations in the engine . 76 | One can see this as transforming Natlog ’s backtracking over all answers into a deterministic stream of lazily generated answers . 77 | Finally , the predicate “ stop Engine ” stops the Engine , reclaiming the resources it has used and ensures that no is returned for all future queries to the engine . 78 | Natlog ’s yield operation : a key coroutining primitive Besides these predicates exposing a logic engine as a first class object , the annotation “ ^ Term ” extends our coroutining mechanism by allowing answers to be yielded from arbitrary places in the computa- tion . 79 | It is implemented simply by using Python ’s yield operation . 80 | As implemented in Python , engines can be seen as a special case of generators that yield one answer at a time , on demand . 81 | 4.2 82 | Things that we can do with First Class Logic Engines 83 | We will sketch here a few expressiveness improvements First Class Logic Engines can bring to a logic - based programming language , Source - level emulation of some key built - ins with engines 84 | We can emulate at source level some key Prolog built - ins in terms of engine operations , shown here with Natlog ’s simplified syntax . 85 | if_ C Y N : eng C C E , ask E R , stop E , pick_ R C Y N. pick _ ( theC ) CY_N : callY . 86 | pick_ no _C _Y N : call N. not_ G : if_ G ( fail ) ( true ) . 87 | once_ G : if_ G ( true ) ( fail ) . 88 | findall_ X G Xs : eng X G E , ask E Y , collect_all_ E Y Xs. collect_all_ _ no ( ) . 89 | collect_all_ E ( the X ) ( X Xs ) : ask E Y , collect_all_ E Y Xs . 90 | An infinite Fibonacci stream with yield Like in a non-strict functional language , one can create an infinite recursive loop from which values are yielded as the computation advances : fiboNXs:engX ( slide_fibo11 ) E , takeNEXs . 91 | slide_fibo X Y : with X + Y as Z , ^X , slide_fibo Y Z . 92 | Note that the infinite loop ’s results , when seen from the outside , show up as a stream of answers as if produced on backtracking . 93 | With help of the library predicate take , we extract the first 5 ( seen as a Python dictionary with name “ X ” of the variable as a key and the nested tuple representation of Natlog ’s list as a value ) , as follows : 6 ?- fibo 5 Xs ? 94 | ANSWER : {' 95 | Xs ' : ( 1 , ( 1 , ( 2 , ( 3 , ( 5 , ( ) ) ) ) ) ) } 5 Borrowing some Magic : Logic Grammars as Prompt Generators 96 | With magic wands on a lease from text - to - text generators like GPT3 [ 1 ] and text - to- image generators like DALL - E [ 5 ] or Stable Diffusion [ 10 ] we can introduce Definite Clause Grammars ( DCGs ) as prompt generators for such systems . 97 | As examples of the natural synergy between declarative constructs of a logic - based language and the declarative features of today ’s deep learning systems , we will next overview Natlog applications for text - to - text and text - to - image generation . 98 | We refer to the Natlog code4 and its Python companion5 for full implementation details . 99 | 5.1 Prompt engineering by extending GPT3 ’s text completion GPT3 is basically a text completion engine , which , when given an initial segment of a sentence or paragraph as a prompt , it will complete it , often with highly coherent and informative results . 100 | Thus , to get from GPT3 the intended output ( e.g. , answer to a question , elations extracted from a sentence , building analogies , etc. ) one needs to rewrite the original input into a prompt that fits GPT3 ’s text completion model . 101 | We will use here Natlog ’s syntactically lighter Definite Clause Grammars , with one or more terminal symbols prefixed by “ @ ” and “ = > ” replacing Prolog ’s “ --> ” . 102 | A prompt generator with ability to be specialized for several “ kinds ” of prompts is described by the DCG rule : prompt Kind QuestText => prefix Kind , sent QuestText , suffix Kind . 103 | The predicate sent takes a question sentence originating from a user ’s input and maps it into a DCG non-terminal transforming cons-list Ws1 into cons-list Ws2 : sent QuestText Ws1 Ws2 : `split QuestText List , to_cons_list List Ws , append Ws Ws2 Ws1 . 104 | The predicate query takes the DCG - generated prompt derived from user question Q and converts it back to a string passed to GPT ’3 completion API by a call to the function complete , implemented in Python , with its answer returned in variable A. query Kind Q A : prompt Kind Q Ps ( ) , to_list Ps List , `join List P , `complete P A . 105 | Next we will describe specializations to question / answering , relation extraction and analogy invention . 106 | An easy way to transform a question answering task into a comple - tion task is to emulate a hypothetical conversation : 4 see https://github.com/ptarau/natlog/blob/main/apps/natgpt/chat.nat 5 see https://github.com/ptarau/natlog/blob/main/apps/natgpt/chat.py 7 prefix question => @ ' 107 | If ' you would ask me . 108 | suffix question => @ ' I ' would say that . 109 | Extraction of subject - verb - object phrases can be mapped to completion tasks as in : prefix relation => @ ' 110 | If ' you would ask me what are the subject and the verb and the object in . 111 | suffix relation => @ ' I ' would say subject is . 112 | For analogy invention we will need to create a custom trigger as follows : trigger X Y Z => @ given that X relates to Y by analogy ' I ' would briefly say that Z relates to . 113 | analogy X Y Z A : trigger X Y Z Ps ( ) , to_list Ps List , `join List P , `complete P A . 114 | We will next show interaction examples for all these use cases . 115 | First , question answering : ?- query question ' why is logic programming declarative ' R? ANSWER : {' 116 | R ' : ' logic programming is declarative because it expresses the logic of a problem without describing its control flow . 117 | This means that the programmer does not need to specify the order in which the operations should be performed , as the logic programming language will determine the most efficient way to solve the problem .'} 118 | Next , relation extraction . 119 | Note that after some preprocessing , the extracted triplets can be used as building blocks for knowledge graphs . ?- query relation ' the quick brown fox jumps over the lazy dog ' R? ANSWER : {' R ' : '" quick brown fox " , verb is " jumps " and object is " lazy dog " '} Finally , some examples of analogical reasoning that show GPT3 finding the missing component and explaining its reasoning . 120 | ?- analogy car wheel bird A? ANSWER : {' 121 | A ' : ' wing by analogy . 122 | This is because both car and wheel are used for transportation , while bird and wing are used for flight .'} ?- analogy car driver airplane A? ANSWER : {' 123 | A ' : ' pilot by analogy . 124 | The pilot is responsible for the safe operation of the airplane , just as the driver is responsible for the safe operation of the car .'} 5.2 Text - to - image with DALL.E 125 | To declaratively specify the content of an image to DALL.E [ 5 ] or Stable Diffusion [ 10 ] , Natlog ’s Definite Clause Grammars work as easy to customize prompt generators for such systems . 126 | 8 127 | As the same OpenAI API ( with a slightly different Python call ) can be used for text - to - image generation ( followed by displaying the generate picture in the user ’s default browser ) , the interaction with Python is expressed succinctly by the predicate paint that receives as Prompt the description of the intended picture from the user . 128 | paint Prompt : `paint Prompt URL , # print URL , # browse URL . 129 | The query to visualize in the user ’s browser such a DCG - generated prompts is : ?- paint '< text description of intended image >'. with an example of output shown in Fig. 1 Fig.1 : paint ’ photo of a cat playing on the shiny moon with a trumpet ’ . 130 | The Natlog DCG , in generation mode , will iterate over possible styles and content elements of a desired painting as in the following example : image => style , subject , verb , object . 131 | style => @photorealistic rendering . 132 | style => @a dreamy ' Marc ' ' Chagall ' style picture . 133 | style => @an action video game graphics style image . 134 | subject => @of , adjective , noun. noun => @robot . 135 | adjective => @shiny . 136 | verb => @walking . 137 | object => location , @with , instrument . 138 | location => @on planet ' Mars ' . instrument => @high hills and a blue purse . 139 | instrument => @a sombrero hat . 140 | 9 141 | This generates text ready to be passed via the OpenAI Python APIs to DALL.E : ?- image Words ( ) , `to_tuple Words Ws , # writeln Ws , nl , fail . 142 | photorealistic rendering of shiny robot walking on planet Mars with high hills and a blue purse photorealistic rendering of shiny robot walking on planet Mars with a sombrero hat ..... 143 | Besides the expected dependence on the style component ( photorealistic vs. Chagall - style ) , as an illustration of GPT3 ’s stereotyping bias , female and respectively male fea- tures would be derived from the generated robot pictures depending on the purse vs. sombrero hat picked by the DCG , as can be seen in the generated images6 . 144 | 6 Related Work 145 | An introduction to Natlog , its initial proof - of - concept implementation and its content - driven indexing mechanism are covered in [ 8 ] , but the language constructs and ap- plication discussed in this paper are all part of a fresh , “ from scratch ” implementation . 146 | Interoperation with Python has been also used in Janus [ 6 ] connecting Python and XSB - Prolog via their foreign language interfaces and systems like DeepProblog [ 3 ] , in the latter as a facilitator for neuro - symbolic computations . 147 | OpenAI ’s own GPT 3.5 - based ChatGPT7 automates the mapping of more queries ( e.g. , questions , code generation , dialog sessions , etc. ) using an extensive Reinforce - ment Learning With Human Advice process [ 4 ] . 148 | By contrast , our DCG - supported ap- proach relies exclusively on the pure GPT3 text - completion API on top of which we engineer task - specific prompts . 149 | 7 Conclusion 150 | We have informally overviewed automation , learnability and expressiveness challenges faced by logic - based programming languages in the context of today ’s competitive land - scape of alternatives from other programming paradigms as well as from neural net - based machine learning frameworks . 151 | We have also sketched solutions to the challenges , with some emphasis on coroutining methods and neuro - symbolic interoperation mecha- nisms . 152 | We have illustrated the surprising synergies that emerge when joining declarative logic programming constructs and declarative prompt - driven interactions with Large Language Models based deep learning systems . 153 | Acknowledgments 154 | These reflections have been inspired by the live and deep - probing Prolog ’ 50 discussions lead by Bob Kowalski and Veronica Dahl with focus on logical thinking and logic - based 6 at https://github.com/ptarau/natlog/tree/main/apps/natgpt/pics 155 | 7 https://chat.openai.com/chat 10 programming as well as on approaches to make logic - based programming accessible to newcomers , including use cases for a first - contact introduction to computing . 156 | I am thankful to the participants of these meetings for sharing their thoughts on both the last 50 years and the next 50 years of logic programming . 157 | Finally , many thanks go to the reviewers of the paper for their careful reading and constructive suggestions that helped clarify and substantiate key concepts covered in the paper . 158 | References 1 . 159 | Brown , T. , Mann , B. , Ryder , N. , Subbiah , M. , Kaplan , J.D. , Dhariwal , P. , Neelakantan , A. , Shyam , P. , Sastry , G. , Askell , A. , Agarwal , S. , Herbert - Voss , A. , Krueger , G. , Henighan , T. , Child , R. , Ramesh , A. , Ziegler , D. , Wu , J. , Winter , C. , Hesse , C. , Chen , M. , Sigler , E. , Litwin , M. , Gray , S. , Chess , B. , Clark , J. , Berner , C. , McCandlish , S. , Radford , A. , Sutskever , I. , Amodei , D. : Language models are few - shot learners . 160 | In : Larochelle , H. , Ranzato , M. , Hadsell , R. , Balcan , M. , Lin , H. ( eds . ) 161 | Advances in Neural Information Processing Systems . 162 | vol. 33 , pp. 1877 – 1901 . 163 | Curran Associates , Inc. ( 2020 ) , https://proceedings.neurips. 164 | cc/paper/2020/file/1457c0d6bfcb4967418bfb8ac142f64a-Paper.pdf 165 | 2 . Hermenegildo , M.V. : 166 | An abstract machine for restricted AND - parallel execution of logic programs . 167 | In : Proceedings on Third international conference on logic programming . 168 | pp. 25 – 39 . 169 | Springer - Verlag New York , Inc. , New York , NY , USA ( 1986 ) 3 . 170 | Manhaeve , R. , Dumancic , S. , Kimmig , A. , Demeester , T. , DeRaedt , L. : Deepproblog : Neural probabilistic logic programming . 171 | In : Bengio , S. , Wallach , H. , Larochelle , H. , Grauman , K. , Cesa - Bianchi , N. , Garnett , R. ( eds . ) 172 | Advances in Neural Information Processing Systems 31 , pp. 3749–3759 . 173 | Curran Associates , Inc. ( 2018 ) , http://papers.nips.cc/paper/ 7632-deepproblog-neural-probabilistic-logic-programming.pdf 174 | 4 . Ouyang , L. , Wu , J. , Jiang , X. , Almeida , D. , Wainwright , C.L. , Mishkin , P. , Zhang , C. , Agar - wal , S. , Slama , K. , Ray , A. , Schulman , J. , Hilton , J. , Kelton , F. , Miller , L. , Simens , M. , Askell , A. , Welinder , P. , Christiano , P. , Leike , J. , Lowe , R. : Training language models to fol - low instructions with human feedback ( 2022 ) . 175 | https://doi.org/10.48550/ARXIV.2203.02155, https://arxiv.org/abs/2203.02155 176 | 5 . Ramesh , A. , Pavlov , M. , Goh , G. , Gray , S. , Voss , C. , Radford , A. , Chen , M. , Sutskever , I. : Zero - shot text - to - image generation ( 2021 ) . 177 | https://doi.org/10.48550/ARXIV.2102.12092, https://arxiv.org/abs/2102.12092 178 | 6 . Swift , T.:TheJanusSystem:Multi-paradigmProgramminginPrologandPython.incurrent volume ( 2023 ) 7 . 179 | Tarau , P.:TheBinPrologExperience : ArchitectureandImplementationChoicesforContinu- ation Passing Prolog and First - Class Logic Engines . 180 | Theory and Practice of Logic Program - ming 12 ( 1 - 2 ) , 97–126 ( 2012 ) . 181 | https://doi.org/10.1007/978-3-642-60085-2“ ̇ 2 8 . Tarau , P. : Natlog : a Lightweight Logic Programming Language with a Neuro - symbolic Touch . 182 | In : Formisano , A. , Liu , Y.A. , Bogaerts , B. , Brik , A. , Dahl , V. , Dodaro , C. , Fodor , P. , Pozzato , G.L. , Vennekens , J. , Zhou , N.F. ( eds . ) Proceedings 37th International Confer - ence on Logic Programming ( Technical Communications ) , 20 - 27th September 2021 ( 2021 ) 9 . 183 | Vaswani , A. , Shazeer , N. , Parmar , N. , Uszkoreit , J. , Jones , L. , Gomez , A.N. , Kaiser , L.u. , Polosukhin , I. : Attention is all you need . 184 | In : Guyon , I. , Luxburg , U.V. , Bengio , S. , Wallach , H. , Fergus , R. , Vishwanathan , S. , Garnett , R. ( eds . ) 185 | Advances in Neural Information Process - ing Systems . vol. 30 . Curran Associates , Inc. ( 2017 ) , https://proceedings.neurips. 186 | cc/ paper/2017/file/3f5ee243547dee91fbd053c1c4a845aa-Paper.pdf 10 . Vision , C.M. , at LMU Munich , L.R.G. : Stable Diffusion ( 2018 - 2022 ) , https://github. 187 | com/CompVis/stable-diffusion 188 | 11 -------------------------------------------------------------------------------- /docs/quotes.txt: -------------------------------------------------------------------------------- 1 | The only thing we have to fear is fear itself said Franklin Roosevelt. 2 | If you don't know where you are going, any road will get you there said Lewis Carroll. 3 | If you don't know where you are going you will end up somewhere else said Yogi Berra. 4 | Imagination is more important than knowledge said Albert Einstein. 5 | I give myself very good advice, but I very seldom follow it said Lewis Carroll. 6 | I can resist anything except temptation said Oscar Wilde. 7 | -------------------------------------------------------------------------------- /docs/slides_ICLP21.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/docs/slides_ICLP21.pdf -------------------------------------------------------------------------------- /docs/slides_LPOP22.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/docs/slides_LPOP22.pdf -------------------------------------------------------------------------------- /docs/slides_iclp23.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/docs/slides_iclp23.pdf -------------------------------------------------------------------------------- /docs/small.txt: -------------------------------------------------------------------------------- 1 | The cat sits on the mat. 2 | The dog barks to the moon. 3 | The pirate travels the oceans. 4 | The phone rings with a musical tone. 5 | The man watches the bright moon. 6 | The dog barks at a cat. 7 | -------------------------------------------------------------------------------- /natlog/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/db.cpython-39.pyc 2 | __pycache__/mparser.cpython-39.pyc 3 | __pycache__/mscanner.cpython-39.pyc 4 | __pycache__/unify.cpython-39.pyc 5 | -------------------------------------------------------------------------------- /natlog/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ('Natlog', 'interp') 2 | 3 | from .natlog import * 4 | from .db import Db 5 | __version__='1.8.5' 6 | -------------------------------------------------------------------------------- /natlog/__main__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from natlog.natlog import * 3 | print('usage: python3 -m natlog .nat\n') 4 | if len(sys.argv)>1: 5 | file_name=sys.argv[1] 6 | n = Natlog(file_name=file_name, with_lib=natprogs() + 'lib.nat', callables=globals()) 7 | else: 8 | n= Natlog(text='',with_lib=natprogs() + 'lib.nat', callables=globals()) 9 | n.repl() 10 | -------------------------------------------------------------------------------- /natlog/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/natlog/app/__init__.py -------------------------------------------------------------------------------- /natlog/app/natapp.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import streamlit as st 4 | 5 | from natlog.natlog import * 6 | 7 | print('Running Natlog as a streamlit app!') 8 | 9 | st.set_page_config(layout="wide") 10 | 11 | st.sidebar.title('Streamlit-based [NatLog](https://github.com/ptarau/natlog) Client') 12 | 13 | 14 | def ppp(*args): 15 | args=[str(x) for x in args] 16 | st.write(*args) 17 | 18 | 19 | upload_dir = "UPLOADS/" 20 | 21 | suf = '.nat' 22 | 23 | 24 | def handle_uploaded(uploaded_file): 25 | if uploaded_file is not None: 26 | fname, prog = save_uploaded_file(uploaded_file) 27 | suf0 = '.' + fname.split('.')[-1] 28 | if suf0 == suf: 29 | return fname, prog 30 | else: 31 | ppp(f'Please chose a {suf} file!') 32 | else: 33 | ppp(f'You can also edit your code here!') 34 | return None, "" 35 | 36 | 37 | def save_uploaded_file(uploaded_file): 38 | name = uploaded_file.name 39 | fname = os.path.join(upload_dir, name) 40 | # if exists_file(fname): return fname,file2string(fname) 41 | ensure_path(upload_dir) 42 | bs = uploaded_file.getbuffer() 43 | prog = str(bs, 'utf-8') 44 | 45 | with open(fname, "wb") as f: 46 | f.write(bs) 47 | return fname, prog 48 | 49 | 50 | def ensure_path(fname): 51 | folder, _ = os.path.split(fname) 52 | os.makedirs(folder, exist_ok=True) 53 | 54 | 55 | def exists_file(fname): 56 | return os.path.exists(fname) 57 | 58 | 59 | def file2string(fname): 60 | with open(fname, 'r') as f: 61 | return f.read() 62 | 63 | 64 | fname, prog = handle_uploaded(st.sidebar.file_uploader('Select a File', type=[suf])) 65 | print(f'fname={fname} chars:',len(prog)) 66 | 67 | editor = st.text_area('Program', prog, height=320) # pixels 68 | 69 | print('editor chars:',len(editor)) 70 | 71 | with st.sidebar: 72 | question = st.text_area('Query?') 73 | query_it = st.button('Submit your question!') 74 | 75 | 76 | def do_query(): 77 | if fname is not None: 78 | lib = natprogs() + "lib.nat" 79 | if fname != lib: 80 | with_lib = lib 81 | else: 82 | with_lib = None 83 | nat = Natlog(text=editor, with_lib=with_lib) 84 | else: 85 | print('running with code in editor, chars:',len(editor)) 86 | nat = Natlog(text=editor, with_lib=natprogs() + "lib.nat") 87 | 88 | ppp('?- ' + question) 89 | 90 | success = False 91 | ppp('ANSWERS:') 92 | for answer in nat.solve(question): 93 | success = True 94 | ppp(answer) 95 | if not success: 96 | ppp('No ANSWER!') 97 | ppp('') 98 | 99 | if query_it: 100 | do_query() 101 | -------------------------------------------------------------------------------- /natlog/app/requirements.txt: -------------------------------------------------------------------------------- 1 | streamlit 2 | natlog 3 | -------------------------------------------------------------------------------- /natlog/db.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | import json 3 | import csv 4 | 5 | from .unify import unify, activate 6 | from .parser import mparse 7 | from .scanner import Var 8 | 9 | 10 | def path_of(t): 11 | def path_of0(t): 12 | if isinstance(t, Var): 13 | pass 14 | elif isinstance(t, tuple): 15 | for i, x in enumerate(t): 16 | for c, ps in path_of0(x): 17 | yield c, (i, ps) 18 | else: 19 | yield t, () 20 | 21 | ps = set(path_of0(t)) 22 | qs = set((c, list2tuple(x)) for (c, x) in ps) 23 | return qs 24 | 25 | 26 | def list2tuple(ls): 27 | # print('!!! LS=',ls) 28 | def scan(xs): 29 | while xs != () and isinstance(xs, tuple): 30 | x, xs = xs 31 | yield x 32 | 33 | if not isinstance(ls, tuple): 34 | return ls 35 | return tuple(scan(ls)) 36 | 37 | 38 | def make_index(): 39 | return defaultdict(set) 40 | 41 | 42 | def tuplify(t): 43 | if isinstance(t, list): 44 | return tuple(map(tuplify, t)) 45 | if isinstance(t, tuple): 46 | return tuple(map(tuplify, t)) 47 | else: 48 | return t 49 | 50 | 51 | class Db: 52 | def __init__(self): 53 | self.index = make_index() # content --> int index 54 | self.css = [] # content as ground tuples 55 | self.index_source = path_of 56 | 57 | # parses text to list of ground tuples 58 | def digest(self, text): 59 | for cs in mparse(text, ground=True): 60 | # print('DIGEST:', cs) 61 | assert len(cs) == 1 62 | self.add_clause(cs[0]) 63 | 64 | # loads from json list of lists 65 | def load_json(self, fname): 66 | with open(fname, 'r') as f: 67 | ts = json.load(f) 68 | for t in ts: 69 | self.add_db_clause(t) 70 | 71 | def load_csv(self, fname, delimiter=','): 72 | with open(fname) as f: 73 | wss = csv.reader(f, delimiter=delimiter) 74 | for ws in wss: 75 | self.add_db_clause(ws) 76 | 77 | def load_tsv(self, fname): 78 | self.load_csv(fname, delimiter='\t') 79 | 80 | def load_txt(self, fname): 81 | """ assuming text tokenized, one sentence per line, 82 | single white space separated, ending with '.' or '?' 83 | """ 84 | with open(fname) as f: 85 | lines = f.read().split('\n') 86 | for line in lines: 87 | if len(line) < 2: continue 88 | line = line.strip() 89 | assert line[-1] in ".?" 90 | line = line[0:-1] 91 | line = line.strip() 92 | ws = line.split(' ') 93 | self.add_clause(('txt', tuple(ws),)) 94 | 95 | def add_db_clause(self, t): 96 | # print('####', t) 97 | if t: self.add_clause(tuplify(t)) 98 | 99 | # loads ground facts .nat or .json files 100 | def load(self, fname): 101 | if len(fname) > 4 and fname[-4:] == '.nat': 102 | with open(fname, 'r') as f: 103 | self.digest(f.read()) 104 | elif len(fname) > 4 and fname[-4:] == '.tsv': 105 | self.load_tsv(fname) 106 | elif len(fname) > 4 and fname[-4:] == '.csv': 107 | self.load_csv(fname) 108 | elif len(fname) > 4 and fname[-4:] == '.txt': 109 | self.load_txt(fname) 110 | else: 111 | self.load_json(fname) 112 | 113 | def save(self, fname): 114 | with open(fname, "w") as g: 115 | json.dump(self.css, g) 116 | 117 | def size(self): 118 | return len(self.css) 119 | 120 | # adds a clause and indexes it for all constants 121 | # recursively occurring in it, in any subtuple 122 | 123 | def add_clause(self, cs): 124 | # add_clause_by_content(self.index, self.css, cs) 125 | 126 | i = len(self.css) 127 | self.css.append(cs) 128 | for c in self.index_source(cs): 129 | self.index[c].add(i) 130 | 131 | def ground_match_of(self, query): 132 | """ 133 | computes all ground matches of a query term in the Db; 134 | if a constant occurs in the query, it must also occur in 135 | a ground term that unifies with it, as the ground term 136 | has no variables that would match the constant 137 | """ 138 | # find all paths in query 139 | paths = self.index_source(query) 140 | if not paths: 141 | # match against all clauses css, no help from indexing 142 | return set(range(len(self.css))) 143 | # pick a copy of the first set where c occurs 144 | first_path = next(iter(paths)) 145 | matches = self.index[first_path].copy() 146 | # shrink it by intersecting with sets where other paths occur 147 | for x in paths: 148 | matches &= self.index[x] 149 | # these are all possible ground matches - return them 150 | return matches 151 | 152 | # uses unification to match ground fact 153 | # with bindining applied to vs and collected on trail 154 | def unify_with_fact(self, h, trail): 155 | ms = self.ground_match_of(h) 156 | for i in ms: 157 | h0 = self.css[i] 158 | u = unify(h, h0, trail) 159 | yield u 160 | 161 | # uses unification to match and return ground fact 162 | def match_of_(self, h): 163 | h = activate(h, dict()) 164 | for ok in self.unify_with_fact(h, []): 165 | if ok: yield h 166 | 167 | def match_of(self, hx): 168 | h = activate(hx, dict()) 169 | ms = self.ground_match_of(h) 170 | for i in ms: 171 | h0 = self.css[i] 172 | trail = [] 173 | if unify(h, h0, trail): 174 | yield h0 175 | for v in trail: v.unbind() 176 | 177 | def search(self, query): 178 | """ 179 | searches for a matching tuple 180 | """ 181 | qss = mparse(query, ground=False) 182 | for qs in qss: 183 | qs = qs[0] 184 | # print('SEARCHING:', qs) 185 | for rs in self.match_of(qs): 186 | yield rs 187 | 188 | # simple search based on content 189 | def about(self, c): 190 | for k, v in self.index.items(): 191 | if k[0] == c: 192 | for i in v: 193 | yield self.css[i] 194 | 195 | def ask_about(self, query): 196 | print('QUERY:', query) 197 | for r in self.about(query): 198 | print('-->', r) 199 | print('') 200 | 201 | # queries_text the Db directly with a text query 202 | def ask(self, query): 203 | print('QUERY:', query) 204 | for r in self.search(query): 205 | print('-->', r) 206 | print('') 207 | 208 | # builds possibly very large string representation 209 | # of the facts contained in the Db 210 | def __repr__(self): 211 | xs = [str(cs) + '\n' for cs in enumerate(self.css)] 212 | return "".join(xs) 213 | 214 | 215 | def about_facts(): 216 | prog = """ 217 | quest X Y : ~ (text_term (give X Y)) ? 218 | """ 219 | db = Db() 220 | db_name = 'natprogs/facts.nat' 221 | db.load(db_name) 222 | 223 | print('SIZE:', db.size(), 'LEN:', len(db.css[0])) 224 | print(42, ':', db.css[42]) 225 | db.ask_about("subgraph") 226 | 227 | 228 | def test_db(): 229 | pass 230 | about_facts() 231 | 232 | 233 | if __name__ == "__main__": 234 | test_db() 235 | -------------------------------------------------------------------------------- /natlog/natlog.py: -------------------------------------------------------------------------------- 1 | from math import * 2 | from pathlib import Path 3 | import readline 4 | 5 | from .parser import * 6 | from .unify import * # unify, lazy_unify, activate, extractTerm, Var 7 | from .tools import * 8 | from .db import Db 9 | 10 | 11 | def my_path(): 12 | return str(Path(__file__).parent) + "/" 13 | 14 | 15 | def natprogs(): 16 | return my_path() + 'natprogs/' 17 | 18 | 19 | def to_python(x): 20 | return x 21 | 22 | 23 | def from_python(x): 24 | return x 25 | 26 | 27 | # eng X (between 1 5 X) E,`next E R, #print R, fail? 28 | 29 | class Eng: 30 | def __init__(self, interp, css, g, db, callables): 31 | self.interp = interp 32 | self.css = css 33 | self.db = db 34 | self.g = g 35 | self.callables = callables 36 | self.runner = None 37 | self.stopped = False 38 | 39 | def start(self): 40 | if self.runner is None and not self.stopped: 41 | self.runner = interp(self.css, self.g, db=self.db, callables=self.callables) 42 | 43 | # eng X (between 1 5 X) E, stop E. 44 | def stop(self): 45 | 46 | if self.runner is not None and not self.stopped: 47 | self.runner.close() 48 | self.stopped = True 49 | 50 | def __next__(self): 51 | if self.stopped: return None 52 | self.start() 53 | return next(self.runner) 54 | 55 | def __call__(self): 56 | self.start() 57 | if not self.stopped: yield from self.runner 58 | 59 | def __repr__(self): 60 | mes = "" 61 | if self.stopped: mes = "stopped_" 62 | return mes + 'eng_' + str(id(self)) 63 | 64 | 65 | def undo(trail): 66 | while trail: 67 | trail.pop().unbind() 68 | 69 | 70 | def unfold1(g, gs, h, bs, trail): 71 | d = dict() 72 | if not lazy_unify(h, g, trail, d): 73 | undo(trail) 74 | return None # FAILURE 75 | 76 | for b in reversed(bs): 77 | b = activate(b, d) 78 | gs = (b, gs) 79 | return gs # SUCCESS 80 | 81 | 82 | nat_builtins = { 83 | "call", "~", "`", "``", "^", "#", "$", "@", 84 | "if", "eng", "ask", "unify_with_occurs_check" 85 | } 86 | 87 | 88 | def interp(css, goals0, db=None, callables=dict()): 89 | """ 90 | main interpreter 91 | """ 92 | 93 | def to_callable(name): 94 | """ 95 | associates string names to callables 96 | """ 97 | if callable(name): return name 98 | f = callables.get(name, None) 99 | if f is not None: return f 100 | return eval(name) 101 | 102 | def dispatch_call(op, g, goals, trail): 103 | """ 104 | dispatches several types of calls to Python 105 | """ 106 | 107 | # yields facts matching g in Db 108 | def db_call(g): 109 | for ok in db.unify_with_fact(g, trail): 110 | if not ok: # FAILURE 111 | undo(trail) 112 | continue 113 | yield from step(goals) # SUCCESS 114 | undo(trail) 115 | 116 | def python_call(g): 117 | """ 118 | simple call to Python (e.g., print, no return expected) 119 | """ 120 | f = to_callable(g[0]) 121 | args = to_python(g[1:]) 122 | f(*args) 123 | 124 | def python_fun(g): 125 | """ 126 | function call to Python, last arg unified with result 127 | """ 128 | f = to_callable(g[0]) 129 | g = g[1:] 130 | v = g[-1] 131 | args = to_python(g[:-1]) 132 | r = f(*args) 133 | r = from_python(r) 134 | if not unify(v, r, trail): 135 | undo(trail) 136 | else: 137 | yield from step(goals) 138 | 139 | def python_var(g): 140 | """ 141 | query value of Python var last arg unified with result 142 | """ 143 | r = to_callable(g[0]) 144 | v = g[-1] 145 | r = from_python(r) 146 | if not unify(v, r, trail): 147 | undo(trail) 148 | else: 149 | yield from step(goals) 150 | 151 | def dcg_terminals(g): 152 | assert len(g) >= 3 153 | args = g[0:-2] 154 | s1 = g[-2] 155 | s2 = g[-1] 156 | xs = to_dif_list(args, s2) 157 | if not unify(xs, s1, trail): 158 | undo(trail) 159 | else: 160 | yield from step(goals) 161 | 162 | def eng(xge): 163 | x, g, e = xge 164 | (x, g) = copy_term((x, g)) 165 | g = (('the', x, g), ()) 166 | assert isinstance(e, Var) 167 | r = Eng(interp, css, g, db, callables) 168 | e.bind(r, trail) 169 | yield from step(goals) 170 | 171 | # eng X (between 1 5 X) E, ask E R, #print R, fail? 172 | def ask(ex): 173 | e, x = ex 174 | a = next(e, None) 175 | # print('RAW ask next:',a) 176 | if a is None: 177 | r = 'no' 178 | e.stop() 179 | elif len(a) == 1: # a ^ operation 180 | r = ('the', copy_term(a[0])) 181 | else: 182 | ((the, r, g), ()) = a 183 | r = (the, copy_term(r)) 184 | if not unify(x, r, trail): 185 | undo(trail) 186 | else: 187 | yield from step(goals) 188 | 189 | def gen_call(g): 190 | """ 191 | unifies with last arg yield from a generator 192 | and first args, assumed ground, passed to it 193 | """ 194 | gen = to_callable(g[0]) 195 | g = g[1:] 196 | v = g[-1] 197 | args = to_python(g[:-1]) 198 | for r in gen(*args): 199 | r = from_python(r) 200 | 201 | if unify(v, r, trail): 202 | yield from step(goals) 203 | undo(trail) 204 | 205 | def if_op(g): 206 | cond, yes, no = g 207 | cond = extractTerm(cond) 208 | 209 | if next(step((cond, ())), None) is not None: 210 | yield from step((yes, goals)) 211 | else: 212 | yield from step((no, goals)) 213 | 214 | def unify_with_occurs_check_op(g): 215 | t1, t2 = g 216 | if not unify(t1, t2, trail, occ=True): 217 | undo(trail) 218 | else: 219 | yield from step(goals) 220 | 221 | if op == 'eng': 222 | yield from eng(g) 223 | 224 | elif op == 'ask': 225 | yield from ask(g) 226 | 227 | elif op == 'call': 228 | yield from step((g[0] + g[1:], goals)) 229 | 230 | elif op == 'if': 231 | yield from if_op(g) 232 | 233 | elif op == 'unify_with_occurs_check': 234 | yield from unify_with_occurs_check_op(g) 235 | 236 | elif op == '~': # matches against database of facts 237 | yield from db_call(g) 238 | 239 | elif op == '^': # yield g as an answer directly 240 | yield extractTerm(g) 241 | yield from step(goals) 242 | 243 | elif op == '`': # function call, last arg unified 244 | yield from python_fun(g) 245 | 246 | elif op == "``": # generator call, last arg unified 247 | yield from gen_call(g) 248 | 249 | elif op == '#': # simple call, no return 250 | python_call(g) 251 | yield from step(goals) 252 | 253 | elif op == '@': # DCG terminal(s) 254 | yield from dcg_terminals(g) 255 | 256 | else: # op == '$' find value of variable 257 | yield from python_var(g) 258 | 259 | undo(trail) 260 | 261 | def step(goals): 262 | """ 263 | recursive inner function 264 | """ 265 | trail = [] 266 | if goals == (): 267 | yield extractTerm(goals0) 268 | undo(trail) 269 | else: 270 | g, goals = goals 271 | op = g[0] if g else None 272 | if op in nat_builtins: 273 | g = extractTerm(g[1:]) 274 | yield from dispatch_call(op, g, goals, trail) 275 | else: 276 | for (h, bs) in css: 277 | bsgs = unfold1(g, goals, h, bs, trail) 278 | if bsgs is not None: 279 | yield from step(bsgs) 280 | undo(trail) 281 | 282 | done = False 283 | while not done: 284 | done = True 285 | for a in step(goals0): 286 | if a is not None and len(a) >= 2 and a[0] == 'trust': 287 | newg = a[1:], () 288 | goals0 = newg 289 | done = False 290 | break 291 | yield a 292 | 293 | 294 | LIB = '../natprogs/lib.nat' 295 | 296 | 297 | class Natlog: 298 | def __init__(self, text=None, file_name=None, db_name=None, with_lib=None, callables=dict()): 299 | if file_name: 300 | with open(file_name, 'r') as f: 301 | self.text = f.read() 302 | else: 303 | self.text = text 304 | 305 | if with_lib: 306 | with open(with_lib, 'r') as f: 307 | lib = f.read() 308 | self.text = self.text + '\n' + lib 309 | 310 | self.callables = callables 311 | self.gsyms = dict() 312 | self.gixs = dict() 313 | 314 | css, ixss = zip(*parse(self.text, gsyms=self.gsyms, gixs=self.gixs, ground=False, rule=True)) 315 | 316 | self.css = tuple(css) 317 | self.ixss = tuple(ixss) 318 | 319 | # print('GIXSS in natlog:', self.gixs) 320 | 321 | if db_name is not None: 322 | self.db_init() 323 | self.db.load(db_name) 324 | else: 325 | self.db = None 326 | 327 | def db_init(self): 328 | """ 329 | overridable database initializer 330 | sets the type of the database (default or neuro-symbolic) 331 | """ 332 | self.db = Db() 333 | 334 | def solve(self, quest): 335 | """ 336 | answer generator for given question 337 | """ 338 | goals0, ixs = next(parse(quest, gsyms=self.gsyms, gixs=self.gixs, ground=False, rule=False)) 339 | 340 | vs = dict() 341 | goals0 = activate(goals0, vs) 342 | ns = dict(zip(vs, ixs)) 343 | 344 | for k, v in self.gixs.items(): 345 | ns[k] = v 346 | 347 | for answer in interp(self.css, goals0, self.db, self.callables): 348 | 349 | if answer and len(answer) == 1: 350 | sols = {'_': answer[0]} 351 | else: 352 | sols = dict((ns[v], deref(r)) for (v, r) in vs.items()) 353 | yield sols 354 | 355 | def count(self, quest): 356 | """ 357 | answer counter 358 | """ 359 | c = 0 360 | for _ in self.solve(quest): 361 | c += 1 362 | return floor(c) 363 | 364 | def query(self, quest, in_repl=False): 365 | """ 366 | show answers for given query 367 | """ 368 | if not in_repl: print('QUERY:', quest) 369 | success = False 370 | for answer in self.solve(quest): 371 | success = True 372 | print('ANSWER:', answer) 373 | if not success: 374 | print('No ANSWER!') 375 | print('') 376 | 377 | def repl(self): 378 | """ 379 | read-eval-print-loop 380 | """ 381 | print("Type ENTER to quit.") 382 | while True: 383 | q = input('?- ') 384 | if not q: return 385 | try: 386 | self.query(q, in_repl=True) 387 | except Exception as e: 388 | print('EXCEPTION:', type(e).__name__, e.args) 389 | #raise e 390 | 391 | # shows tuples of Natlog rule base 392 | def __repr__(self): 393 | xs = [str(cs) + '\n' for cs in self.css] 394 | return " ".join(xs) 395 | 396 | 397 | # built-ins, callable with ` notation 398 | 399 | def numlist(n, m): 400 | return to_cons_list(range(n, m + 1)) 401 | 402 | 403 | def consult(natfile=natprogs() + 'family.nat'): 404 | n = Natlog(file_name=natfile, with_lib=natprogs() + 'lib.nat') 405 | n.repl() 406 | 407 | 408 | def load(natfile): 409 | Natlog(file_name=natprogs() + natfile + ".nat").repl() 410 | 411 | 412 | # tests 413 | 414 | def test_natlog(): 415 | n = Natlog(file_name="natprogs/tc.nat") 416 | print(n) 417 | n.query("tc Who is animal ?") 418 | 419 | # n = Natlog(file_name="../natprogs/queens.nat") 420 | # n.query("goal8 Queens?") 421 | 422 | n = Natlog(file_name="natprogs/perm.nat") 423 | # print(n) 424 | n.query("perm (1 (2 (3 ()))) X ?") 425 | 426 | n = Natlog(file_name="natprogs/py_call.nat") 427 | # print(n) 428 | n.query("goal X?") 429 | # n.repl() 430 | 431 | n = Natlog(file_name="natprogs/family.nat") 432 | # print(n) 433 | n.query("cousin of X C, male C?") 434 | # n.repl() 435 | 436 | # n = Natlog(file_name="../natprogs/queens.nat") 437 | 438 | # print(n.count("goal8 X ?")) 439 | 440 | n = Natlog(file_name="natprogs/lib.nat") 441 | print(n) 442 | n.repl() 443 | 444 | 445 | def lconsult(fname): 446 | fname = natprogs() + fname + ".nat" 447 | n = Natlog(file_name=fname, with_lib=natprogs() + 'lib.nat') 448 | n.repl() 449 | 450 | 451 | def dconsult(nname, dname): 452 | nname = natprogs() + nname + ".nat" 453 | dname = natprogs() + dname + ".nat" 454 | n = Natlog(file_name=nname, db_name=dname) 455 | n.repl() 456 | 457 | 458 | def tconsult(fname): 459 | nname = natprogs() + fname + ".nat" 460 | dname = natprogs() + fname + ".tsv" 461 | n = Natlog(file_name=nname, db_name=dname) 462 | n.repl() 463 | 464 | 465 | def natrun(fname, natgoal, callables=globals()): 466 | fname = fname + ".nat" 467 | n = Natlog(file_name=fname, with_lib=natprogs() + 'lib.nat', callables=callables) 468 | # n.repl() 469 | return list(n.solve(natgoal)) 470 | 471 | 472 | def natlog(text, goal=None): 473 | n = Natlog(text=text, with_lib=natprogs() + 'lib.nat', callables=globals()) 474 | if goal is not None: 475 | n.query(goal) 476 | n.repl() 477 | -------------------------------------------------------------------------------- /natlog/natprogs/arith.nat: -------------------------------------------------------------------------------- 1 | succ X (s X). 2 | 3 | add 0 X X . 4 | add (s X) Y (s Z) : add X Y Z. 5 | 6 | mul 0 _Y 0. 7 | mul (s X) Y R : mul X Y M, add Y M R. 8 | 9 | two (s (s 0)). 10 | 11 | three SX : two X, succ X SX. 12 | 13 | goal Six : three A, two B, mul A B Six. 14 | -------------------------------------------------------------------------------- /natlog/natprogs/dall_e.nat: -------------------------------------------------------------------------------- 1 | dall_e => @photo, @of, subject, verb, object. 2 | 3 | subject => @a, @cat. 4 | subject => @a, @dog. 5 | 6 | verb => @playing. 7 | 8 | adjective => @golden. 9 | adjective => @shiny. 10 | 11 | object => @on, @the, adjective, location, @with, @a, instrument. 12 | 13 | location => @moon. 14 | 15 | instrument => @violin. 16 | instrument => @trumpet. 17 | 18 | go: dall_e Words (), to_tuple Words Ws, #writeln Ws, fail. 19 | go. 20 | -------------------------------------------------------------------------------- /natlog/natprogs/db.json: -------------------------------------------------------------------------------- 1 | [["tiger", "is", "feline"], ["mouse", "is", "rodent"], ["feline", "is", "mammal"], ["rodent", "is", "mammal"], ["snake", "is", "reptile"], ["mammal", "is", "animal"], ["reptile", "is", "animal"], ["bee", "is", "insect"], ["ant", "is", "insect"], ["insect", "is", "animal"]] -------------------------------------------------------------------------------- /natlog/natprogs/db.nat: -------------------------------------------------------------------------------- 1 | tiger is feline. 2 | mouse is rodent. 3 | feline is mammal. 4 | rodent is mammal. 5 | snake is reptile. 6 | mammal is animal. 7 | reptile is animal. 8 | bee is insect. 9 | ant is insect. 10 | insect is animal. 11 | -------------------------------------------------------------------------------- /natlog/natprogs/db.tsv: -------------------------------------------------------------------------------- 1 | mouse is rodent 2 | cat is feline 3 | feline is mammal 4 | rodent is mammal 5 | snake is reptile 6 | mammal is animal 7 | reptile is animal 8 | bee is insect 9 | ant is insect 10 | insect is animal 11 | snake can bite 12 | ant can bite 13 | bee can sting 14 | snake can eat mouse 15 | cat can eat mouse 16 | -------------------------------------------------------------------------------- /natlog/natprogs/dbtc.nat: -------------------------------------------------------------------------------- 1 | cat is_a feline. 2 | A is_a B : ~ A is B. 3 | 4 | 5 | tc A Rel C : A Rel B, tc1 B Rel C. 6 | 7 | tc1 B _Rel B. 8 | tc1 B Rel C : tc B Rel C. 9 | 10 | -------------------------------------------------------------------------------- /natlog/natprogs/elements.nat: -------------------------------------------------------------------------------- 1 | data Num Sym Neut Prot Elec Period Group Phase Type Isos Shells : ~ Num Sym Neut Prot Elec Period Group Phase Type Isos Shells. 2 | 3 | an_el Num El : data Num El '45' '35' '35' '4' '17' liq 'Halogen' '19' '4'. 4 | gases Num El : data Num El _1 _2 _3 _4 _5 gas _6 _7 _8. 5 | -------------------------------------------------------------------------------- /natlog/natprogs/elements.tsv: -------------------------------------------------------------------------------- 1 | 1 H 0 1 1 1 1 gas Nonmetal 3 1 2 | 2 He 2 2 2 1 18 gas Noble Gas 5 1 3 | 3 Li 4 3 3 2 1 solid Alkali Metal 5 2 4 | 4 Be 5 4 4 2 2 solid Alkaline Earth Metal 6 2 5 | 5 B 6 5 5 2 13 solid Metalloid 6 2 6 | 6 C 6 6 6 2 14 solid Nonmetal 7 2 7 | 7 N 7 7 7 2 15 gas Nonmetal 8 2 8 | 8 O 8 8 8 2 16 gas Nonmetal 8 2 9 | 9 F 10 9 9 2 17 gas Halogen 6 2 10 | 10 Ne 10 10 10 2 18 gas Noble Gas 8 2 11 | 11 Na 12 11 11 3 1 solid Alkali Metal 7 3 12 | 12 Mg 12 12 12 3 2 solid Alkaline Earth Metal 8 3 13 | 13 Al 14 13 13 3 13 solid Metal 8 3 14 | 14 Si 14 14 14 3 14 solid Metalloid 8 3 15 | 15 P 16 15 15 3 15 solid Nonmetal 7 3 16 | 16 S 16 16 16 3 16 solid Nonmetal 10 3 17 | 17 Cl 18 17 17 3 17 gas Halogen 11 3 18 | 18 Ar 22 18 18 3 18 gas Noble Gas 8 3 19 | 19 K 20 19 19 4 1 solid Alkali Metal 10 4 20 | 20 Ca 20 20 20 4 2 solid Alkaline Earth Metal 14 4 21 | 21 Sc 24 21 21 4 3 solid Transition Metal 15 4 22 | 22 Ti 26 22 22 4 4 solid Transition Metal 9 4 23 | 23 V 28 23 23 4 5 solid Transition Metal 9 4 24 | 24 Cr 28 24 24 4 6 solid Transition Metal 9 4 25 | 25 Mn 30 25 25 4 7 solid Transition Metal 11 4 26 | 26 Fe 30 26 26 4 8 solid Transition Metal 10 4 27 | 27 Co 32 27 27 4 9 solid Transition Metal 14 4 28 | 28 Ni 31 28 28 4 10 solid Transition Metal 11 4 29 | 29 Cu 35 29 29 4 11 solid Transition Metal 11 4 30 | 30 Zn 35 30 30 4 12 solid Transition Metal 15 4 31 | 31 Ga 39 31 31 4 13 solid Metal 14 4 32 | 32 Ge 41 32 32 4 14 solid Metalloid 17 4 33 | 33 As 42 33 33 4 15 solid Metalloid 14 4 34 | 34 Se 45 34 34 4 16 solid Nonmetal 20 4 35 | 35 Br 45 35 35 4 17 liq Halogen 19 4 36 | 36 Kr 48 36 36 4 18 gas Noble Gas 23 4 37 | 37 Rb 48 37 37 5 1 solid Alkali Metal 20 5 38 | 38 Sr 50 38 38 5 2 solid Alkaline Earth Metal 18 5 39 | 39 Y 50 39 39 5 3 solid Transition Metal 21 5 40 | 40 Zr 51 40 40 5 4 solid Transition Metal 20 5 41 | 41 Nb 52 41 41 5 5 solid Transition Metal 24 5 42 | 42 Mo 54 42 42 5 6 solid Transition Metal 20 5 43 | 43 Tc 55 43 43 5 7 artificial Transition Metal 23 5 44 | 44 Ru 57 44 44 5 8 solid Transition Metal 16 5 45 | 45 Rh 58 45 45 5 9 solid Transition Metal 20 5 46 | 46 Pd 60 46 46 5 10 solid Transition Metal 21 5 47 | 47 Ag 61 47 47 5 11 solid Transition Metal 27 5 48 | 48 Cd 64 48 48 5 12 solid Transition Metal 22 5 49 | 49 In 66 49 49 5 13 solid Metal 34 5 50 | 50 Sn 69 50 50 5 14 solid Metal 28 5 51 | 51 Sb 71 51 51 5 15 solid Metalloid 29 5 52 | 52 Te 76 52 52 5 16 solid Metalloid 29 5 53 | 53 I 74 53 53 5 17 solid Halogen 24 5 54 | 54 Xe 77 54 54 5 18 gas Noble Gas 31 5 55 | 55 Cs 78 55 55 6 1 solid Alkali Metal 22 6 56 | 56 Ba 81 56 56 6 2 solid Alkaline Earth Metal 25 6 57 | 57 La 82 57 57 6 3 solid Lanthanide 19 6 58 | 58 Ce 82 58 58 6 solid Lanthanide 19 6 59 | 59 Pr 82 59 59 6 solid Lanthanide 15 6 60 | 60 Nd 84 60 60 6 solid Lanthanide 16 6 61 | 61 Pm 84 61 61 6 artificial Lanthanide 14 6 62 | 62 Sm 88 62 62 6 solid Lanthanide 17 6 63 | 63 Eu 89 63 63 6 solid Lanthanide 21 6 64 | 64 Gd 93 64 64 6 solid Lanthanide 17 6 65 | 65 Tb 94 65 65 6 solid Lanthanide 24 6 66 | 66 Dy 97 66 66 6 solid Lanthanide 21 6 67 | 67 Ho 98 67 67 6 solid Lanthanide 29 6 68 | 68 Er 99 68 68 6 solid Lanthanide 16 6 69 | 69 Tm 100 69 69 6 solid Lanthanide 18 6 70 | 70 Yb 103 70 70 6 solid Lanthanide 16 6 71 | 71 Lu 104 71 71 6 solid Lanthanide 22 6 72 | 72 Hf 106 72 72 6 4 solid Transition Metal 17 6 73 | 73 Ta 108 73 73 6 5 solid Transition Metal 19 6 74 | 74 W 110 74 74 6 6 solid Transition Metal 22 6 75 | 75 Re 111 75 75 6 7 solid Transition Metal 21 6 76 | 76 Os 114 76 76 6 8 solid Transition Metal 19 6 77 | 77 Ir 115 77 77 6 9 solid Transition Metal 25 6 78 | 78 Pt 117 78 78 6 10 solid Transition Metal 32 6 79 | 79 Au 118 79 79 6 11 solid Transition Metal 21 6 80 | 80 Hg 121 80 80 6 12 liq Transition Metal 26 6 81 | 81 Tl 123 81 81 6 13 solid Metal 28 6 82 | 82 Pb 125 82 82 6 14 solid Metal 29 6 83 | 83 Bi 126 83 83 6 15 solid Metal 19 6 84 | 84 Po 126 84 84 6 16 solid Metalloid 34 6 85 | 85 At 125 85 85 6 17 solid Noble Gas 21 6 86 | 86 Rn 136 86 86 6 18 gas Alkali Metal 20 6 -------------------------------------------------------------------------------- /natlog/natprogs/emu.nat: -------------------------------------------------------------------------------- 1 | % engine based alternative primitive implementations 2 | 3 | if_ C Y N : eng C C E, ask E R, stop E, pick_ R C Y N. 4 | 5 | not_ G : if_ G (fail) (true). 6 | 7 | once_ G : if_ G (true) (fail). 8 | 9 | pick_ (the C) C Y _N : call Y. 10 | pick_ no _C _Y N : call N. 11 | 12 | findall_ X G Xs : eng X G E, ask E Y, collect_all_ E Y Xs. 13 | 14 | collect_all_ _ no (). 15 | collect_all_ E (the X) (X Xs) : ask E Y, collect_all_ E Y Xs. 16 | 17 | copy_term_ T CT : eng T (true) E, ask E (the CT), stop E. 18 | 19 | var_ X : not_ (not_ (eq X 1)), not_ (not_ (eq X 2)). 20 | 21 | nonvar_ X: not_ (var_ X). 22 | 23 | emu1: 24 | if_ (eq 1 2) (eq X yes) (eq X no), 25 | #print X, 26 | fail. 27 | 28 | emu2: 29 | findall_ X (between 1 5 X) Xs, 30 | #print Xs, 31 | fail. 32 | 33 | emu3: 34 | not_ (eq 1 2). 35 | 36 | emu4 : var_ X, nonvar_ a. 37 | -------------------------------------------------------------------------------- /natlog/natprogs/family.nat: -------------------------------------------------------------------------------- 1 | male 'Adam'. 2 | male 'Bill'. 3 | male 'Paul'. 4 | male 'Brandon'. 5 | male 'Peter'. 6 | male 'Dylan'. 7 | 8 | female 'Brenda'. 9 | female 'Sandy'. 10 | female 'Siena'. 11 | female 'Mary'. 12 | female 'Anna'. 13 | 14 | parent of 'Adam' 'Brandon'. 15 | parent of 'Adam' 'Sandy'. 16 | parent of 'Dylan' 'Brenda'. 17 | parent of 'Dylan' 'Paul' . 18 | parent of 'Siena' 'Brenda'. 19 | parent of 'Siena' 'Paul'. 20 | parent of 'Brenda' 'Bill'. 21 | parent of 'Brenda' 'Mary'. 22 | parent of 'Sandy' 'Bill'. 23 | parent of 'Sandy' 'Mary'. 24 | parent of 'Paul' 'Anna'. 25 | parent of 'Paul' 'Peter'. 26 | 27 | sibling of X S: parent of X P, parent of S P, distinct S X. 28 | 29 | brother of X B: sibling of X B, male B. 30 | 31 | sister of X S: sibling of X S, female S. 32 | 33 | mother of X M: parent of X M, female M. 34 | 35 | father of X M: parent of X M, male M. 36 | 37 | grand parent of X GP: parent of X P, parent of P GP. 38 | 39 | ancestor of X A : parent of X P, parent or ancestor P A. 40 | 41 | parent or ancestor P P. 42 | parent or ancestor P A : ancestor of P A. 43 | 44 | cousin of X C: 45 | parent of X P, 46 | parent of P GP, 47 | parent of Q GP, 48 | distinct P Q, 49 | parent of C Q, 50 | distinct X C. 51 | 52 | -------------------------------------------------------------------------------- /natlog/natprogs/gcol.nat: -------------------------------------------------------------------------------- 1 | node 1 (2 3 5) &C1. 2 | node 2 (3 5) &C2. 3 | node 3 (4) &C3. 4 | node 4 (5) &C4. 5 | node 5 () &C5. 6 | 7 | edge_col CX CY : node X Ys CX, argx _ Ys Y, node Y _ CY. 8 | 9 | color C : argx _ (r g b) C. 10 | 11 | nodes Ns : findall (N C) (node N _ C) Ns. 12 | 13 | edges Es : findall (CX CY) (edge_col CX CY) Es. 14 | 15 | 16 | color_graph NCs : 17 | nodes NCs, 18 | edges Es, 19 | color_edges Es. 20 | 21 | color_edges () . 22 | color_edges ((CX CY) Es) : 23 | color CX, 24 | color CY, 25 | distinct CX CY, 26 | color_edges Es. 27 | 28 | 29 | go NCs : color_graph NCs. 30 | 31 | -------------------------------------------------------------------------------- /natlog/natprogs/gram.nat: -------------------------------------------------------------------------------- 1 | sent End => @a smart,noun,verb, @on a nice, place, @End. 2 | 3 | noun => @cat. 4 | noun => @dog. 5 | 6 | verb => @sits. 7 | 8 | place => @mat. 9 | place => @bed. 10 | 11 | % @ X (X Xs) Xs. 12 | 13 | goal Xs : $eos Dot, sent Dot Xs (). 14 | 15 | go : goal Xs, to_tuple Xs T, #writeln T, fail. 16 | 17 | -------------------------------------------------------------------------------- /natlog/natprogs/interclausal.nat: -------------------------------------------------------------------------------- 1 | a X: &P X, b X. 2 | b 0. 3 | b 1. 4 | c : eq &P b. 5 | 6 | go X: c, a X. 7 | -------------------------------------------------------------------------------- /natlog/natprogs/lib.nat: -------------------------------------------------------------------------------- 1 | true. 2 | 3 | eq X X. 4 | 5 | eq_ X Y : unify_with_occurs_check X Y. 6 | 7 | the X G : call G. 8 | 9 | member X (X _). 10 | member X (_ Xs) : member X Xs. 11 | 12 | append () Xs Xs. 13 | append (X Xs) Ys (X Zs) : append Xs Ys Zs. 14 | 15 | permute () (). 16 | permute (X Xs) Zs : permute Xs Ys, insert X Ys Zs. 17 | 18 | insert X Xs (X Xs). 19 | insert X (Y Xs) (Y Ys) : insert X Xs Ys. 20 | 21 | select X Xs Ys : insert X Ys Xs. 22 | 23 | 24 | % metacall operations require arguments parenthesized. 25 | 26 | or X _ : call X. 27 | or _ Y : call Y. 28 | 29 | and X Y: call X, call Y. 30 | 31 | not X : if X (fail) (true). 32 | 33 | once X : if X (true) (fail). 34 | 35 | distinct X Y : not (eq X Y). 36 | 37 | var X: `type _ T, `isinstance X T R, `int R 1. 38 | 39 | compound X : distinct X (), `type () T, `isinstance X T R, `int R 1. 40 | 41 | atomic X : not (var X), not (compound X). 42 | 43 | maplist _ (). 44 | maplist F (X Xs) : call F X , maplist F Xs. 45 | 46 | maplist _ () (). 47 | maplist F (X Xs) (Y Ys) : call F X Y, maplist F Xs Ys. 48 | 49 | maplist _ () () (). 50 | maplist F (X Xs) (Y Ys) (Z Zs) : call F X Y Z, maplist F Xs Ys Zs. 51 | 52 | nth 0 (X _) X . 53 | nth N (_ Xs) R : when N > 0, with N - 1 as M , nth M Xs R. 54 | 55 | max X Y Z : `max X Y Z. 56 | min X Y Z : `min X Y Z. 57 | sum X Y Z : `add X Y Z. 58 | 59 | between A B X : with B + 1 as SB, `` range A SB X. 60 | 61 | arg I T X : `arg T I X. 62 | setarg I T X : #setarg T I X. 63 | 64 | argx I T X: `len T L, ``range 0 L I, `arg T I X. 65 | 66 | 67 | to_tuple Xs T : `from_cons_list_as_tuple Xs T. 68 | 69 | to_list Xs T : `from_cons_list Xs T. 70 | 71 | 72 | to_cons_list T Xs : `to_cons_list T Xs. 73 | 74 | 75 | to_dict Xs D : `dict Xs D. 76 | from_dict D Xs : `from_dict D Xs. 77 | in_dict D K_V : ``in_dict D K_V. 78 | 79 | 80 | setprop O K V : #setattr O K V. 81 | getprop O K V : `getattr O K V. 82 | 83 | stack S : `list S. 84 | push S X : #meth_call S append (X). 85 | pop S X : `meth_call S pop () X. 86 | 87 | copy_term X CX : `copy_term X CX. 88 | 89 | 90 | findall X G Xs: listof X G S, to_cons_list S Xs. 91 | 92 | listof X G S: stack S, collect_ X G S. 93 | 94 | collect_ X G S : call G, `copy_term X CX, push S CX, fail. 95 | collect_ _X _G _S. 96 | 97 | 98 | stop E : getprop E stop S, #S. 99 | 100 | in E X : ask E A, more in E A X. 101 | 102 | more in E (the X) X. 103 | more in E (the _) X : in E X. 104 | 105 | enum X G (N R) : eng X G E, enum from E R -1 N. 106 | 107 | enum from E R I1 I3 : ask E A, with I1 + 1 as I2, enum more E A R I2 I3. 108 | 109 | enum more E (the X) X I I. 110 | enum more E (the _) X I1 I2 : enum from E X I1 I2. 111 | 112 | 113 | first_sols N G R: eng G G E, enum from E R 0 K, if (when K0, ask E X, with K - 1 as K1, take_more X K1 E XXs. 117 | 118 | take_more no _K E () : stop E. 119 | take_more (the X) K E (X Xs) : take K E Xs. 120 | 121 | loop N N. 122 | loop N X : with N + 1 as M, ^trust loop M X. 123 | 124 | repeat X : call X. 125 | repeat X : ^trust repeat X. 126 | 127 | % arithmetics 128 | 129 | % defs that have a var as 1-st arg will start with when or with 130 | 131 | with X + Y as Z : `add X Y Z. 132 | with X - Y as Z : `sub X Y Z. 133 | with X * Y as Z : `mul X Y Z. 134 | with X / Y as Z : `truediv X Y Z. 135 | with X // Y as Z : `floordiv X Y Z. 136 | 137 | when X < Y : `lt X Y R, `int R 1. 138 | when X > Y : `gt X Y R, `int R 1. 139 | when X <= Y : `le X Y R, `int R 1. 140 | when X >= Y : `ge X Y R, `int R 1. 141 | when X <> Y : `ne X Y R, `int R 1. 142 | when X == Y : `eq X Y R, `int R 1. 143 | 144 | writeln X : #print X. 145 | nl : #print. 146 | 147 | % exceptions with engines 148 | 149 | throw E : ^(exception E), fail. 150 | 151 | catch Goal Catcher Recovery: 152 | eng (just Goal) Goal Engine, 153 | in Engine Answer, 154 | maybe_caught Answer Catcher Goal Recovery. 155 | 156 | maybe_caught (exception C) C _ Recovery : call Recovery, ^trust true. 157 | maybe_caught (exception C) Catcher _ _ : distinct C Catcher, throw C. 158 | maybe_caught (just G) _ G _ . 159 | 160 | is Term Type : `has_type Term Type 1. 161 | 162 | % quick exception throwing test 163 | 164 | exception_test Kind X: 165 | catch (thrower X) (Kind X) (#print recovering from Kind X), 166 | #print got Kind X. 167 | 168 | 169 | thrower X: #print X is fine so far. 170 | thrower X : throw (bad X), #print never here. 171 | thrower X : #print X missed. 172 | 173 | etest1 : exception_test bad ball. 174 | etest2 : exception_test other ball. 175 | etest3 : catch (eq X 1) fail fail, #print X. 176 | etest4 : findall X (eq X 1) R,#print R. 177 | -------------------------------------------------------------------------------- /natlog/natprogs/lib_tests.nat: -------------------------------------------------------------------------------- 1 | lib_t00: `numlist 1 3 Xs, maplist (`add 10) Xs Ys, #print Ys, fail. 2 | lib_t01 : `numlist 1 3 Xs, maplist (permute) (Xs (Xs ())) Yss, #print Yss, fail. 3 | lib_t02 : `numlist 1 5 X, `numlist 11 15 Y, maplist (sum) X Y Z, #print Z, fail. 4 | lib_t03 : `numlist 1 5 X, nth 2 X R, #print R ,fail. 5 | lib_t04 : list (1 2 3) Xs, permute Xs Ys, tuple Ys T, #print T, fail. 6 | 7 | lib_t05 : list (1 2 3 4) Xs, #print Xs, eng X (member X Xs) E, #print E, 8 | ask E R1, ask E R2, ask E R3, ask E R4, #print R1 R2 R3 R4, fail. 9 | 10 | p 1. 11 | p 2. 12 | p 3. 13 | p 4. 14 | 15 | 16 | lib_t06 : eng X (p X) E, #print engine E, 17 | ask E R1, ask E R2, ask E R3, ask E R4, #print results R1 R2 R3 R4, fail. 18 | 19 | lib_t07: eng X (between 1 5 X) E, in E A , writeln A, fail. 20 | 21 | lib_t08: eng X true E, ask E R, writeln R, fail. 22 | 23 | lib_t09: eq G (p X), eng G G E, ask E A, ask E B, #print A B, fail. 24 | 25 | lib_t10: eq G (p X), eng G G E, in E A, #print answer A, fail. 26 | 27 | 28 | lib_t11: if fail (eq X 0) (eq X 1), # print X, fail. 29 | 30 | lib_t12 : 31 | eng X (between 1 5 X) E, stop E, ask E A, #print here A, fail. 32 | 33 | lib_t13 : eng X (loop 0 X) E, ask E A, ask E B, #print A B, fail. 34 | 35 | 36 | lib_t14: eng X (loop 0 X) E, take 5 E Xs, tuple Xs T, #print T, fail. 37 | 38 | lib_t15: eq N 10, eng X (loop 0 X) E, take N E Xs, nth 5 Xs X, #print X,fail ? 39 | 40 | lib_t16 : first_sols 5 (loop 0 X) R, #print R, fail. 41 | 42 | 43 | fibo N Xs : eng X (slide_fibo 1 1) E, take N E Xs. 44 | 45 | slide_fibo X Y : with X+Y as Z, ^X, slide_fibo Y Z. 46 | -------------------------------------------------------------------------------- /natlog/natprogs/loop.nat: -------------------------------------------------------------------------------- 1 | loop. 2 | loop : loop. 3 | 4 | goal(X) : loop. 5 | -------------------------------------------------------------------------------- /natlog/natprogs/meta.nat: -------------------------------------------------------------------------------- 1 | metaint (). % no more goals left, succeed 2 | metaint (G Gs) : % unify the first goal with the head of a clause 3 | cls (G Bs) Gs, % build a new list of goals from the body of the 4 | % clause extended with the remaining goals as tail 5 | metaint Bs. % interpret the extended body 6 | 7 | 8 | cls 9 | ((add 0 X X) Tail) 10 | Tail. 11 | cls ( 12 | (add (s X) Y (s Z)) 13 | ( 14 | (add X Y Z) Tail 15 | ) 16 | ) 17 | Tail. 18 | 19 | cls ((goal R) ((add (s (s 0)) (s (s 0)) R) Tail)) 20 | Tail. 21 | 22 | 23 | % ?- metaint ((goal R) ())? 24 | % ANSWER: {'R': ('s', ('s', ('s', ('s', 0))))} 25 | -------------------------------------------------------------------------------- /natlog/natprogs/nrev.nat: -------------------------------------------------------------------------------- 1 | app () Ys Ys. 2 | app (X Xs) Ys (X Zs) : app Xs Ys Zs. 3 | 4 | nrev () (). 5 | nrev (X Xs) Zs : nrev Xs Ys, app Ys (X ()) Zs. 6 | 7 | goal Xs : nrev (1 (2 (3 ()))) Xs. 8 | -------------------------------------------------------------------------------- /natlog/natprogs/perm.nat: -------------------------------------------------------------------------------- 1 | perm () (). 2 | perm (X Xs) Zs : perm Xs Ys, ins X Ys Zs. 3 | 4 | ins X Xs (X Xs). 5 | ins X (Y Xs) (Y Ys) : ins X Xs Ys. 6 | 7 | goal Ps : perm (1 (2 (3 ()))) Ps. 8 | -------------------------------------------------------------------------------- /natlog/natprogs/pets.nat: -------------------------------------------------------------------------------- 1 | pet : dog. 2 | pet : cat. 3 | pet : snake. 4 | 5 | dog : barks, walks, bites. 6 | 7 | cat : purrs, walks, hisses. 8 | 9 | snake: hisses, slither, bites. 10 | 11 | walks : true. 12 | purrs : true. 13 | bites : true. 14 | hisses : true. 15 | 16 | slither : false. 17 | bites : false. 18 | barks : false. 19 | -------------------------------------------------------------------------------- /natlog/natprogs/pro.nat: -------------------------------------------------------------------------------- 1 | eq X X . 2 | 3 | sel X ( X Xs ) Xs . 4 | 5 | sel X ( Y Xs ) ( Y Ys ) : 6 | sel X Xs Ys . 7 | 8 | perm () () . 9 | 10 | perm ( X Xs ) Zs : 11 | perm Xs Ys , 12 | sel X Zs Ys . 13 | 14 | app () Xs Xs . 15 | 16 | app ( X Xs ) Ys ( X Zs ) : 17 | app Xs Ys Zs . 18 | 19 | nrev () () . 20 | 21 | nrev ( X Xs ) Zs : 22 | nrev Xs Ys , 23 | app Ys ( X () ) Zs . 24 | 25 | input ( 1 ( 2 ( 3 ( 4 ( 5 ( 6 ( 7 () ) ) ) ) ) ) ) . 26 | 27 | goal Y : 28 | input X , 29 | nrev X Y , 30 | call ( perm X Y ) , 31 | perm Y X . 32 | 33 | -------------------------------------------------------------------------------- /natlog/natprogs/pro2nat.pro: -------------------------------------------------------------------------------- 1 | c:-make. 2 | 3 | go:-pro2nat('sudoku4'),shell('cat sudoku4.nat'). 4 | 5 | pro2nat(F):- 6 | pl2nl(F), 7 | writeln(done). 8 | 9 | 10 | pl2nl(F):- 11 | atomic_list_concat(['prolog_progs/',F,'.pro'],PL), 12 | atom_concat(F,'.nat',NL), 13 | pl2nl(PL,NL). 14 | 15 | pl2nl(PL,NL):- 16 | see(PL), 17 | tell(NL), 18 | repeat, 19 | clause2sent(EOF), 20 | EOF==yes, 21 | !, 22 | seen, 23 | told. 24 | 25 | read_with_names(T,T0):- 26 | read_term(T0,[variable_names(Es)]), 27 | copy_term(T0,T), 28 | maplist(call,Es). 29 | 30 | writes(':'):-!,write((':')),nl,write(' '). 31 | writes(','):-!,write(','),nl,write(' '). 32 | writes(('.')):-!,write(('.')),nl. 33 | writes(W):-write(W),write(' '). 34 | 35 | clause2sent(EOF):- 36 | read_with_names(T,T0), 37 | ( 38 | T==end_of_file->EOF=yes 39 | ; 40 | EOF=no, 41 | cls2nat(T,Ns), 42 | T=T0, 43 | Ns=Xs, 44 | maplist(writes,Xs),nl 45 | ). 46 | 47 | cls2nat(C,Es):- 48 | cls2eqs(C,Es). 49 | 50 | cls2eqs(C,Rs):- 51 | (C=(H:-Bs)->true;C=H,Bs=true), 52 | cls2list(H,Bs,Ts), 53 | maplist(term2list,Ts,[Hs|Bss]), 54 | add_commas(Bss,Cs), 55 | (Cs=[]->Neck=[];Neck=[':']), 56 | append([Hs,Neck,Cs,['.']],Rs). 57 | 58 | add_commas([],[]). 59 | add_commas([Xs],Xs):-!. 60 | add_commas([Xs|Xss],Rs):-add_commas(Xss,Rs1),append(Xs,[','|Rs1],Rs). 61 | 62 | cls2list(H,Bs,Cs):- 63 | body2list((H,Bs),Cs). 64 | 65 | body2list(B,R):-var(B),!,R=[call(B)]. 66 | body2list((B,Cs),[B|Bs]):-!,body2list(Cs,Bs). 67 | body2list(true,[]):-!. 68 | body2list(C,[C]). 69 | 70 | 71 | term2list(T,Zs):-term2list(T,Xs,[]),Xs=[_|Ys],append(Zs,[_],Ys),!. 72 | 73 | term2list(A)-->{var(A)},!,[A]. 74 | term2list([])-->!,['()']. 75 | term2list(A)-->{atomic(A)},!,[A]. 76 | term2list([X|Xs])-->!,['('],term2list(X),term2list(Xs),[')']. 77 | term2list(T)-->{T=..[F|Xs]},['(',F],term2tuple(Xs),[')']. 78 | 79 | term2tuple([])-->[]. 80 | term2tuple([X|Xs])-->term2list(X),term2tuple(Xs). 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /natlog/natprogs/prolog_progs/pro.pro: -------------------------------------------------------------------------------- 1 | eq(X,X). 2 | 3 | sel(X,[X|Xs],Xs). 4 | sel(X,[Y|Xs],[Y|Ys]):-sel(X,Xs,Ys). 5 | 6 | perm([],[]). 7 | perm([X|Xs],Zs):- 8 | perm(Xs,Ys), 9 | sel(X,Zs,Ys). 10 | 11 | app([],Xs,Xs). 12 | app([X|Xs],Ys,[X|Zs]):-app(Xs,Ys,Zs). 13 | 14 | nrev([],[]). 15 | nrev([X|Xs],Zs):-nrev(Xs,Ys),app(Ys,[X],Zs). 16 | 17 | % goal(X):-eq(X,[1,2,3,4,5,6,7,8,9]),perm(X,P),nrev(P,X). 18 | 19 | % goal(P):-eq(X,[1,2,3,4,5,6,7,8,9,10,11]),nrev(X,P),perm(X,P). 20 | 21 | 22 | input([1,2,3,4,5,6,7]). 23 | 24 | goal(Y):-input(X),nrev(X,Y),call(perm(X,Y)),perm(Y,X). 25 | -------------------------------------------------------------------------------- /natlog/natprogs/prolog_progs/sudoku4.pro: -------------------------------------------------------------------------------- 1 | goal(Xss):-sudoku(Xss). 2 | 3 | first_of([X|_Xs],X). 4 | 5 | sudoku(Xss):- 6 | s4x4(Xsss), 7 | first_of(Xsss,Xss), 8 | fill_out_blocks(Xsss). 9 | 10 | fill_out_rows([]). 11 | fill_out_rows([P|Ps]):- 12 | permute([1,2,3,4],P), 13 | fill_out_rows(Ps). 14 | 15 | fill_out_blocks([]). 16 | fill_out_blocks([B|Bs]):- 17 | fill_out_rows(B), 18 | fill_out_blocks(Bs). 19 | 20 | s4x4([ 21 | [ 22 | [S11,S12, S13,S14], 23 | [S21,S22, S23,S24], 24 | 25 | [S31,S32, S33,S34], 26 | [S41,S42, S43,S44] 27 | ], 28 | [ 29 | [S11,S21, S31,S41], 30 | [S12,S22, S32,S42], 31 | 32 | [S13,S23, S33,S43], 33 | [S14,S24, S34,S44] 34 | ], 35 | [ 36 | [S11,S12, S21,S22], 37 | [S13,S14, S23,S24], 38 | 39 | [S31,S32, S41,S42], 40 | [S33,S34, S43,S44] 41 | ] 42 | ]). 43 | 44 | 45 | /* 46 | % in lib.nat 47 | 48 | permute([],[]). 49 | permute([X|Xs],Zs):-permute(Xs,Ys),ins(X,Ys,Zs). 50 | 51 | ins(X,Xs,[X|Xs]). 52 | ins(X,[Y|Xs],[Y|Ys]):-ins(X,Xs,Ys). 53 | */ 54 | 55 | /* 56 | 57 | ?- go 58 | | . 59 | 60 | [1,2,3,4] 61 | [3,4,1,2] 62 | [2,3,4,1] 63 | [4,1,2,3] 64 | 65 | [1,2,3,4] 66 | [3,4,1,2] 67 | [2,1,4,3] 68 | [4,3,2,1] 69 | 70 | [1,2,3,4] 71 | [3,4,1,2] 72 | [4,1,2,3] 73 | [2,3,4,1] 74 | 75 | [1,2,3,4] 76 | [3,4,1,2] 77 | [4,3,2,1] 78 | [2,1,4,3] 79 | 80 | [1,2,3,4] 81 | [3,4,2,1] 82 | [2,1,4,3] 83 | [4,3,1,2] 84 | 85 | [1,2,3,4] 86 | [3,4,2,1] 87 | [4,3,1,2] 88 | [2,1,4,3] 89 | 90 | [1,2,3,4] 91 | [4,3,1,2] 92 | [3,4,2,1] 93 | [2,1,4,3] 94 | 95 | [1,2,3,4] 96 | [4,3,1,2] 97 | [2,1,4,3] 98 | [3,4,2,1] 99 | 100 | [1,2,3,4] 101 | [4,3,2,1] 102 | [3,1,4,2] 103 | [2,4,1,3] 104 | 105 | [1,2,3,4] 106 | [4,3,2,1] 107 | [3,4,1,2] 108 | [2,1,4,3] 109 | 110 | [1,2,3,4] 111 | [4,3,2,1] 112 | [2,1,4,3] 113 | [3,4,1,2] 114 | 115 | [1,2,3,4] 116 | [4,3,2,1] 117 | [2,4,1,3] 118 | [3,1,4,2] 119 | 120 | [2,1,3,4] 121 | [3,4,1,2] 122 | [1,2,4,3] 123 | [4,3,2,1] 124 | 125 | [2,1,3,4] 126 | [3,4,1,2] 127 | [4,3,2,1] 128 | [1,2,4,3] 129 | 130 | [2,1,3,4] 131 | [3,4,2,1] 132 | [1,3,4,2] 133 | [4,2,1,3] 134 | 135 | [2,1,3,4] 136 | [3,4,2,1] 137 | [1,2,4,3] 138 | [4,3,1,2] 139 | 140 | [2,1,3,4] 141 | [3,4,2,1] 142 | [4,2,1,3] 143 | [1,3,4,2] 144 | 145 | [2,1,3,4] 146 | [3,4,2,1] 147 | [4,3,1,2] 148 | [1,2,4,3] 149 | 150 | [2,1,3,4] 151 | [4,3,1,2] 152 | [3,2,4,1] 153 | [1,4,2,3] 154 | 155 | [2,1,3,4] 156 | [4,3,1,2] 157 | [3,4,2,1] 158 | [1,2,4,3] 159 | 160 | [2,1,3,4] 161 | [4,3,1,2] 162 | [1,2,4,3] 163 | [3,4,2,1] 164 | 165 | [2,1,3,4] 166 | [4,3,1,2] 167 | [1,4,2,3] 168 | [3,2,4,1] 169 | 170 | [2,1,3,4] 171 | [4,3,2,1] 172 | [3,4,1,2] 173 | [1,2,4,3] 174 | 175 | [2,1,3,4] 176 | [4,3,2,1] 177 | [1,2,4,3] 178 | [3,4,1,2] 179 | 180 | [2,3,1,4] 181 | [1,4,2,3] 182 | [3,2,4,1] 183 | [4,1,3,2] 184 | 185 | [2,3,1,4] 186 | [1,4,2,3] 187 | [3,1,4,2] 188 | [4,2,3,1] 189 | 190 | [2,3,1,4] 191 | [1,4,2,3] 192 | [4,2,3,1] 193 | [3,1,4,2] 194 | 195 | [2,3,1,4] 196 | [1,4,2,3] 197 | [4,1,3,2] 198 | [3,2,4,1] 199 | 200 | [2,3,1,4] 201 | [4,1,2,3] 202 | [3,2,4,1] 203 | [1,4,3,2] 204 | 205 | [2,3,1,4] 206 | [4,1,2,3] 207 | [1,4,3,2] 208 | [3,2,4,1] 209 | 210 | [2,3,1,4] 211 | [1,4,3,2] 212 | [3,2,4,1] 213 | [4,1,2,3] 214 | 215 | [2,3,1,4] 216 | [1,4,3,2] 217 | [4,1,2,3] 218 | [3,2,4,1] 219 | 220 | [2,3,1,4] 221 | [4,1,3,2] 222 | [3,2,4,1] 223 | [1,4,2,3] 224 | 225 | [2,3,1,4] 226 | [4,1,3,2] 227 | [3,4,2,1] 228 | [1,2,4,3] 229 | 230 | [2,3,1,4] 231 | [4,1,3,2] 232 | [1,2,4,3] 233 | [3,4,2,1] 234 | 235 | [2,3,1,4] 236 | [4,1,3,2] 237 | [1,4,2,3] 238 | [3,2,4,1] 239 | 240 | [2,3,4,1] 241 | [1,4,2,3] 242 | [3,2,1,4] 243 | [4,1,3,2] 244 | 245 | [2,3,4,1] 246 | [1,4,2,3] 247 | [4,1,3,2] 248 | [3,2,1,4] 249 | 250 | [2,3,4,1] 251 | [4,1,2,3] 252 | [1,2,3,4] 253 | [3,4,1,2] 254 | 255 | [2,3,4,1] 256 | [4,1,2,3] 257 | [3,2,1,4] 258 | [1,4,3,2] 259 | 260 | [2,3,4,1] 261 | [4,1,2,3] 262 | [3,4,1,2] 263 | [1,2,3,4] 264 | 265 | [2,3,4,1] 266 | [4,1,2,3] 267 | [1,4,3,2] 268 | [3,2,1,4] 269 | 270 | [2,3,4,1] 271 | [1,4,3,2] 272 | [3,1,2,4] 273 | [4,2,1,3] 274 | 275 | [2,3,4,1] 276 | [1,4,3,2] 277 | [3,2,1,4] 278 | [4,1,2,3] 279 | 280 | [2,3,4,1] 281 | [1,4,3,2] 282 | [4,1,2,3] 283 | [3,2,1,4] 284 | 285 | [2,3,4,1] 286 | [1,4,3,2] 287 | [4,2,1,3] 288 | [3,1,2,4] 289 | 290 | [2,3,4,1] 291 | [4,1,3,2] 292 | [3,2,1,4] 293 | [1,4,2,3] 294 | 295 | [2,3,4,1] 296 | [4,1,3,2] 297 | [1,4,2,3] 298 | [3,2,1,4] 299 | 300 | [1,3,2,4] 301 | [2,4,1,3] 302 | [3,2,4,1] 303 | [4,1,3,2] 304 | 305 | [1,3,2,4] 306 | [2,4,1,3] 307 | [3,1,4,2] 308 | [4,2,3,1] 309 | 310 | [1,3,2,4] 311 | [2,4,1,3] 312 | [4,2,3,1] 313 | [3,1,4,2] 314 | 315 | [1,3,2,4] 316 | [2,4,1,3] 317 | [4,1,3,2] 318 | [3,2,4,1] 319 | 320 | [1,3,2,4] 321 | [2,4,3,1] 322 | [3,1,4,2] 323 | [4,2,1,3] 324 | 325 | [1,3,2,4] 326 | [2,4,3,1] 327 | [4,2,1,3] 328 | [3,1,4,2] 329 | 330 | [1,3,2,4] 331 | [4,2,1,3] 332 | [3,1,4,2] 333 | [2,4,3,1] 334 | 335 | [1,3,2,4] 336 | [4,2,1,3] 337 | [2,4,3,1] 338 | [3,1,4,2] 339 | 340 | [1,3,2,4] 341 | [4,2,3,1] 342 | [3,1,4,2] 343 | [2,4,1,3] 344 | 345 | [1,3,2,4] 346 | [4,2,3,1] 347 | [3,4,1,2] 348 | [2,1,4,3] 349 | 350 | [1,3,2,4] 351 | [4,2,3,1] 352 | [2,1,4,3] 353 | [3,4,1,2] 354 | 355 | [1,3,2,4] 356 | [4,2,3,1] 357 | [2,4,1,3] 358 | [3,1,4,2] 359 | 360 | [3,1,2,4] 361 | [2,4,1,3] 362 | [1,3,4,2] 363 | [4,2,3,1] 364 | 365 | [3,1,2,4] 366 | [2,4,1,3] 367 | [4,2,3,1] 368 | [1,3,4,2] 369 | 370 | [3,1,2,4] 371 | [2,4,3,1] 372 | [1,3,4,2] 373 | [4,2,1,3] 374 | 375 | [3,1,2,4] 376 | [2,4,3,1] 377 | [1,2,4,3] 378 | [4,3,1,2] 379 | 380 | [3,1,2,4] 381 | [2,4,3,1] 382 | [4,2,1,3] 383 | [1,3,4,2] 384 | 385 | [3,1,2,4] 386 | [2,4,3,1] 387 | [4,3,1,2] 388 | [1,2,4,3] 389 | 390 | [3,1,2,4] 391 | [4,2,1,3] 392 | [2,3,4,1] 393 | [1,4,3,2] 394 | 395 | [3,1,2,4] 396 | [4,2,1,3] 397 | [1,3,4,2] 398 | [2,4,3,1] 399 | 400 | [3,1,2,4] 401 | [4,2,1,3] 402 | [2,4,3,1] 403 | [1,3,4,2] 404 | 405 | [3,1,2,4] 406 | [4,2,1,3] 407 | [1,4,3,2] 408 | [2,3,4,1] 409 | 410 | [3,1,2,4] 411 | [4,2,3,1] 412 | [1,3,4,2] 413 | [2,4,1,3] 414 | 415 | [3,1,2,4] 416 | [4,2,3,1] 417 | [2,4,1,3] 418 | [1,3,4,2] 419 | 420 | [3,2,1,4] 421 | [1,4,2,3] 422 | [2,3,4,1] 423 | [4,1,3,2] 424 | 425 | [3,2,1,4] 426 | [1,4,2,3] 427 | [4,1,3,2] 428 | [2,3,4,1] 429 | 430 | [3,2,1,4] 431 | [4,1,2,3] 432 | [2,3,4,1] 433 | [1,4,3,2] 434 | 435 | [3,2,1,4] 436 | [4,1,2,3] 437 | [1,3,4,2] 438 | [2,4,3,1] 439 | 440 | [3,2,1,4] 441 | [4,1,2,3] 442 | [2,4,3,1] 443 | [1,3,4,2] 444 | 445 | [3,2,1,4] 446 | [4,1,2,3] 447 | [1,4,3,2] 448 | [2,3,4,1] 449 | 450 | [3,2,1,4] 451 | [1,4,3,2] 452 | [2,3,4,1] 453 | [4,1,2,3] 454 | 455 | [3,2,1,4] 456 | [1,4,3,2] 457 | [2,1,4,3] 458 | [4,3,2,1] 459 | 460 | [3,2,1,4] 461 | [1,4,3,2] 462 | [4,1,2,3] 463 | [2,3,4,1] 464 | 465 | [3,2,1,4] 466 | [1,4,3,2] 467 | [4,3,2,1] 468 | [2,1,4,3] 469 | 470 | [3,2,1,4] 471 | [4,1,3,2] 472 | [2,3,4,1] 473 | [1,4,2,3] 474 | 475 | [3,2,1,4] 476 | [4,1,3,2] 477 | [1,4,2,3] 478 | [2,3,4,1] 479 | 480 | [3,2,4,1] 481 | [1,4,2,3] 482 | [2,1,3,4] 483 | [4,3,1,2] 484 | 485 | [3,2,4,1] 486 | [1,4,2,3] 487 | [2,3,1,4] 488 | [4,1,3,2] 489 | 490 | [3,2,4,1] 491 | [1,4,2,3] 492 | [4,1,3,2] 493 | [2,3,1,4] 494 | 495 | [3,2,4,1] 496 | [1,4,2,3] 497 | [4,3,1,2] 498 | [2,1,3,4] 499 | 500 | [3,2,4,1] 501 | [4,1,2,3] 502 | [2,3,1,4] 503 | [1,4,3,2] 504 | 505 | [3,2,4,1] 506 | [4,1,2,3] 507 | [1,4,3,2] 508 | [2,3,1,4] 509 | 510 | [3,2,4,1] 511 | [1,4,3,2] 512 | [2,3,1,4] 513 | [4,1,2,3] 514 | 515 | [3,2,4,1] 516 | [1,4,3,2] 517 | [4,1,2,3] 518 | [2,3,1,4] 519 | 520 | [3,2,4,1] 521 | [4,1,3,2] 522 | [2,3,1,4] 523 | [1,4,2,3] 524 | 525 | [3,2,4,1] 526 | [4,1,3,2] 527 | [1,3,2,4] 528 | [2,4,1,3] 529 | 530 | [3,2,4,1] 531 | [4,1,3,2] 532 | [2,4,1,3] 533 | [1,3,2,4] 534 | 535 | [3,2,4,1] 536 | [4,1,3,2] 537 | [1,4,2,3] 538 | [2,3,1,4] 539 | 540 | [1,3,4,2] 541 | [2,4,1,3] 542 | [3,1,2,4] 543 | [4,2,3,1] 544 | 545 | [1,3,4,2] 546 | [2,4,1,3] 547 | [4,2,3,1] 548 | [3,1,2,4] 549 | 550 | [1,3,4,2] 551 | [2,4,3,1] 552 | [3,1,2,4] 553 | [4,2,1,3] 554 | 555 | [1,3,4,2] 556 | [2,4,3,1] 557 | [3,2,1,4] 558 | [4,1,2,3] 559 | 560 | [1,3,4,2] 561 | [2,4,3,1] 562 | [4,1,2,3] 563 | [3,2,1,4] 564 | 565 | [1,3,4,2] 566 | [2,4,3,1] 567 | [4,2,1,3] 568 | [3,1,2,4] 569 | 570 | [1,3,4,2] 571 | [4,2,1,3] 572 | [2,1,3,4] 573 | [3,4,2,1] 574 | 575 | [1,3,4,2] 576 | [4,2,1,3] 577 | [3,1,2,4] 578 | [2,4,3,1] 579 | 580 | [1,3,4,2] 581 | [4,2,1,3] 582 | [3,4,2,1] 583 | [2,1,3,4] 584 | 585 | [1,3,4,2] 586 | [4,2,1,3] 587 | [2,4,3,1] 588 | [3,1,2,4] 589 | 590 | [1,3,4,2] 591 | [4,2,3,1] 592 | [3,1,2,4] 593 | [2,4,1,3] 594 | 595 | [1,3,4,2] 596 | [4,2,3,1] 597 | [2,4,1,3] 598 | [3,1,2,4] 599 | 600 | [3,1,4,2] 601 | [2,4,1,3] 602 | [1,2,3,4] 603 | [4,3,2,1] 604 | 605 | [3,1,4,2] 606 | [2,4,1,3] 607 | [1,3,2,4] 608 | [4,2,3,1] 609 | 610 | [3,1,4,2] 611 | [2,4,1,3] 612 | [4,2,3,1] 613 | [1,3,2,4] 614 | 615 | [3,1,4,2] 616 | [2,4,1,3] 617 | [4,3,2,1] 618 | [1,2,3,4] 619 | 620 | [3,1,4,2] 621 | [2,4,3,1] 622 | [1,3,2,4] 623 | [4,2,1,3] 624 | 625 | [3,1,4,2] 626 | [2,4,3,1] 627 | [4,2,1,3] 628 | [1,3,2,4] 629 | 630 | [3,1,4,2] 631 | [4,2,1,3] 632 | [1,3,2,4] 633 | [2,4,3,1] 634 | 635 | [3,1,4,2] 636 | [4,2,1,3] 637 | [2,4,3,1] 638 | [1,3,2,4] 639 | 640 | [3,1,4,2] 641 | [4,2,3,1] 642 | [2,3,1,4] 643 | [1,4,2,3] 644 | 645 | [3,1,4,2] 646 | [4,2,3,1] 647 | [1,3,2,4] 648 | [2,4,1,3] 649 | 650 | [3,1,4,2] 651 | [4,2,3,1] 652 | [2,4,1,3] 653 | [1,3,2,4] 654 | 655 | [3,1,4,2] 656 | [4,2,3,1] 657 | [1,4,2,3] 658 | [2,3,1,4] 659 | 660 | [3,4,1,2] 661 | [1,2,3,4] 662 | [2,3,4,1] 663 | [4,1,2,3] 664 | 665 | [3,4,1,2] 666 | [1,2,3,4] 667 | [2,1,4,3] 668 | [4,3,2,1] 669 | 670 | [3,4,1,2] 671 | [1,2,3,4] 672 | [4,1,2,3] 673 | [2,3,4,1] 674 | 675 | [3,4,1,2] 676 | [1,2,3,4] 677 | [4,3,2,1] 678 | [2,1,4,3] 679 | 680 | [3,4,1,2] 681 | [2,1,3,4] 682 | [1,2,4,3] 683 | [4,3,2,1] 684 | 685 | [3,4,1,2] 686 | [2,1,3,4] 687 | [4,3,2,1] 688 | [1,2,4,3] 689 | 690 | [3,4,1,2] 691 | [1,2,4,3] 692 | [2,1,3,4] 693 | [4,3,2,1] 694 | 695 | [3,4,1,2] 696 | [1,2,4,3] 697 | [4,3,2,1] 698 | [2,1,3,4] 699 | 700 | [3,4,1,2] 701 | [2,1,4,3] 702 | [1,2,3,4] 703 | [4,3,2,1] 704 | 705 | [3,4,1,2] 706 | [2,1,4,3] 707 | [1,3,2,4] 708 | [4,2,3,1] 709 | 710 | [3,4,1,2] 711 | [2,1,4,3] 712 | [4,2,3,1] 713 | [1,3,2,4] 714 | 715 | [3,4,1,2] 716 | [2,1,4,3] 717 | [4,3,2,1] 718 | [1,2,3,4] 719 | 720 | [3,4,2,1] 721 | [1,2,3,4] 722 | [2,1,4,3] 723 | [4,3,1,2] 724 | 725 | [3,4,2,1] 726 | [1,2,3,4] 727 | [4,3,1,2] 728 | [2,1,4,3] 729 | 730 | [3,4,2,1] 731 | [2,1,3,4] 732 | [1,3,4,2] 733 | [4,2,1,3] 734 | 735 | [3,4,2,1] 736 | [2,1,3,4] 737 | [1,2,4,3] 738 | [4,3,1,2] 739 | 740 | [3,4,2,1] 741 | [2,1,3,4] 742 | [4,2,1,3] 743 | [1,3,4,2] 744 | 745 | [3,4,2,1] 746 | [2,1,3,4] 747 | [4,3,1,2] 748 | [1,2,4,3] 749 | 750 | [3,4,2,1] 751 | [1,2,4,3] 752 | [2,1,3,4] 753 | [4,3,1,2] 754 | 755 | [3,4,2,1] 756 | [1,2,4,3] 757 | [2,3,1,4] 758 | [4,1,3,2] 759 | 760 | [3,4,2,1] 761 | [1,2,4,3] 762 | [4,1,3,2] 763 | [2,3,1,4] 764 | 765 | [3,4,2,1] 766 | [1,2,4,3] 767 | [4,3,1,2] 768 | [2,1,3,4] 769 | 770 | [3,4,2,1] 771 | [2,1,4,3] 772 | [1,2,3,4] 773 | [4,3,1,2] 774 | 775 | [3,4,2,1] 776 | [2,1,4,3] 777 | [4,3,1,2] 778 | [1,2,3,4] 779 | 780 | [1,2,4,3] 781 | [3,4,1,2] 782 | [2,1,3,4] 783 | [4,3,2,1] 784 | 785 | [1,2,4,3] 786 | [3,4,1,2] 787 | [4,3,2,1] 788 | [2,1,3,4] 789 | 790 | [1,2,4,3] 791 | [3,4,2,1] 792 | [2,1,3,4] 793 | [4,3,1,2] 794 | 795 | [1,2,4,3] 796 | [3,4,2,1] 797 | [2,3,1,4] 798 | [4,1,3,2] 799 | 800 | [1,2,4,3] 801 | [3,4,2,1] 802 | [4,1,3,2] 803 | [2,3,1,4] 804 | 805 | [1,2,4,3] 806 | [3,4,2,1] 807 | [4,3,1,2] 808 | [2,1,3,4] 809 | 810 | [1,2,4,3] 811 | [4,3,1,2] 812 | [2,1,3,4] 813 | [3,4,2,1] 814 | 815 | [1,2,4,3] 816 | [4,3,1,2] 817 | [3,1,2,4] 818 | [2,4,3,1] 819 | 820 | [1,2,4,3] 821 | [4,3,1,2] 822 | [3,4,2,1] 823 | [2,1,3,4] 824 | 825 | [1,2,4,3] 826 | [4,3,1,2] 827 | [2,4,3,1] 828 | [3,1,2,4] 829 | 830 | [1,2,4,3] 831 | [4,3,2,1] 832 | [2,1,3,4] 833 | [3,4,1,2] 834 | 835 | [1,2,4,3] 836 | [4,3,2,1] 837 | [3,4,1,2] 838 | [2,1,3,4] 839 | 840 | [2,1,4,3] 841 | [3,4,1,2] 842 | [1,2,3,4] 843 | [4,3,2,1] 844 | 845 | [2,1,4,3] 846 | [3,4,1,2] 847 | [1,3,2,4] 848 | [4,2,3,1] 849 | 850 | [2,1,4,3] 851 | [3,4,1,2] 852 | [4,2,3,1] 853 | [1,3,2,4] 854 | 855 | [2,1,4,3] 856 | [3,4,1,2] 857 | [4,3,2,1] 858 | [1,2,3,4] 859 | 860 | [2,1,4,3] 861 | [3,4,2,1] 862 | [1,2,3,4] 863 | [4,3,1,2] 864 | 865 | [2,1,4,3] 866 | [3,4,2,1] 867 | [4,3,1,2] 868 | [1,2,3,4] 869 | 870 | [2,1,4,3] 871 | [4,3,1,2] 872 | [1,2,3,4] 873 | [3,4,2,1] 874 | 875 | [2,1,4,3] 876 | [4,3,1,2] 877 | [3,4,2,1] 878 | [1,2,3,4] 879 | 880 | [2,1,4,3] 881 | [4,3,2,1] 882 | [1,2,3,4] 883 | [3,4,1,2] 884 | 885 | [2,1,4,3] 886 | [4,3,2,1] 887 | [3,2,1,4] 888 | [1,4,3,2] 889 | 890 | [2,1,4,3] 891 | [4,3,2,1] 892 | [3,4,1,2] 893 | [1,2,3,4] 894 | 895 | [2,1,4,3] 896 | [4,3,2,1] 897 | [1,4,3,2] 898 | [3,2,1,4] 899 | 900 | [2,4,1,3] 901 | [1,3,2,4] 902 | [3,2,4,1] 903 | [4,1,3,2] 904 | 905 | [2,4,1,3] 906 | [1,3,2,4] 907 | [3,1,4,2] 908 | [4,2,3,1] 909 | 910 | [2,4,1,3] 911 | [1,3,2,4] 912 | [4,2,3,1] 913 | [3,1,4,2] 914 | 915 | [2,4,1,3] 916 | [1,3,2,4] 917 | [4,1,3,2] 918 | [3,2,4,1] 919 | 920 | [2,4,1,3] 921 | [3,1,2,4] 922 | [1,3,4,2] 923 | [4,2,3,1] 924 | 925 | [2,4,1,3] 926 | [3,1,2,4] 927 | [4,2,3,1] 928 | [1,3,4,2] 929 | 930 | [2,4,1,3] 931 | [1,3,4,2] 932 | [3,1,2,4] 933 | [4,2,3,1] 934 | 935 | [2,4,1,3] 936 | [1,3,4,2] 937 | [4,2,3,1] 938 | [3,1,2,4] 939 | 940 | [2,4,1,3] 941 | [3,1,4,2] 942 | [1,2,3,4] 943 | [4,3,2,1] 944 | 945 | [2,4,1,3] 946 | [3,1,4,2] 947 | [1,3,2,4] 948 | [4,2,3,1] 949 | 950 | [2,4,1,3] 951 | [3,1,4,2] 952 | [4,2,3,1] 953 | [1,3,2,4] 954 | 955 | [2,4,1,3] 956 | [3,1,4,2] 957 | [4,3,2,1] 958 | [1,2,3,4] 959 | 960 | [2,4,3,1] 961 | [1,3,2,4] 962 | [3,1,4,2] 963 | [4,2,1,3] 964 | 965 | [2,4,3,1] 966 | [1,3,2,4] 967 | [4,2,1,3] 968 | [3,1,4,2] 969 | 970 | [2,4,3,1] 971 | [3,1,2,4] 972 | [1,3,4,2] 973 | [4,2,1,3] 974 | 975 | [2,4,3,1] 976 | [3,1,2,4] 977 | [1,2,4,3] 978 | [4,3,1,2] 979 | 980 | [2,4,3,1] 981 | [3,1,2,4] 982 | [4,2,1,3] 983 | [1,3,4,2] 984 | 985 | [2,4,3,1] 986 | [3,1,2,4] 987 | [4,3,1,2] 988 | [1,2,4,3] 989 | 990 | [2,4,3,1] 991 | [1,3,4,2] 992 | [3,1,2,4] 993 | [4,2,1,3] 994 | 995 | [2,4,3,1] 996 | [1,3,4,2] 997 | [3,2,1,4] 998 | [4,1,2,3] 999 | 1000 | [2,4,3,1] 1001 | [1,3,4,2] 1002 | [4,1,2,3] 1003 | [3,2,1,4] 1004 | 1005 | [2,4,3,1] 1006 | [1,3,4,2] 1007 | [4,2,1,3] 1008 | [3,1,2,4] 1009 | 1010 | [2,4,3,1] 1011 | [3,1,4,2] 1012 | [1,3,2,4] 1013 | [4,2,1,3] 1014 | 1015 | [2,4,3,1] 1016 | [3,1,4,2] 1017 | [4,2,1,3] 1018 | [1,3,2,4] 1019 | 1020 | [1,4,2,3] 1021 | [2,3,1,4] 1022 | [3,2,4,1] 1023 | [4,1,3,2] 1024 | 1025 | [1,4,2,3] 1026 | [2,3,1,4] 1027 | [3,1,4,2] 1028 | [4,2,3,1] 1029 | 1030 | [1,4,2,3] 1031 | [2,3,1,4] 1032 | [4,2,3,1] 1033 | [3,1,4,2] 1034 | 1035 | [1,4,2,3] 1036 | [2,3,1,4] 1037 | [4,1,3,2] 1038 | [3,2,4,1] 1039 | 1040 | [1,4,2,3] 1041 | [2,3,4,1] 1042 | [3,2,1,4] 1043 | [4,1,3,2] 1044 | 1045 | [1,4,2,3] 1046 | [2,3,4,1] 1047 | [4,1,3,2] 1048 | [3,2,1,4] 1049 | 1050 | [1,4,2,3] 1051 | [3,2,1,4] 1052 | [2,3,4,1] 1053 | [4,1,3,2] 1054 | 1055 | [1,4,2,3] 1056 | [3,2,1,4] 1057 | [4,1,3,2] 1058 | [2,3,4,1] 1059 | 1060 | [1,4,2,3] 1061 | [3,2,4,1] 1062 | [2,1,3,4] 1063 | [4,3,1,2] 1064 | 1065 | [1,4,2,3] 1066 | [3,2,4,1] 1067 | [2,3,1,4] 1068 | [4,1,3,2] 1069 | 1070 | [1,4,2,3] 1071 | [3,2,4,1] 1072 | [4,1,3,2] 1073 | [2,3,1,4] 1074 | 1075 | [1,4,2,3] 1076 | [3,2,4,1] 1077 | [4,3,1,2] 1078 | [2,1,3,4] 1079 | 1080 | [4,1,2,3] 1081 | [2,3,1,4] 1082 | [3,2,4,1] 1083 | [1,4,3,2] 1084 | 1085 | [4,1,2,3] 1086 | [2,3,1,4] 1087 | [1,4,3,2] 1088 | [3,2,4,1] 1089 | 1090 | [4,1,2,3] 1091 | [2,3,4,1] 1092 | [1,2,3,4] 1093 | [3,4,1,2] 1094 | 1095 | [4,1,2,3] 1096 | [2,3,4,1] 1097 | [3,2,1,4] 1098 | [1,4,3,2] 1099 | 1100 | [4,1,2,3] 1101 | [2,3,4,1] 1102 | [3,4,1,2] 1103 | [1,2,3,4] 1104 | 1105 | [4,1,2,3] 1106 | [2,3,4,1] 1107 | [1,4,3,2] 1108 | [3,2,1,4] 1109 | 1110 | [4,1,2,3] 1111 | [3,2,1,4] 1112 | [2,3,4,1] 1113 | [1,4,3,2] 1114 | 1115 | [4,1,2,3] 1116 | [3,2,1,4] 1117 | [1,3,4,2] 1118 | [2,4,3,1] 1119 | 1120 | [4,1,2,3] 1121 | [3,2,1,4] 1122 | [2,4,3,1] 1123 | [1,3,4,2] 1124 | 1125 | [4,1,2,3] 1126 | [3,2,1,4] 1127 | [1,4,3,2] 1128 | [2,3,4,1] 1129 | 1130 | [4,1,2,3] 1131 | [3,2,4,1] 1132 | [2,3,1,4] 1133 | [1,4,3,2] 1134 | 1135 | [4,1,2,3] 1136 | [3,2,4,1] 1137 | [1,4,3,2] 1138 | [2,3,1,4] 1139 | 1140 | [4,2,1,3] 1141 | [1,3,2,4] 1142 | [3,1,4,2] 1143 | [2,4,3,1] 1144 | 1145 | [4,2,1,3] 1146 | [1,3,2,4] 1147 | [2,4,3,1] 1148 | [3,1,4,2] 1149 | 1150 | [4,2,1,3] 1151 | [3,1,2,4] 1152 | [2,3,4,1] 1153 | [1,4,3,2] 1154 | 1155 | [4,2,1,3] 1156 | [3,1,2,4] 1157 | [1,3,4,2] 1158 | [2,4,3,1] 1159 | 1160 | [4,2,1,3] 1161 | [3,1,2,4] 1162 | [2,4,3,1] 1163 | [1,3,4,2] 1164 | 1165 | [4,2,1,3] 1166 | [3,1,2,4] 1167 | [1,4,3,2] 1168 | [2,3,4,1] 1169 | 1170 | [4,2,1,3] 1171 | [1,3,4,2] 1172 | [2,1,3,4] 1173 | [3,4,2,1] 1174 | 1175 | [4,2,1,3] 1176 | [1,3,4,2] 1177 | [3,1,2,4] 1178 | [2,4,3,1] 1179 | 1180 | [4,2,1,3] 1181 | [1,3,4,2] 1182 | [3,4,2,1] 1183 | [2,1,3,4] 1184 | 1185 | [4,2,1,3] 1186 | [1,3,4,2] 1187 | [2,4,3,1] 1188 | [3,1,2,4] 1189 | 1190 | [4,2,1,3] 1191 | [3,1,4,2] 1192 | [1,3,2,4] 1193 | [2,4,3,1] 1194 | 1195 | [4,2,1,3] 1196 | [3,1,4,2] 1197 | [2,4,3,1] 1198 | [1,3,2,4] 1199 | 1200 | [4,2,3,1] 1201 | [1,3,2,4] 1202 | [3,1,4,2] 1203 | [2,4,1,3] 1204 | 1205 | [4,2,3,1] 1206 | [1,3,2,4] 1207 | [3,4,1,2] 1208 | [2,1,4,3] 1209 | 1210 | [4,2,3,1] 1211 | [1,3,2,4] 1212 | [2,1,4,3] 1213 | [3,4,1,2] 1214 | 1215 | [4,2,3,1] 1216 | [1,3,2,4] 1217 | [2,4,1,3] 1218 | [3,1,4,2] 1219 | 1220 | [4,2,3,1] 1221 | [3,1,2,4] 1222 | [1,3,4,2] 1223 | [2,4,1,3] 1224 | 1225 | [4,2,3,1] 1226 | [3,1,2,4] 1227 | [2,4,1,3] 1228 | [1,3,4,2] 1229 | 1230 | [4,2,3,1] 1231 | [1,3,4,2] 1232 | [3,1,2,4] 1233 | [2,4,1,3] 1234 | 1235 | [4,2,3,1] 1236 | [1,3,4,2] 1237 | [2,4,1,3] 1238 | [3,1,2,4] 1239 | 1240 | [4,2,3,1] 1241 | [3,1,4,2] 1242 | [2,3,1,4] 1243 | [1,4,2,3] 1244 | 1245 | [4,2,3,1] 1246 | [3,1,4,2] 1247 | [1,3,2,4] 1248 | [2,4,1,3] 1249 | 1250 | [4,2,3,1] 1251 | [3,1,4,2] 1252 | [2,4,1,3] 1253 | [1,3,2,4] 1254 | 1255 | [4,2,3,1] 1256 | [3,1,4,2] 1257 | [1,4,2,3] 1258 | [2,3,1,4] 1259 | 1260 | [1,4,3,2] 1261 | [2,3,1,4] 1262 | [3,2,4,1] 1263 | [4,1,2,3] 1264 | 1265 | [1,4,3,2] 1266 | [2,3,1,4] 1267 | [4,1,2,3] 1268 | [3,2,4,1] 1269 | 1270 | [1,4,3,2] 1271 | [2,3,4,1] 1272 | [3,1,2,4] 1273 | [4,2,1,3] 1274 | 1275 | [1,4,3,2] 1276 | [2,3,4,1] 1277 | [3,2,1,4] 1278 | [4,1,2,3] 1279 | 1280 | [1,4,3,2] 1281 | [2,3,4,1] 1282 | [4,1,2,3] 1283 | [3,2,1,4] 1284 | 1285 | [1,4,3,2] 1286 | [2,3,4,1] 1287 | [4,2,1,3] 1288 | [3,1,2,4] 1289 | 1290 | [1,4,3,2] 1291 | [3,2,1,4] 1292 | [2,3,4,1] 1293 | [4,1,2,3] 1294 | 1295 | [1,4,3,2] 1296 | [3,2,1,4] 1297 | [2,1,4,3] 1298 | [4,3,2,1] 1299 | 1300 | [1,4,3,2] 1301 | [3,2,1,4] 1302 | [4,1,2,3] 1303 | [2,3,4,1] 1304 | 1305 | [1,4,3,2] 1306 | [3,2,1,4] 1307 | [4,3,2,1] 1308 | [2,1,4,3] 1309 | 1310 | [1,4,3,2] 1311 | [3,2,4,1] 1312 | [2,3,1,4] 1313 | [4,1,2,3] 1314 | 1315 | [1,4,3,2] 1316 | [3,2,4,1] 1317 | [4,1,2,3] 1318 | [2,3,1,4] 1319 | 1320 | [4,1,3,2] 1321 | [2,3,1,4] 1322 | [3,2,4,1] 1323 | [1,4,2,3] 1324 | 1325 | [4,1,3,2] 1326 | [2,3,1,4] 1327 | [3,4,2,1] 1328 | [1,2,4,3] 1329 | 1330 | [4,1,3,2] 1331 | [2,3,1,4] 1332 | [1,2,4,3] 1333 | [3,4,2,1] 1334 | 1335 | [4,1,3,2] 1336 | [2,3,1,4] 1337 | [1,4,2,3] 1338 | [3,2,4,1] 1339 | 1340 | [4,1,3,2] 1341 | [2,3,4,1] 1342 | [3,2,1,4] 1343 | [1,4,2,3] 1344 | 1345 | [4,1,3,2] 1346 | [2,3,4,1] 1347 | [1,4,2,3] 1348 | [3,2,1,4] 1349 | 1350 | [4,1,3,2] 1351 | [3,2,1,4] 1352 | [2,3,4,1] 1353 | [1,4,2,3] 1354 | 1355 | [4,1,3,2] 1356 | [3,2,1,4] 1357 | [1,4,2,3] 1358 | [2,3,4,1] 1359 | 1360 | [4,1,3,2] 1361 | [3,2,4,1] 1362 | [2,3,1,4] 1363 | [1,4,2,3] 1364 | 1365 | [4,1,3,2] 1366 | [3,2,4,1] 1367 | [1,3,2,4] 1368 | [2,4,1,3] 1369 | 1370 | [4,1,3,2] 1371 | [3,2,4,1] 1372 | [2,4,1,3] 1373 | [1,3,2,4] 1374 | 1375 | [4,1,3,2] 1376 | [3,2,4,1] 1377 | [1,4,2,3] 1378 | [2,3,1,4] 1379 | 1380 | [4,3,1,2] 1381 | [1,2,3,4] 1382 | [3,4,2,1] 1383 | [2,1,4,3] 1384 | 1385 | [4,3,1,2] 1386 | [1,2,3,4] 1387 | [2,1,4,3] 1388 | [3,4,2,1] 1389 | 1390 | [4,3,1,2] 1391 | [2,1,3,4] 1392 | [3,2,4,1] 1393 | [1,4,2,3] 1394 | 1395 | [4,3,1,2] 1396 | [2,1,3,4] 1397 | [3,4,2,1] 1398 | [1,2,4,3] 1399 | 1400 | [4,3,1,2] 1401 | [2,1,3,4] 1402 | [1,2,4,3] 1403 | [3,4,2,1] 1404 | 1405 | [4,3,1,2] 1406 | [2,1,3,4] 1407 | [1,4,2,3] 1408 | [3,2,4,1] 1409 | 1410 | [4,3,1,2] 1411 | [1,2,4,3] 1412 | [2,1,3,4] 1413 | [3,4,2,1] 1414 | 1415 | [4,3,1,2] 1416 | [1,2,4,3] 1417 | [3,1,2,4] 1418 | [2,4,3,1] 1419 | 1420 | [4,3,1,2] 1421 | [1,2,4,3] 1422 | [3,4,2,1] 1423 | [2,1,3,4] 1424 | 1425 | [4,3,1,2] 1426 | [1,2,4,3] 1427 | [2,4,3,1] 1428 | [3,1,2,4] 1429 | 1430 | [4,3,1,2] 1431 | [2,1,4,3] 1432 | [1,2,3,4] 1433 | [3,4,2,1] 1434 | 1435 | [4,3,1,2] 1436 | [2,1,4,3] 1437 | [3,4,2,1] 1438 | [1,2,3,4] 1439 | 1440 | [4,3,2,1] 1441 | [1,2,3,4] 1442 | [3,1,4,2] 1443 | [2,4,1,3] 1444 | 1445 | [4,3,2,1] 1446 | [1,2,3,4] 1447 | [3,4,1,2] 1448 | [2,1,4,3] 1449 | 1450 | [4,3,2,1] 1451 | [1,2,3,4] 1452 | [2,1,4,3] 1453 | [3,4,1,2] 1454 | 1455 | [4,3,2,1] 1456 | [1,2,3,4] 1457 | [2,4,1,3] 1458 | [3,1,4,2] 1459 | 1460 | [4,3,2,1] 1461 | [2,1,3,4] 1462 | [3,4,1,2] 1463 | [1,2,4,3] 1464 | 1465 | [4,3,2,1] 1466 | [2,1,3,4] 1467 | [1,2,4,3] 1468 | [3,4,1,2] 1469 | 1470 | [4,3,2,1] 1471 | [1,2,4,3] 1472 | [2,1,3,4] 1473 | [3,4,1,2] 1474 | 1475 | [4,3,2,1] 1476 | [1,2,4,3] 1477 | [3,4,1,2] 1478 | [2,1,3,4] 1479 | 1480 | [4,3,2,1] 1481 | [2,1,4,3] 1482 | [1,2,3,4] 1483 | [3,4,1,2] 1484 | 1485 | [4,3,2,1] 1486 | [2,1,4,3] 1487 | [3,2,1,4] 1488 | [1,4,3,2] 1489 | 1490 | [4,3,2,1] 1491 | [2,1,4,3] 1492 | [3,4,1,2] 1493 | [1,2,3,4] 1494 | 1495 | [4,3,2,1] 1496 | [2,1,4,3] 1497 | [1,4,3,2] 1498 | [3,2,1,4] 1499 | 1500 | */ 1501 | -------------------------------------------------------------------------------- /natlog/natprogs/py_call.nat: -------------------------------------------------------------------------------- 1 | a 1. 2 | a 2. 3 | 4 | b X : a X. 5 | b X : `sum (1 2 3) X, #print sum is X, c(X). 6 | 7 | c 1 : ^just yielding 42 . 8 | c 3. 9 | 10 | good h. 11 | good o. 12 | 13 | goal X : b X, #print 'printing b =' X, c X. 14 | goal X : ``iter hello X, good X. 15 | goal X : `` range 1000 1005 X. 16 | goal X : `` range 3 X. 17 | 18 | goal no_more : #print done. 19 | -------------------------------------------------------------------------------- /natlog/natprogs/py_call1.nat: -------------------------------------------------------------------------------- 1 | goal it : #print 42. 2 | goal done. 3 | 4 | -------------------------------------------------------------------------------- /natlog/natprogs/queens.nat: -------------------------------------------------------------------------------- 1 | place_queen I (I _A) (I _B) (I _C). 2 | place_queen I (_A Cs) (_B Us) (_C Ds) : place_queen I Cs Us Ds. 3 | 4 | place_queens () _A _B _C. 5 | place_queens (I Is) Cs Us (_A Ds) : 6 | place_queens Is Cs (_B Us) Ds, 7 | place_queen I Cs Us Ds. 8 | 9 | gen_places () (). 10 | gen_places (_A Qs) (_B Ps) : gen_places Qs Ps. 11 | 12 | qs Qs Ps : gen_places Qs Ps , place_queens Qs Ps _A _B. 13 | 14 | goal8 Qs : qs (1 (2 (3 (4 (5 (6 (7 (8 ())))))))) Qs. 15 | 16 | goal9 Qs : qs (1 (2 (3 (4 (5 (6 (7 (8 (9 ()))))))))) Qs. 17 | 18 | goal10 Qs : qs (1 (2 (3 (4 (5 (6 (7 (8 (9 (10 ())))))))))) Qs. 19 | 20 | goal11 Qs : qs (1 (2 (3 (4 (5 (6 (7 (8 (9 (10 (11 ()))))))))))) Qs. 21 | 22 | goal12 Qs : qs (1 (2 (3 (4 (5 (6 (7 (8 (9 (10 (11 (12 ())))))))))))) Qs. 23 | -------------------------------------------------------------------------------- /natlog/natprogs/queens.pro: -------------------------------------------------------------------------------- 1 | goal(Ps):-qs([1,2,3,4,5,6,7,8,9,10],Ps). 2 | 3 | qs(Qs,Ps):-gen_places(Qs,Ps),place_queens(Qs,Ps,_,_). 4 | 5 | gen_places([],[]). 6 | gen_places([_|Qs],[_|Ps]):-gen_places(Qs,Ps). 7 | 8 | place_queen(I,[I|_],[I|_],[I|_]). 9 | place_queen(I,[_|Cs],[_|Us],[_|Ds]):-place_queen(I,Cs,Us,Ds). 10 | 11 | place_queens([],_,_,_). 12 | place_queens([I|Is],Cs,Us,[_|Ds]):- 13 | place_queens(Is,Cs,[_|Us],Ds), 14 | place_queen(I,Cs,Us,Ds). 15 | -------------------------------------------------------------------------------- /natlog/natprogs/story.nat: -------------------------------------------------------------------------------- 1 | story about 'Alice' and 'Bob'. 2 | 3 | find Matches : ~txt ('Alice' 'Bob') Matches. 4 | -------------------------------------------------------------------------------- /natlog/natprogs/story.txt: -------------------------------------------------------------------------------- 1 | ChatGPT wrote this short story, with not more than 10 sentences about Alice and Bob discovering that encrypting messages between them is too much of a pain and not worth doing it as nobody is listening to what they are saying to each other. 2 | Alice and Bob were good friends and enjoyed chatting with each other. 3 | They heard about encryption and decided to try it out to keep their messages secure. 4 | They downloaded a secure messaging app and began to encrypt their conversations, but soon found it to be too much of a hassle. 5 | They had to constantly enter passwords and decipher each other's messages, which made their conversations less enjoyable. 6 | One day, they decided to skip the encryption and just chat freely. 7 | As it turned out, nobody was listening in on their conversations in the first place, so there was no need for encryption. 8 | They laughed about the whole thing and continued to chat without any worries. 9 | From then on, they realized that sometimes it's better to keep things simple and not overcomplicate things. 10 | -------------------------------------------------------------------------------- /natlog/natprogs/sudoku4.nat: -------------------------------------------------------------------------------- 1 | goal Xss : 2 | sudoku Xss . 3 | 4 | first_of ( X _Xs ) X . 5 | 6 | sudoku Xss : 7 | s4x4 Xsss , 8 | first_of Xsss Xss , 9 | fill_out_blocks Xsss . 10 | 11 | fill_out_rows () . 12 | 13 | fill_out_rows ( P Ps ) : 14 | permute ( 1 ( 2 ( 3 ( 4 () ) ) ) ) P , 15 | fill_out_rows Ps . 16 | 17 | fill_out_blocks () . 18 | 19 | fill_out_blocks ( B Bs ) : 20 | fill_out_rows B , 21 | fill_out_blocks Bs . 22 | 23 | s4x4 ( ( ( S11 ( S12 ( S13 ( S14 () ) ) ) ) ( ( S21 ( S22 ( S23 ( S24 () ) ) ) ) ( ( S31 ( S32 ( S33 ( S34 () ) ) ) ) ( ( S41 ( S42 ( S43 ( S44 () ) ) ) ) () ) ) ) ) ( ( ( S11 ( S21 ( S31 ( S41 () ) ) ) ) ( ( S12 ( S22 ( S32 ( S42 () ) ) ) ) ( ( S13 ( S23 ( S33 ( S43 () ) ) ) ) ( ( S14 ( S24 ( S34 ( S44 () ) ) ) ) () ) ) ) ) ( ( ( S11 ( S12 ( S21 ( S22 () ) ) ) ) ( ( S13 ( S14 ( S23 ( S24 () ) ) ) ) ( ( S31 ( S32 ( S41 ( S42 () ) ) ) ) ( ( S33 ( S34 ( S43 ( S44 () ) ) ) ) () ) ) ) ) () ) ) ) . 24 | -------------------------------------------------------------------------------- /natlog/natprogs/tc.nat: -------------------------------------------------------------------------------- 1 | cat is feline. 2 | tiger is feline. 3 | mouse is rodent. 4 | feline is mammal. 5 | rodent is mammal. 6 | snake is reptile. 7 | mammal is animal. 8 | reptile is animal. 9 | 10 | tc A Rel C : A Rel B, tc1 B Rel C. 11 | 12 | tc1 B _Rel B. 13 | tc1 B Rel C : tc B Rel C. 14 | 15 | goal Who : tc Who is animal. 16 | -------------------------------------------------------------------------------- /natlog/ndb.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | 4 | from sklearn.ensemble import RandomForestClassifier 5 | from sklearn.neural_network import MLPClassifier 6 | import numpy as np 7 | import os 8 | import pickle 9 | 10 | from .db import * 11 | 12 | 13 | # simple multi-layer percdeptron 14 | def neural_learner(): 15 | return MLPClassifier( 16 | hidden_layer_sizes=(16, 32, 16), 17 | random_state=1234, 18 | verbose=1, 19 | # activation='relu', 20 | max_iter=2000 21 | ) 22 | 23 | 24 | def rf_learner(): 25 | return RandomForestClassifier(random_state=1234) # alternative 26 | 27 | 28 | def set2bits(n, xs): 29 | """ 30 | turns set into 1 hot encoded bitlist 31 | """ 32 | return [1 if x in xs else 0 for x in range(n)] 33 | 34 | 35 | def bits2set(bs): 36 | """ 37 | turns bitslist into set of natural nunbers 38 | """ 39 | return [i for i, b in enumerate(bs) if b == 1] 40 | 41 | 42 | def seq2nums(xs): 43 | """ 44 | turns symbol set into set of natural numbers 45 | """ 46 | d, i = dict(), 0 47 | for x in xs: 48 | if x not in d: 49 | d[x] = i 50 | i += 1 51 | return d 52 | 53 | 54 | def exists_file(fname): 55 | return os.path.exists(fname) 56 | 57 | 58 | def to_pickle(obj, fname): 59 | """ 60 | serializes an object to a .pickle file 61 | """ 62 | with open(fname, "wb") as outf: 63 | pickle.dump(obj, outf) 64 | 65 | 66 | def from_pickle(fname): 67 | """ 68 | deserializes an object from a pickle file 69 | """ 70 | with open(fname, "rb") as inf: 71 | return pickle.load(inf) 72 | 73 | 74 | class Ndb(Db): 75 | """ 76 | replaces indexing in Db with machine-learned equivalent 77 | """ 78 | 79 | def __init__(self, learner=neural_learner): 80 | super().__init__() 81 | self.learner_name=learner.__name__ 82 | self.learner = learner() 83 | 84 | def to_model_name(self, fname): 85 | return fname + "."+self.learner_name+".pickle" 86 | 87 | def load(self, fname): 88 | """ 89 | overrides loading mechanism to fit learner 90 | """ 91 | model_name = self.to_model_name(fname) 92 | if exists_file(model_name): 93 | self.learner, self.db_const_dict, self.css = from_pickle(model_name) 94 | return 95 | 96 | super().load(fname) 97 | db_const_dict = seq2nums(self.index) # assuming dict ordered 98 | # create diagonal numpy matrix, one row for each constant 99 | X = np.eye(len(db_const_dict), dtype=int) 100 | val_count = len(self.css) 101 | y = np.array([set2bits(val_count, xs) for xs in self.index.values()]) 102 | print('X:', X.shape, '\n', X) 103 | print('\ny:', y.shape, '\n', y, '\n') 104 | self.learner.fit(X, y) 105 | self.db_const_dict = db_const_dict 106 | to_pickle((self.learner, db_const_dict, self.css), model_name) 107 | 108 | def ground_match_of(self, query_tuple): 109 | """ 110 | overrides database matching with learned predictions 111 | """ 112 | query_consts = path_of(query_tuple) 113 | query_consts_nums = \ 114 | [self.db_const_dict[c] for c in query_consts if c in self.db_const_dict] 115 | db_const_count = len(self.db_const_dict) 116 | qs = np.array([set2bits(db_const_count, query_consts_nums)]) 117 | rs = self.learner.predict(qs) 118 | matches = bits2set(list(rs[0])) 119 | #print('!!!!!!:',matches,self.css) 120 | return matches 121 | -------------------------------------------------------------------------------- /natlog/neural_natlog.py: -------------------------------------------------------------------------------- 1 | from .natlog import Natlog,natprogs 2 | from .ndb import * 3 | 4 | 5 | class NeuralNatlog(Natlog): 6 | """ 7 | overrrides Natlog's database constructor 8 | to use a neurally indexed nd instead of Db 9 | """ 10 | 11 | def db_init(self): 12 | self.db = Ndb() 13 | 14 | 15 | def nconsult(fname): 16 | nname = natprogs() + fname + ".nat" 17 | dname = natprogs() + fname + ".tsv" 18 | print('consulted:',nname,dname) 19 | n = NeuralNatlog(file_name=nname,db_name=dname) 20 | 21 | n.repl() 22 | -------------------------------------------------------------------------------- /natlog/parser.py: -------------------------------------------------------------------------------- 1 | from operator import * 2 | 3 | from .scanner import Scanner, VarNum 4 | 5 | trace = 0 6 | 7 | 8 | def rp(LP): 9 | return ')' if LP == '(' else ']' 10 | 11 | 12 | def from_none(LP, w): 13 | if w is None: 14 | if LP == '(': return () 15 | if LP == '[': return [] 16 | return w 17 | 18 | 19 | # simple LL(1) recursive descent Parser 20 | # supporting parenthesized tuples 21 | # scanned from whitespace separated tokens 22 | class Parser: 23 | def __init__(self, words): 24 | words = list(reversed(words)) 25 | self.words = words 26 | 27 | def get(self): 28 | if self.words: 29 | w = self.words.pop() 30 | return w 31 | else: 32 | return None 33 | 34 | def peek(self): 35 | if self.words: 36 | w = self.words[-1] 37 | return w 38 | else: 39 | return None 40 | 41 | def par(self, LP, RP): 42 | w = self.get() 43 | assert w == LP 44 | return self.pars(LP, RP) 45 | 46 | def pars(self, LP, RP): 47 | w = self.peek() 48 | if w == RP: 49 | self.get() 50 | return from_none(LP, None) 51 | elif w == LP: 52 | t = self.par(LP, RP) 53 | ts = self.pars(LP, RP) 54 | ts = from_none(LP, ts) 55 | return (t, ts) if LP == '(' else [t] + ts 56 | elif w == '(' or w == '[' and w != LP: 57 | t = self.par(w, rp(w)) 58 | ts = self.pars(LP, RP) 59 | ts = from_none(LP, ts) 60 | return (t, ts) if LP == '(' else [t] + ts 61 | else: 62 | self.get() 63 | ts = self.pars(LP, RP) 64 | ts = from_none(LP, ts) 65 | return (w, ts) if LP == '(' else [w] + ts 66 | 67 | def run(self): 68 | ls = sum(1 for x in self.words if x == '(') 69 | rs = sum(1 for x in self.words if x == ')') 70 | assert ls == rs 71 | ls = sum(1 for x in self.words if x == '[') 72 | rs = sum(1 for x in self.words if x == ']') 73 | assert ls == rs 74 | t = self.par('(', ')') 75 | t = to_tuple(t) 76 | if trace: print("PARSED", t) 77 | return t 78 | 79 | 80 | # extracts a Prolog-like clause made of tuples 81 | def to_clause(xs): 82 | if not (':' in xs or '=>' in xs): return xs, () 83 | if "=>" in xs: 84 | sep = '=>' 85 | else: 86 | sep = ':' 87 | neck = xs.index(sep) 88 | head = xs[:neck] 89 | body = xs[neck + 1:] 90 | 91 | if sep == ':': 92 | if ',' not in xs: 93 | res = head, (body,) 94 | else: 95 | bss = [] 96 | bs = [] 97 | for b in body: 98 | if b == ',': 99 | bss.append(tuple(bs)) 100 | bs = [] 101 | else: 102 | bs.append(b) 103 | bss.append(tuple(bs)) 104 | 105 | res = head, tuple(bss) 106 | return res 107 | if sep == '=>': 108 | n0 = 100 109 | n = n0 110 | if ',' not in xs: 111 | vs = (VarNum(n), VarNum(n + 1)) 112 | res = head + vs, (body + vs,) 113 | else: 114 | bss = [] 115 | bs = [] 116 | for b in body: 117 | if b == ',': 118 | vs = VarNum(n), VarNum(n + 1) 119 | n += 1 120 | bs = tuple(bs) + vs 121 | bss.append(bs) 122 | bs = [] 123 | else: 124 | bs.append(b) 125 | 126 | vs = VarNum(n), VarNum(n + 1) 127 | n += 1 128 | bs = tuple(bs) + vs 129 | bss.append(bs) 130 | head = head + (VarNum(n0), VarNum(n)) 131 | 132 | res = head, tuple(bss) 133 | return res 134 | 135 | 136 | # main exported Parser + Scanner 137 | def parse(text, gsyms=dict(), gixs=dict(), ground=False, rule=False): 138 | text = clean_comments(text) 139 | s = Scanner(text, gsyms=gsyms, gixs=gixs, ground=ground) 140 | for ws in s.run(): 141 | if not rule: ws = ('head_', ':') + ws 142 | ws = ("(",) + ws + (")",) 143 | p = Parser(ws) 144 | r = p.run() 145 | r = to_clause(r) 146 | if not rule: r = to_cons_list(r[1]) 147 | if not rule and ground: r = (r[0],) # db fact 148 | 149 | yield r, s.names 150 | 151 | 152 | def mparse(text, ground=False, rule=False): 153 | for r, ixs in parse(text, ground=ground, rule=rule): 154 | yield r 155 | 156 | 157 | # turns cons-like tuples into long tuples 158 | # do not change, deep recursion needed 159 | def to_tuple(xy): 160 | if xy is None or xy == (): 161 | return () 162 | elif isinstance(xy, list): 163 | return [to_tuple(x) for x in xy] 164 | elif not isinstance(xy, tuple): 165 | return xy 166 | else: # tuple 167 | x, y = xy 168 | t = to_tuple(x) 169 | ts = to_tuple(y) 170 | return (t,) + ts 171 | 172 | def from_cons_list_as_tuple(xs): 173 | return tuple(from_cons_list(xs)) 174 | 175 | 176 | def from_cons_list(xs): 177 | rs = [] 178 | while xs: 179 | x, xs = xs 180 | rs.append(x) 181 | return rs 182 | 183 | 184 | def to_cons_list(ts,end=()): 185 | gs = end 186 | for g in reversed(ts): 187 | gs = (g, gs) 188 | return gs 189 | 190 | def to_dif_list(ts,end): 191 | return to_cons_list(ts,end=end) 192 | 193 | def q(xs): 194 | rs = [] 195 | while xs: 196 | x, xs = xs 197 | rs.append(x) 198 | return rs 199 | 200 | 201 | def numlist(n, m): 202 | return to_cons_list(range(n, m)) 203 | 204 | 205 | def clean_comments(text): 206 | lines = text.split('\n') 207 | cleaned = [] 208 | for line in lines: 209 | parts = line.split("%") 210 | if len(parts) > 1: 211 | line = parts[0] 212 | cleaned.append(line) 213 | text = "\n".join(cleaned) 214 | #print('>>> ???',text) 215 | return text 216 | 217 | 218 | # tests 219 | 220 | def ptest(): 221 | text = """ 222 | app () Ys Ys. 223 | app (X Xs) Ys (X Zs) : 224 | app Xs Ys Zs. 225 | 226 | nrev () (). 227 | nrev (X Xs) Zs : nrev Xs Ys, app Ys (X) Zs. 228 | """ 229 | for c in mparse(text, ground=True): 230 | print(c) 231 | print('') 232 | for c in mparse(text, ground=False, rule=True): 233 | print(c) 234 | print('') 235 | ptest1() 236 | 237 | 238 | def ptest1(): 239 | xs = ('a', 0, 1, 2, ':', 'b', 0, ',', 'c', 0, 1, ',', 'd', 1, 2) 240 | print(to_clause(xs)) 241 | 242 | 243 | def ptest2(): 244 | ws = "( x y [ a ( b [ c 1 2 ] ) d ] ( xx yy ) )".split() 245 | # ws = "( 1 [ 2 3 4 ] 5 6 )".split() 246 | 247 | p = Parser(ws) 248 | print('WS:', ws) 249 | r = p.par('(', ')') 250 | print('R:', r) 251 | # return 252 | t = to_tuple(r) 253 | print('T:', t) 254 | print('WR:', p.words) 255 | print('RES:', Parser(ws).run()) 256 | 257 | 258 | def ptest3(): 259 | text = """ 260 | sent => a,noun,verb, @on, @a, place. 261 | 262 | noun => @cat. 263 | noun => @dog. 264 | 265 | verb => @sits. 266 | 267 | place => @mat. 268 | place => @bed. 269 | 270 | @ X (X Xs) Xs. 271 | 272 | goal Xs : sent Xs (). 273 | 274 | """ 275 | 276 | r = parse(text, ground=False, rule=True) 277 | print(list(r)) 278 | 279 | 280 | def ptest4(): 281 | r = parse('a [].') 282 | print(list(r)) 283 | 284 | 285 | def clean_test(): 286 | text = """ 287 | a b c % d e 288 | mmm nn pp 289 | xx yyyy % a % b 290 | 291 | % zzz zz z 292 | more 293 | 294 | % aaa 295 | 296 | boo. 297 | 298 | """ 299 | print(text) 300 | print('-----') 301 | print(clean_comments(text)) 302 | 303 | 304 | if __name__ == '__main__': 305 | ptest4() 306 | clean_test() 307 | -------------------------------------------------------------------------------- /natlog/requirements.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /natlog/scanner.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | # wrapper around int to be used in data fields 5 | # as actual int works as the type of variables 6 | # for simplicity and efficiency 7 | 8 | 9 | class VarNum(int): 10 | """ 11 | def __hash__(self): 12 | return super().__hash__() 13 | 14 | def __eq__(self,other): 15 | return isinstance(other,VarNum) and int(self)==int(other) 16 | """ 17 | 18 | def __repr__(self): 19 | return "_" + str(int(self)) 20 | 21 | 22 | class Var: 23 | def __init__(self): 24 | self.val = None 25 | 26 | def bind(self, val, trail): 27 | if self==val: return 28 | self.val = val 29 | trail.append(self) 30 | 31 | def unbind(self): 32 | self.val = None 33 | 34 | def __repr__(self): 35 | v = deref(self) 36 | if isinstance(v, Var) and v.val is None: 37 | return "_" + str(id(v)) 38 | else: 39 | return repr(v) 40 | 41 | 42 | def deref(v): 43 | while isinstance(v, Var): 44 | if v.val is None: 45 | return v 46 | v = v.val 47 | return v 48 | 49 | 50 | class GVar(Var): 51 | def __repr__(self): 52 | v = deref(self) 53 | if isinstance(v, GVar) and v.val is None: 54 | return "&" + str(id(v)) 55 | else: 56 | return repr(v) 57 | 58 | 59 | def qtrim(s): 60 | return s[1:-1] 61 | 62 | 63 | class Scanner: 64 | def __init__(self, text, gsyms=dict(), gixs=dict(), ground=True): 65 | self.text = text 66 | self.varcount = 0 67 | self.initsyms() 68 | self.ground = ground 69 | self.gsyms = gsyms 70 | self.gixs = gixs 71 | self.Scanner = re.Scanner([ 72 | (r"[-+]?\d+\.\d+", lambda sc, tok: ("FLOAT", float(tok))), 73 | (r"[-+]?\d+", lambda sc, tok: ("INT", int(tok))), 74 | (r"[a-z]+[\w]*", lambda sc, tok: ("ID", tok)), 75 | (r"'[\w\s\-\.\/,%=!\+\(\)]+'", lambda sc, tok: ("ID", qtrim(tok))), 76 | (r"[_]+[\w]*", lambda sc, tok: ("VAR", self.sym(tok + self.ctr()))), 77 | (r"[A-Z_]+[\w]*", lambda sc, tok: ("VAR", self.sym(tok))), 78 | (r"[\&]+[\w]*", lambda sc, tok: ("GVAR", self.gsym(tok))), 79 | (r"[(]", lambda sc, tok: ("LPAR", tok)), 80 | (r"[)]", lambda sc, tok: ("RPAR", tok)), 81 | (r"[\[]", lambda sc, tok: ("LPAR_", tok)), 82 | (r"[\]]", lambda sc, tok: ("RPAR_", tok)), 83 | (r"[.?]", lambda sc, tok: ("END", self.newsyms())), 84 | (r":", lambda sc, tok: ("IF", tok)), 85 | (r"=>", lambda sc, tok: ("REW", tok)), 86 | (r"[,]", lambda sc, tok: ("AND", tok)), 87 | (r"~|``|`|\^|\$|#|@|%|;|<=|>=|//|==|\->|\+|\-|\*|/|=|<|>|!", lambda sc, tok: ("OP", tok)), 88 | # (r"[;]", lambda sc, tok: ("OR", tok)), 89 | (r"\s+", None), # None == skip tok. 90 | ]) 91 | 92 | def ctr(self): 93 | s = str(self.varcount) 94 | self.varcount += 1 95 | return s 96 | 97 | def initsyms(self): 98 | self.syms = dict() 99 | self.ixs = [] 100 | 101 | def newsyms(self): 102 | self.names = self.ixs 103 | self.initsyms() 104 | return "." 105 | 106 | def sym(self, w): 107 | if self.ground: return w 108 | i = self.syms.get(w) 109 | if i is None: 110 | i = len(self.syms) 111 | self.syms[w] = i 112 | self.ixs.append(w) 113 | return VarNum(i) 114 | 115 | def gsym(self, w): 116 | if self.ground: return w 117 | v = self.gsyms.get(w) 118 | if v is None: 119 | v = GVar() 120 | self.gsyms[w] = v 121 | self.gixs[v] = w 122 | 123 | return v 124 | 125 | def run(self): 126 | toks, _ = self.Scanner.scan(self.text) 127 | ts = [] 128 | for (_, x) in toks: 129 | if x == '.': 130 | yield tuple(ts) 131 | ts = [] 132 | else: 133 | ts.append(x) 134 | 135 | 136 | # tests 137 | 138 | def stest(): 139 | sent = \ 140 | "(The ~ cat -42) (~ 'sits on' [the mat 0.42]). \n the ` Dog _barks . (` a `` b) and (`b `a) ." 141 | s = Scanner(sent, ground=False) 142 | print(list(s.run())) 143 | 144 | 145 | def gtest(): 146 | sent = """ 147 | sent => a,noun,verb, @ on a,place. 148 | 149 | noun => @ cat. 150 | noun => @ dog. 151 | 152 | verb => @sits. 153 | 154 | place => @ mat. 155 | place => @ bed. 156 | """ 157 | s = Scanner(sent, ground=False) 158 | print(list(s.run())) 159 | 160 | 161 | def ivtest(): 162 | sent = """ 163 | node 1 &C1. 164 | node 2 &C2. 165 | 166 | edge &C1 &C2. 167 | """ 168 | s = Scanner(sent, ground=False) 169 | print(list(s.run())) 170 | print('GVAR names:', s.gixs) 171 | 172 | 173 | if __name__ == '__main__': 174 | # stest() 175 | # gtest() 176 | ivtest() 177 | -------------------------------------------------------------------------------- /natlog/tdb.py: -------------------------------------------------------------------------------- 1 | from .db import * 2 | 3 | 4 | def const_of(t): 5 | def scan(t): 6 | if isinstance(t, Var): 7 | pass 8 | elif isinstance(t, tuple): 9 | for x in t: 10 | for c in scan(x): 11 | yield c 12 | else: 13 | yield t 14 | 15 | qs = set(scan(t)) 16 | return qs 17 | 18 | 19 | class Tdb(Db): 20 | """ 21 | specializes to db derived from text 22 | assumes .txt file with one sentence per line 23 | ending with '.' or '?' and 24 | white space separated words 25 | """ 26 | 27 | def __init__(self): 28 | super().__init__() 29 | self.index_source = const_of 30 | 31 | 32 | def digest(self, text): 33 | sents=text.split('\n') 34 | for sent in sents: 35 | ws=sent.split(' ') 36 | if len(ws) < 2: continue 37 | ws=[w for w in ws if w.isalpha()] 38 | self.add_clause(ws) 39 | 40 | def load_txt(self, fname): 41 | """ assuming text tokenized, one sentence per line, 42 | single white space separated 43 | """ 44 | with open(fname) as f: 45 | lines = f.read().split('\n') 46 | for line in lines: 47 | if len(line) < 2: continue 48 | line = line.strip() 49 | ws = line.split(' ') 50 | ws = [w for w in ws if w.isalpha()] 51 | self.add_clause(('txt', tuple(ws),)) 52 | 53 | 54 | def unify_with_fact(self, h, trail): 55 | _txt, key, val = h 56 | ms = self.ground_match_of(key) 57 | for i in ms: 58 | h0 = self.css[i] 59 | u = unify(val, h0[1], trail) 60 | yield u 61 | -------------------------------------------------------------------------------- /natlog/test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/natlog/test/__init__.py -------------------------------------------------------------------------------- /natlog/test/benchmark.py: -------------------------------------------------------------------------------- 1 | import timeit 2 | import sys 3 | from natlog.natlog import * 4 | 5 | sys.setrecursionlimit(1 << 28) 6 | 7 | def time_of(f, x, times=1): 8 | res = None 9 | start_time = timeit.default_timer() 10 | for i in range(times): 11 | res = f(x) 12 | if i == times - 1: print(x) 13 | end_time = timeit.default_timer() 14 | print(x, '==>', 'res = ', res) 15 | total_time=end_time - start_time 16 | print('time = ', total_time) 17 | print('') 18 | return total_time 19 | 20 | 21 | my_text = """ 22 | app () Ys Ys. 23 | app (X Xs) Ys (X Zs) : 24 | app Xs Ys Zs. 25 | 26 | nrev () (). 27 | nrev (X Xs) Zs : nrev Xs Ys, app Ys (X ()) Zs. 28 | 29 | goal N Ys : 30 | `numlist 0 N Xs, 31 | nrev Xs Ys. 32 | """ 33 | 34 | 35 | def bm1(): 36 | n = Natlog(text=my_text) 37 | print('NREV STARTING:') 38 | n.query("goal 10 L?") 39 | time_of(n.count, "goal 16 L?", times=512) 40 | time_of(n.count, "goal 32 L?", times=256) 41 | time_of(n.count, "goal 64 L?", times=64) 42 | t=time_of(n.count, "goal 128 L?", times=32) 43 | lips = 128*129//2*32/t 44 | print('LIPS:',lips) 45 | #time_of(n.count, "goal 256 L?", times=1) 46 | #time_of(n.count, "goal 512 L?", times=1) 47 | #time_of(n.count, "goal 1024 L?", times=1) 48 | print('') 49 | 50 | 51 | def bm(): 52 | print('N-QUEENS STARTING:') 53 | n = Natlog(file_name=natprogs()+"queens.nat") 54 | time_of(n.count, "goal8 Queens?", times=9) 55 | time_of(n.count, "goal9 Queens?") 56 | time_of(n.count, "goal10 Queens?") 57 | # return # runs, but quite a bit longer 58 | time_of(n.count, "goal11 Queens?") 59 | time_of(n.count, "goal12 Queens?") 60 | 61 | 62 | def prof(): 63 | import cProfile 64 | p = cProfile.Profile() 65 | 66 | n = Natlog(file_name=natprogs()+"queens.nat") 67 | 68 | def fun(): 69 | n.count('goal10 L?') 70 | 71 | print('PROFILING STARTED') 72 | p.runcall(fun) 73 | p.print_stats(sort=1) 74 | 75 | 76 | def run_all(): 77 | bm1() 78 | bm() 79 | prof() 80 | 81 | if __name__ == "__main__": 82 | run_all() 83 | 84 | 85 | -------------------------------------------------------------------------------- /natlog/test/tests.py: -------------------------------------------------------------------------------- 1 | from natlog.natlog import * 2 | from natlog.unify import * 3 | from natlog.neural_natlog import * 4 | from natlog.textual_natlog import * 5 | 6 | NATPROGS = natprogs() 7 | 8 | my_text = """ 9 | app () Ys Ys. 10 | app (X Xs) Ys (X Zs) : 11 | app Xs Ys Zs. 12 | 13 | nrev () (). 14 | nrev (X Xs) Zs : nrev Xs Ys, app Ys (X ()) Zs. 15 | 16 | goal N N : 17 | `numlist 0 N Xs, 18 | nrev Xs Ys. 19 | """ 20 | 21 | 22 | def test_generators(): 23 | prog = """ 24 | good 'l'. 25 | good 'o'. 26 | goal X : ``iter hello X, good X. 27 | goal X : ``range 1000 1005 X. 28 | """ 29 | n = Natlog(text=prog) 30 | for answer in n.solve("goal R?"): 31 | print(answer) 32 | 33 | 34 | def test_answer_stream(): 35 | prog = """ 36 | perm () (). 37 | perm (X Xs) Zs : perm Xs Ys, ins X Ys Zs. 38 | 39 | ins X Xs (X Xs). 40 | ins X (Y Xs) (Y Ys) : ins X Xs Ys. 41 | """ 42 | n = Natlog(text=prog) 43 | for answer in n.solve("perm (a (b (c ()))) P?"): 44 | print(answer) 45 | 46 | 47 | def yield_test(): 48 | prog = """ 49 | worm : ^o, worm. 50 | """ 51 | n = Natlog(text=prog) 52 | for i, answer in enumerate(n.solve("worm ?")): 53 | print(answer[0], end='') 54 | if i > 42: break 55 | print('') 56 | 57 | 58 | # testing with string text 59 | def t1(): 60 | n = Natlog(text=my_text) 61 | n.query("nrev (a (b (c (d ())))) R ?") 62 | n.query("goal 10 L?") 63 | 64 | 65 | # testing with some .nat files 66 | 67 | def t2(): 68 | n = Natlog(file_name=NATPROGS + "tc.nat") 69 | print(n) 70 | n.query("tc Who is animal ?") 71 | # n.query("tc Who is What ?") 72 | 73 | 74 | def t4(): 75 | n = Natlog(file_name=NATPROGS + "perm.nat") 76 | n.query("perm (1 (2 (3 ()))) Ps?") 77 | 78 | 79 | def t3(): 80 | n = Natlog(file_name=NATPROGS + "arith.nat") 81 | print(n) 82 | n.query("goal R ?") 83 | 84 | 85 | # longer output: 8 queens 86 | def t5(): 87 | n = Natlog(file_name=NATPROGS + "queens.nat") 88 | # print(n) 89 | n.query("goal8 Queens?") 90 | # n.repl() 91 | 92 | 93 | def t6(): 94 | n = Natlog(file_name=NATPROGS + "family.nat", with_lib=NATPROGS + "lib.nat") 95 | # print(n) 96 | n.query("grand parent of 'Adam' GP ?") 97 | 98 | 99 | def t7(): 100 | n = Natlog(file_name=NATPROGS + "family.nat", with_lib=NATPROGS + "lib.nat") 101 | n.query("cousin of X B?") 102 | 103 | 104 | def t8(): 105 | n = Natlog(file_name=NATPROGS + "lib.nat") 106 | n.query('`numlist 1 5 Xs, findall X (member X Xs) Ys.') 107 | # n.repl() 108 | 109 | 110 | def t9(): 111 | n = Natlog(file_name=NATPROGS + "pro.nat") 112 | n.repl() 113 | 114 | 115 | def t10(): 116 | print('it takes a while to start') 117 | n = Natlog(file_name=NATPROGS + "sudoku4.nat", with_lib=NATPROGS + "lib.nat") 118 | # n.repl() 119 | n.query("goal Xss, nl, member Xs Xss, tuple Xs T, writeln T, fail?") 120 | 121 | 122 | def fam_repl(): 123 | n = Natlog(file_name=NATPROGS + "family.nat", with_lib=LIB) 124 | print('Enter some queries_text!') 125 | n.repl() 126 | 127 | 128 | def lib(): 129 | n = Natlog(file_name=NATPROGS + "lib.nat") 130 | n.repl() 131 | 132 | 133 | def loop(): 134 | n = Natlog(file_name=NATPROGS + "loop.nat") 135 | print(n) 136 | n.query("goal X?") 137 | 138 | 139 | def db_test(): 140 | nd = Natlog( 141 | file_name=NATPROGS + "dbtc.nat", 142 | db_name=NATPROGS + "db.nat") 143 | print('RULES') 144 | print(nd) 145 | print('DB FACTS') 146 | print(nd.db) 147 | print('QUERY:') 148 | nd.query("tc Who is_a animal ?") 149 | # nd.repl() 150 | 151 | 152 | def tdb_test(): 153 | td = TextualNatlog( 154 | file_name=NATPROGS + "story.nat", 155 | db_name=NATPROGS + "story.txt") 156 | print('RULES') 157 | print(td) 158 | print('DB FACTS') 159 | print(td.db) 160 | print('QUERY:') 161 | td.query("~txt ('Alice' 'Bob') X?") 162 | # td.repl() 163 | 164 | 165 | def ndb_test(): 166 | nd = NeuralNatlog(file_name=NATPROGS + "dbtc.nat", db_name=NATPROGS + "db.nat") 167 | print('RULES') 168 | print(nd) 169 | print('DB FACTS') 170 | print(nd.db) 171 | nd.query("tc Who is_a animal ?") 172 | 173 | 174 | def db_chem(): 175 | nd = Natlog( 176 | file_name=NATPROGS + "elements.nat", 177 | db_name=NATPROGS + "elements.tsv" 178 | ) 179 | print('RULES') 180 | print(nd) 181 | # print('DB FACTS');print(nd.db) 182 | print('SIZE:', nd.db.size(), 'LEN:', len(nd.db.css[0])) 183 | nd.query("an_el Num Element ?") 184 | nd.query("gases Num Element ?") 185 | 186 | 187 | def ndb_chem(): 188 | nd = NeuralNatlog( 189 | file_name=NATPROGS + "elements.nat", 190 | db_name=NATPROGS + "elements.tsv" 191 | ) 192 | print('RULES') 193 | print(nd) 194 | print('DB FACTS') 195 | print(nd.db) 196 | nd.query("gases Num Element ?") 197 | 198 | 199 | def py_test(): 200 | nd = Natlog(file_name=NATPROGS + "py_call.nat") 201 | print('RULES') 202 | # print(nd) 203 | nd.query("goal X?") 204 | 205 | 206 | def py_test1(): 207 | nd = Natlog(file_name=NATPROGS + "py_call1.nat") 208 | print('RULES') 209 | # print(nd) 210 | nd.query("goal X?") 211 | 212 | 213 | def dtest1(): 214 | c1 = ('a', 1, 'car', 'a') 215 | c2 = ('a', 2, 'horse', 'aa') 216 | c3 = ('b', 1, 'horse', 'b') 217 | c4 = ('b', 2, 'car', 'bb') 218 | 219 | g1 = ('a', Var(), Var(), Var()) 220 | g2 = (Var(), Var(), 'car', Var()) 221 | g3 = (Var(), Var(), Var(), Var()) 222 | 223 | print(c1, '\n<-const:', list(path_of(c1))) 224 | d = Db() 225 | for cs in [c1, c2, c3, c4]: 226 | d.add_clause(cs) 227 | print('\nindex') 228 | for xv in d.index.items(): 229 | print(xv) 230 | 231 | print('\ncss') 232 | for cs in d.css: 233 | print(cs) 234 | print('Gmatch', g1, list(d.ground_match_of(g1))) 235 | print('Vmatch', g1, list(d.match_of(g1))) 236 | print('Gmatch', g2, list(d.ground_match_of(g2))) 237 | print('Vmatch', g2, list(d.match_of(g2))) 238 | print('Gmatch', g3, list(d.ground_match_of(g3))) 239 | print('Vmatch', g3, list(d.match_of(g3))) 240 | 241 | 242 | # Db built form text 243 | def dtest(): 244 | text = """ 245 | John has (a car). 246 | Mary has (a bike). 247 | Mary is (a student). 248 | John is (a pilot). 249 | """ 250 | print(text) 251 | d = Db() 252 | d.digest(text) 253 | print(d) 254 | print('') 255 | query = "Who has (a What)?" 256 | d.ask(query) 257 | 258 | query = "Who is (a pilot)?" 259 | d.ask(query) 260 | 261 | query = "'Mary' is What?" 262 | d.ask(query) 263 | 264 | query = "'John' is (a What)?" 265 | d.ask(query) 266 | 267 | query = "Who is What?" 268 | d.ask(query) 269 | 270 | 271 | # Db from a .nat file 272 | def dtestf(): 273 | fname = NATPROGS + 'db.tsv' 274 | d = Db() 275 | d.load(fname) 276 | print(d) 277 | print('LOADED:', fname) 278 | d.ask("Who is mammal?") 279 | 280 | 281 | # Db from a json file 282 | def dtestj(): 283 | fname = NATPROGS + 'db' 284 | jname = fname + '.json' 285 | nname = fname + '.nat' 286 | d = Db() 287 | d.load(nname) 288 | d.save(jname) 289 | d = Db() 290 | d.load(jname) 291 | # print(d) 292 | print('LOADED:', jname) 293 | print("") 294 | query = "Who is What?" 295 | d.ask(query) 296 | 297 | 298 | def big_db(): 299 | prog = """ 300 | quest X Y : ~ (text_term (give X Y)) ? 301 | """ 302 | n = Natlog(text=prog, db_name=NATPROGS + 'facts.nat') 303 | # print(n) 304 | print('SIZE:', n.db.size(), 'LEN:', len(n.db.css[0])) 305 | # print(n.db.css[0]) 306 | n.query("quest X Y?") 307 | # n.repl() 308 | 309 | 310 | def big_ndb(): 311 | prog = """ 312 | quest X Y : ~ (text_term (give X Y)) ? 313 | """ 314 | n = NeuralNatlog(text=prog, db_name=NATPROGS + 'facts.nat') 315 | # print(n) 316 | print('SIZE:', n.db.size(), 'LEN:', len(n.db.css[0])) 317 | # print(n.db.css[0]) 318 | n.query("quest X Y?") 319 | # n.repl() 320 | 321 | 322 | def libtest(): 323 | n = Natlog(file_name=NATPROGS + 'emu.nat', with_lib=NATPROGS + "lib.nat") 324 | n.repl() 325 | 326 | 327 | def gramtest(): 328 | n = Natlog(file_name=NATPROGS + 'dall_e.nat') 329 | print(n) 330 | n.query("go.") 331 | # n.repl() 332 | 333 | 334 | def meta_test(): 335 | n = Natlog(file_name=NATPROGS + 'meta.nat') 336 | n.query("metaint ((go R) ()) ?") 337 | # n.repl() 338 | 339 | 340 | def ivtest1(): 341 | n = Natlog(file_name=NATPROGS + 'gcol.nat', with_lib=NATPROGS + "lib.nat") 342 | # print(n) 343 | n.query("go Colors?") 344 | # n.repl() 345 | 346 | 347 | def ivtest(): 348 | n = Natlog(file_name=NATPROGS + 'interclausal.nat', with_lib=NATPROGS + "lib.nat") 349 | n.query("go X?") 350 | # n.repl() 351 | 352 | 353 | def go(): 354 | ts = [dtest1, 355 | dtest, 356 | dtestf, 357 | test_generators, 358 | test_answer_stream, 359 | t1, 360 | t2, 361 | t3, 362 | t4, 363 | t5, 364 | t6, 365 | t7, 366 | db_test, db_chem, py_test, py_test1, 367 | big_db, 368 | gramtest, 369 | meta_test, 370 | ivtest, 371 | tdb_test 372 | ] 373 | for t in ts: 374 | print('\n\n', '*' * 20, t.__name__, '*' * 20, '\n') 375 | t() 376 | 377 | 378 | def runtest(): 379 | xs = natrun(natprogs() + 'family', 'brother of X Y.') 380 | print(xs) 381 | 382 | 383 | if __name__ == "__main__": 384 | pass 385 | go() 386 | # ndb_test() 387 | # libtest() 388 | # gramtest() 389 | # meta_test() 390 | # ivtest() 391 | # lconsult('gram') 392 | # runtest() 393 | # tdb_test() 394 | -------------------------------------------------------------------------------- /natlog/textual_natlog.py: -------------------------------------------------------------------------------- 1 | from .natlog import Natlog, natprogs 2 | from .tdb import * 3 | 4 | 5 | class TextualNatlog(Natlog): 6 | """ 7 | overrrides Natlog's database constructor 8 | to use an indexed text seen as a set of ground db facts 9 | """ 10 | 11 | def db_init(self): 12 | self.db = Tdb() 13 | 14 | 15 | def xconsult(fname): 16 | nname = natprogs() + fname + ".nat" 17 | dname = natprogs() + fname + ".txt" 18 | print('consulted:', nname, dname) 19 | n = TextualNatlog(file_name=nname, db_name=dname) 20 | n.repl() 21 | -------------------------------------------------------------------------------- /natlog/tools.py: -------------------------------------------------------------------------------- 1 | from .unify import Var, GVar, deref 2 | 3 | 4 | def copy_term(t0): 5 | def ct(t): 6 | t = deref(t) 7 | if isinstance(t, GVar): 8 | return t 9 | if isinstance(t, Var): 10 | return d.setdefault(t, Var()) 11 | if not isinstance(t, tuple): 12 | return t 13 | return tuple(map(ct, t)) 14 | 15 | d = dict() 16 | # print('CT <<<',t0) 17 | r = ct(t0) 18 | # print('CT >>>', r) 19 | return r 20 | 21 | 22 | def arg(x, i): 23 | return x[i] 24 | 25 | 26 | def setarg(x, i, v): 27 | x[i] = v 28 | 29 | 30 | def crop(a, l1, l2): 31 | return a[l1:l2] 32 | 33 | 34 | def to_dict(kvs): 35 | return dict((kv[0], kv[1]) for kv in kvs) 36 | 37 | 38 | def from_dict(d): 39 | return tuple(d.items()) 40 | 41 | 42 | def in_dict(d): 43 | yield from d.items() 44 | 45 | 46 | def meth_call(o, f, xs): 47 | m = getattr(o, f) 48 | return m(*xs) 49 | 50 | 51 | def write(args): 52 | print(*args, end=' ') 53 | 54 | 55 | def nl(): 56 | print() 57 | 58 | 59 | def writeln(args): 60 | write(args) 61 | nl() 62 | 63 | 64 | def has_type(x, tname): 65 | t = eval(tname), 66 | return int(isinstance(x, t)) 67 | 68 | 69 | # globals vars for "." and "?" escapes 70 | # refer to them as $eos Dot,$eoq Qest. 71 | 72 | eos = '.' 73 | eoq = '?' 74 | -------------------------------------------------------------------------------- /natlog/unify.py: -------------------------------------------------------------------------------- 1 | from .scanner import VarNum, Var, GVar, deref 2 | 3 | 4 | def unify(x, y, trail, occ=False): 5 | ustack = [] 6 | ustack.append(y) 7 | ustack.append(x) 8 | while ustack: 9 | x1 = deref(ustack.pop()) 10 | x2 = deref(ustack.pop()) 11 | 12 | if isinstance(x1, GVar) and isinstance(x2, Var): 13 | x2.bind(x1, trail) 14 | elif isinstance(x2, GVar) and isinstance(x1, Var): 15 | x1.bind(x2, trail) 16 | elif isinstance(x1, Var): 17 | if occ and occurs(x1, x2): 18 | return False 19 | x1.bind(x2, trail) 20 | elif isinstance(x2, Var): 21 | if occ and occurs(x2, x1): 22 | return False 23 | x2.bind(x1, trail) 24 | elif isinstance(x2, tuple) and isinstance(x1, tuple): 25 | arity = len(x1) 26 | if len(x2) != arity: 27 | return False 28 | for i in range(arity - 1, -1, -1): 29 | ustack.append(x2[i]) 30 | ustack.append(x1[i]) 31 | elif x1 == x2 and type(x1) == type(x2): 32 | continue 33 | else: 34 | return False 35 | return True 36 | 37 | 38 | def new_var(t, d): 39 | v = d.get(t, None) 40 | if v is None: 41 | v = Var() 42 | d[t] = v 43 | return v 44 | 45 | def lazy_unify(x, y, trail, d): 46 | ustack = [] 47 | ustack.append(y) 48 | ustack.append(x) 49 | while ustack: 50 | x1 = deref(ustack.pop()) 51 | x2 = deref(ustack.pop()) 52 | 53 | if isinstance(x1, GVar) and isinstance(x2, Var): 54 | x2.bind(x1, trail) 55 | elif isinstance(x2, GVar) and isinstance(x1, Var): 56 | x1.bind(x2, trail) 57 | elif isinstance(x1, Var): 58 | x1.bind(x2, trail) 59 | elif isinstance(x2, Var): 60 | x1 = activate(x1, d) 61 | x2.bind(x1, trail) 62 | elif type(x1) != type(x2): 63 | # this should be before next 64 | return False 65 | elif isinstance(x2, tuple): # and isinstance(x1, tuple): 66 | arity = len(x2) 67 | if len(x1) != arity: 68 | return False 69 | for i in range(arity - 1, -1, -1): 70 | ustack.append(x2[i]) 71 | ustack.append(activate(x1[i], d)) 72 | elif isinstance(x1, VarNum): 73 | # conflating int and VarNum not possible now 74 | x1 = new_var(x1, d) 75 | x1.bind(x2, trail) 76 | elif x1 == x2: 77 | # not tuples, should be other objects 78 | continue 79 | else: 80 | return False 81 | return True 82 | 83 | 84 | def activate(t, d): 85 | if isinstance(t, VarNum): 86 | return new_var(t, d) 87 | elif not isinstance(t, tuple): 88 | return t 89 | else: 90 | return tuple(activate(x, d) for x in t) 91 | 92 | 93 | def extractTerm(t): 94 | t = deref(t) 95 | if isinstance(t, Var): 96 | return t 97 | elif not isinstance(t, tuple): 98 | return t 99 | else: 100 | return tuple(map(extractTerm, t)) 101 | 102 | 103 | def occurs(x0, t0): 104 | def occ(t): 105 | t = deref(t) 106 | if x == t: 107 | return True 108 | if not isinstance(t, tuple): 109 | return False 110 | return any(map(occ, t)) 111 | 112 | x = deref(x0) 113 | return occ(t0) 114 | 115 | def test_unify(): 116 | a, b, c, d, e, f, g = "abcdefg" 117 | x, y, z = VarNum(0), VarNum(1), VarNum(2) 118 | t = (f, a, (g, (b, x, (e, b, c, y)), d)) 119 | for p in path_of(t): print('PATH:', p) 120 | 121 | c = activate(t, dict()) 122 | 123 | print('ORIG:', t) 124 | print('COPY:', c) 125 | 126 | z = activate(z, dict()) 127 | print('Z:', z) 128 | 129 | t1 = (f, z) 130 | t2 = z 131 | 132 | print('unif occ:', unify(t1, t2, [], occ=True)) 133 | print('unif nocc:', unify(t1, t2, [], occ=False)) 134 | 135 | 136 | if __name__ == "__main__": 137 | test_unify() 138 | -------------------------------------------------------------------------------- /run/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ptarau/natlog/156f6df660459b03b79d3deec6dc8a5437d78f41/run/__init__.py -------------------------------------------------------------------------------- /run/app.sh: -------------------------------------------------------------------------------- 1 | streamlit run ../natlog/app/natapp.py 2 | -------------------------------------------------------------------------------- /run/bm.py: -------------------------------------------------------------------------------- 1 | from natlog.test.benchmark import * 2 | 3 | if __name__ == "__main__": 4 | run_all() 5 | -------------------------------------------------------------------------------- /run/bm.sh: -------------------------------------------------------------------------------- 1 | # echo make sure natlog is installed as a pypy package 2 | # echo for taht, type: pypy -m pip install -e . 3 | time pypy3 -O bm.py 4 | -------------------------------------------------------------------------------- /run/t0.txt: -------------------------------------------------------------------------------- 1 | QUERY: bug 0? 2 | ACT<<<: (('bug', 0), ()) {} 3 | ACT<<<: ('bug', 0) {} 4 | ACT<<<: bug {} 5 | ACT>>>: bug {} 6 | ACT<<<: 0 {} 7 | ACT>>>: 0 {} 8 | ACT>>>: ('bug', 0) {} 9 | ACT<<<: () {} 10 | ACT>>>: () {} 11 | ACT>>>: (('bug', 0), ()) {} 12 | ;;; ('bug', 0) 13 | BEFORE: {} ('bug', _0) ('bug', 0) 14 | DICT: {} ('bug', _0) ('bug', 0) 15 | ACT<<<: ('#', 'print', _0) {} 16 | ACT<<<: # {} 17 | ACT>>>: # {} 18 | ACT<<<: print {} 19 | ACT>>>: print {} 20 | ACT<<<: _0 {} 21 | ACT>>>: _140305444268736 {_0: _140305444268736} 22 | ACT>>>: ('#', 'print', _140305444268736) {_0: _140305444268736} 23 | GS: (('#', 'print', _140305444268736), ()) 24 | _140305444268736 25 | ANSWER: {} 26 | BEFORE: {} ('t14',) ('bug', 0) 27 | -------------------------------------------------------------------------------- /run/t1.txt: -------------------------------------------------------------------------------- 1 | QUERY: bug 1? 2 | ACT<<<: (('bug', 1), ()) {} 3 | ACT<<<: ('bug', 1) {} 4 | ACT<<<: bug {} 5 | ACT>>>: bug {} 6 | ACT<<<: 1 {} 7 | ACT>>>: 1 {} 8 | ACT>>>: ('bug', 1) {} 9 | ACT<<<: () {} 10 | ACT>>>: () {} 11 | ACT>>>: (('bug', 1), ()) {} 12 | ;;; ('bug', 1) 13 | BEFORE: {} ('bug', _0) ('bug', 1) 14 | ACT<<<: _0 {} 15 | ACT>>>: _140305712765008 {_0: _140305712765008} 16 | ACT<<<: bug {_0: _140305712765008} 17 | ACT>>>: bug {_0: _140305712765008} 18 | DICT: {_0: 1} ('bug', _0) ('bug', 1) 19 | ACT<<<: ('#', 'print', _0) {_0: 1} 20 | ACT<<<: # {_0: 1} 21 | ACT>>>: # {_0: 1} 22 | ACT<<<: print {_0: 1} 23 | ACT>>>: print {_0: 1} 24 | ACT<<<: _0 {_0: 1} 25 | ACT>>>: 1 {_0: 1} 26 | ACT>>>: ('#', 'print', 1) {_0: 1} 27 | GS: (('#', 'print', 1), ()) 28 | 1 29 | ANSWER: {} 30 | BEFORE: {} ('t14',) ('bug', 1) 31 | -------------------------------------------------------------------------------- /run/tests.py: -------------------------------------------------------------------------------- 1 | from natlog.test.tests import * 2 | 3 | if __name__ == "__main__": 4 | libtest() 5 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | import setuptools 3 | 4 | import natlog 5 | 6 | #with open('natlog/requirements.txt') as f: 7 | # required = f.read().splitlines() 8 | required = [] 9 | with open("README.md", "r") as f: 10 | long_description = f.read() 11 | 12 | version = natlog.__version__ 13 | setup(name='natlog', 14 | version=version, 15 | description='Prolog-like interpreter and tuple store', 16 | long_description=long_description, 17 | long_description_content_type='text/markdown', 18 | url='https://github.com/ptarau/natlog.git', 19 | author='Paul Tarau', 20 | author_email='paul.tarau@gmail.com', 21 | license='Apache', 22 | packages=setuptools.find_packages(), 23 | package_data={'natlog': [ 24 | 'natlog/requirements.txt', 25 | 'natprogs/*.nat', 26 | 'natprogs/*.tsv', 27 | 'natprogs/*.pro', 28 | 'natprogs/*.json' 29 | ] 30 | }, 31 | include_package_data=True, 32 | install_requires=required, 33 | zip_safe=False 34 | ) 35 | -------------------------------------------------------------------------------- /softlog/requirements.txt: -------------------------------------------------------------------------------- 1 | natlog 2 | sentence_store 3 | 4 | -------------------------------------------------------------------------------- /softlog/softdb.py: -------------------------------------------------------------------------------- 1 | from natlog.db import * 2 | from sentence_store.main import Embedder 3 | 4 | 5 | class SoftDB(Db): 6 | """ 7 | specializes to db derived from text 8 | """ 9 | 10 | def __repr__(self): 11 | return 'SoftDB' 12 | 13 | def initalize_store(self, cache_name): 14 | self.emb = Embedder(cache_name) 15 | self.emb.clear() 16 | self.abduced_clauses = dict() 17 | 18 | def digest(self, text): 19 | 20 | self.initalize_store(cache_name="soft_db_cache") 21 | self.emb.store_text(text, clean=False) 22 | 23 | def load_txt(self, doc_name, doc_type='txt'): 24 | # can be 'url', 'wikipage', 'txt', 'pdf' 25 | cache_name = "".join(c for c in doc_name if c.isalpha()) 26 | self.initalize_store(cache_name=cache_name) 27 | 28 | self.emb.store_doc(doc_type, doc_name, clean=True) 29 | 30 | def unify_with_fact(self, goal, trail): 31 | assert len(goal) == 4, goal 32 | # q = query goal to be matched 33 | # k = number of knns to be returned 34 | # d = minimum knn distance 35 | # v = variable to be unified withthe matches 36 | q, k, d, v = goal 37 | d = float(d) / 100 38 | # _knn_pairs: pairs of the form i=sent index,r=distance 39 | _knn_pairs, answers = self.emb.knn_query(q, k) 40 | for sent, dist in answers: 41 | if dist <= d: 42 | self.abduced_clauses[(q, sent)] = dist 43 | yield unify(v, sent, trail) 44 | 45 | 46 | def test_softdb(): 47 | sents = [ 48 | "The cat sits on the mat.", 49 | "The dog barks at a cat.", 50 | "The dog barks at the moon.", 51 | "The pirate travels the oceans.", 52 | "The phone rings with a musical tone.", 53 | "The man watches the bright moon." 54 | ] 55 | v = Var() 56 | quest = ('Who barks out there', 3, 99, v) 57 | text = "\n".join(sents) 58 | sdb = SoftDB() 59 | sdb.digest(text) 60 | for _ in sdb.unify_with_fact(quest, []): 61 | # print(a, '-->', v) 62 | pass 63 | print('THE ABDUCED CLAUSES aRE:') 64 | for (h, b), r in sdb.abduced_clauses.items(): 65 | if b.endswith('.'): b = b[0:-1] 66 | print(f"'{h}' : '{b}'. % {r}") 67 | 68 | 69 | if __name__ == "__main__": 70 | test_softdb() 71 | 72 | """ 73 | Matches are found with knn against the fact base. 74 | A min threshold is needed for a match to be accepted. 75 | We use sentence-store as backend. 76 | 77 | To rember the result as a Natlog program, 78 | each time a match t is found for head h, 79 | the clause h:-t is generated. 80 | 81 | That also gives a log for the interaction with the sofdb. 82 | """ 83 | -------------------------------------------------------------------------------- /softlog/softlog.py: -------------------------------------------------------------------------------- 1 | from natlog.natlog import Natlog, natprogs 2 | from softdb import * 3 | 4 | 5 | class SoftNatlog(Natlog): 6 | """ 7 | overrrides Natlog's database constructor 8 | to use semantic similarity when unifying with facts 9 | """ 10 | 11 | def db_init(self): 12 | self.db = SoftDB() 13 | 14 | def repl(self): 15 | """ 16 | read-eval-print-loop 17 | """ 18 | print("Type ENTER to quit.") 19 | while True: 20 | q = input('?- ') 21 | if not q: return 22 | try: 23 | self.query(q, in_repl=True) 24 | print('ABDUCED CLAUSES') 25 | for (h,b),r in self.db.abduced_clauses.items(): 26 | print(f"{h} : {b} % distance={r}") 27 | print() 28 | 29 | except Exception as e: 30 | print('EXCEPTION:', type(e).__name__, e.args) 31 | raise e 32 | 33 | 34 | def xconsult(fname): 35 | nname = natprogs() + fname + ".nat" 36 | dname = natprogs() + fname + ".txt" 37 | print('consulted:', nname, dname) 38 | n = SoftNatlog(file_name=nname, db_name=dname) 39 | n.repl() 40 | -------------------------------------------------------------------------------- /softlog/softprog.nat: -------------------------------------------------------------------------------- 1 | knn 3. 2 | threshold 70. 3 | 4 | quest Quest Answer: 5 | knn K, 6 | threshold D, 7 | ~ Quest K D Answer. 8 | 9 | go X : quest 'What happens if you do not know where you go' X? 10 | tempt X : quest 'What did Wilde say about temptation' X? 11 | alice X : quest 'What did Alice say about following advice' X? 12 | -------------------------------------------------------------------------------- /softlog/softprog.py: -------------------------------------------------------------------------------- 1 | from natlog.natlog import Natlog, natprogs 2 | from softlog import SoftNatlog 3 | 4 | 5 | def start(): 6 | nname = "softprog.nat" 7 | dname = '../docs/quotes.txt' 8 | 9 | n = SoftNatlog(file_name=nname, db_name=dname) 10 | n.repl() 11 | 12 | 13 | if __name__ == "__main__": 14 | start() 15 | 16 | # ?- quest 'What happens if you do not know where you go' X? 17 | # see sofprog.nat for more 18 | -------------------------------------------------------------------------------- /upload.sh: -------------------------------------------------------------------------------- 1 | ./clean.sh 2 | python3 setup.py sdist 3 | python3 -m twine upload dist/* 4 | --------------------------------------------------------------------------------