├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE.txt ├── README.md ├── docs ├── 0.bootstrap.js ├── 1.bootstrap.js ├── 2414d062704a789a55f4.module.wasm ├── 27165cf271ab18793319.module.wasm ├── 348268e5d1020bef0ddc.module.wasm ├── 47adc18454c2103ab4e3.module.wasm ├── 72781fd812bf9904c49f.module.wasm ├── 7acebe2d4953fb30d48b.module.wasm ├── 899d0917e928127d6651.module.wasm ├── a6104829f4332c103d4a.module.wasm ├── aafab6b59ffbb7de2ba6.module.wasm ├── b20b6cee0c4d9b8d1fa0.module.wasm ├── bf6707ca7f83163df97e.module.wasm ├── bootstrap.js ├── ce7c89fb119b238127ec.module.wasm ├── dabc5a981d33955a4a8e.module.wasm ├── dd54eb58002ccc760544.module.wasm └── index.html ├── examples ├── abox.ttl ├── abox2.ttl ├── deeptaxomy │ ├── abox.ttl │ ├── rdfs-subClassOf.n3 │ ├── test-dl-100.n3 │ ├── test-dl.ttl │ └── test-dt.n3 ├── rsp │ └── location_update_stream.nt ├── rules.n3 └── rules2.n3 ├── js ├── Cargo.toml ├── package-lock.json ├── roxi-js.iml ├── src │ ├── lib.rs │ └── utils.rs └── web │ ├── .travis.yml │ ├── LICENSE-APACHE │ ├── LICENSE-MIT │ ├── README.md │ ├── bootstrap.js │ ├── index.html │ ├── package-lock.json │ ├── package.json │ ├── src │ ├── index.js │ └── tabs │ │ ├── reasoning.js │ │ ├── reasoningAndQuery.js │ │ └── rsp.js │ └── webpack.config.js ├── lib ├── Cargo.toml ├── benches │ ├── bench.rs │ └── hierarchies.rs ├── flamegraph.svg ├── minimal.iml └── src │ ├── backwardchaining.rs │ ├── bindings.rs │ ├── csprite.rs │ ├── dred.rs │ ├── encoding.rs │ ├── imars_reasoner.rs │ ├── imars_triple.rs │ ├── imars_window.rs │ ├── lib.rs │ ├── observer.rs │ ├── parser.rs │ ├── parser │ ├── n3.pest │ └── n3rule_parser.rs │ ├── pipeline.rs │ ├── queryengine.rs │ ├── reasoner.rs │ ├── rsp.rs │ ├── rsp │ ├── r2r.rs │ ├── r2s.rs │ └── s2r.rs │ ├── ruleindex.rs │ ├── service_composition.rs │ ├── sparql.rs │ ├── time_window.rs │ ├── tripleindex.rs │ ├── triples.rs │ └── utils.rs ├── scripts └── buildWebsiteDocs.sh └── server ├── Cargo.toml ├── server.iml └── src └── main.rs /.gitignore: -------------------------------------------------------------------------------- 1 | js/web/dist 2 | js/web/node_modules 3 | js/web/.bin 4 | node_modules/ 5 | .idea/ 6 | /target 7 | .DS_Store 8 | .idea/ 9 | target/ 10 | js/pkg 11 | *.iml 12 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | 3 | members =[ 4 | "lib", 5 | "server", 6 | "js" 7 | ] 8 | [profile.bench] 9 | debug = true 10 | opt-level = 3 11 | 12 | #[profile.test] 13 | #opt-level = 3 -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright © 2022–now Pieter Bonte 4 | Ghent University – imec, Belgium 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # RoXi 2 | 3 | RoXi provides a uniform framework for Reactive Reasoning applications including: 4 | - Incremental maintenance 5 | - RDF Stream Processing 6 | - Temporal Reasoning (TODO) 7 | 8 | RoXi uses some of the internals of [oxigraph](https://github.com/oxigraph/oxigraph), including [sparqlalgebra](https://crates.io/crates/spargebra) and [OxRDF](https://crates.io/crates/oxrdf). 9 | 10 | 11 | RoXi can be included as a library, run in server mode through CLI or in the browser using web assembly. 12 | You can try it out in your [own browser](https://pbonte.github.io/roxi/index.html)! 13 | 14 | RoXi supports datalog reasoning using [N3](https://w3c.github.io/N3/spec/) syntax. The whole N3 spec is currently not supported yet, but this is work in progress. Currently we support rules with plain datalog expressivity. 15 | 16 | ## RoXi Server 17 | 18 | How to build RoXi in server mode: 19 | ``` 20 | cd server 21 | cargo build --release 22 | cd .. 23 | ./target/release/server --abox --tbox 24 | ``` 25 | The following parameters can be defined: 26 | 1. `--abox` file location to abox statements. File in TTL format (.ttl) supported. 27 | 2. `--tbox` file location to tbox statements. Files in TTL format (.ttl) and N3 Logic (.n3) supported. 28 | 3`--trace` [optional] boolean for printing reasoning traces 29 | 30 | For example: 31 | ``` 32 | ./target/release/server --abox examples/abox.ttl --tbox examples/rules.n3 33 | ``` 34 | 35 | ## Using RoXi with Javascript/Typescript from NPM in your Node project 36 | 37 | You can add find the Javascript bindings directly on [NPM](https://www.npmjs.com/package/roxi-js?activeTab=readme) 38 | `npm i roxi-js` 39 | 40 | See the examples below on how to use RoXi in JS mode 41 | 42 | ## Building RoXi for Javascript/Typescript usage 43 | 44 | Make sure that you have `wasm-pack`, `cargo-generate` and `npm` installed. Instructions to install those can be found [here](https://rustwasm.github.io/book/game-of-life/setup.html). 45 | 46 | You can use roxi both inside a browser as well as a Node JS module. 47 | 48 | ### Using Roxi inside a browser. 49 | 50 | ``` 51 | cd js 52 | wasm-pack build 53 | ``` 54 | 55 | A `pkg` folder will be generated which contains the generated web assembly modules which can be used in the browser. You can install the package inside your application with `npm install --save-dev /path/to/roxi/js/pkg` More information and a tutorial regarding using Webassembly within webpages using webpack can be found [here](https://rustwasm.github.io/book/game-of-life/hello-world.html#putting-it-into-a-web-page) 56 | 57 | ### Using Roxi as a Node Module. 58 | 59 | ``` 60 | cd js 61 | wasm-pack build --target nodejs 62 | ``` 63 | You can install the package with `npm install --save-dev path/to/roxi/js/pkg`. The package will be found in your dependencies as, 64 | ``` 65 | "dependencies": { 66 | "roxi-js": "file:../roxi/js/pkg" 67 | } 68 | ``` 69 | 70 | Go to package.json of your project and add: 71 | 72 | ``` 73 | "type": "module" 74 | ``` 75 | 76 | ## Examples of RoXi in JS/TS mode 77 | 78 | Example usage when using the static reasoner: 79 | 80 | ```javascript 81 | import {RoxiReasoner} from "roxi-js"; 82 | // create the reasoner 83 | const reasoner = RoxiReasoner.new(); 84 | // add ABox 85 | reasoner.add_abox(" ."); 86 | // Add rules 87 | reasoner.add_rules("@prefix test: .\n @prefix rdf: .\n {?s rdf:type test:SubClass. }=>{?s rdf:type test:SuperType.}"); 88 | // perform materialization through forward chaining 89 | reasoner.materialize(); 90 | // log a dump of the materialized abox 91 | console.log(reasoner.get_abox_dump()); 92 | 93 | ``` 94 | Example usage when using the RSP engine: 95 | 96 | ```javascript 97 | import {JSRSPEngine} from "roxi-js"; 98 | // callback function 99 | function callback(val) { 100 | console.log(val); 101 | } 102 | let width = 10; 103 | let slide = 2; 104 | let rules = "@prefix test: .\n @prefix rdf: .\n {?x test:isIn ?y. ?y test:isIn ?z. }=>{?x test:isIn ?z.}"; 105 | let abox = ""; 106 | let query = "Select * WHERE{ ?x ?y}"; 107 | // create the engine 108 | let rspEngine = JSRSPEngine.new(width,slide,rules,abox,query,callback); 109 | // add some data 110 | let event = " ."; 111 | let currentTimeStamp = 0; 112 | rspEngine.add(event, currentTimeStamp); 113 | ... 114 | let event = ... ; 115 | currentTimeStamp += 1; 116 | rspEngine.add(event, currentTimeStamp); 117 | 118 | ``` 119 | 120 | 121 | 122 | 123 | -------------------------------------------------------------------------------- /docs/2414d062704a789a55f4.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/2414d062704a789a55f4.module.wasm -------------------------------------------------------------------------------- /docs/27165cf271ab18793319.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/27165cf271ab18793319.module.wasm -------------------------------------------------------------------------------- /docs/348268e5d1020bef0ddc.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/348268e5d1020bef0ddc.module.wasm -------------------------------------------------------------------------------- /docs/47adc18454c2103ab4e3.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/47adc18454c2103ab4e3.module.wasm -------------------------------------------------------------------------------- /docs/72781fd812bf9904c49f.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/72781fd812bf9904c49f.module.wasm -------------------------------------------------------------------------------- /docs/7acebe2d4953fb30d48b.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/7acebe2d4953fb30d48b.module.wasm -------------------------------------------------------------------------------- /docs/899d0917e928127d6651.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/899d0917e928127d6651.module.wasm -------------------------------------------------------------------------------- /docs/a6104829f4332c103d4a.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/a6104829f4332c103d4a.module.wasm -------------------------------------------------------------------------------- /docs/aafab6b59ffbb7de2ba6.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/aafab6b59ffbb7de2ba6.module.wasm -------------------------------------------------------------------------------- /docs/b20b6cee0c4d9b8d1fa0.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/b20b6cee0c4d9b8d1fa0.module.wasm -------------------------------------------------------------------------------- /docs/bf6707ca7f83163df97e.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/bf6707ca7f83163df97e.module.wasm -------------------------------------------------------------------------------- /docs/ce7c89fb119b238127ec.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/ce7c89fb119b238127ec.module.wasm -------------------------------------------------------------------------------- /docs/dabc5a981d33955a4a8e.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/dabc5a981d33955a4a8e.module.wasm -------------------------------------------------------------------------------- /docs/dd54eb58002ccc760544.module.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pbonte/roxi/85e29b484fc99798983499978fdae14eb2558e59/docs/dd54eb58002ccc760544.module.wasm -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | RoXi Reasoner 6 | 7 | 8 | 9 | 81 | 82 | 83 | 84 | 85 | 86 | 87 |
88 |
89 |

90 | RoXi 91 |

92 |

93 | Reasoning on Web scale 94 |

95 |
96 |
97 |
98 | 103 |
104 |
105 |
106 |
107 | 108 | 109 | 110 | 111 |
112 | 113 |
114 | 115 | 116 |
117 | Execution Time: 118 |
119 |
120 | Number Of Triples: 121 |
122 |
123 | 124 | 125 |
126 | 129 |
130 |
131 |
132 |
133 | 134 | 135 | 136 |
137 |
149 |
150 |
151 | 152 |
153 |
154 | 155 |
156 |
157 |
158 |

159 | 160 |

161 |
162 |
163 |
164 | 165 |
166 |
167 |
168 |

169 | 170 |

171 |
172 |
173 |
174 | 175 |
176 |
177 | 178 |
179 |
180 |
181 |

182 | 183 |

184 |
185 |
186 |
187 | 188 |
189 |
190 |
191 |

192 | 193 |

194 |
195 |
196 |
197 | 198 |
199 | 200 |
201 | 202 |
203 |
204 | 205 | 206 |
207 | 210 |
211 |
212 |
213 |
214 | 215 | 216 | 217 | 218 | 219 |
220 | 221 | 222 |
223 | 224 | 228 | 229 |
230 | 231 |
232 | 233 |
234 | 235 |
236 | 237 |
238 | Reasoning execution Time: 239 |
240 |
241 | 242 | 243 |
244 | 247 |
248 |
249 |
250 | 258 | 259 | 339 | 340 | -------------------------------------------------------------------------------- /examples/abox.ttl: -------------------------------------------------------------------------------- 1 | . 2 | 3 | -------------------------------------------------------------------------------- /examples/abox2.ttl: -------------------------------------------------------------------------------- 1 | @prefix : . 2 | @prefix rdf: . 3 | 4 | :ind rdf:type :N0. 5 | -------------------------------------------------------------------------------- /examples/deeptaxomy/abox.ttl: -------------------------------------------------------------------------------- 1 | @prefix rdf: . 2 | @prefix rdfs: . 3 | @prefix : . 4 | 5 | :TestVariable a :A2. 6 | -------------------------------------------------------------------------------- /examples/deeptaxomy/rdfs-subClassOf.n3: -------------------------------------------------------------------------------- 1 | @prefix rdf: . 2 | @prefix rdfs: . 3 | {?C rdfs:subClassOf ?D. ?X rdf:type ?C} => {?X rdf:type ?D}. 4 | {?C rdfs:subClassOf ?D. ?D rdfs:subClassOf ?E} => {?C rdfs:subClassOf ?E}. 5 | -------------------------------------------------------------------------------- /examples/deeptaxomy/test-dl-100.n3: -------------------------------------------------------------------------------- 1 | @prefix rdf: . 2 | @prefix rdfs: . 3 | @prefix : . 4 | 5 | :ind a :N0. 6 | :N0 rdfs:subClassOf :N1. 7 | :N0 rdfs:subClassOf :I1. 8 | :N0 rdfs:subClassOf :J1. 9 | :N1 rdfs:subClassOf :N2. 10 | :N1 rdfs:subClassOf :I2. 11 | :N1 rdfs:subClassOf :J2. 12 | :N2 rdfs:subClassOf :N3. 13 | :N2 rdfs:subClassOf :I3. 14 | :N2 rdfs:subClassOf :J3. 15 | :N3 rdfs:subClassOf :N4. 16 | :N3 rdfs:subClassOf :I4. 17 | :N3 rdfs:subClassOf :J4. 18 | :N4 rdfs:subClassOf :N5. 19 | :N4 rdfs:subClassOf :I5. 20 | :N4 rdfs:subClassOf :J5. 21 | :N5 rdfs:subClassOf :N6. 22 | :N5 rdfs:subClassOf :I6. 23 | :N5 rdfs:subClassOf :J6. 24 | :N6 rdfs:subClassOf :N7. 25 | :N6 rdfs:subClassOf :I7. 26 | :N6 rdfs:subClassOf :J7. 27 | :N7 rdfs:subClassOf :N8. 28 | :N7 rdfs:subClassOf :I8. 29 | :N7 rdfs:subClassOf :J8. 30 | :N8 rdfs:subClassOf :N9. 31 | :N8 rdfs:subClassOf :I9. 32 | :N8 rdfs:subClassOf :J9. 33 | :N9 rdfs:subClassOf :N10. 34 | :N9 rdfs:subClassOf :I10. 35 | :N9 rdfs:subClassOf :J10. 36 | :N10 rdfs:subClassOf :N11. 37 | :N10 rdfs:subClassOf :I11. 38 | :N10 rdfs:subClassOf :J11. 39 | :N11 rdfs:subClassOf :N12. 40 | :N11 rdfs:subClassOf :I12. 41 | :N11 rdfs:subClassOf :J12. 42 | :N12 rdfs:subClassOf :N13. 43 | :N12 rdfs:subClassOf :I13. 44 | :N12 rdfs:subClassOf :J13. 45 | :N13 rdfs:subClassOf :N14. 46 | :N13 rdfs:subClassOf :I14. 47 | :N13 rdfs:subClassOf :J14. 48 | :N14 rdfs:subClassOf :N15. 49 | :N14 rdfs:subClassOf :I15. 50 | :N14 rdfs:subClassOf :J15. 51 | :N15 rdfs:subClassOf :N16. 52 | :N15 rdfs:subClassOf :I16. 53 | :N15 rdfs:subClassOf :J16. 54 | :N16 rdfs:subClassOf :N17. 55 | :N16 rdfs:subClassOf :I17. 56 | :N16 rdfs:subClassOf :J17. 57 | :N17 rdfs:subClassOf :N18. 58 | :N17 rdfs:subClassOf :I18. 59 | :N17 rdfs:subClassOf :J18. 60 | :N18 rdfs:subClassOf :N19. 61 | :N18 rdfs:subClassOf :I19. 62 | :N18 rdfs:subClassOf :J19. 63 | :N19 rdfs:subClassOf :N20. 64 | :N19 rdfs:subClassOf :I20. 65 | :N19 rdfs:subClassOf :J20. 66 | :N20 rdfs:subClassOf :N21. 67 | :N20 rdfs:subClassOf :I21. 68 | :N20 rdfs:subClassOf :J21. 69 | :N21 rdfs:subClassOf :N22. 70 | :N21 rdfs:subClassOf :I22. 71 | :N21 rdfs:subClassOf :J22. 72 | :N22 rdfs:subClassOf :N23. 73 | :N22 rdfs:subClassOf :I23. 74 | :N22 rdfs:subClassOf :J23. 75 | :N23 rdfs:subClassOf :N24. 76 | :N23 rdfs:subClassOf :I24. 77 | :N23 rdfs:subClassOf :J24. 78 | :N24 rdfs:subClassOf :N25. 79 | :N24 rdfs:subClassOf :I25. 80 | :N24 rdfs:subClassOf :J25. 81 | :N25 rdfs:subClassOf :N26. 82 | :N25 rdfs:subClassOf :I26. 83 | :N25 rdfs:subClassOf :J26. 84 | :N26 rdfs:subClassOf :N27. 85 | :N26 rdfs:subClassOf :I27. 86 | :N26 rdfs:subClassOf :J27. 87 | :N27 rdfs:subClassOf :N28. 88 | :N27 rdfs:subClassOf :I28. 89 | :N27 rdfs:subClassOf :J28. 90 | :N28 rdfs:subClassOf :N29. 91 | :N28 rdfs:subClassOf :I29. 92 | :N28 rdfs:subClassOf :J29. 93 | :N29 rdfs:subClassOf :N30. 94 | :N29 rdfs:subClassOf :I30. 95 | :N29 rdfs:subClassOf :J30. 96 | :N30 rdfs:subClassOf :N31. 97 | :N30 rdfs:subClassOf :I31. 98 | :N30 rdfs:subClassOf :J31. 99 | :N31 rdfs:subClassOf :N32. 100 | :N31 rdfs:subClassOf :I32. 101 | :N31 rdfs:subClassOf :J32. 102 | :N32 rdfs:subClassOf :N33. 103 | :N32 rdfs:subClassOf :I33. 104 | :N32 rdfs:subClassOf :J33. 105 | :N33 rdfs:subClassOf :N34. 106 | :N33 rdfs:subClassOf :I34. 107 | :N33 rdfs:subClassOf :J34. 108 | :N34 rdfs:subClassOf :N35. 109 | :N34 rdfs:subClassOf :I35. 110 | :N34 rdfs:subClassOf :J35. 111 | :N35 rdfs:subClassOf :N36. 112 | :N35 rdfs:subClassOf :I36. 113 | :N35 rdfs:subClassOf :J36. 114 | :N36 rdfs:subClassOf :N37. 115 | :N36 rdfs:subClassOf :I37. 116 | :N36 rdfs:subClassOf :J37. 117 | :N37 rdfs:subClassOf :N38. 118 | :N37 rdfs:subClassOf :I38. 119 | :N37 rdfs:subClassOf :J38. 120 | :N38 rdfs:subClassOf :N39. 121 | :N38 rdfs:subClassOf :I39. 122 | :N38 rdfs:subClassOf :J39. 123 | :N39 rdfs:subClassOf :N40. 124 | :N39 rdfs:subClassOf :I40. 125 | :N39 rdfs:subClassOf :J40. 126 | :N40 rdfs:subClassOf :N41. 127 | :N40 rdfs:subClassOf :I41. 128 | :N40 rdfs:subClassOf :J41. 129 | :N41 rdfs:subClassOf :N42. 130 | :N41 rdfs:subClassOf :I42. 131 | :N41 rdfs:subClassOf :J42. 132 | :N42 rdfs:subClassOf :N43. 133 | :N42 rdfs:subClassOf :I43. 134 | :N42 rdfs:subClassOf :J43. 135 | :N43 rdfs:subClassOf :N44. 136 | :N43 rdfs:subClassOf :I44. 137 | :N43 rdfs:subClassOf :J44. 138 | :N44 rdfs:subClassOf :N45. 139 | :N44 rdfs:subClassOf :I45. 140 | :N44 rdfs:subClassOf :J45. 141 | :N45 rdfs:subClassOf :N46. 142 | :N45 rdfs:subClassOf :I46. 143 | :N45 rdfs:subClassOf :J46. 144 | :N46 rdfs:subClassOf :N47. 145 | :N46 rdfs:subClassOf :I47. 146 | :N46 rdfs:subClassOf :J47. 147 | :N47 rdfs:subClassOf :N48. 148 | :N47 rdfs:subClassOf :I48. 149 | :N47 rdfs:subClassOf :J48. 150 | :N48 rdfs:subClassOf :N49. 151 | :N48 rdfs:subClassOf :I49. 152 | :N48 rdfs:subClassOf :J49. 153 | :N49 rdfs:subClassOf :N50. 154 | :N49 rdfs:subClassOf :I50. 155 | :N49 rdfs:subClassOf :J50. 156 | :N50 rdfs:subClassOf :N51. 157 | :N50 rdfs:subClassOf :I51. 158 | :N50 rdfs:subClassOf :J51. 159 | :N51 rdfs:subClassOf :N52. 160 | :N51 rdfs:subClassOf :I52. 161 | :N51 rdfs:subClassOf :J52. 162 | :N52 rdfs:subClassOf :N53. 163 | :N52 rdfs:subClassOf :I53. 164 | :N52 rdfs:subClassOf :J53. 165 | :N53 rdfs:subClassOf :N54. 166 | :N53 rdfs:subClassOf :I54. 167 | :N53 rdfs:subClassOf :J54. 168 | :N54 rdfs:subClassOf :N55. 169 | :N54 rdfs:subClassOf :I55. 170 | :N54 rdfs:subClassOf :J55. 171 | :N55 rdfs:subClassOf :N56. 172 | :N55 rdfs:subClassOf :I56. 173 | :N55 rdfs:subClassOf :J56. 174 | :N56 rdfs:subClassOf :N57. 175 | :N56 rdfs:subClassOf :I57. 176 | :N56 rdfs:subClassOf :J57. 177 | :N57 rdfs:subClassOf :N58. 178 | :N57 rdfs:subClassOf :I58. 179 | :N57 rdfs:subClassOf :J58. 180 | :N58 rdfs:subClassOf :N59. 181 | :N58 rdfs:subClassOf :I59. 182 | :N58 rdfs:subClassOf :J59. 183 | :N59 rdfs:subClassOf :N60. 184 | :N59 rdfs:subClassOf :I60. 185 | :N59 rdfs:subClassOf :J60. 186 | :N60 rdfs:subClassOf :N61. 187 | :N60 rdfs:subClassOf :I61. 188 | :N60 rdfs:subClassOf :J61. 189 | :N61 rdfs:subClassOf :N62. 190 | :N61 rdfs:subClassOf :I62. 191 | :N61 rdfs:subClassOf :J62. 192 | :N62 rdfs:subClassOf :N63. 193 | :N62 rdfs:subClassOf :I63. 194 | :N62 rdfs:subClassOf :J63. 195 | :N63 rdfs:subClassOf :N64. 196 | :N63 rdfs:subClassOf :I64. 197 | :N63 rdfs:subClassOf :J64. 198 | :N64 rdfs:subClassOf :N65. 199 | :N64 rdfs:subClassOf :I65. 200 | :N64 rdfs:subClassOf :J65. 201 | :N65 rdfs:subClassOf :N66. 202 | :N65 rdfs:subClassOf :I66. 203 | :N65 rdfs:subClassOf :J66. 204 | :N66 rdfs:subClassOf :N67. 205 | :N66 rdfs:subClassOf :I67. 206 | :N66 rdfs:subClassOf :J67. 207 | :N67 rdfs:subClassOf :N68. 208 | :N67 rdfs:subClassOf :I68. 209 | :N67 rdfs:subClassOf :J68. 210 | :N68 rdfs:subClassOf :N69. 211 | :N68 rdfs:subClassOf :I69. 212 | :N68 rdfs:subClassOf :J69. 213 | :N69 rdfs:subClassOf :N70. 214 | :N69 rdfs:subClassOf :I70. 215 | :N69 rdfs:subClassOf :J70. 216 | :N70 rdfs:subClassOf :N71. 217 | :N70 rdfs:subClassOf :I71. 218 | :N70 rdfs:subClassOf :J71. 219 | :N71 rdfs:subClassOf :N72. 220 | :N71 rdfs:subClassOf :I72. 221 | :N71 rdfs:subClassOf :J72. 222 | :N72 rdfs:subClassOf :N73. 223 | :N72 rdfs:subClassOf :I73. 224 | :N72 rdfs:subClassOf :J73. 225 | :N73 rdfs:subClassOf :N74. 226 | :N73 rdfs:subClassOf :I74. 227 | :N73 rdfs:subClassOf :J74. 228 | :N74 rdfs:subClassOf :N75. 229 | :N74 rdfs:subClassOf :I75. 230 | :N74 rdfs:subClassOf :J75. 231 | :N75 rdfs:subClassOf :N76. 232 | :N75 rdfs:subClassOf :I76. 233 | :N75 rdfs:subClassOf :J76. 234 | :N76 rdfs:subClassOf :N77. 235 | :N76 rdfs:subClassOf :I77. 236 | :N76 rdfs:subClassOf :J77. 237 | :N77 rdfs:subClassOf :N78. 238 | :N77 rdfs:subClassOf :I78. 239 | :N77 rdfs:subClassOf :J78. 240 | :N78 rdfs:subClassOf :N79. 241 | :N78 rdfs:subClassOf :I79. 242 | :N78 rdfs:subClassOf :J79. 243 | :N79 rdfs:subClassOf :N80. 244 | :N79 rdfs:subClassOf :I80. 245 | :N79 rdfs:subClassOf :J80. 246 | :N80 rdfs:subClassOf :N81. 247 | :N80 rdfs:subClassOf :I81. 248 | :N80 rdfs:subClassOf :J81. 249 | :N81 rdfs:subClassOf :N82. 250 | :N81 rdfs:subClassOf :I82. 251 | :N81 rdfs:subClassOf :J82. 252 | :N82 rdfs:subClassOf :N83. 253 | :N82 rdfs:subClassOf :I83. 254 | :N82 rdfs:subClassOf :J83. 255 | :N83 rdfs:subClassOf :N84. 256 | :N83 rdfs:subClassOf :I84. 257 | :N83 rdfs:subClassOf :J84. 258 | :N84 rdfs:subClassOf :N85. 259 | :N84 rdfs:subClassOf :I85. 260 | :N84 rdfs:subClassOf :J85. 261 | :N85 rdfs:subClassOf :N86. 262 | :N85 rdfs:subClassOf :I86. 263 | :N85 rdfs:subClassOf :J86. 264 | :N86 rdfs:subClassOf :N87. 265 | :N86 rdfs:subClassOf :I87. 266 | :N86 rdfs:subClassOf :J87. 267 | :N87 rdfs:subClassOf :N88. 268 | :N87 rdfs:subClassOf :I88. 269 | :N87 rdfs:subClassOf :J88. 270 | :N88 rdfs:subClassOf :N89. 271 | :N88 rdfs:subClassOf :I89. 272 | :N88 rdfs:subClassOf :J89. 273 | :N89 rdfs:subClassOf :N90. 274 | :N89 rdfs:subClassOf :I90. 275 | :N89 rdfs:subClassOf :J90. 276 | :N90 rdfs:subClassOf :N91. 277 | :N90 rdfs:subClassOf :I91. 278 | :N90 rdfs:subClassOf :J91. 279 | :N91 rdfs:subClassOf :N92. 280 | :N91 rdfs:subClassOf :I92. 281 | :N91 rdfs:subClassOf :J92. 282 | :N92 rdfs:subClassOf :N93. 283 | :N92 rdfs:subClassOf :I93. 284 | :N92 rdfs:subClassOf :J93. 285 | :N93 rdfs:subClassOf :N94. 286 | :N93 rdfs:subClassOf :I94. 287 | :N93 rdfs:subClassOf :J94. 288 | :N94 rdfs:subClassOf :N95. 289 | :N94 rdfs:subClassOf :I95. 290 | :N94 rdfs:subClassOf :J95. 291 | :N95 rdfs:subClassOf :N96. 292 | :N95 rdfs:subClassOf :I96. 293 | :N95 rdfs:subClassOf :J96. 294 | :N96 rdfs:subClassOf :N97. 295 | :N96 rdfs:subClassOf :I97. 296 | :N96 rdfs:subClassOf :J97. 297 | :N97 rdfs:subClassOf :N98. 298 | :N97 rdfs:subClassOf :I98. 299 | :N97 rdfs:subClassOf :J98. 300 | :N98 rdfs:subClassOf :N99. 301 | :N98 rdfs:subClassOf :I99. 302 | :N98 rdfs:subClassOf :J99. 303 | :N99 rdfs:subClassOf :N100. 304 | :N99 rdfs:subClassOf :I100. 305 | :N99 rdfs:subClassOf :J100. 306 | 307 | -------------------------------------------------------------------------------- /examples/rsp/location_update_stream.nt: -------------------------------------------------------------------------------- 1 | . 2 | . 3 | . 4 | . 5 | . 6 | . 7 | -------------------------------------------------------------------------------- /examples/rules.n3: -------------------------------------------------------------------------------- 1 | @prefix test: . 2 | @prefix rdf: . 3 | {?s rdf:type test:SubClass. }=>{?s rdf:type test:SuperType.} 4 | -------------------------------------------------------------------------------- /examples/rules2.n3: -------------------------------------------------------------------------------- 1 | @prefix rdf: . 2 | @prefix : . 3 | 4 | {?V0 rdf:type :N0} => {?V0 rdf:type :N1}. 5 | {?V0 rdf:type :N1} => {?V0 rdf:type :N2}. 6 | -------------------------------------------------------------------------------- /js/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "roxi-js" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | crate-type = ["cdylib", "rlib"] 8 | 9 | [features] 10 | default = ["console_error_panic_hook"] 11 | 12 | [dependencies] 13 | wasm-bindgen = "0.2.63" 14 | cfg-if = "0.1" 15 | roxi = { version = "0.1.0", path = "../lib" } 16 | getrandom = { version = "0.2.6", features = ["js"] } 17 | js-sys = "0.3.60" 18 | log = { version = "0.4"} 19 | wasm-logger = "0.2.0" 20 | 21 | # The `console_error_panic_hook` crate provides better debugging of panics by 22 | # logging them with `console.error`. This is great for development, but requires 23 | # all the `std::fmt` and `std::panicking` infrastructure, so isn't great for 24 | # code size when deploying. 25 | console_error_panic_hook = { version = "0.1.6", optional = true } 26 | 27 | # `wee_alloc` is a tiny allocator for wasm that is only ~1K in code size 28 | # compared to the default allocator's ~10K. It is slower than the default 29 | # allocator, however. 30 | # 31 | # Unfortunately, `wee_alloc` requires nightly Rust when targeting wasm for now. 32 | wee_alloc = { version = "0.4.5", optional = true } 33 | 34 | [dev-dependencies] 35 | wasm-bindgen-test = "0.3.13" 36 | 37 | [profile.release] 38 | # Tell `rustc` to optimize for small code size. 39 | opt-level = "s" 40 | -------------------------------------------------------------------------------- /js/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "roxi-js", 3 | "lockfileVersion": 2, 4 | "requires": true, 5 | "packages": {} 6 | } 7 | -------------------------------------------------------------------------------- /js/roxi-js.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /js/src/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate cfg_if; 2 | extern crate wasm_bindgen; 3 | 4 | mod utils; 5 | 6 | use std::fmt::format; 7 | use std::sync::{Arc, Mutex}; 8 | use std::thread; 9 | use cfg_if::cfg_if; 10 | use js_sys::{Array, Function}; 11 | use log::error; 12 | use wasm_bindgen::JsCast; 13 | use wasm_bindgen::prelude::*; 14 | use roxi::parser::Syntax; 15 | use roxi::TripleStore; 16 | use roxi::rsp::{OperationMode, ResultConsumer, RSPBuilder, RSPEngine, SimpleR2R}; 17 | use roxi::rsp::r2s::StreamOperator; 18 | use roxi::rsp::s2r::{ReportStrategy, Tick, WindowTriple}; 19 | use roxi::sparql::Binding; 20 | 21 | cfg_if! { 22 | // When the `wee_alloc` feature is enabled, use `wee_alloc` as the global 23 | // allocator. 24 | if #[cfg(feature = "wee_alloc")] { 25 | extern crate wee_alloc; 26 | #[global_allocator] 27 | static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; 28 | } 29 | } 30 | 31 | #[wasm_bindgen] 32 | pub struct RoxiReasoner{ 33 | reasoner: TripleStore 34 | } 35 | #[wasm_bindgen] 36 | impl RoxiReasoner{ 37 | pub fn new() -> RoxiReasoner{ 38 | wasm_logger::init(wasm_logger::Config::default()); 39 | RoxiReasoner{reasoner: TripleStore::new()} 40 | } 41 | pub fn add_abox(&mut self, abox:String){ 42 | match self.reasoner.load_triples(abox.as_ref(), Syntax::Turtle){ 43 | Err(error) => {error!("{}", error);}, 44 | _ => () 45 | } 46 | } 47 | pub fn add_rules(&mut self, rules:String){ 48 | self.reasoner.load_rules(rules.as_str()); 49 | } 50 | pub fn len_abox(&self)->usize{ 51 | self.reasoner.len() 52 | } 53 | pub fn materialize(&mut self){ 54 | self.reasoner.materialize(); 55 | } 56 | pub fn get_abox_dump(& self)->String{ 57 | self.reasoner.content_to_string() 58 | } 59 | pub fn query(&self, query: String)->Array { 60 | match self.reasoner.query(query.as_str()) { 61 | Ok(queryresult) => { 62 | queryresult.into_iter().map(|row| 63 | { 64 | let js_bindings: Vec = row.into_iter().map(|b| JSBinding { var: b.var, val: b.val }.into()).collect(); 65 | let js_array = JsValue::from(js_bindings.into_iter().collect::()); 66 | js_array 67 | } 68 | ).collect::() 69 | }, 70 | Err(error_string) => { 71 | error!("{}", error_string); 72 | Array::new() 73 | } 74 | } 75 | } 76 | } 77 | 78 | #[wasm_bindgen] 79 | pub struct JSRSPEngine{ 80 | engine: RSPEngine> 81 | } 82 | struct Test{ 83 | f: js_sys::Function 84 | } 85 | unsafe impl Send for Test {} 86 | 87 | #[wasm_bindgen] 88 | #[derive(Debug)] 89 | pub struct JSBinding{ 90 | val:String, var:String 91 | } 92 | #[wasm_bindgen] 93 | impl JSBinding{ 94 | pub fn getValue(&self) -> String{ 95 | self.val.clone() 96 | } 97 | pub fn getVar(&self) -> String { 98 | self.var.clone() 99 | } 100 | pub fn toString(&self) -> String{ 101 | format!("Binding{{{:?}: {:?}}}",self.var.clone(), self.val.clone()) 102 | } 103 | } 104 | #[wasm_bindgen] 105 | impl JSRSPEngine{ 106 | pub fn new(width: usize, slide: usize, rules: String, abox: String, query: String, f: &js_sys::Function) -> JSRSPEngine { 107 | let t = Arc::new(Mutex::new(Test{f: f.clone()})); 108 | let t2 = t.clone(); 109 | let function: Box)-> () + Send + Sync> = Box::new(move |r|{ 110 | //for x in r{ 111 | let this = JsValue::null(); 112 | //convert to JSBindings and JSValues 113 | let r_js: Vec = r.into_iter().map(|binding|JSBinding{var: binding.var, val: binding.val}.into()).collect(); 114 | // convert to JS Array 115 | let x = JsValue::from(r_js.into_iter().collect::()); 116 | let f = t2.lock().unwrap(); 117 | let _ = f.f.call1(&this, &x); 118 | //} 119 | (); 120 | }); 121 | 122 | let result_consumer : ResultConsumer> = ResultConsumer{function: Arc::new(function)}; 123 | let r2r = Box::new(SimpleR2R{item: TripleStore::new()}); 124 | let mut engine = RSPBuilder::new(width,slide) 125 | .add_tick(Tick::TimeDriven) 126 | .add_report_strategy(ReportStrategy::OnWindowClose) 127 | .add_triples(&abox) 128 | .add_syntax(Syntax::NTriples) 129 | .add_rules(&rules) 130 | .add_query(&query) 131 | .add_consumer(result_consumer) 132 | .add_r2r(r2r) 133 | .add_r2s(StreamOperator::RSTREAM) 134 | .set_operation_mode(OperationMode::SingleThread) 135 | .build(); 136 | JSRSPEngine{engine} 137 | 138 | } 139 | pub fn add(&mut self, triple_string: String, ts: usize){ 140 | let mut triple_string = triple_string.clone(); 141 | if triple_string.ends_with("."){ 142 | triple_string = triple_string[..triple_string.len() - 1].to_string(); 143 | } 144 | let mut triple_string = triple_string.split(" "); 145 | 146 | let triple = WindowTriple{s:triple_string.next().unwrap().trim().to_string(), 147 | p:triple_string.next().unwrap().trim().to_string(), 148 | o: triple_string.next().unwrap().trim().to_string(),}; 149 | 150 | self.engine.add(triple,ts); 151 | } 152 | } 153 | #[cfg(test)] 154 | mod test{ 155 | use crate::RoxiReasoner; 156 | 157 | #[test] 158 | fn test_js_load_reasoner(){ 159 | let mut reasoner = RoxiReasoner::new(); 160 | reasoner.add_abox("test".to_string()); 161 | } 162 | } -------------------------------------------------------------------------------- /js/src/utils.rs: -------------------------------------------------------------------------------- 1 | pub fn set_panic_hook() { 2 | // When the `console_error_panic_hook` feature is enabled, we can call the 3 | // `set_panic_hook` function at least once during initialization, and then 4 | // we will get better error messages if our code ever panics. 5 | // 6 | // For more details see 7 | // https://github.com/rustwasm/console_error_panic_hook#readme 8 | #[cfg(feature = "console_error_panic_hook")] 9 | console_error_panic_hook::set_once(); 10 | } 11 | -------------------------------------------------------------------------------- /js/web/.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: "10" 3 | 4 | script: 5 | - ./node_modules/.bin/webpack 6 | -------------------------------------------------------------------------------- /js/web/LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /js/web/LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) [year] [name] 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /js/web/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

create-wasm-app

4 | 5 | An npm init template for kick starting a project that uses NPM packages containing Rust-generated WebAssembly and bundles them with Webpack. 6 | 7 |

8 | Build Status 9 |

10 | 11 |

12 | Usage 13 | | 14 | Chat 15 |

16 | 17 | Built with 🦀🕸 by The Rust and WebAssembly Working Group 18 |
19 | 20 | ## About 21 | 22 | This template is designed for depending on NPM packages that contain 23 | Rust-generated WebAssembly and using them to create a Website. 24 | 25 | * Want to create an NPM package with Rust and WebAssembly? [Check out 26 | `wasm-pack-template`.](https://github.com/rustwasm/wasm-pack-template) 27 | * Want to make a monorepo-style Website without publishing to NPM? Check out 28 | [`rust-webpack-template`](https://github.com/rustwasm/rust-webpack-template) 29 | and/or 30 | [`rust-parcel-template`](https://github.com/rustwasm/rust-parcel-template). 31 | 32 | ## 🚴 Usage 33 | 34 | ``` 35 | npm init wasm-app 36 | ``` 37 | 38 | ## 🔋 Batteries Included 39 | 40 | - `.gitignore`: ignores `node_modules` 41 | - `LICENSE-APACHE` and `LICENSE-MIT`: most Rust projects are licensed this way, so these are included for you 42 | - `README.md`: the file you are reading now! 43 | - `index.html`: a bare bones html document that includes the webpack bundle 44 | - `index.js`: example js file with a comment showing how to import and use a wasm pkg 45 | - `package.json` and `package-lock.json`: 46 | - pulls in devDependencies for using webpack: 47 | - [`webpack`](https://www.npmjs.com/package/webpack) 48 | - [`webpack-cli`](https://www.npmjs.com/package/webpack-cli) 49 | - [`webpack-dev-server`](https://www.npmjs.com/package/webpack-dev-server) 50 | - defines a `start` script to run `webpack-dev-server` 51 | - `webpack.config.js`: configuration file for bundling your js with webpack 52 | 53 | ## License 54 | 55 | Licensed under either of 56 | 57 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 58 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 59 | 60 | at your option. 61 | 62 | ### Contribution 63 | 64 | Unless you explicitly state otherwise, any contribution intentionally 65 | submitted for inclusion in the work by you, as defined in the Apache-2.0 66 | license, shall be dual licensed as above, without any additional terms or 67 | conditions. 68 | -------------------------------------------------------------------------------- /js/web/bootstrap.js: -------------------------------------------------------------------------------- 1 | // A dependency graph that contains any wasm must all be imported 2 | // asynchronously. This `bootstrap.js` file does the single async import, so 3 | // that no one else needs to worry about it again. 4 | import("./src/index") 5 | .catch(e => console.error("Error importing `index.js`:", e)); 6 | 7 | 8 | -------------------------------------------------------------------------------- /js/web/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | RoXi Reasoner 6 | 7 | 8 | 9 | 81 | 82 | 83 | 84 | 85 | 86 | 87 |
88 |
89 |

90 | RoXi 91 |

92 |

93 | Reasoning on Web scale 94 |

95 |
96 |
97 |
98 | 103 |
104 |
105 |
106 |
107 | 108 | 109 | 110 | 111 |
112 | 113 |
114 | 115 | 116 |
117 | Execution Time: 118 |
119 |
120 | Number Of Triples: 121 |
122 |
123 | 124 | 125 |
126 | 129 |
130 |
131 |
132 |
133 | 134 | 135 | 136 |
137 |
149 |
150 |
151 | 152 |
153 |
154 | 155 |
156 |
157 |
158 |

159 | 160 |

161 |
162 |
163 |
164 | 165 |
166 |
167 |
168 |

169 | 170 |

171 |
172 |
173 |
174 | 175 |
176 |
177 | 178 |
179 |
180 |
181 |

182 | 183 |

184 |
185 |
186 |
187 | 188 |
189 |
190 |
191 |

192 | 193 |

194 |
195 |
196 |
197 | 198 |
199 | 200 |
201 | 202 |
203 |
204 | 205 | 206 |
207 | 210 |
211 |
212 |
213 |
214 | 215 | 216 | 217 | 218 | 219 |
220 | 221 | 222 |
223 | 224 | 228 | 229 |
230 | 231 |
232 | 233 |
234 | 235 |
236 | 237 |
238 | Reasoning execution Time: 239 |
240 |
241 | 242 | 243 |
244 | 247 |
248 |
249 |
250 | 258 | 259 | 339 | 340 | -------------------------------------------------------------------------------- /js/web/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "create-wasm-app", 3 | "version": "0.1.0", 4 | "description": "create an app to consume rust-generated wasm packages", 5 | "main": "src/index.js", 6 | "bin": { 7 | "create-wasm-app": ".bin/create-wasm-app.js" 8 | }, 9 | "scripts": { 10 | "build": "webpack --config webpack.config.js", 11 | "start": "webpack-dev-server" 12 | }, 13 | "repository": { 14 | "type": "git", 15 | "url": "git+https://github.com/rustwasm/create-wasm-app.git" 16 | }, 17 | "keywords": [ 18 | "webassembly", 19 | "wasm", 20 | "rust", 21 | "webpack" 22 | ], 23 | "author": "Ashley Williams ", 24 | "license": "(MIT OR Apache-2.0)", 25 | "bugs": { 26 | "url": "https://github.com/rustwasm/create-wasm-app/issues" 27 | }, 28 | "homepage": "https://github.com/rustwasm/create-wasm-app#readme", 29 | "devDependencies": { 30 | "copy-webpack-plugin": "^5.0.0", 31 | "hello-wasm-pack": "^0.1.0", 32 | "roxi": "file:../pkg", 33 | "webpack": "^4.29.3", 34 | "webpack-cli": "^3.1.0", 35 | "webpack-dev-server": "^3.1.5" 36 | }, 37 | "dependencies": { 38 | "@triply/yasgui": "^4.2.28", 39 | "bulma": "^0.9.3" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /js/web/src/index.js: -------------------------------------------------------------------------------- 1 | require("./tabs/reasoning"); 2 | require("./tabs/rsp"); 3 | require("./tabs/reasoningAndQuery"); 4 | const {yasqeRSP} = require("./tabs/rsp"); 5 | const {yasqeQ} = require("./tabs/reasoningAndQuery"); 6 | 7 | function gup( name, url ) { 8 | if (!url) url = location.href; 9 | name = name.replace(/[\[]/,"\\\[").replace(/[\]]/,"\\\]"); 10 | var regexS = "[\\?&]"+name+"=([^&#]*)"; 11 | var regex = new RegExp( regexS ); 12 | var results = regex.exec( url ); 13 | return results == null ? null : results[1]; 14 | } 15 | 16 | function decodeAndAssign(toDecode,elementID){ 17 | if(toDecode){ 18 | try { 19 | let decoded = decodeURIComponent(toDecode); 20 | document.getElementById(elementID).value = decoded; 21 | } catch (e) { 22 | console.error(e); 23 | } 24 | } 25 | } 26 | 27 | function start(){ 28 | let view = gup('view', window.location.href); 29 | if( view === 'reasoning'){ 30 | openTab(event,'reasoning'); 31 | decodeAndAssign(gup('abox', window.location.href),'aboxContentR'); 32 | decodeAndAssign(gup('rules', window.location.href),'rulesContentR'); 33 | 34 | } 35 | else if (view === 'rsp') { 36 | openTab(event,'rsp'); 37 | decodeAndAssign(gup('rules', window.location.href),'rulesContentRSP'); 38 | decodeAndAssign(gup('windowWidth', window.location.href),'windowWidth'); 39 | decodeAndAssign(gup('windowSlide', window.location.href),'windowSlide'); 40 | decodeAndAssign(gup('eventID', window.location.href),'eventID'); 41 | decodeAndAssign(gup('timestamp', window.location.href),'timestamp'); 42 | try { 43 | let decoded = decodeURIComponent(gup('query', window.location.href)); 44 | yasqeRSP.setValue(decoded); 45 | } catch (e) { 46 | console.error(e); 47 | } 48 | } 49 | else if(view === 'rq') { 50 | openTab(event,'rq'); 51 | decodeAndAssign(gup('abox', window.location.href),'aboxContentQ'); 52 | decodeAndAssign(gup('rules', window.location.href),'rulesContentQ'); 53 | try { 54 | let decoded = decodeURIComponent(gup('query', window.location.href)); 55 | yasqeQ.setValue(decoded); 56 | } catch (e) { 57 | console.error(e); 58 | } 59 | } 60 | else { 61 | openTab(event,'reasoning'); 62 | decodeAndAssign(gup('abox', window.location.href),'aboxContentR'); 63 | decodeAndAssign(gup('rules', window.location.href),'rulesContentR'); 64 | } 65 | } 66 | start(); 67 | -------------------------------------------------------------------------------- /js/web/src/tabs/reasoning.js: -------------------------------------------------------------------------------- 1 | import {RoxiReasoner} from "roxi"; 2 | 3 | const aboxInitialContents = " ."; 4 | const tboxInitialContents = "@prefix test: .\n@prefix rdf: .\n{?s rdf:type test:SubClass. }=>{?s rdf:type test:SuperType.}"; 5 | 6 | const abox = document.getElementById('aboxContentR'); 7 | const tbox = document.getElementById('rulesContentR'); 8 | const reasoningShareButton = document.getElementById("shareReasoningR"); 9 | 10 | abox.value = aboxInitialContents; 11 | tbox.value = tboxInitialContents; 12 | 13 | const tripleRegex = new RegExp(/((<[^>]*>)?|(.+:.+)?)(\ )+((<[^>]*>)?|(.*:.+)?)(\ )+((<[^>]*>)?|(".+"(\^\^<.+>)?)?|(.*:.+)?)(\ )*\./, "gm"); 14 | const startReasoning = () => { 15 | const reasoner = RoxiReasoner.new(); 16 | 17 | const startTime = performance.now(); 18 | 19 | reasoner.add_abox(abox.value); 20 | reasoner.add_rules(tbox.value); 21 | reasoner.materialize(); 22 | 23 | const endTime = performance.now(); 24 | const difftime = endTime-startTime ; 25 | 26 | const materializedTriples = reasoner.get_abox_dump(); 27 | 28 | document.getElementById('resultsR').value = materializedTriples; 29 | document.getElementById('timeResultsR').innerHTML = difftime + " ms"; 30 | document.getElementById('numberOfTriplesR').innerHTML = (materializedTriples.match(tripleRegex) || []).length.toString(); 31 | }; 32 | 33 | const shareReasoning = () =>{ 34 | let host = window.location.href.split('?')[0]; 35 | let encodedAbox = encodeURIComponent(abox.value); 36 | let encodedRules = encodeURIComponent(tbox.value); 37 | 38 | let result = host +'?view=reasoning&abox='+encodedAbox+'&rules='+encodedRules; 39 | 40 | navigator.clipboard 41 | .writeText(result) 42 | .then( 43 | success => { 44 | reasoningShareButton.style.backgroundColor = "#43b343"; 45 | document.getElementById("shareReasoningTextR").style.opacity = "1"; 46 | setTimeout(()=>{ 47 | reasoningShareButton.style.backgroundColor = ""; 48 | document.getElementById("shareReasoningTextR").style.opacity = "0"; 49 | }, 1000); 50 | }, 51 | err => { 52 | reasoningShareButton.style.backgroundColor = "#e83131"; 53 | setTimeout(()=>{ 54 | reasoningShareButton.style.backgroundColor = ""; 55 | }, 1000); 56 | // activate share text area 57 | document.getElementById('shareIDR').style.display = "block"; 58 | // display the url 59 | document.getElementById('shareBoxR').value = result; 60 | } 61 | ); 62 | } 63 | 64 | document.getElementById("startReasoningR").addEventListener("click", event => { 65 | startReasoning(); 66 | }); 67 | 68 | reasoningShareButton.addEventListener("click", event => { 69 | shareReasoning(); 70 | }); 71 | -------------------------------------------------------------------------------- /js/web/src/tabs/reasoningAndQuery.js: -------------------------------------------------------------------------------- 1 | import {RoxiReasoner, JSBinding} from "roxi"; 2 | import Yasqe from "@triply/yasqe"; 3 | import Yasr from "@triply/yasr"; 4 | 5 | const aboxInitialContents = " ."; 6 | const tboxInitialContents = "@prefix test: .\n@prefix rdf: .\n{?s rdf:type test:SubClass. }=>{?s rdf:type test:SuperType.}"; 7 | const queryInitialContents = "SELECT * WHERE {\n\t?s ?p ?o.\n}"; 8 | 9 | 10 | const aboxElement = document.getElementById('aboxContentQ'); 11 | const tboxElement = document.getElementById('rulesContentQ'); 12 | const reasoningSwitch = document.getElementById("reasoningSwitchQ"); 13 | const reasoningShareButton = document.getElementById("shareReasoningQ"); 14 | 15 | export const yasqeQ = new Yasqe( 16 | document.getElementById('queryQ') 17 | ); 18 | 19 | const yasr = new Yasr( 20 | document.getElementById('resultsQ') 21 | ); 22 | 23 | yasr.setResponse({head:{vars:[""]},results:{bindings:[{"":{type:"literal",value: ""}}]}}); 24 | 25 | reasoningSwitch.checked = true; 26 | aboxElement.value = aboxInitialContents; 27 | tboxElement.value = tboxInitialContents; 28 | yasqeQ.setValue(queryInitialContents); 29 | 30 | const urlRegex = new RegExp(/?/); 31 | 32 | const startReasoning = () => { 33 | const reasoner = RoxiReasoner.new(); 34 | 35 | const startTime = performance.now(); 36 | 37 | reasoner.add_abox(aboxElement.value); 38 | reasoner.add_rules(tboxElement.value); 39 | 40 | if (reasoningSwitch.checked) { 41 | reasoner.materialize(); 42 | } 43 | 44 | const endTime = performance.now(); 45 | const difftime = endTime-startTime ; 46 | 47 | const result = reasoner.query(yasqeQ.getValue().toString()); 48 | const results = []; 49 | let temp = {}; 50 | let headVars = new Map(); 51 | for (const row of result){ 52 | temp = {}; 53 | for(const binding of row){ 54 | headVars.set(binding.getVar(), binding.getVar()); 55 | const regexArray = urlRegex.exec(binding.getValue()); 56 | if (regexArray == null) { 57 | temp[binding.getVar()] = {type:"literal",value: binding.getValue()}; 58 | } 59 | else { 60 | temp[binding.getVar()] = {type:"uri",value: regexArray[1]}; 61 | } 62 | } 63 | results.push(temp) 64 | } 65 | const response={head:{vars:Array.from(headVars.keys())},results:{bindings:results}}; 66 | yasr.setResponse(response); 67 | document.getElementById('timeResultsQ').innerHTML = difftime + " ms"; 68 | }; 69 | 70 | const shareReasoning = () =>{ 71 | let host = window.location.href.split('?')[0]; 72 | let encodedAbox = encodeURIComponent(aboxElement.value); 73 | let encodedRules = encodeURIComponent(tboxElement.value); 74 | let encodedQuery = encodeURIComponent(yasqeQ.getValue()); 75 | 76 | let result = host +'?view=rq&abox='+encodedAbox+'&rules='+encodedRules+'&query='+encodedQuery; 77 | 78 | navigator.clipboard 79 | .writeText(result) 80 | .then( 81 | success => { 82 | reasoningShareButton.style.backgroundColor = "#43b343"; 83 | document.getElementById("shareReasoningTextQ").style.opacity = "1"; 84 | setTimeout(()=>{ 85 | reasoningShareButton.style.backgroundColor = ""; 86 | document.getElementById("shareReasoningTextQ").style.opacity = "0"; 87 | }, 1000); 88 | }, 89 | err => { 90 | reasoningShareButton.style.backgroundColor = "#e83131"; 91 | setTimeout(()=>{ 92 | reasoningShareButton.style.backgroundColor = ""; 93 | }, 1000); 94 | // activate share text area 95 | document.getElementById('shareIDQ').style.display = "block"; 96 | // display the url 97 | document.getElementById('shareBoxQ').value = result; 98 | } 99 | ); 100 | } 101 | 102 | document.getElementById("startReasoningQ").addEventListener("click", event => { 103 | startReasoning(); 104 | }); 105 | 106 | reasoningShareButton.addEventListener("click", event => { 107 | shareReasoning(); 108 | }); 109 | -------------------------------------------------------------------------------- /js/web/src/tabs/rsp.js: -------------------------------------------------------------------------------- 1 | import {JSRSPEngine, JSBinding} from "roxi"; 2 | import Yasqe from "@triply/yasqe"; 3 | import Yasr from "@triply/yasr"; 4 | 5 | const rules = "@prefix test: .\n@prefix rdf: .\n{?x test:isIn ?y. ?y test:isIn ?z. }=>{?x test:isIn ?z.}"; 6 | const query = "Select * WHERE {\n\t?x ?y.\n}"; 7 | 8 | const tboxElement = document.getElementById('rulesContentRSP'); 9 | const windowWidthElement = document.getElementById('windowWidth'); 10 | const windowSlideElement = document.getElementById('windowSlide'); 11 | const eventIDElement = document.getElementById('eventID'); 12 | const timestampElement = document.getElementById('timestamp'); 13 | const reasoningShareButton = document.getElementById("shareReasoningRSP"); 14 | const rspButton = document.getElementById("startRSP"); 15 | 16 | export const yasqeRSP = new Yasqe( 17 | document.getElementById('queryRSP') 18 | ); 19 | 20 | const yasr = new Yasr( 21 | document.getElementById('resultsRSP') 22 | ); 23 | 24 | yasr.setResponse({head:{vars:[""]},results:{bindings:[{"":{type:"literal",value: ""}}]}}); 25 | 26 | tboxElement.value = rules; 27 | yasqeRSP.setValue(query); 28 | let currentTs = 0; 29 | let rspEngine = null; 30 | let results = []; 31 | 32 | const urlRegex = new RegExp(/?/); 33 | 34 | // callback function 35 | function callback(val) { 36 | let headVars = new Map(); 37 | const temp = {}; 38 | headVars.set("Timestamp (not a binding)", "Timestamp (not a binding)"); 39 | temp["Timestamp (not a binding)"] = {type:"literal",value: currentTs.toString()}; 40 | for (const binding of val) { 41 | headVars.set(binding.getVar(), binding.getVar()); 42 | const regexArray = urlRegex.exec(binding.getValue()); 43 | if (regexArray == null) { 44 | temp[binding.getVar()] = {type:"literal",value: binding.getValue()}; 45 | } 46 | else { 47 | temp[binding.getVar()] = {type:"uri",value: regexArray[1]}; 48 | } 49 | } 50 | results.push(temp) 51 | const response={head:{vars:Array.from(headVars.keys())},results:{bindings:results}}; 52 | yasr.setResponse(response); 53 | } 54 | 55 | const startRSP = () => { 56 | if(rspEngine == null){ 57 | console.log("starting"); 58 | let tbox_new = tboxElement.value; 59 | tboxElement.setAttribute('disabled', ''); 60 | 61 | let abox = ""; 62 | let query = yasqeRSP.getValue(); 63 | document.getElementById("disableQueryRSP").style.display = "block"; 64 | 65 | let width = windowWidthElement.value; 66 | windowWidthElement.setAttribute('disabled', ''); 67 | let slide = windowSlideElement.value; 68 | windowSlideElement.setAttribute('disabled', ''); 69 | rspEngine = JSRSPEngine.new(width,slide,tbox_new,abox,query,callback); 70 | } 71 | currentTs+=1; 72 | let event = eventIDElement.value; 73 | rspEngine.add(event, currentTs); 74 | timestampElement.value = currentTs; 75 | eventIDElement.value = " ."; 76 | 77 | console.log("stopped"); 78 | } 79 | 80 | const shareReasoning = () =>{ 81 | let host = window.location.href.split('?')[0]; 82 | let encodedRules = encodeURIComponent(tboxElement.value); 83 | let encodedQuery = encodeURIComponent(yasqeRSP.getValue()); 84 | let encodedWindowWidth = encodeURIComponent(windowWidthElement.value); 85 | let encodedWindowSlide = encodeURIComponent(windowSlideElement.value); 86 | let encodedEventID = encodeURIComponent(eventIDElement.value); 87 | let encodedTimestamp = encodeURIComponent(timestampElement.value); 88 | 89 | let result = host + 90 | '?view=rsp' + 91 | '&rules=' + encodedRules + 92 | '&query=' + encodedQuery + 93 | '&windowWidth=' + encodedWindowWidth + 94 | '&windowSlide=' + encodedWindowSlide + 95 | '&eventID=' + encodedEventID + 96 | '×tamp=' + encodedTimestamp; 97 | 98 | navigator.clipboard 99 | .writeText(result) 100 | .then( 101 | success => { 102 | reasoningShareButton.style.backgroundColor = "#43b343"; 103 | document.getElementById("shareReasoningTextRSP").style.opacity = "1"; 104 | setTimeout(()=>{ 105 | reasoningShareButton.style.backgroundColor = ""; 106 | document.getElementById("shareReasoningTextRSP").style.opacity = "0"; 107 | }, 1000); 108 | }, 109 | err => { 110 | reasoningShareButton.style.backgroundColor = "#e83131"; 111 | setTimeout(()=>{ 112 | reasoningShareButton.style.backgroundColor = ""; 113 | }, 1000); 114 | // activate share text area 115 | document.getElementById('shareIDR').style.display = "block"; 116 | // display the url 117 | document.getElementById('shareBoxR').value = result; 118 | } 119 | ); 120 | } 121 | 122 | rspButton.addEventListener("click", event => { 123 | startRSP(); 124 | }); 125 | 126 | reasoningShareButton.addEventListener("click", event => { 127 | shareReasoning(); 128 | }); 129 | -------------------------------------------------------------------------------- /js/web/webpack.config.js: -------------------------------------------------------------------------------- 1 | const CopyWebpackPlugin = require("copy-webpack-plugin"); 2 | const path = require('path'); 3 | 4 | module.exports = { 5 | entry: "./bootstrap.js", 6 | output: { 7 | path: path.resolve(__dirname, "dist"), 8 | filename: "bootstrap.js", 9 | }, 10 | mode: "development", 11 | plugins: [ 12 | new CopyWebpackPlugin(['index.html']) 13 | ], 14 | }; 15 | -------------------------------------------------------------------------------- /lib/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "roxi" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | [dependencies] 8 | deepmesa = "0.9.0" 9 | bencher = "0.1.5" 10 | log = { version = "0.4"} 11 | env_logger = "0.9.0" 12 | spargebra = "0.2.8" 13 | sparesults = "0.1.8" 14 | rio_api = "0.7.1" 15 | rio_turtle = "0.7.1" 16 | pest = "2.0" 17 | pest_derive = "2.0" 18 | either = "1.8.0" 19 | once_cell = "1.16.0" 20 | 21 | 22 | [[bench]] 23 | name = "bench" 24 | harness = false 25 | 26 | [[bench]] 27 | name = "hierarchies" 28 | harness = false -------------------------------------------------------------------------------- /lib/benches/bench.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate bencher; 3 | 4 | use std::cell::RefCell; 5 | use std::rc::Rc; 6 | use bencher::Bencher; 7 | 8 | use minimal::imars_window::{ImarsWindow, SimpleWindowConsumer}; 9 | use minimal::time_window::TimeWindow; 10 | use minimal::parser::Parser; 11 | use minimal::pipeline::WindowReasoner; 12 | use minimal::TripleStore; 13 | 14 | fn create_window(width:i32, slide:i32) -> ImarsWindow { 15 | let mut window :ImarsWindow = ImarsWindow::new(width,slide); 16 | let consumer = Rc::new(RefCell::new(SimpleWindowConsumer::new())); 17 | window.register_consumer(consumer.clone()); 18 | window 19 | } 20 | fn add(window: &mut ImarsWindow, size:i32){ 21 | for i in 1..size{ 22 | window.add(i,i); 23 | } 24 | } 25 | fn start_add_test(bench: &mut Bencher, width: i32, slide:i32, size:i32){ 26 | let mut window = create_window(width,slide); 27 | bench.iter(|| { 28 | add(&mut window,size) 29 | }); 30 | } 31 | fn add_100(bench: &mut Bencher) { 32 | start_add_test(bench,100,10,100000); 33 | } 34 | fn add_1000(bench: &mut Bencher) { 35 | start_add_test(bench,1000,10,100000); 36 | 37 | } 38 | fn add_10000(bench: &mut Bencher) { 39 | start_add_test(bench,10000,10,100000); 40 | } 41 | 42 | 43 | fn update(window: &mut ImarsWindow, width:i32, size:i32){ 44 | for i in 1..size{ 45 | if i / width > 0 && (i % width) == width - width/10 { 46 | window.add(i-width/2,i); 47 | }else{ 48 | window.add(i,i); 49 | } 50 | } 51 | } 52 | fn start_update_test(bench: &mut Bencher, width: i32, slide:i32, size:i32){ 53 | let mut window = create_window(width,slide); 54 | bench.iter(|| { 55 | update(&mut window,width,size); 56 | }); 57 | } 58 | fn update_100(bench: &mut Bencher) { 59 | start_update_test(bench,100,10,100000); 60 | } 61 | fn update_1000(bench: &mut Bencher) { 62 | start_update_test(bench,1000,10,100000); 63 | 64 | } 65 | fn update_10000(bench: &mut Bencher) { 66 | start_update_test(bench,10000,10,100000); 67 | } 68 | 69 | fn pipeline(bench: &mut Bencher){ 70 | bench.iter(|| { 71 | let mut window = TimeWindow::new(10, 5); 72 | 73 | let mut data = "{?a in ?b.?b in ?c}=>{?a in ?c}\n".to_owned(); 74 | for i in 0..500{ 75 | data += format!(":{} in :{}.\n",i+1,i).as_str(); 76 | } 77 | 78 | 79 | 80 | let mut reasoner = WindowReasoner::new(); 81 | 82 | let (mut content, mut rules) = Parser::parse(data.to_string(), &mut reasoner.store.encoder); 83 | reasoner.store.add_rules(rules); 84 | let mut consumer = Rc::new(RefCell::new(reasoner)); 85 | window.register_consumer(consumer.clone()); 86 | 87 | 88 | content.into_iter().enumerate().for_each(|(i, t)| window.add(t, i as i32)); 89 | }); 90 | 91 | } 92 | fn test_transitive_rule(bench: &mut Bencher){ 93 | bench.iter(|| { 94 | let mut data = "{?a in ?b.?b in ?c}=>{?a in ?c}\n".to_owned(); 95 | for i in 0..100 { 96 | data += format!(":{} in :{}.\n", i + 1, i).as_str(); 97 | } 98 | let mut store = TripleStore::from(data.as_str()); 99 | store.materialize(); 100 | }); 101 | } 102 | benchmark_group!(benches, test_transitive_rule); 103 | benchmark_main!(benches); -------------------------------------------------------------------------------- /lib/benches/hierarchies.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate bencher; 3 | 4 | use std::cell::RefCell; 5 | use std::rc::Rc; 6 | use bencher::Bencher; 7 | use minimal::encoding::Encoder; 8 | 9 | use minimal::imars_window::{ImarsWindow, SimpleWindowConsumer}; 10 | use minimal::time_window::TimeWindow; 11 | use minimal::parser::Parser; 12 | use minimal::pipeline::WindowReasoner; 13 | use minimal::ruleindex::RuleIndex; 14 | use minimal::tripleindex::TripleIndex; 15 | use minimal::triples::{Rule, Triple, VarOrTerm}; 16 | use minimal::TripleStore; 17 | 18 | fn infer_hierarchy(max_depth: i32) { 19 | let mut data = ":a a :U0\n".to_owned(); 20 | for i in 0..max_depth { 21 | data += format!("{{?a a :U{}}}=>{{?a a :U{}}}\n", i, i + 1).as_str(); 22 | data += format!("{{?a a :U{}}}=>{{?a a :J{}}}\n", i, i + 1).as_str(); 23 | data += format!("{{?a a :U{}}}=>{{?a a :Q{}}}\n", i, i + 1).as_str(); 24 | } 25 | let mut store = TripleStore::from(data.as_str()); 26 | store.materialize(); 27 | } 28 | fn infer_hierarchy_rdf_rule(max_depth: i32) { 29 | let mut data = ":a a :U0\n\ 30 | {?a :subClassOf ?b.?b :subClassOf ?c}=>{?a :subClassOf ?c}\n".to_owned(); 31 | for i in 0..max_depth { 32 | data += format!(":U{} :subClassOf :U{}.\n", i, i + 1).as_str(); 33 | data += format!(":U{} :subClassOf :J{}.\n", i, i + 1).as_str(); 34 | data += format!(":U{} :subClassOf :Q{}.\n", i, i + 1).as_str(); 35 | } 36 | let mut store = TripleStore::from(data.as_str()); 37 | store.materialize(); 38 | } 39 | fn test_hierarchy_10000(bench: &mut Bencher){ 40 | bench.iter(|| { 41 | let max_depth = 10000; 42 | infer_hierarchy(max_depth); 43 | }); 44 | } 45 | fn test_hierarchy_1000(bench: &mut Bencher){ 46 | bench.iter(|| { 47 | let max_depth = 1000; 48 | infer_hierarchy(max_depth); 49 | }); 50 | } 51 | fn test_hierarchy_100(bench: &mut Bencher){ 52 | bench.iter(|| { 53 | let max_depth = 100; 54 | infer_hierarchy(max_depth); 55 | }); 56 | } 57 | fn test_hierarchy_10(bench: &mut Bencher){ 58 | bench.iter(|| { 59 | let max_depth = 10; 60 | infer_hierarchy(max_depth); 61 | }); 62 | } 63 | fn test_rdf_hierarchy_10000(bench: &mut Bencher){ 64 | bench.iter(|| { 65 | let max_depth = 10000; 66 | infer_hierarchy_rdf_rule(max_depth); 67 | }); 68 | } 69 | fn test_rdf_hierarchy_1000(bench: &mut Bencher){ 70 | bench.iter(|| { 71 | let max_depth = 1000; 72 | infer_hierarchy_rdf_rule(max_depth); 73 | }); 74 | } 75 | fn test_rdf_hierarchy_100(bench: &mut Bencher){ 76 | bench.iter(|| { 77 | let max_depth = 100; 78 | infer_hierarchy_rdf_rule(max_depth); 79 | }); 80 | } 81 | fn test_rdf_hierarchy_10(bench: &mut Bencher){ 82 | bench.iter(|| { 83 | let max_depth = 10; 84 | infer_hierarchy_rdf_rule(max_depth); 85 | }); 86 | } 87 | benchmark_group!(benches, test_hierarchy_10,test_rdf_hierarchy_10,test_hierarchy_100,test_rdf_hierarchy_100,test_hierarchy_1000,test_rdf_hierarchy_1000); 88 | benchmark_main!(benches); -------------------------------------------------------------------------------- /lib/minimal.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /lib/src/backwardchaining.rs: -------------------------------------------------------------------------------- 1 | use std::rc::Rc; 2 | use crate::{Binding, Rule, RuleIndex, Triple, TripleIndex, VarOrTerm, Encoder,TripleStore}; 3 | #[cfg(not(test))] 4 | use log::{info, warn,trace, debug}; // Use log crate when building application 5 | 6 | #[cfg(test)] 7 | use std::{println as info, println as warn, println as trace, println as debug}; 8 | 9 | pub struct BackwardChainer; 10 | 11 | impl BackwardChainer { 12 | pub fn eval_backward(triple_index: &TripleIndex, rule_index: &RuleIndex, rule_head: &Triple) -> Binding { 13 | let sub_rules: Vec<(Rc, Vec<(usize, usize)>)> = Self::find_subrules(rule_index, rule_head); 14 | let mut all_bindings = Binding::new(); 15 | for (sub_rule, var_subs) in sub_rules.into_iter() { 16 | debug!("Backchainging rule: {:?}",TripleStore::decode_rule(&sub_rule)); 17 | let mut rule_bindings = Binding::new(); 18 | for rule_atom in &sub_rule.body { 19 | debug!("Matching body: {:?}",TripleStore::decode_triple(rule_atom)); 20 | 21 | if let Some(result_bindings) = triple_index.query(rule_atom, None) { 22 | debug!(" Found matching body: {:?}",TripleStore::decode_bindings(&result_bindings)); 23 | 24 | rule_bindings = rule_bindings.join(&result_bindings); 25 | 26 | } 27 | //recursive call 28 | let recursive_bindings = Self::eval_backward(triple_index, rule_index, rule_atom); 29 | rule_bindings.combine(recursive_bindings); 30 | } 31 | //rename variables 32 | let renamed = rule_bindings.rename(var_subs); 33 | all_bindings.combine(renamed); 34 | } 35 | all_bindings 36 | } 37 | //todo create index on rule heads 38 | pub fn find_subrules(rules_index: &RuleIndex, rule_head: &Triple) -> Vec<(Rc, Vec<(usize, usize)>)> { 39 | let mut rule_matches = Vec::new(); 40 | for rule in rules_index.rules.iter() { 41 | let head: &Triple = &rule.head; 42 | let mut var_names_subs: Vec::<(usize, usize)> = Vec::new(); 43 | if Self::eval_triple_element(&head.s, &rule_head.s, &mut var_names_subs) && 44 | Self::eval_triple_element(&head.p, &rule_head.p, &mut var_names_subs) && 45 | Self::eval_triple_element(&head.o, &rule_head.o, &mut var_names_subs) { 46 | rule_matches.push((rule.clone(), var_names_subs)); 47 | } 48 | } 49 | rule_matches 50 | } 51 | fn eval_triple_element(left: &VarOrTerm, right: &VarOrTerm, var_names_sub: &mut Vec<(usize, usize)>) -> bool { 52 | if let (VarOrTerm::Var(left_name), VarOrTerm::Var(right_name)) = (left, right) { 53 | var_names_sub.push((left_name.name, right_name.name)); 54 | true 55 | } else { 56 | left.eq(right) 57 | } 58 | } 59 | } 60 | 61 | #[cfg(test)] 62 | mod tests { 63 | use std::collections::HashMap; 64 | use crate::{BackwardChainer, Encoder, Syntax, Triple, TripleStore, VarOrTerm}; 65 | 66 | #[test] 67 | fn test(){ 68 | let triples = "@prefix rdf: . 69 | @prefix rdfs: . 70 | @prefix : . 71 | :sensor1 rdf:type :Sensor. 72 | :sensor1 :observes :temp. 73 | :temp rdf:type :Temp. 74 | :obs rdf:type :Observation. 75 | :obs :madeBySensor :sensor1. 76 | :obs :observedProperty :temp. 77 | "; 78 | 79 | let rules ="@prefix : . 80 | @prefix rdf: . 81 | {?x rdf:type :Observation. ?x :madeBySensor ?y. ?y rdf:type :TempSensor}=>{?x rdf:type :TempObservation.} 82 | {?x rdf:type :Sensor. ?x :observes ?y. ?y rdf:type :Temp}=>{?x rdf:type :TempSensor.}. 83 | {?x rdf:type :TempObservation} => {?x rdf:type :EnvironmentObservation.}. 84 | "; 85 | 86 | let mut store = TripleStore::new(); 87 | store.load_triples(triples, Syntax::Turtle); 88 | store.load_rules(rules); 89 | 90 | //backward head 91 | let backward_head = Triple::from("?x".to_string(),"".to_string(),"".to_string()); 92 | let var_encoded= Encoder::add("x".to_string()); 93 | let result_encoded = Encoder::add("".to_string()); 94 | 95 | let bindings = BackwardChainer::eval_backward(&store.triple_index, &store.rules_index, &backward_head); 96 | let result_bindings = HashMap::from([ 97 | (var_encoded, Vec::from([result_encoded])) 98 | ]); 99 | 100 | assert_eq!(1,bindings.len()); 101 | assert_eq!(result_bindings.get(&var_encoded), bindings.get(&var_encoded)); 102 | } 103 | #[test] 104 | fn test_eval_backward_rule(){ 105 | let data=" a test:SubClass.\n\ 106 | test:hasRef .\n\ 107 | test:hasRef .\n\ 108 | a test:SubClass.\n\ 109 | {?s a test:SubClass.}=>{?s a test:SubClass2.}\n 110 | {?s a test:SubClass2.?s test:hasRef ?b.?b test:hasRef ?c.?c a test:SubClass2.}=>{?s a test:SuperType.}"; 111 | let mut store = TripleStore::from(data); 112 | let backward_head = Triple{s:VarOrTerm::new_var("?newVar".to_string()),p:VarOrTerm::new_term("a".to_string()),o:VarOrTerm::new_term("test:SuperType".to_string()), g: None}; 113 | let var_encoded= Encoder::add("?newVar".to_string()); 114 | let result_encoded = Encoder::add("".to_string()); 115 | 116 | let bindings = BackwardChainer::eval_backward(&store.triple_index, &store.rules_index, &backward_head); 117 | let result_bindings = HashMap::from([ 118 | (var_encoded, Vec::from([result_encoded])) 119 | ]); 120 | assert_eq!(result_bindings.get(&12), bindings.get(&12)); 121 | } 122 | } 123 | 124 | -------------------------------------------------------------------------------- /lib/src/bindings.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::path::Iter; 3 | 4 | #[derive(Debug, Clone, Eq, PartialEq)] 5 | pub struct Binding { 6 | bindings: HashMap>, 7 | } 8 | impl Binding { 9 | pub fn new() -> Binding { 10 | Binding { bindings: HashMap::new() } 11 | } 12 | pub fn add(& mut self, var_name: &usize, term: usize) { 13 | if !self.bindings.contains_key(var_name){ 14 | self.bindings.insert(*var_name, Vec::new()); 15 | } 16 | let mut binding_values= self.bindings.get_mut(var_name).unwrap(); 17 | binding_values.push(term); 18 | } 19 | pub fn len(&self) -> usize{ 20 | if let Some(values) = self.bindings.values().into_iter().next(){ 21 | return values.len(); 22 | } 23 | 0 24 | } 25 | pub fn iter(&self) -> std::collections::hash_map::Iter<'_, usize, Vec> { 26 | self.bindings.iter() 27 | } 28 | pub fn get(&self,key:&usize)->Option<&Vec>{ 29 | self.bindings.get(key) 30 | } 31 | pub fn join(& self, join_binding: & Binding) -> Binding { 32 | let mut left = self; 33 | let mut right = join_binding; 34 | if left.len() == 0 {return right.clone();} 35 | if right.len() == 0 {return left.clone();} 36 | let mut result = Binding::new(); 37 | if left.len()= left.bindings.keys().into_iter().filter(|k|right.bindings.contains_key(*k)).collect(); 43 | 44 | for left_c in 0..left.len(){ 45 | for right_c in 0..right.len(){ 46 | // iterate over all join keys 47 | let mut match_keys=true; 48 | for join_key in &join_keys{ 49 | let left_term = left.bindings.get(*join_key).unwrap().get(left_c).unwrap(); 50 | let right_term = right.bindings.get(*join_key).unwrap().get(right_c).unwrap(); 51 | if left_term != right_term{ 52 | match_keys = false; 53 | break; 54 | } 55 | } 56 | if match_keys{ 57 | left.bindings.keys().into_iter() 58 | .for_each(|k|result.add(k,left.bindings.get(k).unwrap().get(left_c).unwrap().clone())); 59 | //add right data (without the current key 60 | right.bindings.keys().into_iter() 61 | .filter(|k|!left.bindings.contains_key(*k)) 62 | .for_each(|k|result.add(k,right.bindings.get(k).unwrap().get(right_c).unwrap().clone())); 63 | } 64 | } 65 | } 66 | result 67 | } 68 | pub fn combine(&mut self, to_combine: Binding) { 69 | let binding_size = self.bindings.values().map(|v|v.len()).max().unwrap_or(1); 70 | for (k,v) in to_combine.bindings{ 71 | if !self.bindings.contains_key(&k){ 72 | self.bindings.insert(k,Vec::new()); 73 | } 74 | let mut add_vec = self.bindings.get_mut(&k).unwrap(); 75 | for value in v{ 76 | for _ in 0..binding_size { 77 | add_vec.push(value); 78 | } 79 | } 80 | } 81 | } 82 | pub fn rename(&self, var_subs: Vec<(usize, usize)>) -> Binding { 83 | let mut renamed = Binding::new(); 84 | for (orig_name, new_name) in var_subs{ 85 | if let Some(bound_value) = self.bindings.get(&orig_name){ 86 | renamed.bindings.insert(new_name,bound_value.clone()); 87 | } 88 | 89 | } 90 | renamed 91 | } 92 | pub fn remove_vars(&mut self, var_names: &[usize]) { 93 | for var_name in var_names{ 94 | self.bindings.remove(var_name); 95 | } 96 | } 97 | pub fn retain_vars(&mut self, var_names: &[usize]) { 98 | self.bindings.retain(|k,_|var_names.contains(k)); 99 | } 100 | pub fn vars (&self) -> Vec<&usize>{ 101 | self.bindings.keys().collect() 102 | } 103 | 104 | } -------------------------------------------------------------------------------- /lib/src/csprite.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | use std::rc::Rc; 3 | use crate::{BackwardChainer, Encoder, Reasoner, Rule, RuleIndex, Triple, TripleIndex, TripleStore}; 4 | #[cfg(not(test))] 5 | use log::{info, warn,trace}; // Use log crate when building application 6 | use std::fmt::Write; 7 | 8 | #[cfg(test)] 9 | use std::{println as info, println as warn, println as trace}; 10 | use std::cell::RefCell; 11 | use crate::imars_window::{ImarsWindow, WindowConsumer}; 12 | use crate::reasoner::CSpriteReasoner; 13 | 14 | pub struct CSprite{ 15 | pub rules: Vec, 16 | pub rules_index: RuleIndex, 17 | pub triple_index : TripleIndex, 18 | window_reasoner: CSpriteReasoner, 19 | reasoner: Reasoner, 20 | imars: ImarsWindow 21 | } 22 | 23 | impl CSprite{ 24 | pub fn new() -> CSprite{ 25 | CSprite{rules: Vec::new(), rules_index: RuleIndex::new(), triple_index: TripleIndex::new(), window_reasoner: CSpriteReasoner{ } , reasoner: Reasoner{} , imars: ImarsWindow::new_no_window() } 26 | 27 | } 28 | pub fn from(data:&str) -> CSprite{ 29 | let triple_store = TripleStore::from(&data); 30 | CSprite{rules: triple_store.rules, rules_index: triple_store.rules_index , triple_index: triple_store.triple_index, window_reasoner: CSpriteReasoner{ }, reasoner: Reasoner{},imars: ImarsWindow::new_no_window() } 31 | } 32 | pub fn window_update(&mut self, new_data: Vec<(i32, Rc)>, old_data: Vec<(i32, Rc)>, last_ts:&i32){ 33 | println!("New data: {:?}",Self::decode_triples(&new_data)); 34 | println!("Old data: {:?}",Self::decode_triples(&old_data)); 35 | 36 | //remove expired data 37 | let old_items = self.imars.remove_old_elements(last_ts); 38 | println!("Deleting expired: {:?}",Self::decode_triples(&old_items)); 39 | 40 | old_items.into_iter().for_each(|(_ts,item)|self.triple_index.remove_ref(&item)); 41 | 42 | //add new data 43 | 44 | new_data.iter().for_each(|(ts, triple)|{ 45 | self.imars.add_without_update(triple.clone(),*ts); 46 | self.add_ref(triple.clone()); 47 | }); 48 | let materialization = self.window_reasoner.materialize(&new_data,&mut self.triple_index,&self.rules_index,&mut self.imars); 49 | println!("inferred data: {:?}",Self::decode_triples(&materialization)); 50 | 51 | //add materialization to maintenance program 52 | //materialization.into_iter().for_each(|(ts,t)|self.imars.add_without_update(t,ts)); 53 | } 54 | fn decode_triples(triples: &Vec<(i32,Rc)>) -> String { 55 | let mut res = String::new(); 56 | for (ts,triple) in triples { 57 | let decoded_s = Encoder::decode(&triple.s.to_encoded()).unwrap(); 58 | let decoded_p = Encoder::decode(&triple.p.to_encoded()).unwrap(); 59 | let decoded_o = Encoder::decode(&triple.o.to_encoded()).unwrap(); 60 | 61 | write!(&mut res, "{} {} {} @ {}.\n", decoded_s, decoded_p, decoded_o,ts).unwrap(); 62 | } 63 | res 64 | } 65 | 66 | pub fn add(&mut self, triple: Triple){ 67 | trace!{"Adding triple: {:?}", self.decode_triple(&triple) } 68 | self.triple_index.add(triple); 69 | } 70 | pub fn add_ref(&mut self, triple: Rc){ 71 | trace!{"Adding triple: {:?}", self.decode_triple(triple.as_ref()) } 72 | self.triple_index.add_ref(triple); 73 | } 74 | pub fn remove_ref(&mut self, triple: Rc){ 75 | trace!{"Removing triple: {:?}", self.decode_triple(triple.as_ref()) } 76 | self.triple_index.remove_ref(&triple); 77 | } 78 | pub fn add_rules(&mut self, rules: Vec) { 79 | rules.into_iter().for_each(|rule|self.rules_index.add(rule)); 80 | } 81 | pub fn len(&self) -> usize{ 82 | self.triple_index.len() 83 | } 84 | fn decode_triple(&self, triple: &Triple) -> String { 85 | let s = Encoder::decode(&triple.s.to_encoded()).unwrap(); 86 | let p = Encoder::decode(&triple.p.to_encoded()).unwrap(); 87 | let o = Encoder::decode(&triple.o.to_encoded()).unwrap(); 88 | format!("{} {} {}",s,p,o) 89 | } 90 | pub fn materialize_window(&mut self, window: Rc>>) -> Vec<(i32, Triple)>{ 91 | //self.window_reasoner.materialize(&mut self.triple_index, &self.rules_index, &self.imars) 92 | Vec::new() 93 | } 94 | pub fn materialize(&mut self) -> Vec{ 95 | self.reasoner.materialize(&mut self.triple_index, &self.rules_index) 96 | } 97 | pub fn clear(&mut self){ 98 | self.triple_index.clear(); 99 | 100 | } 101 | pub(crate) fn compute_sprite(&mut self, query: &Triple) { 102 | let (backward_rules, hierarcies) = self.eval_backward_csprite( query); 103 | 104 | // new rules 105 | let mut new_rules: Vec> = backward_rules.into_iter().filter(|r|r.body.len()>1).collect(); 106 | for hierarchy in hierarcies{ 107 | let rewritten_hierarchy = Self::rewrite_hierarchy(&hierarchy); 108 | rewritten_hierarchy.into_iter().for_each(|r|new_rules.push(Rc::new(r))); 109 | } 110 | 111 | // new rule index 112 | let mut parsed_rules_index = RuleIndex::new(); 113 | for rule in new_rules.iter(){ 114 | parsed_rules_index.add_ref(rule); 115 | } 116 | self.rules_index = parsed_rules_index; 117 | } 118 | fn eval_backward_csprite(&self, rule_head: &Triple)->(HashSet>, Vec>>){ 119 | //TODO check cycles 120 | let mut matched_rules = HashSet::new(); 121 | let mut hierarchies = Vec::new(); 122 | self.eval_backward_csprite_helper(rule_head,&mut matched_rules,false, &mut hierarchies); 123 | (matched_rules, hierarchies) 124 | //self.eval_backward_csprite_helper_with_stack(rule_head) 125 | } 126 | fn eval_backward_csprite_helper(&self, rule_head: &Triple, matched_rules: &mut HashSet>, hierarchy:bool, hierarchies: &mut Vec>>){ 127 | //TODO check cycles 128 | let sub_rules : Vec<(Rc, Vec<(usize, usize)>)> = BackwardChainer::find_subrules(&self.rules_index,rule_head); 129 | let mut current_hierarchy= false; 130 | for (sub_rule,var_subs) in sub_rules.into_iter(){ 131 | if matched_rules.insert(sub_rule.clone()) { 132 | if sub_rule.body.len() == 1{ 133 | //hierarchy candidate 134 | if hierarchy{ 135 | if let Some(current_hierarchy) = hierarchies.last_mut(){ 136 | current_hierarchy.push(sub_rule.clone()); 137 | } 138 | } 139 | else{ 140 | hierarchies.push(Vec::from([sub_rule.clone()])); 141 | } 142 | current_hierarchy = true; 143 | } 144 | for rule_atom in &sub_rule.body { 145 | //recursive call 146 | self.eval_backward_csprite_helper(rule_atom,matched_rules,current_hierarchy, hierarchies); 147 | } 148 | } 149 | 150 | } 151 | } 152 | fn eval_backward_csprite_helper_with_stack(&self, rule_head: &Triple)->(HashSet>, Vec>>){ 153 | //TODO check cycles 154 | let mut stack = Vec::from([rule_head.clone()]); //TODO add initial size & pointers instead of triples 155 | let mut matched_rules = HashSet::new(); 156 | let mut hierarchies: Vec>> = Vec::new(); 157 | let mut hierarchy = false; 158 | while !stack.is_empty() { 159 | let current_head = stack.pop().unwrap(); 160 | let sub_rules: Vec<(Rc, Vec<(usize, usize)>)> = BackwardChainer::find_subrules(&self.rules_index,¤t_head); 161 | let mut current_hierarchy = false; 162 | for (sub_rule, var_subs) in sub_rules.into_iter() { 163 | if matched_rules.insert(sub_rule.clone()) { 164 | if sub_rule.body.len() == 1 { 165 | //hierarchy candidate 166 | if hierarchy { 167 | if let Some(current_hierarchy) = hierarchies.last_mut() { 168 | current_hierarchy.push(sub_rule.clone()); 169 | } 170 | } else { 171 | hierarchies.push(Vec::from([sub_rule.clone()])); 172 | } 173 | current_hierarchy = true; 174 | } 175 | for rule_atom in &sub_rule.body { 176 | //recursive call 177 | //self.eval_backward_csprite_helper(rule_atom,matched_rules,current_hierarchy, hierarchies); 178 | stack.push(rule_atom.clone()); 179 | hierarchy = current_hierarchy; 180 | } 181 | } 182 | } 183 | } 184 | (matched_rules, hierarchies) 185 | } 186 | fn rewrite_hierarchy(rules: &Vec>) -> Vec{ 187 | let mut new_rules = Vec::new(); 188 | if rules.len() >0 { 189 | let new_head = &rules.get(0).unwrap().head; 190 | for rule in rules.iter(){ 191 | new_rules.push(Rule{body: rule.body.clone(), head: new_head.clone()}) 192 | } 193 | } 194 | 195 | new_rules 196 | } 197 | } 198 | #[cfg(test)] 199 | mod tests { 200 | use std::collections::HashMap; 201 | use std::rc::Rc; 202 | use crate::{Rule, Triple, TripleStore, TermImpl, VarOrTerm, RuleIndex, TripleIndex, Encoder, SimpleQueryEngine, QueryEngine, Parser, BackwardChainer}; 203 | use crate::csprite::CSprite; 204 | use crate::reasoner::Reasoner; 205 | 206 | #[test] 207 | fn test_sprite_compute(){ 208 | let data=" a test:SubClass.\n\ 209 | test:hasRef .\n\ 210 | test:hasRef .\n\ 211 | a test:SubClassH1.\n\ 212 | {?s a test:SubClass.}=>{?s a test:SubClass2.}\n\ 213 | {?s a test:SubClass2.}=>{?s a test:SubClass.}\n\ 214 | {?s a test:SubClass0.}=>{?s a test:SubClass2.}\n\ 215 | {?s a test:SubClass01.}=>{?s a test:SubClass0.}\n\ 216 | {?s a test:SubClassH1.}=>{?s a test:SubClassH.}\n\ 217 | {?s a test:SubClassH2.}=>{?s a test:SubClassH1.}\n\ 218 | {?s a test:SubClassH22.}=>{?s a test:SubClassH1.}\n\ 219 | {?s a test:SubClass2.?s test:hasRef ?b.?b test:hasRef ?c.?c a test:SubClassH.}=>{?s a test:SuperType.}\n\ 220 | {?super a test:SuperType.}=>{?super a test:SuperType3.}"; 221 | let mut store = CSprite::from(data); 222 | 223 | let backward_head = Triple { s: VarOrTerm::new_var("?newVar".to_string()), p: VarOrTerm::new_term("a".to_string()), o: VarOrTerm::new_term("test:SuperType".to_string()), g:None }; 224 | 225 | 226 | //assert_eq!(4,store.len()); 227 | let validation_triple = Triple { s: VarOrTerm::new_term("".to_string()), p: VarOrTerm::new_term("a".to_string()), o: VarOrTerm::new_term("test:SuperType".to_string()), g: None }; 228 | 229 | store.compute_sprite(&backward_head); 230 | store.materialize(); 231 | assert_eq!(true, store.triple_index.contains(&validation_triple)); 232 | assert_eq!(7,store.len()); 233 | 234 | } 235 | //todo move to benchmark 236 | #[test] 237 | fn test_sprite_compute_hierarchy(){ 238 | let timer_load = ::std::time::Instant::now(); 239 | 240 | let size = 10; 241 | let mut data = String::new(); 242 | for i in 0..size{ 243 | data += &format!(" a test:SubClass0.\n",i); 244 | data += &format!("{{?s a test:SubClass{}.}}=>{{?s a test:SubClass{}.}}\n",i,(i+1)); 245 | } 246 | let mut store = CSprite::from(data.as_str()); 247 | 248 | let backward_head = Triple{s:VarOrTerm::new_var("?newVar".to_string()),p:VarOrTerm::new_term("a".to_string()),o:VarOrTerm::new_term(format!("test:SubClass{}", size)), g: None}; 249 | 250 | let load_time = timer_load.elapsed(); 251 | println!("Load Time: {:.2?}", load_time); 252 | assert_eq!(size,store.len()); 253 | let timer_load = ::std::time::Instant::now(); 254 | store.compute_sprite(&backward_head); 255 | let csprite_time = timer_load.elapsed(); 256 | println!("CSprite Time: {:.2?}", csprite_time); 257 | let timer_load = ::std::time::Instant::now(); 258 | store.materialize(); 259 | assert_eq!(2*size,store.len()); 260 | let load_time = timer_load.elapsed(); 261 | println!("Materialization Time: {:.2?}", load_time); 262 | 263 | } 264 | 265 | #[test] 266 | fn test_rewrite_hierarchy_csprite(){ 267 | let data=" a test:SubClass.\n\ 268 | {?s a test:SubClassH1.}=>{?s a test:SubClassH.}\n\ 269 | {?s a test:SubClassH2.}=>{?s a test:SubClassH1.}\n\ 270 | {?s a test:SubClassH3.}=>{?s a test:SubClassH2.}"; 271 | let ( _content, rules) = Parser::parse(data.to_string()); 272 | println!("{:?}",rules); 273 | 274 | let rc_rules = rules.into_iter().map(|x|Rc::new(x)).collect(); 275 | let rewritten_rules = CSprite::rewrite_hierarchy(&rc_rules); 276 | println!("{:?}",rewritten_rules); 277 | } 278 | // #[test] 279 | // fn test_transitive(){ 280 | // let rules ="{?a in ?b.?b in ?c}=>{?a in ?c}"; 281 | // let data =":1 in :0.\n\ 282 | // :2 in :1.\n\ 283 | // :3 in :2.\n\ 284 | // :4 in :3.\n\ 285 | // :5 in :4.\n\ 286 | // :6 in :5"; 287 | // let csprite = CSprite::from_with_window(rules, 4, 2); 288 | // let (mut content, mut rules) = Parser::parse(data.to_string(), &mut csprite.borrow_mut().encoder); 289 | // 290 | // 291 | // 292 | // 293 | // 294 | // content.into_iter().enumerate().for_each(|(i, t)| csprite.borrow_mut().window.add(t, i as i32)); 295 | // 296 | // //contains 4 triples and 1 inferred triple 297 | // assert_eq!(19, csprite.borrow_mut().window.len()); 298 | // } 299 | } -------------------------------------------------------------------------------- /lib/src/dred.rs: -------------------------------------------------------------------------------- 1 | use crate::{BackwardChainer, Binding, Encoder, QueryEngine, Reasoner, Rule, RuleIndex, SimpleQueryEngine, Triple, TripleIndex, TripleStore, VarOrTerm}; 2 | #[cfg(not(test))] 3 | use log::{info, warn,trace}; // Use log crate when building application 4 | use std::fmt::Write; 5 | 6 | #[cfg(test)] 7 | use std::{println as info, println as warn, println as trace}; 8 | use std::rc::Rc; 9 | use crate::utils::Utils; 10 | 11 | pub struct DRed{ 12 | pub rules: Vec, 13 | pub rules_index: RuleIndex, 14 | pub triple_index : TripleIndex, 15 | reasoner: Reasoner, 16 | } 17 | 18 | impl DRed{ 19 | fn new() -> Self{ 20 | Self{rules: Vec::new(), rules_index: RuleIndex::new(), triple_index: TripleIndex::new(), reasoner: Reasoner{} } 21 | } 22 | pub fn from(data:&str) -> Self{ 23 | let triple_store = TripleStore::from(&data); 24 | Self{rules: triple_store.rules, rules_index: triple_store.rules_index , triple_index: triple_store.triple_index, reasoner: Reasoner{} } 25 | } 26 | pub fn add(&mut self, triple: Triple){ 27 | trace!{"Adding triple: {:?}", Utils::decode_triple(&triple) } 28 | self.triple_index.add(triple); 29 | } 30 | pub fn add_ref(&mut self, triple: Rc){ 31 | trace!{"Adding triple: {:?}", Utils::decode_triple(triple.as_ref()) } 32 | self.triple_index.add_ref(triple); 33 | } 34 | pub fn remove_ref(&mut self, triple: Rc){ 35 | //println!("{:?}",self.encoder); 36 | 37 | trace!{"Removing triple: {:?}", Utils::decode_triple(triple.as_ref()) } 38 | // over delete 39 | let mut over_deletion = Vec::new(); 40 | let mut stack = Vec::from([triple.as_ref().clone()]); 41 | 42 | while let Some(current_triple) = stack.pop(){ 43 | println!("Investigating deletion {:?}",Utils::decode_triple(¤t_triple)); 44 | 45 | let matching_rules = self.rules_index.find_match(¤t_triple); 46 | let matching_rules: Vec = matching_rules.clone().into_iter().flat_map(|r| Reasoner::substitute_rule(¤t_triple, r)).collect(); 47 | matching_rules.iter().map(|r|Utils::decode_rule(r)).for_each(|r|println!("Matching rules {:?}",r)); 48 | 49 | let delete_triples = Reasoner::infer_rule_heads(&self.triple_index, None, matching_rules); 50 | delete_triples.into_iter().for_each(|t| { 51 | println!("Marked head for deletion {:?}",Utils::decode_triple(&t)); 52 | if ! over_deletion.contains(&t){ 53 | stack.push(t.clone()); 54 | over_deletion.push(t); 55 | } 56 | }); 57 | 58 | } 59 | 60 | over_deletion.iter().map(|t|Utils::decode_triple(t)).for_each(|t|println!("Overdeleted {:?}",t)); 61 | // delete overdeletion 62 | over_deletion.iter().for_each(|t|self.triple_index.remove_ref(t)); 63 | // delete E- 64 | self.triple_index.remove_ref(&triple); 65 | 66 | let mut test = ("123",true); 67 | let mut ref_test = &mut test; 68 | ref_test.1=false; 69 | let delete_list : Vec<(Triple,bool)>= over_deletion.into_iter().map(|t|(t,false)).collect(); 70 | // Rederivation step 71 | let mut delete_num = delete_list.len()+1; 72 | let mut prev_delete_num = delete_num+1; 73 | while delete_num < prev_delete_num{ 74 | prev_delete_num = delete_num; 75 | for (delete_triple, mut delete_status) in &delete_list { 76 | if !delete_status { 77 | println!("Trying redirive {:?}", Utils::decode_triple( &delete_triple)); 78 | 79 | let matched_rules = Self::find_rules_by_head(&self.rules_index, &delete_triple); 80 | for (matched_rule, bindings) in matched_rules { 81 | println!("\t matched rule {:?}", Utils::decode_rule( &matched_rule)); 82 | 83 | println!("\tBindings {:?}", bindings); 84 | let substitute_rule = Reasoner::substitute_rule_body_with_binding(&matched_rule, &bindings); 85 | substitute_rule.iter().for_each(|r| println!("\t subsitute rule_item {:?}", Utils::decode_triple( r))); 86 | if let Some(_) = SimpleQueryEngine::query(&self.triple_index, &substitute_rule, None) { 87 | if ! self.triple_index.contains(delete_triple) { 88 | println!("\t Reinsert {:?}", Utils::decode_triple( &delete_triple)); 89 | self.triple_index.add(delete_triple.clone()); 90 | delete_num -= 1; 91 | delete_status = true; 92 | break; 93 | } 94 | } 95 | } 96 | } 97 | } 98 | } 99 | 100 | } 101 | //todo create index on rule heads 102 | pub(crate) fn find_rules_by_head(rules_index: &RuleIndex, head_triple: &Triple) -> Vec<(Rc, Binding)> { 103 | let mut rule_matches = Vec::new(); 104 | for rule in rules_index.rules.iter() { 105 | let head: &Triple = &rule.head; 106 | let mut binding = Binding::new(); 107 | if Self::eval_triple_element(&head.s, &head_triple.s, &mut binding) && 108 | Self::eval_triple_element(&head.p, &head_triple.p, &mut binding) && 109 | Self::eval_triple_element(&head.o, &head_triple.o, &mut binding) { 110 | rule_matches.push((rule.clone(), binding)); 111 | } 112 | } 113 | rule_matches 114 | } 115 | //todo check if code can be reused 116 | fn eval_triple_element(left: &VarOrTerm, right: &VarOrTerm, bindings: &mut Binding) -> bool { 117 | if let (VarOrTerm::Var(left_name), VarOrTerm::Term(right_name)) = (left, right) { 118 | bindings.add(&left_name.name, right_name.iri); 119 | true 120 | } else { 121 | left.eq(right) 122 | } 123 | } 124 | 125 | pub fn materialize(&mut self) -> Vec{ 126 | self.reasoner.materialize(&mut self.triple_index, &self.rules_index) 127 | } 128 | } 129 | mod test{ 130 | use std::rc::Rc; 131 | use crate::dred::DRed; 132 | use crate::{Triple, VarOrTerm}; 133 | use crate::utils::Utils; 134 | 135 | #[test] 136 | fn test(){ 137 | let data=":john :teaches :math.\n\ 138 | :peter :teaches :math.\n\ 139 | :john :teaches :physics.\n\ 140 | {?s :teaches ?y.}=>{?s a :Person.}\n\ 141 | {?s :teaches ?y.}=>{?y a :Course.}\n\ 142 | {?s a :TA.}=>{?s a :Person.}\n\ 143 | {?s a :Person.?s :teaches ?y.?y a :Course.}=>{?s a :TA.}"; 144 | let mut dred = DRed::from(data); 145 | let inferred = dred.materialize(); 146 | inferred.iter().for_each(|t|println!("{:?}",Utils::decode_triple(t))); 147 | println!("{:?}", inferred); 148 | assert_eq!(9, dred.triple_index.len()); 149 | 150 | let remove_triple = Triple{s:VarOrTerm::new_term(":john".to_string()),p:VarOrTerm::new_term(":teaches".to_string()),o:VarOrTerm::new_term(":math".to_string()), g: None}; 151 | 152 | dred.remove_ref(Rc::new(remove_triple)); 153 | assert_eq!(8, dred.triple_index.len()); 154 | 155 | } 156 | } -------------------------------------------------------------------------------- /lib/src/encoding.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::sync::Mutex; 3 | use once_cell::sync::Lazy; 4 | 5 | 6 | static GLOBAL_ENCODER: Lazy> = Lazy::new(|| {Mutex::new(InternalEncoder::new())}); 7 | 8 | #[derive(Debug, Clone, Eq, PartialEq)] 9 | pub struct InternalEncoder{ 10 | encoded: HashMap, 11 | decoded: HashMap, 12 | counter: usize 13 | } 14 | 15 | 16 | impl InternalEncoder{ 17 | fn new() -> InternalEncoder{ 18 | InternalEncoder{encoded: HashMap::new(), decoded: HashMap::new(), counter:0} 19 | } 20 | fn add(&mut self, uri:String) -> usize{ 21 | if let Some(encoded_uri) = self.encoded.get(&uri){ 22 | return *encoded_uri; 23 | }else{ 24 | self.encoded.insert(uri.clone(),self.counter); 25 | self.decoded.insert(self.counter,uri); 26 | self.counter+=1; 27 | self.counter -1 28 | } 29 | 30 | } 31 | fn get(&self, uri:&str) -> Option{ 32 | if let Some(encoded_uri) = self.encoded.get(uri){ 33 | return Some(*encoded_uri); 34 | }else{ 35 | None 36 | } 37 | } 38 | 39 | fn decode(&self, encoded: &usize)->Option<&String>{ 40 | self.decoded.get(encoded) 41 | } 42 | } 43 | #[derive(Debug)] 44 | pub struct Encoder{} 45 | impl Encoder{ 46 | 47 | pub fn add(uri:String) -> usize{ 48 | let mut encoder = GLOBAL_ENCODER.lock().unwrap(); 49 | if let Some(encoded_uri) = encoder.encoded.get(&uri){ 50 | return *encoded_uri; 51 | }else{ 52 | let current_counter = encoder.counter; 53 | encoder.encoded.insert(uri.clone(),current_counter); 54 | encoder.decoded.insert(current_counter,uri); 55 | encoder.counter+=1; 56 | encoder.counter -1 57 | } 58 | 59 | } 60 | pub fn get(uri:&str) -> Option{ 61 | let encoder = GLOBAL_ENCODER.lock().unwrap(); 62 | if let Some(encoded_uri) = encoder.encoded.get(uri){ 63 | return Some(*encoded_uri); 64 | }else{ 65 | None 66 | } 67 | } 68 | 69 | pub fn decode(encoded: &usize)->Option{ 70 | let encoder = GLOBAL_ENCODER.lock().unwrap(); 71 | let decoded = encoder.decoded.get(encoded); 72 | if let Some(decoded_value) = decoded{ 73 | Some(decoded_value.clone()) 74 | }else{ 75 | None 76 | } 77 | } 78 | 79 | } 80 | 81 | #[test] 82 | fn test_encoding(){ 83 | let mut encoder = InternalEncoder::new(); 84 | let _encoded1 = encoder.add("http://test/1".to_string()); 85 | let encoded2 = encoder.add("http://test/2".to_string()); 86 | let encoded3 = encoder.add("http://test/3".to_string()); 87 | let decoded2 = encoder.decode(&encoded2); 88 | let decoded2_again = encoder.decode(&encoded2); 89 | assert_eq!("http://test/2", decoded2.unwrap()); 90 | assert_eq!("http://test/2", decoded2_again.unwrap()); 91 | assert_eq!(2,encoded3); 92 | } -------------------------------------------------------------------------------- /lib/src/imars_reasoner.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::rc::{Rc, Weak}; 3 | use crate::imars_window::{ImarsWindow, WindowConsumer}; 4 | use crate::{Encoder, Parser, Triple, TripleStore}; 5 | use crate::csprite::CSprite; 6 | 7 | 8 | struct ImarsReasoner { 9 | store: CSprite, 10 | new: Vec<(i32, Rc)>, 11 | old: Vec<(i32, Rc)>, 12 | window: RefCell>>> 13 | } 14 | impl ImarsReasoner { 15 | pub fn new() -> ImarsReasoner{ 16 | ImarsReasoner{ store: CSprite::new(), new: vec![], old: vec![], window: RefCell::new(Weak::new())} 17 | } 18 | } 19 | 20 | impl WindowConsumer for ImarsReasoner { 21 | 22 | fn update(&mut self, new: Vec<(i32, Rc)>, old: Vec<(i32, Rc)>, ts: i32) -> Vec<(i32, Triple)>{ 23 | println!("Received new: {:?}, old: {:?}", new.len(), old.len()); 24 | new.into_iter().for_each(|(ts, triple)|self.store.add_ref(triple)); 25 | let mat_triples = self.store.materialize_window(self.window.borrow().upgrade().unwrap()); 26 | old.into_iter().for_each(|(ts,t)|self.store.remove_ref(t)); 27 | mat_triples 28 | } 29 | } 30 | 31 | #[test] 32 | #[ignore] 33 | fn test_integration(){ 34 | let window = Rc::new(RefCell::new(ImarsWindow::new(4, 2))); 35 | 36 | let data="{?a a :C9}=>{?a a :C10}\n\ 37 | {?a a :C4}=>{?a a :C10}\n\ 38 | :a a :C0.\n\ 39 | :b a :C1.\n\ 40 | :c a :C2.\n\ 41 | :d a :C3.\n\ 42 | :e a :C4.\n\ 43 | :f a :C5.\n\ 44 | :g a :C6.\n\ 45 | :i a :C7.\n\ 46 | :j a :C8.\n\ 47 | :k a :C9."; 48 | let mut reasoner = ImarsReasoner::new(); 49 | 50 | let (content, rules) = Parser::parse(data.to_string()); 51 | reasoner.store.add_rules(rules); 52 | let consumer = Rc::new(RefCell::new(reasoner)); 53 | *consumer.borrow_mut().window.borrow_mut() = Rc::downgrade(&window); 54 | window.borrow_mut().register_consumer(consumer.clone()); 55 | 56 | 57 | content.into_iter().enumerate().for_each(|(i, t)| window.borrow_mut().add(t, i as i32)); 58 | 59 | //contains 4 triples and 1 inferred triple 60 | assert_eq!(5, window.borrow_mut().len()); 61 | 62 | } 63 | #[test] 64 | #[ignore] 65 | fn test_transitive(){ 66 | let window = Rc::new(RefCell::new(ImarsWindow::new(4, 2))); 67 | 68 | let data ="{?a in ?b.?b in ?c}=>{?a in ?c}\n\ 69 | :1 in :0.\n\ 70 | :2 in :1.\n\ 71 | :3 in :2.\n\ 72 | :4 in :3.\n\ 73 | :5 in :4.\n\ 74 | :6 in :5"; 75 | // let csprite = CSprite::from_with_window(rules, 4, 2); 76 | // let (mut content, mut rules) = Parser::parse(data.to_string(), &mut csprite.borrow_mut().encoder); 77 | // 78 | // 79 | // 80 | // 81 | // 82 | // content.into_iter().enumerate().for_each(|(i, t)| csprite.borrow_mut().window.add(t, i as i32)); 83 | // 84 | // //contains 4 triples and 1 inferred triple 85 | // assert_eq!(19, csprite.borrow_mut().window.len()); 86 | let mut reasoner = ImarsReasoner::new(); 87 | 88 | let (content, rules) = Parser::parse(data.to_string()); 89 | reasoner.store.add_rules(rules); 90 | let consumer = Rc::new(RefCell::new(reasoner)); 91 | *consumer.borrow_mut().window.borrow_mut() = Rc::downgrade(&window); 92 | window.borrow_mut().register_consumer(consumer.clone()); 93 | 94 | 95 | content.into_iter().enumerate().for_each(|(i, t)| window.borrow_mut().add(t, i as i32)); 96 | 97 | //contains 4 triples and 1 inferred triple 98 | assert_eq!(5, window.borrow_mut().len()); 99 | } 100 | -------------------------------------------------------------------------------- /lib/src/imars_triple.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use deepmesa::lists::LinkedList; 3 | use std::cmp; 4 | use std::collections::HashMap; 5 | use std::hash::Hash; 6 | use std::rc::Rc; 7 | use deepmesa::lists::linkedlist::Node; 8 | use crate::csprite::CSprite; 9 | use crate::Triple; 10 | 11 | 12 | /// A generic Windowing operator that implements IMaRs functionality. 13 | /// 14 | /// Each window time-based window and has a width and sliding parameter to define its size. 15 | /// The window assigner does not duplicate the items in the window across multiple windows but maintains 16 | /// the state of a single window, adding and remove based on the timestamps. 17 | /// # Examples 18 | /// ``` 19 | /// use lib::imars::ImarsWindow; 20 | /// let mut window :ImarsWindow = ImarsWindow::new(2,2); 21 | /// window.add(100,0); 22 | /// window.add(101,1); 23 | /// window.add(102,2); 24 | /// window.add(103,3); 25 | /// assert_eq!(2,window.len()); 26 | /// ``` 27 | /// IMaRs allows to update values with newer timestamps (used for reasoning): 28 | /// 29 | /// ``` 30 | /// use lib::imars::ImarsWindow; 31 | /// let mut window :ImarsWindow = ImarsWindow::new(4,2); 32 | /// window.add(100,0); 33 | /// window.add(101,1); 34 | /// window.add(102,2); 35 | /// window.add(103,3); 36 | /// assert_eq!(4,window.len()); 37 | /// window.add(100,4); 38 | /// assert_eq!(4,window.len()); 39 | /// ``` 40 | /// Consumers can be added to consume the data when the window triggers, i.e. when the temporal bounds 41 | /// of the window are reached: 42 | /// ``` 43 | /// use std::cell::RefCell; 44 | /// use std::rc::Rc; 45 | /// use lib::imars::{ImarsWindow, SimpleWindowConsumer}; 46 | /// 47 | /// let mut window :ImarsWindow = ImarsWindow::new(2,2); 48 | /// let consumer = Rc::new(RefCell::new(SimpleWindowConsumer::new())); 49 | /// window.register_consumer(consumer.clone()); 50 | /// ``` 51 | 52 | pub struct ImarsWindowTriple { 53 | content: LinkedList<(i32, Rc)>, 54 | consumers: Vec>, 55 | width: i32, 56 | slide: i32, 57 | time: i32, 58 | pending_adds: Vec<(i32,Rc)>, 59 | index: HashMap,Node<(i32,Rc)>> 60 | } 61 | 62 | impl ImarsWindowTriple{ 63 | /// Creates a new time-based window with a certain width and slide 64 | pub fn new(width: i32, slide: i32) -> ImarsWindowTriple{ 65 | ImarsWindowTriple{content: LinkedList::new(), consumers: Vec::new(), width, slide, time: 0, pending_adds: Vec::new(), index: HashMap::new()} 66 | } 67 | /// Adds an item to the window and updates its content, this can either be: 68 | /// - Add the item to the window and to nothing when the new timestamp does not exceed the bounds of the current window 69 | /// - Add the item and update the window, i.e. remove old items that have expired based on their timestamp 70 | /// - The item is already in the window but has an updated timestamp, this will update the current item 71 | pub fn add(&mut self, item:Triple, ts:i32) { 72 | 73 | let rc_item = Rc::new(item.clone()); 74 | //check if item is already present 75 | if self.index.contains_key(&rc_item) { 76 | //update the item 77 | self.update(rc_item,ts); 78 | } else { 79 | //add the item 80 | self.pending_adds.push((ts, rc_item.clone())); 81 | self.add_to_list_and_index(rc_item, ts); 82 | } 83 | if self.does_window_trigger(ts){ 84 | self.update_window_open_time(ts); 85 | let old_values = self.progress_time_and_delete_old(&ts); 86 | let consumers = self.consumers.clone(); 87 | consumers.iter().for_each(|mut item|{ 88 | let mut reference = item; 89 | let updates = reference.update(self.pending_adds.clone(), old_values.clone(), ts); 90 | updates.into_iter().for_each(|t| self.add(t,ts)); 91 | }); 92 | 93 | self.pending_adds.clear(); 94 | } 95 | } 96 | fn update(&mut self, item:Rc, ts:i32){ 97 | if let Some(node) = self.index.get(&item){ 98 | // cut node from middle 99 | if let Some(content) = self.content.pop_node(&node){ 100 | //add it to end with updated timestamp 101 | let updated_node_ref = self.content.push_tail((ts,content.1)); 102 | //update the index 103 | self.index.insert(item,updated_node_ref); 104 | } 105 | 106 | } 107 | } 108 | /// Returns the length of the content of the window 109 | pub fn len(&self) -> usize{ 110 | self.content.len() 111 | } 112 | fn add_to_list_and_index(&mut self, item:Rc, ts:i32){ 113 | let node_ref = self.content.push_tail((ts,item.clone())); 114 | //add to index 115 | self.index.insert(item,node_ref); 116 | } 117 | fn get_last_valid_time_for(&self, new_time: &i32) -> i32{ 118 | cmp::max(0,*new_time - self.width) 119 | } 120 | fn does_window_trigger(&mut self, ts: i32) -> bool { 121 | if ts > self.time + self.width { 122 | true 123 | }else{ 124 | false 125 | } 126 | } 127 | fn update_window_open_time(&mut self, ts: i32){ 128 | let mut residue = (ts - self.width)/self.slide; 129 | if (ts - self.width) % self.slide != 0{ 130 | residue +=1; 131 | } 132 | self.time = residue * self.slide; 133 | } 134 | fn progress_time_and_delete_old(&mut self, ts: &i32) -> Vec<(i32,Rc)>{ 135 | let mut old_values = Vec::new(); 136 | let mut peek = self.content.front(); 137 | while let Some((timestamp, item)) = peek{ 138 | if *timestamp<= self.get_last_valid_time_for(ts){ 139 | if let Some(old_val) = self.content.pop_front(){ 140 | //remove from index 141 | self.index.remove(&old_val.1); 142 | old_values.push(old_val); 143 | } 144 | peek = self.content.front(); 145 | 146 | }else{ 147 | break; 148 | } 149 | } 150 | old_values 151 | } 152 | /// Adds consumer that can be notified with updates 153 | pub fn register_consumer(&mut self, consumer: Rc) { 154 | self.consumers.push(consumer); 155 | } 156 | } 157 | 158 | 159 | 160 | 161 | 162 | 163 | -------------------------------------------------------------------------------- /lib/src/observer.rs: -------------------------------------------------------------------------------- 1 | //! Observer is a behavioral design pattern that allows one objects to notify other objects about changes in their state. 2 | 3 | trait IObserver { 4 | fn update(&self, new: Vec) -> Vec; 5 | } 6 | 7 | trait ISubject<'a, T: IObserver> { 8 | fn attach(&mut self, observer: &'a T); 9 | fn detach(&mut self, observer: &'a T); 10 | fn notify_observers(&mut self); 11 | fn add_data(&mut self, data: i32); 12 | } 13 | 14 | struct Subject<'a, T: IObserver> { 15 | observers: Vec<&'a T>, 16 | data: Vec 17 | } 18 | impl<'a, T: IObserver + PartialEq> Subject<'a, T> { 19 | fn new() -> Subject<'a, T> { 20 | Subject { 21 | observers: Vec::new(), 22 | data: Vec::new() 23 | } 24 | } 25 | } 26 | 27 | impl<'a, T: IObserver + PartialEq> ISubject<'a, T> for Subject<'a, T> { 28 | fn attach(&mut self, observer: &'a T) { 29 | self.observers.push(observer); 30 | } 31 | fn detach(&mut self, observer: &'a T) { 32 | if let Some(idx) = self.observers.iter().position(|x| *x == observer) { 33 | self.observers.remove(idx); 34 | } 35 | } 36 | fn notify_observers(&mut self) { 37 | for item in self.observers.iter() { 38 | let updates_received = item.update(self.data.clone()); 39 | updates_received.into_iter().for_each(|u| self.data.push(u)); 40 | } 41 | } 42 | 43 | fn add_data(&mut self, data: i32) { 44 | self.data.push(data); 45 | } 46 | } 47 | 48 | #[derive(PartialEq)] 49 | struct ConcreteObserver { 50 | id: i32, 51 | data: i32 52 | } 53 | impl IObserver for ConcreteObserver { 54 | fn update(&self, new: Vec) -> Vec{ 55 | println!("Observer id:{} received event with data {:?}!", self.id, new); 56 | vec![self.data] 57 | } 58 | 59 | } 60 | impl ConcreteObserver{ 61 | fn update_data(&mut self, new_data:i32){ 62 | self.data = new_data; 63 | } 64 | } 65 | #[test] 66 | fn test_observer() { 67 | let mut subject = Subject::new(); 68 | let observer_a = ConcreteObserver { id: 1, data:0 }; 69 | let observer_b = ConcreteObserver { id: 2 , data:0}; 70 | 71 | subject.attach(&observer_a); 72 | subject.attach(&observer_b); 73 | subject.add_data(1337); 74 | subject.notify_observers(); 75 | subject.add_data(1339); 76 | subject.detach(&observer_b); 77 | subject.notify_observers(); 78 | 79 | } -------------------------------------------------------------------------------- /lib/src/parser.rs: -------------------------------------------------------------------------------- 1 | use crate::{Encoder, Rule, Triple, VarOrTerm}; 2 | use rio_turtle::{NTriplesParser, TurtleError,TurtleParser, TriGParser, NQuadsParser}; 3 | use rio_api::parser::{QuadsParser, TriplesParser}; 4 | use rio_api::model::NamedNode; 5 | 6 | mod n3rule_parser; 7 | 8 | pub struct Parser; 9 | #[derive(PartialEq)] 10 | pub enum Syntax {NTriples, Turtle, TriG, NQuads} 11 | 12 | impl Default for Syntax{ 13 | fn default() -> Self { 14 | Syntax::NTriples 15 | } 16 | } 17 | 18 | impl Parser { 19 | pub fn parse_triples(data: &str, syntax: Syntax) -> Result, String>{ 20 | if syntax == Syntax::Turtle || syntax == Syntax::NTriples { 21 | Self::parse_triples_helper(data,syntax) 22 | }else{ 23 | Self::parse_quads_helper(data, syntax) 24 | } 25 | 26 | } 27 | fn parse_quads_helper(data: &str, syntax: Syntax) -> Result, String> { 28 | let mut triples = Vec::new(); 29 | let closure_quad = &mut |t: rio_api::model::Quad| { 30 | let s = VarOrTerm::new_term(t.subject.to_string()); 31 | let p = VarOrTerm::new_term(t.predicate.to_string()); 32 | let o = VarOrTerm::new_term(t.object.to_string()); 33 | let g = t.graph_name.map(|g|VarOrTerm::new_term(g.to_string())); 34 | triples.push(Triple { s, p, o, g }); 35 | Ok(()) as Result<(), TurtleError> 36 | }; 37 | 38 | let result = match syntax { 39 | Syntax::TriG => TriGParser::new(data.as_ref(), None).parse_all(closure_quad), 40 | Syntax::NQuads => NQuadsParser::new(data.as_ref()).parse_all(closure_quad), 41 | _ => NQuadsParser::new(data.as_ref()).parse_all(closure_quad) 42 | }; 43 | match result { 44 | Ok(_) =>Ok(triples), 45 | Err(parsing_error) => Err(format!("Parsing error! {:?}", parsing_error.to_string())) 46 | } 47 | 48 | 49 | } 50 | fn parse_triples_helper(data: &str, syntax: Syntax) -> Result, String>{ 51 | let mut triples = Vec::new(); 52 | let closure_triple = &mut |t: rio_api::model::Triple| { 53 | let s = VarOrTerm::new_term(t.subject.to_string()); 54 | let p = VarOrTerm::new_term(t.predicate.to_string()); 55 | let o = VarOrTerm::new_term(t.object.to_string()); 56 | triples.push(Triple { s, p, o, g: None }); 57 | Ok(()) as Result<(), TurtleError> 58 | }; 59 | let result = match syntax { 60 | Syntax::NTriples => NTriplesParser::new(data.as_ref()).parse_all(closure_triple), 61 | Syntax::Turtle => TurtleParser::new(data.as_ref(), None).parse_all(closure_triple), 62 | _=> NTriplesParser::new(data.as_ref()).parse_all(closure_triple), 63 | }; 64 | match result { 65 | Ok(_) =>Ok(triples), 66 | Err(parsing_error) => Err(format!("Parsing error! {:?}", parsing_error.to_string())) 67 | } 68 | } 69 | fn parse_triple(data: &str) -> Triple { 70 | let items: Vec<&str> = data.split(" ").collect(); 71 | let s = items.get(0).unwrap(); 72 | let p = items.get(1).unwrap(); 73 | 74 | let o = if items.get(2).unwrap().ends_with(".") { 75 | let mut o_chars = items.get(2).unwrap().chars(); 76 | o_chars.next_back(); 77 | o_chars.as_str() 78 | } else { 79 | items.get(2).unwrap() 80 | }; 81 | let mut convert_item = |item: &&str| { if item.starts_with("?") { VarOrTerm::new_var(item.to_string()) } else { VarOrTerm::new_term(item.to_string()) } }; 82 | let s = convert_item(s); 83 | let p = convert_item(p); 84 | let o = convert_item(&o); 85 | Triple { s, p, o, g: None } 86 | } 87 | fn rem_first_and_last(value: &str) -> &str { 88 | let mut chars = value.chars(); 89 | chars.next(); 90 | chars.next_back(); 91 | chars.as_str() 92 | } 93 | pub fn parse(data: String) -> (Vec, Vec) { 94 | let mut rules = Vec::new(); 95 | let mut content = Vec::new(); 96 | //line by line 97 | for line in data.split("\n") { 98 | if line.contains("=>") { 99 | //process rule 100 | let rule: Vec<&str> = line.split("=>").collect(); 101 | let body = Self::rem_first_and_last(rule.get(0).unwrap()); 102 | let head = Self::rem_first_and_last(rule.get(1).unwrap()); 103 | let head_triple = Self::parse_triple(head); 104 | let mut body_triples = Vec::new(); 105 | for body_triple in body.split(".") { 106 | if body_triple.trim().len() > 0 { 107 | body_triples.push(Self::parse_triple(body_triple.trim())); 108 | } 109 | } 110 | rules.push(Rule { head: head_triple, body: body_triples }) 111 | } else { 112 | //process triple 113 | if line.len() > 0 { 114 | let triple = Self::parse_triple(line); 115 | content.push(triple); 116 | } 117 | } 118 | } 119 | (content, rules) 120 | } 121 | pub fn parse_rules(parse_string: &str) -> Result,&'static str>{ 122 | n3rule_parser::parse(parse_string) 123 | } 124 | } 125 | 126 | mod test{ 127 | use super::*; 128 | #[test] 129 | fn test_parsing(){ 130 | let ntriples_file = " . 131 | \"Foo\" . 132 | . 133 | \"Bar\" ."; 134 | let triples = Parser::parse_triples(ntriples_file, Syntax::NTriples).unwrap(); 135 | assert_eq!(4, triples.len()); 136 | 137 | let trig_file = " , . 138 | \"Foo\" . 139 | ; \"Bar\" ."; 140 | let triples = Parser::parse_triples(trig_file, Syntax::TriG).unwrap(); 141 | assert_eq!(5, triples.len()); 142 | 143 | let turtle = "@prefix schema: . 144 | a schema:Person ; 145 | schema:name \"Foo\" . 146 | a schema:Person ; 147 | schema:name \"Bar\" ."; 148 | let triples = Parser::parse_triples(turtle, Syntax::Turtle).unwrap(); 149 | assert_eq!(4, triples.len()); 150 | 151 | let nquads = " . 152 | \"Foo\" . 153 | . 154 | \"Bar\" ."; 155 | let triples = Parser::parse_triples(nquads, Syntax::NQuads).unwrap(); 156 | assert_eq!(4, triples.len()); 157 | 158 | let parsing_error = " http://www.w3.org/1999/02/22-rdf-syntax-ns#typehema.org/Person"; 159 | let triples = Parser::parse_triples(parsing_error , Syntax::NQuads); 160 | assert_eq!(true,triples.is_err()); 161 | 162 | } 163 | #[test] 164 | fn test_empty_abox_parsing(){ 165 | let ntriples_file = ""; 166 | let triples = Parser::parse_triples(ntriples_file,Syntax::NTriples).unwrap(); 167 | assert_eq!(0, triples.len()); 168 | } 169 | 170 | #[test] 171 | fn test_error_abox_parsing(){ 172 | let ntriples_file = "asdfadsf"; 173 | match Parser::parse_triples(ntriples_file,Syntax::NTriples){ 174 | Ok(result)=>assert_eq!(0, 1), 175 | Err(err)=>assert_eq!(0, 0) 176 | } 177 | 178 | } 179 | #[test] 180 | fn test_syntactic_sugar_rdf_type(){ 181 | let ntriples_file = " a ."; 182 | match Parser::parse_triples(ntriples_file,Syntax::Turtle){ 183 | Ok(result)=>assert_eq!(1, result.len()), 184 | Err(err)=>assert_eq!(0, 1) 185 | } 186 | 187 | } 188 | #[test] 189 | fn test_white_space_in_rules(){ 190 | let rules = "{?source a test:Source. }=>{?source a test:NeededInput.}"; 191 | match Parser::parse_rules(rules){ 192 | Ok(result)=>assert_eq!(1, result.len()), 193 | Err(err)=>assert_eq!(0, 1) 194 | } 195 | 196 | } 197 | 198 | } -------------------------------------------------------------------------------- /lib/src/parser/n3.pest: -------------------------------------------------------------------------------- 1 | Iri = @{ IriScheme ~ ":" ~ IriHierPart ~ ("?" ~ IriQuery)? ~ ("#" ~ IriFragment)? } 2 | 3 | // NB(@althonos): IriPathAbEmpty is not mandatory if we reach the end of input 4 | // here to allow URLs to hosts without trailing slashes, e.g. 5 | /// 'http://example.com', which would be rejected otherwise. 6 | IriHierPart = { 7 | ("//" ~ IriAuthority ~ (IriPathAbempty | EOI) ) 8 | | IriPathAbsolute 9 | | IriPathRootless 10 | | IriPathEmpty 11 | } 12 | 13 | IriAuthority = ${ (IriUserInfo ~ "@")? ~ IriHost ~ (":" ~ IriPort)?} 14 | IriUserInfo = ${ (IriUnreserved | IriPctEncoded | IriSubDelims | ":")* } 15 | IriHost = ${ IriIpLiteral | IriIpv4Address | IriRegName } 16 | IriRegName = ${ (IriUnreserved | IriPctEncoded | IriSubDelims)* } 17 | 18 | IriPath = ${IriPathAbempty | IriPathAbsolute | IriPathNoScheme | IriPathRootless | IriPathEmpty} 19 | IriPathAbempty = ${ ("/" ~ IriSegment)+ } 20 | IriPathAbsolute = ${ "/" ~ (IriSegmentNz ~ ("/" ~ IriSegment)* )? } 21 | IriPathNoScheme = ${ IriSegmentNzNc ~ ("/" ~ IriSegment)* } 22 | IriPathRootless = ${ IriSegmentNz ~ ("/" ~ IriSegment)* } 23 | IriPathEmpty = ${ "" } 24 | 25 | IriSegment = @{ IriIpChar* } 26 | IriSegmentNz = @{ IriIpChar+ } 27 | IriSegmentNzNc = @{ (IriUnreserved | IriPctEncoded | IriSubDelims | "@")+ } 28 | 29 | IriQuery = @{ (IriIpChar | IriPrivate | "/" | "?")* } 30 | IriFragment = @{ (IriIpChar | "/" | "?")* } 31 | 32 | IriScheme = @{ ASCII_ALPHA ~ (ASCII_ALPHA | ASCII_DIGIT | "+" | "-" | ".")* } 33 | IriPort = @{ ASCII_DIGIT* } 34 | 35 | IriPrivate = ${ '\u{E000}'..'\u{F8FF}' | '\u{F0000}'..'\u{FFFFD}' | '\u{100000}'..'\u{10FFFD}' } 36 | IriPctEncoded = ${ "%" ~ ASCII_HEX_DIGIT ~ ASCII_HEX_DIGIT } 37 | IriUnreserved = @{ ("\\" ~ IriReserved) | ASCII_ALPHA | ASCII_DIGIT | "-" | "." | "_" | "~" } 38 | IriReserved = @{ IriGenDelims | IriSubDelims } 39 | IriGenDelims = @{":" | "/" | "?" | "#" | "[" | "]" | "@"} 40 | IriSubDelims = @{"!" | "$" | "&" | "'" | "(" | ")" | "*" | "+" | "," | ";" | "="} 41 | IriDecOctet = ${ 42 | ASCII_DIGIT 43 | | (('1' .. '9') ~ ASCII_DIGIT) 44 | | ("1" ~ ASCII_DIGIT ~ ASCII_DIGIT) 45 | | ("2" ~ ('0' .. '4') ~ ASCII_DIGIT) 46 | | ("25" ~ ('0' .. '5')) 47 | } 48 | 49 | IriIpChar = { IriUnreserved | IriPctEncoded | IriSubDelims | ":" | "@" } 50 | IriIpLiteral = { "[" ~ (IriIpv6Address ~ IriIpvFutureAddress)* ~ "]" } 51 | 52 | IriIpv6H16 = ${ ASCII_HEX_DIGIT{1,4} } 53 | IriIpv6Ls32 = { (IriIpv6H16 ~ ":" ~ IriIpv6H16) | IriIpv4Address } 54 | 55 | IriIpv4Address = ${ IriDecOctet ~ "." ~ IriDecOctet ~ "." ~ IriDecOctet ~ "." ~ IriDecOctet } 56 | IriIpvFutureAddress = ${ "v" ~ ASCII_HEX_DIGIT+ ~ "." ~ (IriUnreserved | IriSubDelims | ":")+ } 57 | IriIpv6Address = ${ 58 | ( (IriIpv6H16 ~ ":"){6} ~ IriIpv6Ls32 ) 59 | | ( "::" ~ (IriIpv6H16 ~ ":"){5} ~ IriIpv6Ls32 ) 60 | | ( IriIpv6H16? ~ "::" ~ (IriIpv6H16 ~ ":"){4} ~ IriIpv6Ls32 ) 61 | | ( ((IriIpv6H16 ~ ":"){1} ~ IriIpv6H16)? ~ "::" ~ (IriIpv6H16 ~ ":"){3} ~ IriIpv6Ls32) 62 | | ( ((IriIpv6H16 ~ ":"){2} ~ IriIpv6H16)? ~ "::" ~ (IriIpv6H16 ~ ":"){2} ~ IriIpv6Ls32) 63 | | ( ((IriIpv6H16 ~ ":"){3} ~ IriIpv6H16)? ~ "::" ~ IriIpv6H16 ~ ":" ~ IriIpv6Ls32) 64 | | ( ((IriIpv6H16 ~ ":"){4} ~ IriIpv6H16)? ~ "::" ~ IriIpv6Ls32) 65 | | ( ((IriIpv6H16 ~ ":"){5} ~ IriIpv6H16)? ~ "::" ~ IriIpv6H16) 66 | | ( ((IriIpv6H16 ~ ":"){6} ~ IriIpv6H16)? ~ "::") 67 | } 68 | 69 | 70 | // N3 rule 71 | //WHITESPACE = _{ " " | "\t" | "\r" | "\n" } 72 | WS = { " " | "\t" | "\r" | "\n" } 73 | SPACE = { " " | "\t"} 74 | NewLine = { "\r" | "\n" } 75 | Var = {"?"~ASCII_ALPHA+~ASCII_ALPHANUMERIC*} 76 | Prefix = {"@prefix"~SPACE+~PrefixIdentifier~":"~SPACE+~"<"~Iri~">"~WS* ~"."~WS*} 77 | PrefixIdentifier = {ASCII_ALPHANUMERIC*} 78 | LocalNameChar = {ASCII_ALPHANUMERIC | "_"} 79 | Prefixed = {PrefixIdentifier ~":"~LocalNameChar+} 80 | Term = {"<"~Iri~">" | Prefixed} 81 | RdfType= {"a"} 82 | varOrTerm = { Term | Var} 83 | Subject = { Term | Var} 84 | Property = { Term | Var | RdfType} 85 | Object = { Term | Var} 86 | TP = { WS*~Subject ~WS+~ Property ~WS+~ Object ~WS* ~"."? ~WS* } 87 | Body = {"{"~TP+~"}"} 88 | Head = {"{"~WS*~TP~WS*~"}"} 89 | rule = { Body~WS*~"=>"~WS*~Head~WS*~"."? ~WS*} 90 | document = {Prefix* ~ NewLine* ~rule*} -------------------------------------------------------------------------------- /lib/src/parser/n3rule_parser.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use crate::{Encoder, Rule as ReasonerRule, Triple, VarOrTerm}; 3 | 4 | use std::fmt::Error; 5 | use pest::iterators::{Pair, Pairs}; 6 | use pest::Parser; 7 | 8 | #[derive(Debug, Clone)] 9 | pub struct PrefixMapper{ 10 | prefixes: HashMap 11 | } 12 | 13 | impl PrefixMapper{ 14 | pub fn new() -> PrefixMapper{ 15 | PrefixMapper{prefixes:HashMap::new()} 16 | } 17 | pub fn add(&mut self, prefix: String, full_name:String){ 18 | self.prefixes.insert(prefix,full_name); 19 | } 20 | pub fn expand(&self, prefixed:String) -> String{ 21 | if prefixed.eq("a") { 22 | return "".to_string(); 23 | } 24 | 25 | let mut split = prefixed.split(":"); 26 | let vec: Vec<&str> = split.collect(); 27 | if vec.len() >= 2 { 28 | let t = vec.get(0); 29 | if let Some(expanded_prefix) = self.prefixes.get(*vec.get(0).unwrap()) { 30 | let remainder_uri = *vec.get(1).unwrap(); 31 | format!("{}{}", expanded_prefix, remainder_uri) 32 | }else{ 33 | prefixed 34 | } 35 | }else { 36 | prefixed 37 | } 38 | } 39 | } 40 | 41 | #[derive(Parser)] 42 | #[grammar = "parser/n3.pest"] 43 | pub struct CSVParser; 44 | 45 | fn parse_term(tp_term: Pair) { 46 | match tp_term.as_rule(){ 47 | Rule::Var=> println!("Var{:?}", tp_term.as_str()), 48 | Rule::Term=> println!("Term{:?}", tp_term.as_str()), 49 | 50 | Rule::EOI => (), 51 | _ => (), 52 | } 53 | } 54 | 55 | fn parse_tp(pair: Pairs<'_, Rule>, prefixes : &PrefixMapper) -> Triple{ 56 | let mut subject_str="".to_string(); 57 | let mut property_str = "".to_string(); 58 | let mut object_str = "".to_string(); 59 | for sub_rule in pair { 60 | match sub_rule.as_rule() { 61 | Rule::Subject => subject_str= prefixes.expand(sub_rule.as_str().to_string()), 62 | Rule::Property => property_str= prefixes.expand(sub_rule.as_str().to_string()), 63 | Rule::Object => object_str= prefixes.expand(sub_rule.as_str().to_string()), 64 | Rule::EOI => (), 65 | _ => (), 66 | } 67 | } 68 | Triple::from(subject_str,property_str,object_str) 69 | } 70 | 71 | pub fn parse(parse_string: &str) -> Result,&'static str>{ 72 | let mut rules = Vec::new(); 73 | let mut prefix_mapper = PrefixMapper::new(); 74 | let mut unparsed = CSVParser::parse(Rule::document, parse_string).expect("Unable to read") 75 | .next(); 76 | match unparsed { 77 | None => return Err("Parsing failed"), 78 | Some(parsed) => { 79 | for line in parsed.into_inner() { 80 | //println!("{:?}",line); 81 | match line.as_rule() { 82 | Rule::Prefix => { 83 | let mut name = line.into_inner(); 84 | let mut prefix_name = ""; 85 | let mut prefix_iri = ""; 86 | for prefix_sub in name { 87 | match prefix_sub.as_rule() { 88 | Rule::PrefixIdentifier => prefix_name = prefix_sub.as_str(), 89 | Rule::Iri => prefix_iri = prefix_sub.as_str(), 90 | Rule::EOI => (), 91 | _ => (), 92 | } 93 | } 94 | prefix_mapper.add(prefix_name.to_string(), prefix_iri.to_string()); 95 | } 96 | Rule::rule => { 97 | let mut sub_rules = line.into_inner(); 98 | //todo fix unneeded triple allocation 99 | let mut head: Triple = Triple::from("?s".to_string(), "?p".to_string(), "?o".to_string()); 100 | let mut body: Vec = Vec::new(); 101 | for sub_rule in sub_rules { 102 | match sub_rule.as_rule() { 103 | Rule::Body => { 104 | let mut rules = sub_rule.into_inner(); 105 | for rule in rules { 106 | body.push(parse_tp(rule.into_inner(), &prefix_mapper)); 107 | } 108 | }, 109 | Rule::Head => { 110 | for head_item in sub_rule.into_inner(){ 111 | match head_item.as_rule(){ 112 | Rule::TP =>{ head = parse_tp(head_item.into_inner(), &prefix_mapper);}, 113 | _ => () 114 | } 115 | 116 | 117 | } 118 | 119 | }, 120 | 121 | Rule::EOI => (), 122 | _ => (), 123 | } 124 | } 125 | rules.push(ReasonerRule { body: body, head: head }) 126 | } 127 | // Rule::Var => { 128 | // println!("Var{}", line.as_str()); 129 | // } 130 | Rule::EOI => (), 131 | _ => println!("not Found {}", line.as_str()), 132 | } 133 | } 134 | return Ok(rules); 135 | } 136 | } 137 | } 138 | 139 | #[cfg(test)] 140 | mod tests { 141 | use crate::TripleStore; 142 | use super::*; 143 | #[test] 144 | fn parse_tp() { 145 | let rules = parse("@prefix log: .\n{?VaRr0 ?lastVar. ?VaRr0 log:type ?lastVar.}=>{?VaRr0 ssn:HasValue ?lastVar.}").unwrap(); 146 | println!("{:?}",rules); 147 | assert_eq!(rules.get(0).unwrap().body.len(), 2); 148 | } 149 | #[test] 150 | fn parse_multiple_prefixes() { 151 | 152 | let rules = parse("@prefix log: .\n @prefix log2: .\n {?VaRr0 ?lastVar. ?VaRr0 log:type ?lastVar.}=>{?VaRr0 ssn:HasValue ?lastVar.}").unwrap(); 153 | println!("{:?}",rules); 154 | assert_eq!(rules.get(0).unwrap().body.len(), 2); 155 | } 156 | #[test] 157 | fn parse_multiple_rules() { 158 | let rules = parse("@prefix log: .\n @prefix log2: .\n {?VaRr0 ?lastVar. ?VaRr0 log:type ?lastVar.}=>{?VaRr0 ssn:HasValue ?lastVar.}\n{?s ?o.}=>{?s ssn:HasValue ?o.}").unwrap(); 159 | println!("{:?}",rules); 160 | assert_eq!(rules.len(), 2); 161 | } 162 | #[test] 163 | fn parse_multiple_rulese_ending_with_dot() { 164 | let rules = parse("@prefix rdf: .\n@prefix : .\n{?V0 rdf:type :N0} => {?V0 rdf:type :N1}.\n{?V0 rdf:type :N1} => {?V0 rdf:type :N2}.").unwrap(); 165 | println!("{:?}",rules); 166 | assert_eq!(rules.len(), 2); 167 | 168 | } 169 | #[test] 170 | fn parse_empty_rule() { 171 | let rules = parse("").unwrap(); 172 | println!("{:?}",rules); 173 | assert_eq!(0,rules.len()); 174 | } 175 | #[test] 176 | fn parse_rule_with_multiple_spaces() { 177 | let input_rule = "{ ?VaRr0 ?lastVar.\n ?VaRr0 ?lastVar.\n}=>{ ?VaRr0 ?lastVar .\n}.\n"; 178 | let rules = parse(input_rule).unwrap(); 179 | 180 | let rule = rules.get(0).unwrap(); 181 | let str_rule = TripleStore::decode_rule(rule); 182 | println!("{:?}",str_rule); 183 | assert_eq!(input_rule.replace("?","").replace(" ",""), str_rule.replace(" ","")); 184 | } 185 | 186 | #[test] 187 | fn parse_rule_with_a_syntactic_sugar() { 188 | let input_rule = "{?VaRr0 ?lastVar.\n ?VaRr0 a ?lastVar.\n}=>{?VaRr0 ?lastVar.\n}.\n"; 189 | let expected_rule = "{?VaRr0 ?lastVar.\n?VaRr0 ?lastVar.\n}=>{?VaRr0 ?lastVar.\n}.\n"; 190 | 191 | let rules = parse(input_rule).unwrap(); 192 | 193 | let rule = rules.get(0).unwrap(); 194 | let str_rule = TripleStore::decode_rule(rule); 195 | println!("{:?}",str_rule); 196 | assert_eq!(expected_rule.replace("?",""), str_rule); 197 | } 198 | } -------------------------------------------------------------------------------- /lib/src/pipeline.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::rc::Rc; 3 | use crate::csprite::CSprite; 4 | use crate::{Parser, Triple}; 5 | use crate::time_window::{TimeWindow, TimeWindowConsumer}; 6 | 7 | pub struct WindowReasoner { 8 | pub store: CSprite, 9 | prev: Vec<(i32, Rc)> 10 | } 11 | impl WindowReasoner { 12 | pub fn new() -> WindowReasoner{ 13 | WindowReasoner{ store: CSprite::new(), prev: Vec::new()} 14 | } 15 | fn compute_diff(new : &Vec<(i32, Rc)>, old: &Vec<(i32, Rc)>) -> (Vec<(i32, Rc)>,Vec<(i32, Rc)>){ 16 | if old.is_empty(){ 17 | return (new.clone(),Vec::new()); 18 | } 19 | if new.is_empty(){ 20 | return (Vec::new(), old.clone()); 21 | } 22 | let mut additions = Vec::new(); 23 | let mut removals = Vec::new(); 24 | //compute additions 25 | let mut found = false; 26 | for item in new.iter(){ 27 | if let Some(old_item) = old.last(){ 28 | //println!("Additions Old: {:?}, new {:?}", old_item, item); 29 | if item.eq(old_item){ 30 | found = true; 31 | }else if found{ 32 | additions.push(item.clone()); 33 | } 34 | } 35 | } 36 | //compute removals 37 | for old_item in old.iter() { 38 | if let Some(new_item) = new.first(){ 39 | //println!("Removals Old: {:?}, new {:?}", old_item, new_item); 40 | 41 | if !new_item.eq(old_item){ 42 | removals.push(old_item.clone()); 43 | }else{ 44 | break; 45 | } 46 | } 47 | } 48 | 49 | (additions,removals) 50 | } 51 | } 52 | 53 | impl TimeWindowConsumer for WindowReasoner { 54 | 55 | fn update(&mut self, data: Vec<(i32, Rc)>, ts:&i32){ 56 | println!("Received data: {:?},", data.len()); 57 | let (new, old) = WindowReasoner::compute_diff(&data,&self.prev); 58 | //self.store.clear(); 59 | self.store.window_update(new,old,ts); 60 | self.prev = data; 61 | } 62 | } 63 | 64 | #[test] 65 | fn test_transitive(){ 66 | 67 | let mut window = TimeWindow::new(4,2); 68 | 69 | let data ="{?a in ?b.?b in ?c}=>{?a in ?c}\n\ 70 | :1 in :0.\n\ 71 | :2 in :1.\n\ 72 | :3 in :2.\n\ 73 | :4 in :3.\n\ 74 | :5 in :4.\n\ 75 | :6 in :5"; 76 | 77 | let mut reasoner = WindowReasoner::new(); 78 | 79 | let (content, rules) = Parser::parse(data.to_string()); 80 | reasoner.store.add_rules(rules); 81 | let consumer = Rc::new(RefCell::new(reasoner)); 82 | window.register_consumer(consumer.clone()); 83 | 84 | 85 | content.into_iter().enumerate().for_each(|(i, t)| window.add(t, i as i32)); 86 | 87 | //contains 4 triples and 1 inferred triple 88 | assert_eq!(10, consumer.borrow_mut().store.len()); 89 | } 90 | #[test] 91 | fn test_compute_diff(){ 92 | let mut window = TimeWindow::new(4,2); 93 | 94 | let data ="{?a in ?b.?b in ?c}=>{?a in ?c}\n\ 95 | :1 in :0.\n\ 96 | :2 in :1.\n\ 97 | :3 in :2.\n\ 98 | :4 in :3.\n\ 99 | :5 in :4.\n\ 100 | :6 in :5.\n\ 101 | :7 in :6.\n\ 102 | :8 in :7.\n\ 103 | :9 in :8"; 104 | 105 | let mut reasoner = WindowReasoner::new(); 106 | 107 | let (content, rules) = Parser::parse(data.to_string()); 108 | reasoner.store.add_rules(rules); 109 | let consumer = Rc::new(RefCell::new(reasoner)); 110 | window.register_consumer(consumer.clone()); 111 | 112 | 113 | content.into_iter().enumerate().for_each(|(i, t)| window.add(t, i as i32)); 114 | 115 | //contains 4 triples and 1 inferred triple 116 | assert_eq!(10, consumer.borrow_mut().store.len()); 117 | } -------------------------------------------------------------------------------- /lib/src/queryengine.rs: -------------------------------------------------------------------------------- 1 | use std::rc::Rc; 2 | use crate::{Binding, Triple, TripleIndex}; 3 | 4 | pub trait QueryEngine{ 5 | fn query(data: &TripleIndex, query_triples:&Vec::,triple_counter : Option) -> Option; 6 | } 7 | pub struct SimpleQueryEngine; 8 | 9 | 10 | impl QueryEngine for SimpleQueryEngine { 11 | fn query(data: &TripleIndex, query_triples: &Vec::, triple_counter: Option) -> Option { 12 | let mut bindings = Binding::new(); 13 | for query_triple in query_triples { 14 | //let current_bindings = self.query(query_triple,triple_counter); 15 | if let Some(current_bindings) = data.query(query_triple, triple_counter) { 16 | bindings = bindings.join(¤t_bindings); 17 | } else { 18 | return None; 19 | } 20 | } 21 | Some(bindings) 22 | } 23 | } 24 | // pub fn query(&self, query_triple:&Triple, triple_counter : Option) -> Binding{ 25 | // let mut bindings = Binding::new(); 26 | // let mut counter = if let Some(size) = triple_counter{size} else {self.triple_index.len()}; 27 | // for Triple{s,p,o} in self.triple_index.triples.iter().take(counter){ 28 | // match &query_triple.s{ 29 | // VarOrTerm::Var(s_var)=> bindings.add(&s_var.name,s.as_Term().iri), 30 | // VarOrTerm::Term(s_term)=>if let (TermImpl{iri}, TermImpl{iri:iri2})= (s_term,s.as_Term()) { 31 | // if !iri.eq(iri2){break;} 32 | // } 33 | // } 34 | // match &query_triple.p{ 35 | // VarOrTerm::Var(p_var)=> bindings.add(&p_var.name,p.as_Term().iri), 36 | // VarOrTerm::Term(p_term)=>if let (TermImpl{iri}, TermImpl{iri:iri2})= (p_term,p.as_Term()) { 37 | // if !iri.eq(iri2){break;} 38 | // } 39 | // } 40 | // match &query_triple.o{ 41 | // VarOrTerm::Var(o_var)=> bindings.add(&o_var.name,o.as_Term().iri), 42 | // VarOrTerm::Term(o_term)=>if let (TermImpl{iri}, TermImpl{iri:iri2})= (o_term,o.as_Term()) { 43 | // if !iri.eq(iri2){break;} 44 | // } 45 | // } 46 | // } 47 | // bindings 48 | // } 49 | // fn find_matching_rules(&self, triple: &Triple) -> Vec<&Rule> { 50 | // let mut matching_rules = Vec::new(); 51 | // for rule in self.rules.iter(){ 52 | // for body_item in rule.body.iter(){ 53 | // if let Triple{s,p,o} = triple{ 54 | // match &body_item.s{ 55 | // VarOrTerm::Term(s_term)=>if let (TermImpl{iri}, TermImpl{iri:iri2})= (s_term,s.as_Term()) { 56 | // if !iri.eq(iri2){break;} 57 | // }, 58 | // _ => () 59 | // } 60 | // match &body_item.p{ 61 | // VarOrTerm::Term(p_term)=>if let (TermImpl{iri}, TermImpl{iri:iri2})= (p_term,p.as_Term()) { 62 | // if !iri.eq(iri2){break;} 63 | // }, 64 | // _ => () 65 | // } 66 | // match &body_item.o{ 67 | // VarOrTerm::Term(o_term)=>if let (TermImpl{iri}, TermImpl{iri:iri2})= (o_term,o.as_Term()) { 68 | // if !iri.eq(iri2){break;} 69 | // }, 70 | // _ => () 71 | // } 72 | // if !matching_rules.contains(&rule){ 73 | // matching_rules.push(rule); 74 | // 75 | // } 76 | // } 77 | // } 78 | // } 79 | // matching_rules 80 | // } -------------------------------------------------------------------------------- /lib/src/rsp/r2r.rs: -------------------------------------------------------------------------------- 1 | use spargebra::Query; 2 | use crate::Syntax; 3 | 4 | pub trait R2ROperator: Send{ 5 | fn load_triples(&mut self, data: &str, syntax: Syntax) -> Result<(),String>; 6 | fn load_rules(&mut self, data: &str) -> Result<(),&'static str>; 7 | fn add(&mut self, data: I); 8 | fn remove(&mut self, data: &I); 9 | fn materialize(&mut self) -> Vec; 10 | fn execute_query(&self,query: &Query) -> Vec; 11 | } 12 | 13 | -------------------------------------------------------------------------------- /lib/src/rsp/r2s.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | use std::hash::Hash; 3 | use std::mem; 4 | 5 | pub enum StreamOperator{ 6 | RSTREAM, ISTREAM, DSTREAM 7 | } 8 | 9 | impl Default for StreamOperator{ 10 | fn default() -> Self { 11 | StreamOperator::RSTREAM 12 | } 13 | } 14 | pub struct Relation2StreamOperator { 15 | stream_operator: StreamOperator, 16 | old_result: HashSet, 17 | new_result: HashSet, 18 | ts: usize 19 | } 20 | 21 | impl Relation2StreamOperator where O: Clone + Hash + Eq { 22 | pub fn new(stream_operator: StreamOperator, start_time: usize) -> Relation2StreamOperator { 23 | match stream_operator { 24 | StreamOperator::RSTREAM => Relation2StreamOperator {stream_operator, old_result: HashSet::with_capacity(0), new_result: HashSet::with_capacity(0),ts: start_time}, 25 | _ => Relation2StreamOperator {stream_operator, old_result: HashSet::new(), new_result: HashSet::new(),ts: start_time} 26 | } 27 | 28 | } 29 | pub fn eval(&mut self, new_response: Vec, ts: usize) -> Vec{ 30 | match self.stream_operator { 31 | StreamOperator::RSTREAM => new_response, 32 | StreamOperator::ISTREAM => { 33 | let to_compare = new_response.clone(); 34 | self.prepare_compare(new_response, ts); 35 | to_compare.into_iter().filter(|b| !self.old_result.contains(b)).collect() 36 | }, 37 | StreamOperator::DSTREAM => { 38 | self.prepare_compare(new_response, ts); 39 | let to_compare = self.old_result.clone(); 40 | to_compare.into_iter().filter(|b| !self.new_result.contains(b)).collect() 41 | } 42 | } 43 | } 44 | 45 | fn prepare_compare(&mut self, new_repsonse: Vec, ts: usize) { 46 | if self.ts < ts { 47 | mem::swap(&mut self.new_result, &mut self.old_result); 48 | self.new_result.clear(); 49 | self.ts = ts; 50 | } 51 | new_repsonse.into_iter().for_each(|v| { 52 | self.new_result.insert(v); 53 | () 54 | }); 55 | } 56 | } 57 | #[cfg(test)] 58 | mod tests{ 59 | use crate::rsp::r2s::Relation2StreamOperator; 60 | use crate::rsp::r2s::StreamOperator::{DSTREAM, ISTREAM, RSTREAM}; 61 | use crate::sparql::Binding; 62 | 63 | #[test] 64 | fn test_rstream(){ 65 | let new_result = vec!( 66 | vec!(Binding{var:"?1".to_string(),val:"1".to_string()}, 67 | Binding{var:"?2".to_string(),val:"2".to_string()}), 68 | vec!(Binding{var:"?1".to_string(),val:"1.2".to_string()}, 69 | Binding{var:"?2".to_string(),val:"2.2".to_string()}) 70 | ); 71 | let mut s2r: Relation2StreamOperator> = Relation2StreamOperator::new(RSTREAM, 0); 72 | let expected_result = new_result.clone(); 73 | 74 | assert_eq!(expected_result,s2r.eval(new_result,1)); 75 | } 76 | #[test] 77 | fn test_dstream(){ 78 | let old_result = vec!( 79 | vec!(Binding{var:"?1".to_string(),val:"1".to_string()}, 80 | Binding{var:"?2".to_string(),val:"2".to_string()}), 81 | vec!(Binding{var:"?1".to_string(),val:"1.2".to_string()}, 82 | Binding{var:"?2".to_string(),val:"2.2".to_string()}) 83 | ); 84 | let new_result = vec!( 85 | vec!(Binding{var:"?1".to_string(),val:"1".to_string()}, 86 | Binding{var:"?2".to_string(),val:"2".to_string()}), 87 | vec!(Binding{var:"?1".to_string(),val:"1.3".to_string()}, 88 | Binding{var:"?2".to_string(),val:"2.3".to_string()}) 89 | ); 90 | let expected_deletion = vec!( 91 | vec!(Binding{var:"?1".to_string(),val:"1.2".to_string()}, 92 | Binding{var:"?2".to_string(),val:"2.2".to_string()}) 93 | ); 94 | let mut s2r: Relation2StreamOperator> = Relation2StreamOperator::new(DSTREAM, 0); 95 | s2r.eval(old_result,1); 96 | 97 | assert_eq!(expected_deletion,s2r.eval(new_result,2)); 98 | } 99 | #[test] 100 | fn test_istream(){ 101 | let old_result = vec!( 102 | vec!(Binding{var:"?1".to_string(),val:"1".to_string()}, 103 | Binding{var:"?2".to_string(),val:"2".to_string()}), 104 | vec!(Binding{var:"?1".to_string(),val:"1.2".to_string()}, 105 | Binding{var:"?2".to_string(),val:"2.2".to_string()}) 106 | ); 107 | let new_result = vec!( 108 | vec!(Binding{var:"?1".to_string(),val:"1".to_string()}, 109 | Binding{var:"?2".to_string(),val:"2".to_string()}), 110 | vec!(Binding{var:"?1".to_string(),val:"1.3".to_string()}, 111 | Binding{var:"?2".to_string(),val:"2.3".to_string()}) 112 | ); 113 | let expected_deletion = vec!( 114 | vec!(Binding{var:"?1".to_string(),val:"1.3".to_string()}, 115 | Binding{var:"?2".to_string(),val:"2.3".to_string()}) 116 | ); 117 | let mut s2r: Relation2StreamOperator> = Relation2StreamOperator::new(ISTREAM, 0); 118 | s2r.eval(old_result,1); 119 | 120 | assert_eq!(expected_deletion,s2r.eval(new_result,2)); 121 | } 122 | } -------------------------------------------------------------------------------- /lib/src/rsp/s2r.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{HashMap, HashSet}; 2 | use std::{f64, mem}; 3 | use crate::Triple; 4 | use std::sync::mpsc::{channel, Sender}; 5 | use std::thread; 6 | use std::time::Duration; 7 | use std::sync::mpsc::Receiver; 8 | #[cfg(not(test))] 9 | use log::{info, warn, trace, debug}; // Use log crate when building application 10 | #[cfg(test)] 11 | use std::{println as info, println as warn, println as trace, println as debug}; 12 | use std::collections::hash_set::{IntoIter, Iter}; 13 | use std::fmt::Debug; 14 | use std::hash::Hash; 15 | use std::sync::{Arc, Mutex}; 16 | 17 | pub enum ReportStrategy { 18 | NonEmptyContent, 19 | OnContentChange, 20 | OnWindowClose, 21 | Periodic(usize), 22 | } 23 | impl Default for ReportStrategy{ 24 | fn default() -> Self { ReportStrategy::OnWindowClose } 25 | } 26 | pub enum Tick { 27 | TimeDriven, 28 | TupleDriven, 29 | BatchDriven, 30 | } 31 | impl Default for Tick{ 32 | fn default() -> Self { Tick::TimeDriven } 33 | } 34 | 35 | pub struct Report where I: Eq + PartialEq + Clone + Debug + Hash + Send{ 36 | strategies: Vec, 37 | last_change: ContentContainer, 38 | } 39 | 40 | impl Report 41 | where I: Eq + PartialEq + Clone + Debug + Hash + Send{ 42 | pub fn new() -> Report { 43 | Report { strategies: Vec::new(), last_change: ContentContainer::new() } 44 | } 45 | pub fn add(&mut self, strategy: ReportStrategy) { 46 | self.strategies.push(strategy); 47 | } 48 | pub fn report(&mut self, window: &Window, content: &ContentContainer, ts: usize) -> bool { 49 | self.strategies.iter().all(|strategy| { 50 | match strategy { 51 | ReportStrategy::NonEmptyContent => content.len() > 0, 52 | ReportStrategy::OnContentChange => { 53 | let comp = content.eq(&self.last_change); 54 | self.last_change = content.clone(); 55 | comp 56 | } 57 | ReportStrategy::OnWindowClose => window.close < ts, 58 | ReportStrategy::Periodic(period) => ts % period == 0 59 | } 60 | }) 61 | } 62 | } 63 | 64 | #[derive(Eq, Hash, PartialEq, Debug, Clone)] 65 | pub struct Window { 66 | open: usize, 67 | close: usize, 68 | } 69 | 70 | #[derive(Eq, PartialEq, Clone, Debug)] 71 | pub struct ContentContainer where I: Eq + PartialEq + Clone + Debug + Hash + Send{ 72 | elements: HashSet, 73 | last_timestamp_changed: usize, 74 | } 75 | 76 | impl ContentContainer 77 | where I: Eq + PartialEq + Clone + Debug + Hash + Send{ 78 | fn new() -> ContentContainer { 79 | ContentContainer { elements: HashSet::new(), last_timestamp_changed: 0 } 80 | } 81 | fn len(&self) -> usize { 82 | self.elements.len() 83 | } 84 | fn add(&mut self, triple: I, ts: usize) { 85 | self.elements.insert(triple); 86 | self.last_timestamp_changed = ts; 87 | } 88 | pub fn get_last_timestamp_changed(&self) -> usize { 89 | self.last_timestamp_changed 90 | } 91 | 92 | pub fn iter(&self) -> Iter<'_, I> { 93 | self.elements.iter() 94 | } 95 | pub fn into_iter(mut self) -> IntoIter { 96 | let map = mem::take(&mut self.elements); 97 | map.into_iter() 98 | } 99 | } 100 | 101 | 102 | pub struct CSPARQLWindow where I: Eq + PartialEq + Clone + Debug + Hash + Send{ 103 | width: usize, 104 | slide: usize, 105 | t_0: usize, 106 | active_windows: HashMap>, 107 | report: Report, 108 | tick: Tick, 109 | app_time: usize, 110 | consumer: Option>>, 111 | call_back: Option)->()>> 112 | } 113 | 114 | 115 | impl CSPARQLWindow where I: Eq + PartialEq + Clone + Debug + Hash + Send{ 116 | pub fn new(width:usize, slide: usize, report: Report, tick: Tick)-> CSPARQLWindow{ 117 | CSPARQLWindow{slide, width, t_0: 0, app_time:0, report,consumer: None, active_windows: HashMap::new(),tick, call_back: None} 118 | } 119 | pub fn add_to_window(&mut self, event_item: I, ts: usize) { 120 | let event_time = ts; 121 | self.scope(&event_time); 122 | 123 | let test = self.active_windows.clone().into_iter().filter_map(|(window, mut content)| { 124 | debug!("Processing Window [{:?}, {:?}) for element ({:?},{:?})", window.open, window.close,event_item, ts); 125 | if window.open <= event_time && event_time <= window.close { 126 | debug!("Adding element [{:?}] to Window [{:?},{:?})",event_item, window.open, window.close); 127 | content.add(event_item.clone(), ts); 128 | Some((window, content)) 129 | } else { 130 | debug!("Scheduling for Eviction [{:?},{:?})", window.open, window.close); 131 | None 132 | } 133 | }).collect::>>(); 134 | 135 | 136 | let max = self.active_windows.iter() 137 | .filter(|(window, content)| self.report.report(window, content, ts)) 138 | .max_by(|(w1, c1), (w2, c2)| w1.close.cmp(&w2.close)); 139 | if let Some(max_window) = max { 140 | match self.tick { 141 | Tick::TimeDriven => { 142 | if ts > self.app_time { 143 | self.app_time = ts; 144 | // notify consumers 145 | debug!("Window triggers! {:?}", max_window); 146 | // multithreaded consumer using channel 147 | if let Some(sender) = &self.consumer{ 148 | sender.send(max_window.1.clone()); 149 | } 150 | // single threaded consumer using callback 151 | if let Some(call_back) = &mut self.call_back{ 152 | (call_back)(max_window.1.clone()); 153 | } 154 | } 155 | } 156 | _ => () 157 | }; 158 | } 159 | 160 | self.active_windows = test; 161 | } 162 | fn scope(&mut self, event_time: &usize) { 163 | // long c_sup = (long) Math.ceil(((double) Math.abs(t_e - t0) / (double) slide)) * slide; 164 | let temp = (*event_time as f64 - self.t_0 as f64).abs(); 165 | let temp = ((*event_time as f64 - self.t_0 as f64).abs() / (self.slide as f64)).ceil(); 166 | let c_sup = ((*event_time as f64 - self.t_0 as f64).abs() / (self.slide as f64)).ceil() * self.slide as f64; 167 | // long o_i = c_sup - width; 168 | let mut o_i = c_sup - self.width as f64; 169 | debug!("Calculating the Windows to Open. First one opens at [{:?}] and closes at [{:?}]", o_i, c_sup); 170 | // log.debug("Calculating the Windows to Open. First one opens at [" + o_i + "] and closes at [" + c_sup + "]"); 171 | // 172 | loop { 173 | debug!("Computing Window [{:?},{:?}) if absent", o_i, (o_i +self.width as f64)); 174 | let window = Window { open: o_i as usize, close: (o_i + self.width as f64) as usize }; 175 | if let None = self.active_windows.get(&window) { 176 | self.active_windows.insert(window, ContentContainer::new()); 177 | } 178 | o_i += self.slide as f64; 179 | if o_i > *event_time as f64 { break; } 180 | } 181 | } 182 | pub fn register(&mut self)-> Receiver> { 183 | let (send, recv) = channel::>(); 184 | self.consumer.replace(send); 185 | recv 186 | } 187 | pub fn register_callback(&mut self, function: Box) -> ()>) { 188 | self.call_back.replace(function); 189 | } 190 | pub fn stop(&mut self){ 191 | self.consumer.take(); 192 | 193 | } 194 | } 195 | struct ConsumerInner where I: Eq + PartialEq + Clone + Debug + Hash + Send{ 196 | data: Mutex>> 197 | } 198 | struct Consumer where I: Eq + PartialEq + Clone + Debug + Hash + Send{ 199 | inner: Arc> 200 | } 201 | impl Consumer where I: Eq + PartialEq + Clone + Debug + Hash + Send + 'static{ 202 | fn new() -> Consumer { 203 | Consumer{inner: Arc::new(ConsumerInner{data: Mutex::new(Vec::new())})} 204 | } 205 | fn start(&self,receiver: Receiver>){ 206 | let consumer_temp = self.inner.clone(); 207 | thread::spawn(move||{ 208 | loop{ 209 | match receiver.recv(){ 210 | Ok(content)=> { 211 | debug!("Found graph {:?}", content); 212 | consumer_temp.data.lock().unwrap().push(content); 213 | }, 214 | Err(_) => { 215 | debug!("Shutting down!"); 216 | break; 217 | } 218 | } 219 | } 220 | }); 221 | } 222 | fn len(&self)->usize{ 223 | self.inner.data.lock().unwrap().len() 224 | } 225 | } 226 | #[derive(Eq, PartialEq, Clone, Debug, Hash)] 227 | pub struct WindowTriple{ 228 | pub s: String, 229 | pub p: String, 230 | pub o: String 231 | } 232 | #[cfg(test)] 233 | mod tests { 234 | use std::cell::RefCell; 235 | use std::fmt::format; 236 | use std::rc::Rc; 237 | use std::sync::{Arc, Mutex}; 238 | use std::thread::Thread; 239 | use crate::Encoder; 240 | use super::*; 241 | 242 | #[test] 243 | fn test_window() { 244 | let mut report = Report::new(); 245 | report.add(ReportStrategy::OnWindowClose); 246 | let mut window = CSPARQLWindow { width: 10, slide: 2, app_time: 0, t_0: 0, active_windows: HashMap::new(), report, tick: Tick::TimeDriven, consumer: None, call_back: None }; 247 | let receiver = window.register(); 248 | let consumer = Consumer::new(); 249 | consumer.start(receiver); 250 | 251 | for i in 0..10 { 252 | let triple = WindowTriple{s: format!("s{}", i), p: "p".to_string(), o: "o".to_string()}; 253 | window.add_to_window(triple, i); 254 | } 255 | 256 | window.stop(); 257 | thread::sleep(Duration::from_secs(1)); 258 | assert_eq!(5, consumer.len()); 259 | 260 | } 261 | #[test] 262 | fn test_window_with_call_back() { 263 | let mut report = Report::new(); 264 | report.add(ReportStrategy::OnWindowClose); 265 | let mut window = CSPARQLWindow { width: 10, slide: 2, app_time: 0, t_0: 0, active_windows: HashMap::new(), report, tick: Tick::TimeDriven, consumer: None, call_back: None }; 266 | let mut recieved_data = Rc::new(RefCell::new(Vec::new())); 267 | let data_clone = recieved_data.clone(); 268 | let call_back = move| content|{println!("Content: {:?}",content); recieved_data.borrow_mut().push(content);}; 269 | window.register_callback(Box::new(call_back)); 270 | 271 | 272 | for i in 0..10 { 273 | let triple = WindowTriple{s: format!("s{}", i), p: "p".to_string(), o: "o".to_string()}; 274 | window.add_to_window(triple, i); 275 | } 276 | 277 | window.stop(); 278 | assert_eq!(5, (*data_clone.borrow_mut()).len()); 279 | 280 | } 281 | 282 | 283 | } -------------------------------------------------------------------------------- /lib/src/ruleindex.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::rc::Rc; 3 | use crate::{Rule, Triple}; 4 | 5 | 6 | pub struct RuleIndex { 7 | pub rules: Vec>, 8 | spo:Vec>, 9 | s:HashMap>>, 10 | p:HashMap>>, 11 | o:HashMap>>, 12 | sp:HashMap>>>, 13 | po:HashMap>>>, 14 | so:HashMap>>>, 15 | spo_all:HashMap>>>>, 16 | } 17 | 18 | 19 | 20 | impl RuleIndex { 21 | pub fn len(&self) -> usize { 22 | self.spo.len() + self.s.len() + self.o.len() + self.p.len() + 23 | self.sp.len() + self.po.len() + self.so.len() 24 | } 25 | pub fn new() -> RuleIndex{ 26 | RuleIndex{ 27 | rules: Vec::new(), 28 | s:HashMap::new(), 29 | p:HashMap::new(), 30 | o:HashMap::new(), 31 | so:HashMap::new(), 32 | po:HashMap::new(), 33 | sp:HashMap::new(), 34 | spo:Vec::new(), 35 | spo_all: HashMap::new()} 36 | } 37 | fn add_rc(&mut self, rule: Rc){ 38 | self.rules.push(rule.clone()); 39 | for Triple{s ,p,o,.. } in rule.body.iter(){ 40 | //s match 41 | if s.is_term() && p.is_var() && o.is_var(){ 42 | if !self.s.contains_key(&s.to_encoded()){ 43 | self.s.insert(s.to_encoded(), Vec::new()); 44 | } 45 | if let Some(mut rules) = self.s.get_mut(&s.to_encoded()){ 46 | if !rules.contains(&rule) {rules.push(rule.clone())}; 47 | } 48 | // self.s.get(&s.to_string()).unwrap().push(rule.clone()); 49 | } 50 | //p match 51 | if s.is_var() && p.is_term() && o.is_var(){ 52 | if !self.p.contains_key(&p.to_encoded()){ 53 | self.p.insert(p.to_encoded(), Vec::new()); 54 | } 55 | //self.p.get_mut(&p.to_string()).unwrap().push(rule.clone()); 56 | if let Some(mut rules) = self.p.get_mut(&p.to_encoded()){ 57 | if !rules.contains(&rule) {rules.push(rule.clone())}; 58 | } 59 | } 60 | //o match 61 | if s.is_var() && p.is_var() && o.is_term(){ 62 | if !self.o.contains_key(&o.to_encoded()){ 63 | self.o.insert(o.to_encoded(), Vec::new()); 64 | } 65 | //self.o.get_mut(&o.to_string()).unwrap().push(rule.clone()); 66 | if let Some(mut rules) = self.o.get_mut(&o.to_encoded()){ 67 | if !rules.contains(&rule) {rules.push(rule.clone())}; 68 | } 69 | } 70 | //sp 71 | if s.is_term() && p.is_term() && o.is_var(){ 72 | if !self.sp.contains_key(&s.to_encoded()){ 73 | self.sp.insert(s.to_encoded(), HashMap::new()); 74 | } 75 | if !self.sp.get(&s.to_encoded()).unwrap().contains_key(&p.to_encoded()){ 76 | self.sp.get_mut(&s.to_encoded()).unwrap().insert(p.to_encoded(), Vec::new()); 77 | } 78 | //self.sp.get_mut(&sp_str).unwrap().push(rule.clone()); 79 | if let Some(mut rules) = self.sp.get_mut(&s.to_encoded()).unwrap().get_mut(&p.to_encoded()){ 80 | if !rules.contains(&rule) {rules.push(rule.clone())}; 81 | } 82 | } 83 | //so 84 | if s.is_term() && p.is_var() && o.is_term(){ 85 | if !self.so.contains_key(&s.to_encoded()){ 86 | self.so.insert(s.to_encoded(), HashMap::new()); 87 | } 88 | if !self.so.get(&s.to_encoded()).unwrap().contains_key(&o.to_encoded()){ 89 | self.so.get_mut(&s.to_encoded()).unwrap().insert(o.to_encoded(), Vec::new()); 90 | } 91 | //self.sp.get_mut(&sp_str).unwrap().push(rule.clone()); 92 | if let Some(mut rules) = self.so.get_mut(&s.to_encoded()).unwrap().get_mut(&o.to_encoded()){ 93 | if !rules.contains(&rule) {rules.push(rule.clone())}; 94 | } 95 | } 96 | //po 97 | if s.is_var() && p.is_term() && o.is_term(){ 98 | if !self.po.contains_key(&p.to_encoded()){ 99 | self.po.insert(p.to_encoded(), HashMap::new()); 100 | } 101 | if !self.po.get(&p.to_encoded()).unwrap().contains_key(&o.to_encoded()){ 102 | self.po.get_mut(&p.to_encoded()).unwrap().insert(o.to_encoded(), Vec::new()); 103 | } 104 | //self.sp.get_mut(&sp_str).unwrap().push(rule.clone()); 105 | if let Some(mut rules) = self.po.get_mut(&p.to_encoded()).unwrap().get_mut(&o.to_encoded()){ 106 | if !rules.contains(&rule) {rules.push(rule.clone())}; 107 | } 108 | } 109 | //spo 110 | if s.is_term() && p.is_term() && o.is_term(){ 111 | if !self.spo_all.contains_key(&s.to_encoded()){ 112 | self.spo_all.insert(s.to_encoded(), HashMap::new()); 113 | } 114 | if !self.spo_all.get(&s.to_encoded()).unwrap().contains_key(&p.to_encoded()){ 115 | self.spo_all.get_mut(&s.to_encoded()).unwrap().insert(p.to_encoded(), HashMap::new()); 116 | } 117 | if !self.spo_all.get(&s.to_encoded()).unwrap().get(&p.to_encoded()).unwrap().contains_key(&o.to_encoded()){ 118 | self.spo_all.get_mut(&s.to_encoded()).unwrap().get_mut(&p.to_encoded()).unwrap().insert(o.to_encoded(), Vec::new()); 119 | } 120 | //self.sp.get_mut(&sp_str).unwrap().push(rule.clone()); 121 | if let Some(mut rules) = self.spo_all.get_mut(&s.to_encoded()).unwrap().get_mut(&p.to_encoded()).unwrap().get_mut(&o.to_encoded()){ 122 | if !rules.contains(&rule) {rules.push(rule.clone())}; 123 | } 124 | } 125 | //?s?p?o 126 | if s.is_var() && p.is_var() && o.is_var() { 127 | //self.spo.push(rule.clone()); 128 | if !self.spo.contains(&rule) {self.spo.push(rule.clone())}; 129 | 130 | } 131 | 132 | } 133 | } 134 | pub fn add(&mut self, rule: Rule){ 135 | let clone_rule = Rc::new(rule); 136 | self.add_rc(clone_rule); 137 | } 138 | pub fn add_ref(&mut self, rule: & Rule ){ 139 | let clone_rule = Rc::new(rule.clone()); 140 | self.add_rc(clone_rule); 141 | } 142 | 143 | pub fn find_match(&self, triple: &Triple) ->Vec<&Rule>{ 144 | let mut matched_triples: Vec<&Rule> = Vec::new(); 145 | //check s 146 | if let Some(rule) = self.s.get(&triple.s.to_encoded()){ 147 | rule.iter().for_each(|r|matched_triples.push(r)); 148 | } 149 | //check p 150 | if let Some(rule) = self.p.get(&triple.p.to_encoded()){ 151 | rule.iter().for_each(|r|matched_triples.push(r)); 152 | } 153 | //check o 154 | if let Some(rule) = self.o.get(&triple.o.to_encoded()){ 155 | rule.iter().for_each(|r|matched_triples.push(r)); 156 | } 157 | //check so 158 | if let Some(s_rules) = self.so.get(&triple.s.to_encoded()){ 159 | if let Some(rules) = s_rules.get(&triple.o.to_encoded()) { 160 | rules.iter().for_each(|r| matched_triples.push(r)); 161 | } 162 | } 163 | //check po 164 | if let Some(p_rules) = self.po.get(&triple.p.to_encoded()){ 165 | if let Some(rules) = p_rules.get(&triple.o.to_encoded()) { 166 | rules.iter().for_each(|r| matched_triples.push(r)); 167 | } 168 | } 169 | //check sp 170 | if let Some(s_rules) = self.sp.get(&triple.s.to_encoded()){ 171 | if let Some(rules) = s_rules.get(&triple.p.to_encoded()) { 172 | rules.iter().for_each(|r| matched_triples.push(r)); 173 | } 174 | } 175 | //check spo 176 | if let Some(s_rules) = self.spo_all.get(&triple.s.to_encoded()){ 177 | if let Some(p_rules) = s_rules.get(&triple.p.to_encoded()) { 178 | if let Some(rules) = p_rules.get(&triple.o.to_encoded()) { 179 | rules.iter().for_each(|r| matched_triples.push(r)); 180 | } 181 | } 182 | } 183 | self.spo.iter().for_each(|r| matched_triples.push(r)); 184 | 185 | matched_triples 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /lib/src/service_composition.rs: -------------------------------------------------------------------------------- 1 | 2 | #[cfg(test)] 3 | mod tests { 4 | use std::collections::HashMap; 5 | use std::rc::Rc; 6 | use crate::{Rule, Triple, TripleStore, TermImpl, VarOrTerm, RuleIndex, TripleIndex, Encoder, SimpleQueryEngine, QueryEngine, Parser, BackwardChainer}; 7 | use crate::csprite::CSprite; 8 | use crate::reasoner::Reasoner; 9 | 10 | #[test] 11 | fn test_eval_backward_rule(){ 12 | let data="@prefix test: \n\ 13 | a test:MLModel.\n\ 14 | a test:Action.\n\ 15 | test:hasInput .\n\ 16 | test:requirement .\n\ 17 | a test:MLModel.\n\ 18 | test:hasInput .\n\ 19 | test:hasOutput .\n\ 20 | test:hasFunction .\n\ 21 | a test:Source.\n\ 22 | test:hasOutput .\n\ 23 | {?source a test:Source. ?source test:hasOutput ?output. }=>{?source a test:Test. }.\n\ 24 | {?x test:hasInput ?input. ?x test:requirement ?req. ?y test:hasOutput ?input. ?y test:hasFunction ?req.}=>{?y test:hasInput ?input.}\n\ 25 | {?source a test:Source. ?source test:hasOutput ?output. ?y test:hasInput ?output }=>{?source a test:NeededInput. }."; 26 | 27 | let mut store = TripleStore::from(data); 28 | let backward_head = Triple{s:VarOrTerm::new_var("?newVar".to_string()),p:VarOrTerm::new_term("a".to_string()),o:VarOrTerm::new_term("test:Test".to_string()), g: None}; 29 | let var_encoded= Encoder::add("?newVar".to_string()); 30 | let result_encoded = Encoder::add("".to_string()); 31 | 32 | let bindings = BackwardChainer::eval_backward(&store.triple_index, &store.rules_index, &backward_head); 33 | println!("Bindings {}", TripleStore::decode_bindings(&bindings)); 34 | let result_bindings = HashMap::from([ 35 | (var_encoded, Vec::from([result_encoded])) 36 | ]); 37 | assert_eq!(result_bindings.get(&12), bindings.get(&12)); 38 | } 39 | #[test] 40 | fn test_eval_forward_rule(){ 41 | let data="@prefix test: \n\ 42 | a test:MLModel.\n\ 43 | a test:Action.\n\ 44 | test:hasInput .\n\ 45 | test:requirement .\n\ 46 | a test:MLModel.\n\ 47 | test:hasInput .\n\ 48 | test:hasOutput .\n\ 49 | test:hasFunction .\n\ 50 | a test:Source.\n\ 51 | test:hasOutput .\n\ 52 | {?source a test:Source. ?source test:hasOutput ?output. }=>{?source a test:Test. }.\n\ 53 | {?x test:hasInput ?input. ?x test:requirement ?req. ?y test:hasOutput ?input. ?y test:hasFunction ?req.}=>{?y test:hasInput ?input.}\n\ 54 | {?source a test:Source. ?source test:hasOutput ?output. ?y test:hasInput ?output }=>{?source . }."; 55 | let mut store = TripleStore::from(data); 56 | println!("store size {:?}", store.len()); 57 | store.materialize(); 58 | println!("store size {:?}", store.len()); 59 | match store.query("Select * WHERE{?s ?o}"){ 60 | Ok(result)=>assert_ne!(0, result.len()), 61 | Err(err)=>assert_eq!(0, 1) 62 | } 63 | 64 | } 65 | } -------------------------------------------------------------------------------- /lib/src/time_window.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use deepmesa::lists::LinkedList; 3 | use std::cmp; 4 | use std::collections::HashMap; 5 | use std::hash::Hash; 6 | use std::rc::Rc; 7 | use deepmesa::lists::linkedlist::Node; 8 | 9 | /// A consumer for retrieving new and expired data from the window 10 | pub trait TimeWindowConsumer{ 11 | fn update(&mut self, data: Vec<(i32, Rc)>, ts:&i32); 12 | } 13 | 14 | 15 | pub struct SimpleWindowConsumer{ 16 | windows: Vec>>, 17 | data: Vec<(i32, Rc)> 18 | 19 | } 20 | impl SimpleWindowConsumer{ 21 | pub fn new() -> SimpleWindowConsumer{ 22 | SimpleWindowConsumer{windows: Vec::new(), data: vec![] } 23 | } 24 | } 25 | impl TimeWindowConsumer for SimpleWindowConsumer { 26 | 27 | fn update(&mut self, data: Vec<(i32, Rc)>,_ts:&i32) { 28 | //println!("Received new: {:?}, old: {:?}", new.len(), old.len()); 29 | self.data = data; 30 | } 31 | } 32 | 33 | pub struct TimeWindow { 34 | content: LinkedList<(i32, Rc)>, 35 | consumers: Vec>>>, 36 | width: i32, 37 | slide: i32, 38 | time: i32, 39 | pending_adds: Vec<(i32,Rc)>, 40 | } 41 | 42 | 43 | impl TimeWindow< T> where T: Eq + Hash{ 44 | /// Creates a new time-based window with a certain width and slide 45 | pub fn new(width: i32, slide: i32) -> TimeWindow< T>{ 46 | TimeWindow {content: LinkedList::new(), consumers: Vec::new(), width, slide, time: 0, pending_adds: Vec::new()} 47 | } 48 | /// Adds an item to the window and updates its content, this can either be: 49 | /// - Add the item to the window and to nothing when the new timestamp does not exceed the bounds of the current window 50 | /// - Add the item and update the window, i.e. remove old items that have expired based on their timestamp 51 | /// - The item is already in the window but has an updated timestamp, this will update the current item 52 | pub fn add(&mut self, item:T, ts:i32) { 53 | 54 | let rc_item = Rc::new(item.clone()); 55 | 56 | //add the item 57 | self.content.push_tail((ts,rc_item.clone())); 58 | 59 | if self.does_window_trigger(ts){ 60 | self.update_window_open_time(ts); 61 | let window_content = self.progress_time_and_extract_content(&ts); 62 | 63 | let consumers = self.consumers.clone(); 64 | let last_valid_ts = self.get_last_valid_time_for(&ts); 65 | consumers.iter().for_each(|mut item|{ 66 | let mut reference = item.borrow_mut(); 67 | reference.update(window_content.clone(),&last_valid_ts); 68 | }); 69 | 70 | self.pending_adds.clear(); 71 | } 72 | } 73 | 74 | /// Returns the length of the content of the window 75 | pub fn len(&self) -> usize{ 76 | self.content.len() 77 | } 78 | 79 | fn get_last_valid_time_for(&self, new_time: &i32) -> i32{ 80 | cmp::max(0,*new_time - self.width) 81 | } 82 | fn does_window_trigger(&mut self, ts: i32) -> bool { 83 | if ts > self.time + self.width { 84 | true 85 | }else{ 86 | false 87 | } 88 | } 89 | fn update_window_open_time(&mut self, ts: i32){ 90 | let mut residue = (ts - self.width)/self.slide; 91 | if (ts - self.width) % self.slide != 0{ 92 | residue +=1; 93 | } 94 | self.time = residue * self.slide; 95 | } 96 | fn progress_time_and_extract_content(&mut self, ts: &i32) -> Vec<(i32, Rc)>{ let mut window_content = Vec::new(); 97 | while let Some((timestamp, item)) = self.content.front(){ 98 | if *timestamp<= self.get_last_valid_time_for(ts){ 99 | self.content.pop_front(); 100 | }else{ 101 | break; 102 | } 103 | } 104 | self.content.iter().for_each(|(ts,item)| window_content.push((*ts,item.clone()))); 105 | window_content 106 | } 107 | 108 | /// Adds consumer that can be notified with updates 109 | pub fn register_consumer(&mut self, consumer: Rc>>) { 110 | self.consumers.push(consumer); 111 | } 112 | } 113 | 114 | 115 | 116 | #[test] 117 | fn test_new_window(){ 118 | let window : TimeWindow = TimeWindow::new(5, 2); 119 | assert_eq!(window.len(),0); 120 | } 121 | 122 | #[test] 123 | fn test_add_to_window(){ 124 | let mut window : TimeWindow = TimeWindow::new(5, 2); 125 | window.add(100,0); 126 | assert_eq!(window.content.front(),Some(&(0,Rc::from(100)))); 127 | } 128 | 129 | #[test] 130 | fn test_window_shift(){ 131 | let mut window : TimeWindow = TimeWindow::new(2, 2); 132 | window.add(100, 0); 133 | window.add(101, 1); 134 | window.add(102, 2); 135 | window.add(103, 3); 136 | window.progress_time_and_extract_content(&3); 137 | assert_eq!(window.content.front(),Some(&(2,Rc::from(102)))); 138 | } 139 | #[test] 140 | fn test_window_bound_calculation(){ 141 | let mut window : TimeWindow = TimeWindow::new(3, 2); 142 | assert_eq!(false,window.does_window_trigger(2)); 143 | assert_eq!(false,window.does_window_trigger(3)); 144 | assert_eq!(true,window.does_window_trigger(4)); 145 | assert_eq!(true,window.does_window_trigger(5)); 146 | window.update_window_open_time(5); 147 | assert_eq!(false,window.does_window_trigger(5)); 148 | assert_eq!(true,window.does_window_trigger(6)); 149 | assert_eq!(true,window.does_window_trigger(7)); 150 | window.update_window_open_time(8); 151 | assert_eq!(false,window.does_window_trigger(9)); 152 | assert_eq!(true,window.does_window_trigger(10)); 153 | 154 | let mut window : TimeWindow = TimeWindow::new(5, 3); 155 | assert_eq!(false,window.does_window_trigger(2)); 156 | assert_eq!(true,window.does_window_trigger(6)); 157 | window.update_window_open_time(6); 158 | assert_eq!(false,window.does_window_trigger(7)); 159 | assert_eq!(true,window.does_window_trigger(9)); 160 | window.update_window_open_time(10); 161 | assert_eq!(false,window.does_window_trigger(11)); 162 | assert_eq!(true,window.does_window_trigger(12)); 163 | } 164 | 165 | #[test] 166 | fn test_consumer(){ 167 | let mut window : TimeWindow = TimeWindow::new(2, 2); 168 | let consumer = Rc::new(RefCell::new(SimpleWindowConsumer::new())); 169 | window.register_consumer(consumer.clone()); 170 | assert_eq!(0,consumer.borrow_mut().data.len()); 171 | window.add(100,0); 172 | window.add(101,1); 173 | window.add(102,2); 174 | window.add(103,3); 175 | 176 | assert_eq!(2,consumer.borrow_mut().data.len()); 177 | } 178 | #[test] 179 | fn test_delete(){ 180 | let mut window : TimeWindow = TimeWindow::new(2, 2); 181 | let consumer = Rc::new(RefCell::new(SimpleWindowConsumer::new())); 182 | window.register_consumer(consumer.clone()); 183 | assert_eq!(0,consumer.borrow_mut().data.len()); 184 | window.add(100,0); 185 | window.add(101,1); 186 | window.add(102,2); 187 | window.add(103,3); 188 | assert_eq!(2,window.content.len()); 189 | 190 | assert_eq!(2,consumer.borrow_mut().data.len()); 191 | } 192 | 193 | #[test] 194 | fn test_update(){ 195 | let mut window : TimeWindow = TimeWindow::new(4, 2); 196 | let consumer = Rc::new(RefCell::new(SimpleWindowConsumer::new())); 197 | window.register_consumer(consumer.clone()); 198 | assert_eq!(0,consumer.borrow_mut().data.len()); 199 | window.add(100,0); 200 | window.add(101,1); 201 | window.add(102,2); 202 | window.add(103,3); 203 | assert_eq!(4,window.content.len()); 204 | window.add(100,4); 205 | window.add(100,5); 206 | 207 | assert_eq!(4,window.content.len()); 208 | } 209 | 210 | 211 | 212 | 213 | 214 | -------------------------------------------------------------------------------- /lib/src/triples.rs: -------------------------------------------------------------------------------- 1 | use crate::Encoder; 2 | 3 | #[derive(Debug, Clone, Eq, PartialEq, Hash)] 4 | pub enum VarOrTerm { 5 | Var(Variable), 6 | Term(TermImpl), 7 | // Literal(Literal), 8 | // BlankNode(BlankNode) 9 | } 10 | impl VarOrTerm { 11 | pub fn new_term(iri: String) -> VarOrTerm { 12 | let encoded = Encoder::add(iri); 13 | VarOrTerm::Term(TermImpl { iri: encoded }) 14 | } 15 | pub fn new_var(name: String) -> VarOrTerm { 16 | let encoded = Encoder::add(name); 17 | VarOrTerm::Var(Variable { name: encoded }) 18 | } 19 | pub fn new_encoded_term(iri: usize) -> VarOrTerm { 20 | VarOrTerm::Term(TermImpl { iri }) 21 | } 22 | pub fn new_encoded_var(name: usize) -> VarOrTerm { 23 | VarOrTerm::Var(Variable { name }) 24 | } 25 | pub(crate) fn as_term(&self) -> &TermImpl { 26 | if let VarOrTerm::Term(t) = self { 27 | t 28 | } else { 29 | panic!("Not a term") 30 | } 31 | } 32 | pub(crate) fn as_var(&self) -> &Variable { 33 | if let VarOrTerm::Var(v) = self { 34 | v 35 | } else { 36 | panic!("Not a Var") 37 | } 38 | } 39 | pub fn is_var(&self) -> bool { 40 | match self { 41 | Self::Var(_) => true, 42 | Self::Term(_) => false, 43 | } 44 | } 45 | pub fn is_term(&self) -> bool { 46 | !self.is_var() 47 | } 48 | pub fn to_encoded(&self) -> usize { 49 | match self { 50 | Self::Var(var) => var.name, 51 | Self::Term(term) => term.iri, 52 | } 53 | } 54 | fn rem_first_and_last(value: &str) -> &str { 55 | let mut chars = value.chars(); 56 | chars.next(); 57 | chars.next_back(); 58 | chars.as_str() 59 | } 60 | pub fn convert(var_or_term: String) -> VarOrTerm { 61 | if var_or_term.starts_with('?') { 62 | let var_name = &var_or_term[1..]; 63 | VarOrTerm::new_var(var_name.to_string()) 64 | } else { 65 | let mut iri_prefix = var_or_term; 66 | if !iri_prefix.starts_with('<') { 67 | iri_prefix = format!("<{}>", iri_prefix).to_string(); 68 | } 69 | VarOrTerm::new_term(iri_prefix) 70 | } 71 | } 72 | } 73 | 74 | #[derive(Debug, Clone, Eq, PartialEq, Hash)] 75 | pub struct Variable { 76 | pub(crate) name: usize, 77 | } 78 | #[derive(Debug, Clone, Eq, PartialEq, Hash)] 79 | pub struct TermImpl { 80 | pub(crate) iri: usize, 81 | } 82 | // #[derive(Debug, Clone, Eq, PartialEq, Hash)] 83 | // pub struct Literal{ 84 | // pub value: String 85 | // } 86 | // #[derive(Debug, Clone, Eq, PartialEq, Hash)] 87 | // pub struct BlankNode{ 88 | // pub id: String 89 | // } 90 | #[derive(Debug, Clone, Eq, PartialEq, Hash)] 91 | pub struct Triple { 92 | pub s: VarOrTerm, 93 | pub p: VarOrTerm, 94 | pub o: VarOrTerm, 95 | pub g: Option 96 | } 97 | 98 | impl Triple { 99 | pub fn from( 100 | subject: String, 101 | property: String, 102 | object: String 103 | ) -> Triple { 104 | Triple { 105 | s: VarOrTerm::convert(subject), 106 | p: VarOrTerm::convert(property), 107 | o: VarOrTerm::convert(object), 108 | g: None 109 | } 110 | } 111 | pub fn from_with_graph_name( 112 | subject: String, 113 | property: String, 114 | object: String, 115 | graph_name:String, 116 | ) -> Triple { 117 | let mut triple = Self::from(subject, property, object); 118 | triple.g = Some(VarOrTerm::convert(graph_name)); 119 | triple 120 | } 121 | } 122 | 123 | #[derive(Debug, Clone, Eq, PartialEq, Hash)] 124 | pub struct Rule { 125 | pub body: Vec, 126 | pub head: Triple, 127 | } 128 | -------------------------------------------------------------------------------- /lib/src/utils.rs: -------------------------------------------------------------------------------- 1 | use crate::{Encoder, Rule, Triple}; 2 | 3 | pub struct Utils; 4 | 5 | impl Utils{ 6 | pub fn decode_triple( triple: &Triple) -> String { 7 | let s = Encoder::decode(&triple.s.to_encoded()).unwrap(); 8 | let p = Encoder::decode(&triple.p.to_encoded()).unwrap(); 9 | let o = Encoder::decode(&triple.o.to_encoded()).unwrap(); 10 | format!("{} {} {}",s,p,o) 11 | } 12 | pub fn decode_rule(rule: &Rule) -> String{ 13 | let body = rule.body.iter().map(|t|Self::decode_triple(t) + ",").collect::(); 14 | let head = Self::decode_triple(&rule.head); 15 | format!("{{{}}}=>{{{}}}",body,head) 16 | } 17 | pub fn remove_literal_tags(literal: &str) -> String{ 18 | if literal.contains("^^"){ 19 | let mut split = literal.split("^^"); 20 | if let Some(val) = split.next() { 21 | let new_str = &val[1..val.len() - 1]; 22 | new_str.to_string() 23 | }else{ 24 | literal.to_string() 25 | } 26 | }else{ 27 | literal.to_string() 28 | } 29 | } 30 | } 31 | 32 | mod test{ 33 | use crate::utils::Utils; 34 | 35 | #[test] 36 | fn test_remove_literal_tages(){ 37 | let literal = "\"10\"^^"; 38 | let expected = "10".to_string(); 39 | assert_eq!(expected, Utils::remove_literal_tags(literal)); 40 | } 41 | } -------------------------------------------------------------------------------- /scripts/buildWebsiteDocs.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | cd ../js/ 3 | wasm-pack build 4 | cd web 5 | npm run build 6 | cd dist 7 | cp * ../../../docs 8 | -------------------------------------------------------------------------------- /server/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "server" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | clap = { version = "3.1.8", features = ["derive"] } 10 | pest = "2.0" 11 | pest_derive = "2.0" 12 | roxi = { version = "0.1.0", path = "../lib" } 13 | 14 | env_logger = "0.9.0" 15 | profiling = "1.0" 16 | spargebra = "0.2.2" 17 | 18 | 19 | [features] 20 | profile-with-puffin = ["profiling/profile-with-puffin"] 21 | 22 | 23 | [profile.release] 24 | lto = true 25 | opt-level = 3 26 | 27 | 28 | -------------------------------------------------------------------------------- /server/server.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /server/src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate pest; 2 | #[macro_use] 3 | extern crate pest_derive; 4 | 5 | extern crate roxi; 6 | extern crate env_logger; 7 | 8 | use std::cell::RefCell; 9 | use std::fs; 10 | use std::rc::Rc; 11 | use roxi::ruleindex::RuleIndex; 12 | use roxi::tripleindex::TripleIndex; 13 | use roxi::TripleStore; 14 | 15 | 16 | use std::fs::{File, read_to_string}; 17 | use std::io::{BufReader, Read}; 18 | use clap::Parser; 19 | use env_logger::Env; 20 | use spargebra::Query; 21 | use roxi::encoding::Encoder; 22 | use roxi::parser::Syntax; 23 | use roxi::parser::Parser as TripleParser; 24 | use roxi::sparql::{eval_query, evaluate_plan_and_debug}; 25 | 26 | #[derive(Parser, Debug)] 27 | #[clap(author, version, about, long_about = None)] 28 | struct Args { 29 | /// File path to the ABox (in TTL format) 30 | #[clap(short, long)] 31 | abox: String, 32 | 33 | /// File path to the TBox (in TTL format) 34 | #[clap(short, long)] 35 | tbox: String, 36 | 37 | // /// SPARQL query to be executed 38 | // #[clap(short, long)] 39 | // query: String, 40 | 41 | /// Trace of reasoning process 42 | #[clap(short, long)] 43 | trace: Option, 44 | } 45 | 46 | 47 | fn main() { 48 | let args = Args::parse(); 49 | 50 | let timer = ::std::time::Instant::now(); 51 | if let Some(true) = args.trace { 52 | env_logger::Builder::from_env(Env::default().default_filter_or("trace")).init(); 53 | } 54 | 55 | println!("Loading data ABox in NTriples"); 56 | let mut store = TripleStore::new(); 57 | let file_content = read_to_string(args.abox).unwrap(); 58 | store.load_triples(&file_content,Syntax::NTriples); 59 | 60 | println!("Loading data Rulse in N3"); 61 | 62 | let rules = read_to_string(args.tbox).unwrap(); 63 | store.load_rules(&rules); 64 | let elapsed = timer.elapsed(); 65 | 66 | println!("Data Loaded in: {:.2?}", elapsed); 67 | 68 | println!("ABox Size: {}", store.len()); 69 | 70 | println!("Starting materialization"); 71 | let timer2 = ::std::time::Instant::now(); 72 | store.materialize(); 73 | let elapsed2 = timer2.elapsed(); 74 | println!("Materialization Time: {:.2?}", elapsed2); 75 | //SPARQL query 76 | // let q2: String = args.query; 77 | 78 | // println!("Results for query: {}:", q2); 79 | // let query = Query::parse(&q2, None).unwrap(); 80 | // let plan = eval_query(&query, &store.triple_index, &mut encoder); 81 | // let iterator = evaluate_plan_and_debug(&plan, &store.triple_index, &mut encoder); 82 | // for result in iterator{ 83 | // println!("Bindings {:?}:", result); 84 | // } 85 | 86 | println!("Content: \n{:?}", store.content_to_string()); 87 | println!("Size Materialized Store: {}", store.len()); 88 | } 89 | 90 | --------------------------------------------------------------------------------